Merge from trunk @ 138209
[official-gcc.git] / gcc / config / s390 / s390.c
blob3c7d92b3c8d9eff232a87590cbc44b7e2763f69b
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "except.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "expr.h"
43 #include "reload.h"
44 #include "toplev.h"
45 #include "basic-block.h"
46 #include "integrate.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "debug.h"
51 #include "langhooks.h"
52 #include "optabs.h"
53 #include "gimple.h"
54 #include "df.h"
57 /* Define the specific costs for a given cpu. */
59 struct processor_costs
61 /* multiplication */
62 const int m; /* cost of an M instruction. */
63 const int mghi; /* cost of an MGHI instruction. */
64 const int mh; /* cost of an MH instruction. */
65 const int mhi; /* cost of an MHI instruction. */
66 const int ml; /* cost of an ML instruction. */
67 const int mr; /* cost of an MR instruction. */
68 const int ms; /* cost of an MS instruction. */
69 const int msg; /* cost of an MSG instruction. */
70 const int msgf; /* cost of an MSGF instruction. */
71 const int msgfr; /* cost of an MSGFR instruction. */
72 const int msgr; /* cost of an MSGR instruction. */
73 const int msr; /* cost of an MSR instruction. */
74 const int mult_df; /* cost of multiplication in DFmode. */
75 const int mxbr;
76 /* square root */
77 const int sqxbr; /* cost of square root in TFmode. */
78 const int sqdbr; /* cost of square root in DFmode. */
79 const int sqebr; /* cost of square root in SFmode. */
80 /* multiply and add */
81 const int madbr; /* cost of multiply and add in DFmode. */
82 const int maebr; /* cost of multiply and add in SFmode. */
83 /* division */
84 const int dxbr;
85 const int ddbr;
86 const int debr;
87 const int dlgr;
88 const int dlr;
89 const int dr;
90 const int dsgfr;
91 const int dsgr;
94 const struct processor_costs *s390_cost;
96 static const
97 struct processor_costs z900_cost =
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
103 COSTS_N_INSNS (5), /* ML */
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
118 COSTS_N_INSNS (134), /* DXBR */
119 COSTS_N_INSNS (30), /* DDBR */
120 COSTS_N_INSNS (27), /* DEBR */
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
128 static const
129 struct processor_costs z990_cost =
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
135 COSTS_N_INSNS (4), /* ML */
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
150 COSTS_N_INSNS (60), /* DXBR */
151 COSTS_N_INSNS (40), /* DDBR */
152 COSTS_N_INSNS (26), /* DEBR */
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
160 static const
161 struct processor_costs z9_109_cost =
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
182 COSTS_N_INSNS (60), /* DXBR */
183 COSTS_N_INSNS (40), /* DDBR */
184 COSTS_N_INSNS (26), /* DEBR */
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
192 static const
193 struct processor_costs z10_cost =
195 COSTS_N_INSNS (4), /* M */
196 COSTS_N_INSNS (2), /* MGHI */
197 COSTS_N_INSNS (2), /* MH */
198 COSTS_N_INSNS (2), /* MHI */
199 COSTS_N_INSNS (4), /* ML */
200 COSTS_N_INSNS (4), /* MR */
201 COSTS_N_INSNS (5), /* MS */
202 COSTS_N_INSNS (6), /* MSG */
203 COSTS_N_INSNS (4), /* MSGF */
204 COSTS_N_INSNS (4), /* MSGFR */
205 COSTS_N_INSNS (4), /* MSGR */
206 COSTS_N_INSNS (4), /* MSR */
207 COSTS_N_INSNS (1), /* multiplication in DFmode */
208 COSTS_N_INSNS (28), /* MXBR */
209 COSTS_N_INSNS (130), /* SQXBR */
210 COSTS_N_INSNS (66), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
214 COSTS_N_INSNS (60), /* DXBR */
215 COSTS_N_INSNS (40), /* DDBR */
216 COSTS_N_INSNS (26), /* DEBR */
217 COSTS_N_INSNS (30), /* DLGR */
218 COSTS_N_INSNS (23), /* DLR */
219 COSTS_N_INSNS (23), /* DR */
220 COSTS_N_INSNS (24), /* DSGFR */
221 COSTS_N_INSNS (24), /* DSGR */
224 extern int reload_completed;
226 /* Save information from a "cmpxx" operation until the branch or scc is
227 emitted. */
228 rtx s390_compare_op0, s390_compare_op1;
230 /* Save the result of a compare_and_swap until the branch or scc is
231 emitted. */
232 rtx s390_compare_emitted = NULL_RTX;
234 /* Structure used to hold the components of a S/390 memory
235 address. A legitimate address on S/390 is of the general
236 form
237 base + index + displacement
238 where any of the components is optional.
240 base and index are registers of the class ADDR_REGS,
241 displacement is an unsigned 12-bit immediate constant. */
243 struct s390_address
245 rtx base;
246 rtx indx;
247 rtx disp;
248 bool pointer;
249 bool literal_pool;
252 /* Which cpu are we tuning for. */
253 enum processor_type s390_tune = PROCESSOR_max;
254 enum processor_flags s390_tune_flags;
255 /* Which instruction set architecture to use. */
256 enum processor_type s390_arch;
257 enum processor_flags s390_arch_flags;
259 HOST_WIDE_INT s390_warn_framesize = 0;
260 HOST_WIDE_INT s390_stack_size = 0;
261 HOST_WIDE_INT s390_stack_guard = 0;
263 /* The following structure is embedded in the machine
264 specific part of struct function. */
266 struct s390_frame_layout GTY (())
268 /* Offset within stack frame. */
269 HOST_WIDE_INT gprs_offset;
270 HOST_WIDE_INT f0_offset;
271 HOST_WIDE_INT f4_offset;
272 HOST_WIDE_INT f8_offset;
273 HOST_WIDE_INT backchain_offset;
275 /* Number of first and last gpr where slots in the register
276 save area are reserved for. */
277 int first_save_gpr_slot;
278 int last_save_gpr_slot;
280 /* Number of first and last gpr to be saved, restored. */
281 int first_save_gpr;
282 int first_restore_gpr;
283 int last_save_gpr;
284 int last_restore_gpr;
286 /* Bits standing for floating point registers. Set, if the
287 respective register has to be saved. Starting with reg 16 (f0)
288 at the rightmost bit.
289 Bit 15 - 8 7 6 5 4 3 2 1 0
290 fpr 15 - 8 7 5 3 1 6 4 2 0
291 reg 31 - 24 23 22 21 20 19 18 17 16 */
292 unsigned int fpr_bitmap;
294 /* Number of floating point registers f8-f15 which must be saved. */
295 int high_fprs;
297 /* Set if return address needs to be saved.
298 This flag is set by s390_return_addr_rtx if it could not use
299 the initial value of r14 and therefore depends on r14 saved
300 to the stack. */
301 bool save_return_addr_p;
303 /* Size of stack frame. */
304 HOST_WIDE_INT frame_size;
307 /* Define the structure for the machine field in struct function. */
309 struct machine_function GTY(())
311 struct s390_frame_layout frame_layout;
313 /* Literal pool base register. */
314 rtx base_reg;
316 /* True if we may need to perform branch splitting. */
317 bool split_branches_pending_p;
319 /* True during final stage of literal pool processing. */
320 bool decomposed_literal_pool_addresses_ok_p;
322 /* Some local-dynamic TLS symbol name. */
323 const char *some_ld_name;
325 bool has_landing_pad_p;
328 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
330 #define cfun_frame_layout (cfun->machine->frame_layout)
331 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
332 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
333 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
334 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
335 (1 << (BITNUM)))
336 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
337 (1 << (BITNUM))))
339 /* Number of GPRs and FPRs used for argument passing. */
340 #define GP_ARG_NUM_REG 5
341 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
343 /* A couple of shortcuts. */
344 #define CONST_OK_FOR_J(x) \
345 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
346 #define CONST_OK_FOR_K(x) \
347 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
348 #define CONST_OK_FOR_Os(x) \
349 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
350 #define CONST_OK_FOR_Op(x) \
351 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
352 #define CONST_OK_FOR_On(x) \
353 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
355 #define REGNO_PAIR_OK(REGNO, MODE) \
356 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
358 static enum machine_mode
359 s390_libgcc_cmp_return_mode (void)
361 return TARGET_64BIT ? DImode : SImode;
364 static enum machine_mode
365 s390_libgcc_shift_count_mode (void)
367 return TARGET_64BIT ? DImode : SImode;
370 /* Return true if the back end supports mode MODE. */
371 static bool
372 s390_scalar_mode_supported_p (enum machine_mode mode)
374 if (DECIMAL_FLOAT_MODE_P (mode))
375 return true;
376 else
377 return default_scalar_mode_supported_p (mode);
380 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
382 void
383 s390_set_has_landing_pad_p (bool value)
385 cfun->machine->has_landing_pad_p = value;
388 /* If two condition code modes are compatible, return a condition code
389 mode which is compatible with both. Otherwise, return
390 VOIDmode. */
392 static enum machine_mode
393 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
395 if (m1 == m2)
396 return m1;
398 switch (m1)
400 case CCZmode:
401 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
402 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
403 return m2;
404 return VOIDmode;
406 case CCSmode:
407 case CCUmode:
408 case CCTmode:
409 case CCSRmode:
410 case CCURmode:
411 case CCZ1mode:
412 if (m2 == CCZmode)
413 return m1;
415 return VOIDmode;
417 default:
418 return VOIDmode;
420 return VOIDmode;
423 /* Return true if SET either doesn't set the CC register, or else
424 the source and destination have matching CC modes and that
425 CC mode is at least as constrained as REQ_MODE. */
427 static bool
428 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
430 enum machine_mode set_mode;
432 gcc_assert (GET_CODE (set) == SET);
434 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
435 return 1;
437 set_mode = GET_MODE (SET_DEST (set));
438 switch (set_mode)
440 case CCSmode:
441 case CCSRmode:
442 case CCUmode:
443 case CCURmode:
444 case CCLmode:
445 case CCL1mode:
446 case CCL2mode:
447 case CCL3mode:
448 case CCT1mode:
449 case CCT2mode:
450 case CCT3mode:
451 if (req_mode != set_mode)
452 return 0;
453 break;
455 case CCZmode:
456 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
457 && req_mode != CCSRmode && req_mode != CCURmode)
458 return 0;
459 break;
461 case CCAPmode:
462 case CCANmode:
463 if (req_mode != CCAmode)
464 return 0;
465 break;
467 default:
468 gcc_unreachable ();
471 return (GET_MODE (SET_SRC (set)) == set_mode);
474 /* Return true if every SET in INSN that sets the CC register
475 has source and destination with matching CC modes and that
476 CC mode is at least as constrained as REQ_MODE.
477 If REQ_MODE is VOIDmode, always return false. */
479 bool
480 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
482 int i;
484 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
485 if (req_mode == VOIDmode)
486 return false;
488 if (GET_CODE (PATTERN (insn)) == SET)
489 return s390_match_ccmode_set (PATTERN (insn), req_mode);
491 if (GET_CODE (PATTERN (insn)) == PARALLEL)
492 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
494 rtx set = XVECEXP (PATTERN (insn), 0, i);
495 if (GET_CODE (set) == SET)
496 if (!s390_match_ccmode_set (set, req_mode))
497 return false;
500 return true;
503 /* If a test-under-mask instruction can be used to implement
504 (compare (and ... OP1) OP2), return the CC mode required
505 to do that. Otherwise, return VOIDmode.
506 MIXED is true if the instruction can distinguish between
507 CC1 and CC2 for mixed selected bits (TMxx), it is false
508 if the instruction cannot (TM). */
510 enum machine_mode
511 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
513 int bit0, bit1;
515 /* ??? Fixme: should work on CONST_DOUBLE as well. */
516 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
517 return VOIDmode;
519 /* Selected bits all zero: CC0.
520 e.g.: int a; if ((a & (16 + 128)) == 0) */
521 if (INTVAL (op2) == 0)
522 return CCTmode;
524 /* Selected bits all one: CC3.
525 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
526 if (INTVAL (op2) == INTVAL (op1))
527 return CCT3mode;
529 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
530 int a;
531 if ((a & (16 + 128)) == 16) -> CCT1
532 if ((a & (16 + 128)) == 128) -> CCT2 */
533 if (mixed)
535 bit1 = exact_log2 (INTVAL (op2));
536 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
537 if (bit0 != -1 && bit1 != -1)
538 return bit0 > bit1 ? CCT1mode : CCT2mode;
541 return VOIDmode;
544 /* Given a comparison code OP (EQ, NE, etc.) and the operands
545 OP0 and OP1 of a COMPARE, return the mode to be used for the
546 comparison. */
548 enum machine_mode
549 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
551 switch (code)
553 case EQ:
554 case NE:
555 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
556 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
557 return CCAPmode;
558 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
559 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
560 return CCAPmode;
561 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
562 || GET_CODE (op1) == NEG)
563 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
564 return CCLmode;
566 if (GET_CODE (op0) == AND)
568 /* Check whether we can potentially do it via TM. */
569 enum machine_mode ccmode;
570 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
571 if (ccmode != VOIDmode)
573 /* Relax CCTmode to CCZmode to allow fall-back to AND
574 if that turns out to be beneficial. */
575 return ccmode == CCTmode ? CCZmode : ccmode;
579 if (register_operand (op0, HImode)
580 && GET_CODE (op1) == CONST_INT
581 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
582 return CCT3mode;
583 if (register_operand (op0, QImode)
584 && GET_CODE (op1) == CONST_INT
585 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
586 return CCT3mode;
588 return CCZmode;
590 case LE:
591 case LT:
592 case GE:
593 case GT:
594 /* The only overflow condition of NEG and ABS happens when
595 -INT_MAX is used as parameter, which stays negative. So
596 we have an overflow from a positive value to a negative.
597 Using CCAP mode the resulting cc can be used for comparisons. */
598 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCAPmode;
602 /* If constants are involved in an add instruction it is possible to use
603 the resulting cc for comparisons with zero. Knowing the sign of the
604 constant the overflow behavior gets predictable. e.g.:
605 int a, b; if ((b = a + c) > 0)
606 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
607 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
608 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
610 if (INTVAL (XEXP((op0), 1)) < 0)
611 return CCANmode;
612 else
613 return CCAPmode;
615 /* Fall through. */
616 case UNORDERED:
617 case ORDERED:
618 case UNEQ:
619 case UNLE:
620 case UNLT:
621 case UNGE:
622 case UNGT:
623 case LTGT:
624 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
625 && GET_CODE (op1) != CONST_INT)
626 return CCSRmode;
627 return CCSmode;
629 case LTU:
630 case GEU:
631 if (GET_CODE (op0) == PLUS
632 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
633 return CCL1mode;
635 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
636 && GET_CODE (op1) != CONST_INT)
637 return CCURmode;
638 return CCUmode;
640 case LEU:
641 case GTU:
642 if (GET_CODE (op0) == MINUS
643 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
644 return CCL2mode;
646 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
647 && GET_CODE (op1) != CONST_INT)
648 return CCURmode;
649 return CCUmode;
651 default:
652 gcc_unreachable ();
656 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
657 that we can implement more efficiently. */
659 void
660 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
662 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
663 if ((*code == EQ || *code == NE)
664 && *op1 == const0_rtx
665 && GET_CODE (*op0) == ZERO_EXTRACT
666 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
667 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
668 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
670 rtx inner = XEXP (*op0, 0);
671 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
672 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
673 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
675 if (len > 0 && len < modesize
676 && pos >= 0 && pos + len <= modesize
677 && modesize <= HOST_BITS_PER_WIDE_INT)
679 unsigned HOST_WIDE_INT block;
680 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
681 block <<= modesize - pos - len;
683 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
684 gen_int_mode (block, GET_MODE (inner)));
688 /* Narrow AND of memory against immediate to enable TM. */
689 if ((*code == EQ || *code == NE)
690 && *op1 == const0_rtx
691 && GET_CODE (*op0) == AND
692 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
693 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
695 rtx inner = XEXP (*op0, 0);
696 rtx mask = XEXP (*op0, 1);
698 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
699 if (GET_CODE (inner) == SUBREG
700 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
701 && (GET_MODE_SIZE (GET_MODE (inner))
702 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
703 && ((INTVAL (mask)
704 & GET_MODE_MASK (GET_MODE (inner))
705 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
706 == 0))
707 inner = SUBREG_REG (inner);
709 /* Do not change volatile MEMs. */
710 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
712 int part = s390_single_part (XEXP (*op0, 1),
713 GET_MODE (inner), QImode, 0);
714 if (part >= 0)
716 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
717 inner = adjust_address_nv (inner, QImode, part);
718 *op0 = gen_rtx_AND (QImode, inner, mask);
723 /* Narrow comparisons against 0xffff to HImode if possible. */
724 if ((*code == EQ || *code == NE)
725 && GET_CODE (*op1) == CONST_INT
726 && INTVAL (*op1) == 0xffff
727 && SCALAR_INT_MODE_P (GET_MODE (*op0))
728 && (nonzero_bits (*op0, GET_MODE (*op0))
729 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
731 *op0 = gen_lowpart (HImode, *op0);
732 *op1 = constm1_rtx;
735 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
736 if (GET_CODE (*op0) == UNSPEC
737 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
738 && XVECLEN (*op0, 0) == 1
739 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
740 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
741 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
742 && *op1 == const0_rtx)
744 enum rtx_code new_code = UNKNOWN;
745 switch (*code)
747 case EQ: new_code = EQ; break;
748 case NE: new_code = NE; break;
749 case LT: new_code = GTU; break;
750 case GT: new_code = LTU; break;
751 case LE: new_code = GEU; break;
752 case GE: new_code = LEU; break;
753 default: break;
756 if (new_code != UNKNOWN)
758 *op0 = XVECEXP (*op0, 0, 0);
759 *code = new_code;
763 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
764 if (GET_CODE (*op0) == UNSPEC
765 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
766 && XVECLEN (*op0, 0) == 1
767 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
768 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
769 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
770 && *op1 == const0_rtx)
772 enum rtx_code new_code = UNKNOWN;
773 switch (*code)
775 case EQ: new_code = EQ; break;
776 case NE: new_code = NE; break;
777 default: break;
780 if (new_code != UNKNOWN)
782 *op0 = XVECEXP (*op0, 0, 0);
783 *code = new_code;
787 /* Simplify cascaded EQ, NE with const0_rtx. */
788 if ((*code == NE || *code == EQ)
789 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
790 && GET_MODE (*op0) == SImode
791 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
792 && REG_P (XEXP (*op0, 0))
793 && XEXP (*op0, 1) == const0_rtx
794 && *op1 == const0_rtx)
796 if ((*code == EQ && GET_CODE (*op0) == NE)
797 || (*code == NE && GET_CODE (*op0) == EQ))
798 *code = EQ;
799 else
800 *code = NE;
801 *op0 = XEXP (*op0, 0);
804 /* Prefer register over memory as first operand. */
805 if (MEM_P (*op0) && REG_P (*op1))
807 rtx tem = *op0; *op0 = *op1; *op1 = tem;
808 *code = swap_condition (*code);
812 /* Emit a compare instruction suitable to implement the comparison
813 OP0 CODE OP1. Return the correct condition RTL to be placed in
814 the IF_THEN_ELSE of the conditional branch testing the result. */
817 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
819 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
820 rtx ret = NULL_RTX;
822 /* Do not output a redundant compare instruction if a compare_and_swap
823 pattern already computed the result and the machine modes are compatible. */
824 if (s390_compare_emitted
825 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
826 == GET_MODE (s390_compare_emitted)))
827 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
828 else
830 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
832 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
833 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
835 s390_compare_emitted = NULL_RTX;
836 return ret;
839 /* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
840 matches CMP.
841 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
842 conditional branch testing the result. */
844 static rtx
845 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
847 rtx ret;
849 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
850 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
852 s390_compare_emitted = NULL_RTX;
854 return ret;
857 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
858 unconditional jump, else a conditional jump under condition COND. */
860 void
861 s390_emit_jump (rtx target, rtx cond)
863 rtx insn;
865 target = gen_rtx_LABEL_REF (VOIDmode, target);
866 if (cond)
867 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
869 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
870 emit_jump_insn (insn);
873 /* Return branch condition mask to implement a branch
874 specified by CODE. Return -1 for invalid comparisons. */
877 s390_branch_condition_mask (rtx code)
879 const int CC0 = 1 << 3;
880 const int CC1 = 1 << 2;
881 const int CC2 = 1 << 1;
882 const int CC3 = 1 << 0;
884 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
885 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
886 gcc_assert (XEXP (code, 1) == const0_rtx);
888 switch (GET_MODE (XEXP (code, 0)))
890 case CCZmode:
891 case CCZ1mode:
892 switch (GET_CODE (code))
894 case EQ: return CC0;
895 case NE: return CC1 | CC2 | CC3;
896 default: return -1;
898 break;
900 case CCT1mode:
901 switch (GET_CODE (code))
903 case EQ: return CC1;
904 case NE: return CC0 | CC2 | CC3;
905 default: return -1;
907 break;
909 case CCT2mode:
910 switch (GET_CODE (code))
912 case EQ: return CC2;
913 case NE: return CC0 | CC1 | CC3;
914 default: return -1;
916 break;
918 case CCT3mode:
919 switch (GET_CODE (code))
921 case EQ: return CC3;
922 case NE: return CC0 | CC1 | CC2;
923 default: return -1;
925 break;
927 case CCLmode:
928 switch (GET_CODE (code))
930 case EQ: return CC0 | CC2;
931 case NE: return CC1 | CC3;
932 default: return -1;
934 break;
936 case CCL1mode:
937 switch (GET_CODE (code))
939 case LTU: return CC2 | CC3; /* carry */
940 case GEU: return CC0 | CC1; /* no carry */
941 default: return -1;
943 break;
945 case CCL2mode:
946 switch (GET_CODE (code))
948 case GTU: return CC0 | CC1; /* borrow */
949 case LEU: return CC2 | CC3; /* no borrow */
950 default: return -1;
952 break;
954 case CCL3mode:
955 switch (GET_CODE (code))
957 case EQ: return CC0 | CC2;
958 case NE: return CC1 | CC3;
959 case LTU: return CC1;
960 case GTU: return CC3;
961 case LEU: return CC1 | CC2;
962 case GEU: return CC2 | CC3;
963 default: return -1;
966 case CCUmode:
967 switch (GET_CODE (code))
969 case EQ: return CC0;
970 case NE: return CC1 | CC2 | CC3;
971 case LTU: return CC1;
972 case GTU: return CC2;
973 case LEU: return CC0 | CC1;
974 case GEU: return CC0 | CC2;
975 default: return -1;
977 break;
979 case CCURmode:
980 switch (GET_CODE (code))
982 case EQ: return CC0;
983 case NE: return CC2 | CC1 | CC3;
984 case LTU: return CC2;
985 case GTU: return CC1;
986 case LEU: return CC0 | CC2;
987 case GEU: return CC0 | CC1;
988 default: return -1;
990 break;
992 case CCAPmode:
993 switch (GET_CODE (code))
995 case EQ: return CC0;
996 case NE: return CC1 | CC2 | CC3;
997 case LT: return CC1 | CC3;
998 case GT: return CC2;
999 case LE: return CC0 | CC1 | CC3;
1000 case GE: return CC0 | CC2;
1001 default: return -1;
1003 break;
1005 case CCANmode:
1006 switch (GET_CODE (code))
1008 case EQ: return CC0;
1009 case NE: return CC1 | CC2 | CC3;
1010 case LT: return CC1;
1011 case GT: return CC2 | CC3;
1012 case LE: return CC0 | CC1;
1013 case GE: return CC0 | CC2 | CC3;
1014 default: return -1;
1016 break;
1018 case CCSmode:
1019 switch (GET_CODE (code))
1021 case EQ: return CC0;
1022 case NE: return CC1 | CC2 | CC3;
1023 case LT: return CC1;
1024 case GT: return CC2;
1025 case LE: return CC0 | CC1;
1026 case GE: return CC0 | CC2;
1027 case UNORDERED: return CC3;
1028 case ORDERED: return CC0 | CC1 | CC2;
1029 case UNEQ: return CC0 | CC3;
1030 case UNLT: return CC1 | CC3;
1031 case UNGT: return CC2 | CC3;
1032 case UNLE: return CC0 | CC1 | CC3;
1033 case UNGE: return CC0 | CC2 | CC3;
1034 case LTGT: return CC1 | CC2;
1035 default: return -1;
1037 break;
1039 case CCSRmode:
1040 switch (GET_CODE (code))
1042 case EQ: return CC0;
1043 case NE: return CC2 | CC1 | CC3;
1044 case LT: return CC2;
1045 case GT: return CC1;
1046 case LE: return CC0 | CC2;
1047 case GE: return CC0 | CC1;
1048 case UNORDERED: return CC3;
1049 case ORDERED: return CC0 | CC2 | CC1;
1050 case UNEQ: return CC0 | CC3;
1051 case UNLT: return CC2 | CC3;
1052 case UNGT: return CC1 | CC3;
1053 case UNLE: return CC0 | CC2 | CC3;
1054 case UNGE: return CC0 | CC1 | CC3;
1055 case LTGT: return CC2 | CC1;
1056 default: return -1;
1058 break;
1060 default:
1061 return -1;
1066 /* Return branch condition mask to implement a compare and branch
1067 specified by CODE. Return -1 for invalid comparisons. */
1070 s390_compare_and_branch_condition_mask (rtx code)
1072 const int CC0 = 1 << 3;
1073 const int CC1 = 1 << 2;
1074 const int CC2 = 1 << 1;
1076 switch (GET_CODE (code))
1078 case EQ:
1079 return CC0;
1080 case NE:
1081 return CC1 | CC2;
1082 case LT:
1083 case LTU:
1084 return CC1;
1085 case GT:
1086 case GTU:
1087 return CC2;
1088 case LE:
1089 case LEU:
1090 return CC0 | CC1;
1091 case GE:
1092 case GEU:
1093 return CC0 | CC2;
1094 default:
1095 gcc_unreachable ();
1097 return -1;
1100 /* If INV is false, return assembler mnemonic string to implement
1101 a branch specified by CODE. If INV is true, return mnemonic
1102 for the corresponding inverted branch. */
1104 static const char *
1105 s390_branch_condition_mnemonic (rtx code, int inv)
1107 int mask;
1109 static const char *const mnemonic[16] =
1111 NULL, "o", "h", "nle",
1112 "l", "nhe", "lh", "ne",
1113 "e", "nlh", "he", "nl",
1114 "le", "nh", "no", NULL
1117 if (GET_CODE (XEXP (code, 0)) == REG
1118 && REGNO (XEXP (code, 0)) == CC_REGNUM
1119 && XEXP (code, 1) == const0_rtx)
1120 mask = s390_branch_condition_mask (code);
1121 else
1122 mask = s390_compare_and_branch_condition_mask (code);
1124 gcc_assert (mask >= 0);
1126 if (inv)
1127 mask ^= 15;
1129 gcc_assert (mask >= 1 && mask <= 14);
1131 return mnemonic[mask];
1134 /* Return the part of op which has a value different from def.
1135 The size of the part is determined by mode.
1136 Use this function only if you already know that op really
1137 contains such a part. */
1139 unsigned HOST_WIDE_INT
1140 s390_extract_part (rtx op, enum machine_mode mode, int def)
1142 unsigned HOST_WIDE_INT value = 0;
1143 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1144 int part_bits = GET_MODE_BITSIZE (mode);
1145 unsigned HOST_WIDE_INT part_mask
1146 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1147 int i;
1149 for (i = 0; i < max_parts; i++)
1151 if (i == 0)
1152 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1153 else
1154 value >>= part_bits;
1156 if ((value & part_mask) != (def & part_mask))
1157 return value & part_mask;
1160 gcc_unreachable ();
1163 /* If OP is an integer constant of mode MODE with exactly one
1164 part of mode PART_MODE unequal to DEF, return the number of that
1165 part. Otherwise, return -1. */
1168 s390_single_part (rtx op,
1169 enum machine_mode mode,
1170 enum machine_mode part_mode,
1171 int def)
1173 unsigned HOST_WIDE_INT value = 0;
1174 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1175 unsigned HOST_WIDE_INT part_mask
1176 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1177 int i, part = -1;
1179 if (GET_CODE (op) != CONST_INT)
1180 return -1;
1182 for (i = 0; i < n_parts; i++)
1184 if (i == 0)
1185 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1186 else
1187 value >>= GET_MODE_BITSIZE (part_mode);
1189 if ((value & part_mask) != (def & part_mask))
1191 if (part != -1)
1192 return -1;
1193 else
1194 part = i;
1197 return part == -1 ? -1 : n_parts - 1 - part;
1200 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1201 bits and no other bits are set in IN. POS and LENGTH can be used
1202 to obtain the start position and the length of the bitfield.
1204 POS gives the position of the first bit of the bitfield counting
1205 from the lowest order bit starting with zero. In order to use this
1206 value for S/390 instructions this has to be converted to "bits big
1207 endian" style. */
1209 bool
1210 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, int size,
1211 int *pos, int *length)
1213 int tmp_pos = 0;
1214 int tmp_length = 0;
1215 int i;
1216 unsigned HOST_WIDE_INT mask = 1ULL;
1217 bool contiguous = false;
1219 for (i = 0; i < size; mask <<= 1, i++)
1221 if (contiguous)
1223 if (mask & in)
1224 tmp_length++;
1225 else
1226 break;
1228 else
1230 if (mask & in)
1232 contiguous = true;
1233 tmp_length++;
1235 else
1236 tmp_pos++;
1240 if (!tmp_length)
1241 return false;
1243 /* Calculate a mask for all bits beyond the contiguous bits. */
1244 mask = (-1LL & ~(((1ULL << (tmp_length + tmp_pos - 1)) << 1) - 1));
1246 if (mask & in)
1247 return false;
1249 if (tmp_length + tmp_pos - 1 > size)
1250 return false;
1252 if (length)
1253 *length = tmp_length;
1255 if (pos)
1256 *pos = tmp_pos;
1258 return true;
1261 /* Check whether we can (and want to) split a double-word
1262 move in mode MODE from SRC to DST into two single-word
1263 moves, moving the subword FIRST_SUBWORD first. */
1265 bool
1266 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1268 /* Floating point registers cannot be split. */
1269 if (FP_REG_P (src) || FP_REG_P (dst))
1270 return false;
1272 /* We don't need to split if operands are directly accessible. */
1273 if (s_operand (src, mode) || s_operand (dst, mode))
1274 return false;
1276 /* Non-offsettable memory references cannot be split. */
1277 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1278 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1279 return false;
1281 /* Moving the first subword must not clobber a register
1282 needed to move the second subword. */
1283 if (register_operand (dst, mode))
1285 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1286 if (reg_overlap_mentioned_p (subreg, src))
1287 return false;
1290 return true;
1293 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1294 and [MEM2, MEM2 + SIZE] do overlap and false
1295 otherwise. */
1297 bool
1298 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1300 rtx addr1, addr2, addr_delta;
1301 HOST_WIDE_INT delta;
1303 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1304 return true;
1306 if (size == 0)
1307 return false;
1309 addr1 = XEXP (mem1, 0);
1310 addr2 = XEXP (mem2, 0);
1312 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1314 /* This overlapping check is used by peepholes merging memory block operations.
1315 Overlapping operations would otherwise be recognized by the S/390 hardware
1316 and would fall back to a slower implementation. Allowing overlapping
1317 operations would lead to slow code but not to wrong code. Therefore we are
1318 somewhat optimistic if we cannot prove that the memory blocks are
1319 overlapping.
1320 That's why we return false here although this may accept operations on
1321 overlapping memory areas. */
1322 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1323 return false;
1325 delta = INTVAL (addr_delta);
1327 if (delta == 0
1328 || (delta > 0 && delta < size)
1329 || (delta < 0 && -delta < size))
1330 return true;
1332 return false;
1335 /* Check whether the address of memory reference MEM2 equals exactly
1336 the address of memory reference MEM1 plus DELTA. Return true if
1337 we can prove this to be the case, false otherwise. */
1339 bool
1340 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1342 rtx addr1, addr2, addr_delta;
1344 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1345 return false;
1347 addr1 = XEXP (mem1, 0);
1348 addr2 = XEXP (mem2, 0);
1350 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1351 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1352 return false;
1354 return true;
1357 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1359 void
1360 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1361 rtx *operands)
1363 enum machine_mode wmode = mode;
1364 rtx dst = operands[0];
1365 rtx src1 = operands[1];
1366 rtx src2 = operands[2];
1367 rtx op, clob, tem;
1369 /* If we cannot handle the operation directly, use a temp register. */
1370 if (!s390_logical_operator_ok_p (operands))
1371 dst = gen_reg_rtx (mode);
1373 /* QImode and HImode patterns make sense only if we have a destination
1374 in memory. Otherwise perform the operation in SImode. */
1375 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1376 wmode = SImode;
1378 /* Widen operands if required. */
1379 if (mode != wmode)
1381 if (GET_CODE (dst) == SUBREG
1382 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1383 dst = tem;
1384 else if (REG_P (dst))
1385 dst = gen_rtx_SUBREG (wmode, dst, 0);
1386 else
1387 dst = gen_reg_rtx (wmode);
1389 if (GET_CODE (src1) == SUBREG
1390 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1391 src1 = tem;
1392 else if (GET_MODE (src1) != VOIDmode)
1393 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1395 if (GET_CODE (src2) == SUBREG
1396 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1397 src2 = tem;
1398 else if (GET_MODE (src2) != VOIDmode)
1399 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1402 /* Emit the instruction. */
1403 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1404 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1405 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1407 /* Fix up the destination if needed. */
1408 if (dst != operands[0])
1409 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1412 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1414 bool
1415 s390_logical_operator_ok_p (rtx *operands)
1417 /* If the destination operand is in memory, it needs to coincide
1418 with one of the source operands. After reload, it has to be
1419 the first source operand. */
1420 if (GET_CODE (operands[0]) == MEM)
1421 return rtx_equal_p (operands[0], operands[1])
1422 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1424 return true;
1427 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1428 operand IMMOP to switch from SS to SI type instructions. */
1430 void
1431 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1433 int def = code == AND ? -1 : 0;
1434 HOST_WIDE_INT mask;
1435 int part;
1437 gcc_assert (GET_CODE (*memop) == MEM);
1438 gcc_assert (!MEM_VOLATILE_P (*memop));
1440 mask = s390_extract_part (*immop, QImode, def);
1441 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1442 gcc_assert (part >= 0);
1444 *memop = adjust_address (*memop, QImode, part);
1445 *immop = gen_int_mode (mask, QImode);
1449 /* How to allocate a 'struct machine_function'. */
1451 static struct machine_function *
1452 s390_init_machine_status (void)
1454 return GGC_CNEW (struct machine_function);
1457 /* Change optimizations to be performed, depending on the
1458 optimization level.
1460 LEVEL is the optimization level specified; 2 if `-O2' is
1461 specified, 1 if `-O' is specified, and 0 if neither is specified.
1463 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1465 void
1466 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1468 /* ??? There are apparently still problems with -fcaller-saves. */
1469 flag_caller_saves = 0;
1471 /* By default, always emit DWARF-2 unwind info. This allows debugging
1472 without maintaining a stack frame back-chain. */
1473 flag_asynchronous_unwind_tables = 1;
1475 /* Use MVCLE instructions to decrease code size if requested. */
1476 if (size != 0)
1477 target_flags |= MASK_MVCLE;
1480 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1481 to the associated processor_type and processor_flags if so. */
1483 static bool
1484 s390_handle_arch_option (const char *arg,
1485 enum processor_type *type,
1486 enum processor_flags *flags)
1488 static struct pta
1490 const char *const name; /* processor name or nickname. */
1491 const enum processor_type processor;
1492 const enum processor_flags flags;
1494 const processor_alias_table[] =
1496 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1497 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1498 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1499 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1500 | PF_LONG_DISPLACEMENT},
1501 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1502 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1503 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1504 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1505 {"z10", PROCESSOR_2097_Z10, PF_IEEE_FLOAT | PF_ZARCH
1506 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP | PF_Z10},
1508 size_t i;
1510 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1511 if (strcmp (arg, processor_alias_table[i].name) == 0)
1513 *type = processor_alias_table[i].processor;
1514 *flags = processor_alias_table[i].flags;
1515 return true;
1517 return false;
1520 /* Implement TARGET_HANDLE_OPTION. */
1522 static bool
1523 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1525 switch (code)
1527 case OPT_march_:
1528 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1530 case OPT_mstack_guard_:
1531 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1532 return false;
1533 if (exact_log2 (s390_stack_guard) == -1)
1534 error ("stack guard value must be an exact power of 2");
1535 return true;
1537 case OPT_mstack_size_:
1538 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1539 return false;
1540 if (exact_log2 (s390_stack_size) == -1)
1541 error ("stack size must be an exact power of 2");
1542 return true;
1544 case OPT_mtune_:
1545 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1547 case OPT_mwarn_framesize_:
1548 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1550 default:
1551 return true;
1555 void
1556 override_options (void)
1558 /* Set up function hooks. */
1559 init_machine_status = s390_init_machine_status;
1561 /* Architecture mode defaults according to ABI. */
1562 if (!(target_flags_explicit & MASK_ZARCH))
1564 if (TARGET_64BIT)
1565 target_flags |= MASK_ZARCH;
1566 else
1567 target_flags &= ~MASK_ZARCH;
1570 /* Determine processor architectural level. */
1571 if (!s390_arch_string)
1573 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1574 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1577 /* Determine processor to tune for. */
1578 if (s390_tune == PROCESSOR_max)
1580 s390_tune = s390_arch;
1581 s390_tune_flags = s390_arch_flags;
1584 /* Sanity checks. */
1585 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
1586 error ("z/Architecture mode not supported on %s", s390_arch_string);
1587 if (TARGET_64BIT && !TARGET_ZARCH)
1588 error ("64-bit ABI not supported in ESA/390 mode");
1590 if (TARGET_HARD_DFP && !TARGET_DFP)
1592 if (target_flags_explicit & MASK_HARD_DFP)
1594 if (!TARGET_CPU_DFP)
1595 error ("Hardware decimal floating point instructions"
1596 " not available on %s", s390_arch_string);
1597 if (!TARGET_ZARCH)
1598 error ("Hardware decimal floating point instructions"
1599 " not available in ESA/390 mode");
1601 else
1602 target_flags &= ~MASK_HARD_DFP;
1605 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1607 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
1608 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1610 target_flags &= ~MASK_HARD_DFP;
1613 /* Set processor cost function. */
1614 switch (s390_tune)
1616 case PROCESSOR_2084_Z990:
1617 s390_cost = &z990_cost;
1618 break;
1619 case PROCESSOR_2094_Z9_109:
1620 s390_cost = &z9_109_cost;
1621 break;
1622 case PROCESSOR_2097_Z10:
1623 s390_cost = &z10_cost;
1624 break;
1625 default:
1626 s390_cost = &z900_cost;
1629 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1630 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1631 "in combination");
1633 if (s390_stack_size)
1635 if (s390_stack_guard >= s390_stack_size)
1636 error ("stack size must be greater than the stack guard value");
1637 else if (s390_stack_size > 1 << 16)
1638 error ("stack size must not be greater than 64k");
1640 else if (s390_stack_guard)
1641 error ("-mstack-guard implies use of -mstack-size");
1643 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1644 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1645 target_flags |= MASK_LONG_DOUBLE_128;
1646 #endif
1649 /* Map for smallest class containing reg regno. */
1651 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1652 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1653 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1654 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1655 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1656 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1657 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1658 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1659 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1660 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1661 ACCESS_REGS, ACCESS_REGS
1664 /* Return attribute type of insn. */
1666 static enum attr_type
1667 s390_safe_attr_type (rtx insn)
1669 if (recog_memoized (insn) >= 0)
1670 return get_attr_type (insn);
1671 else
1672 return TYPE_NONE;
1675 /* Return true if DISP is a valid short displacement. */
1677 static bool
1678 s390_short_displacement (rtx disp)
1680 /* No displacement is OK. */
1681 if (!disp)
1682 return true;
1684 /* Integer displacement in range. */
1685 if (GET_CODE (disp) == CONST_INT)
1686 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1688 /* GOT offset is not OK, the GOT can be large. */
1689 if (GET_CODE (disp) == CONST
1690 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1691 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1692 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1693 return false;
1695 /* All other symbolic constants are literal pool references,
1696 which are OK as the literal pool must be small. */
1697 if (GET_CODE (disp) == CONST)
1698 return true;
1700 return false;
1703 /* Decompose a RTL expression ADDR for a memory address into
1704 its components, returned in OUT.
1706 Returns false if ADDR is not a valid memory address, true
1707 otherwise. If OUT is NULL, don't return the components,
1708 but check for validity only.
1710 Note: Only addresses in canonical form are recognized.
1711 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1712 canonical form so that they will be recognized. */
1714 static int
1715 s390_decompose_address (rtx addr, struct s390_address *out)
1717 HOST_WIDE_INT offset = 0;
1718 rtx base = NULL_RTX;
1719 rtx indx = NULL_RTX;
1720 rtx disp = NULL_RTX;
1721 rtx orig_disp;
1722 bool pointer = false;
1723 bool base_ptr = false;
1724 bool indx_ptr = false;
1725 bool literal_pool = false;
1727 /* We may need to substitute the literal pool base register into the address
1728 below. However, at this point we do not know which register is going to
1729 be used as base, so we substitute the arg pointer register. This is going
1730 to be treated as holding a pointer below -- it shouldn't be used for any
1731 other purpose. */
1732 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1734 /* Decompose address into base + index + displacement. */
1736 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1737 base = addr;
1739 else if (GET_CODE (addr) == PLUS)
1741 rtx op0 = XEXP (addr, 0);
1742 rtx op1 = XEXP (addr, 1);
1743 enum rtx_code code0 = GET_CODE (op0);
1744 enum rtx_code code1 = GET_CODE (op1);
1746 if (code0 == REG || code0 == UNSPEC)
1748 if (code1 == REG || code1 == UNSPEC)
1750 indx = op0; /* index + base */
1751 base = op1;
1754 else
1756 base = op0; /* base + displacement */
1757 disp = op1;
1761 else if (code0 == PLUS)
1763 indx = XEXP (op0, 0); /* index + base + disp */
1764 base = XEXP (op0, 1);
1765 disp = op1;
1768 else
1770 return false;
1774 else
1775 disp = addr; /* displacement */
1777 /* Extract integer part of displacement. */
1778 orig_disp = disp;
1779 if (disp)
1781 if (GET_CODE (disp) == CONST_INT)
1783 offset = INTVAL (disp);
1784 disp = NULL_RTX;
1786 else if (GET_CODE (disp) == CONST
1787 && GET_CODE (XEXP (disp, 0)) == PLUS
1788 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1790 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1791 disp = XEXP (XEXP (disp, 0), 0);
1795 /* Strip off CONST here to avoid special case tests later. */
1796 if (disp && GET_CODE (disp) == CONST)
1797 disp = XEXP (disp, 0);
1799 /* We can convert literal pool addresses to
1800 displacements by basing them off the base register. */
1801 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1803 /* Either base or index must be free to hold the base register. */
1804 if (!base)
1805 base = fake_pool_base, literal_pool = true;
1806 else if (!indx)
1807 indx = fake_pool_base, literal_pool = true;
1808 else
1809 return false;
1811 /* Mark up the displacement. */
1812 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1813 UNSPEC_LTREL_OFFSET);
1816 /* Validate base register. */
1817 if (base)
1819 if (GET_CODE (base) == UNSPEC)
1820 switch (XINT (base, 1))
1822 case UNSPEC_LTREF:
1823 if (!disp)
1824 disp = gen_rtx_UNSPEC (Pmode,
1825 gen_rtvec (1, XVECEXP (base, 0, 0)),
1826 UNSPEC_LTREL_OFFSET);
1827 else
1828 return false;
1830 base = XVECEXP (base, 0, 1);
1831 break;
1833 case UNSPEC_LTREL_BASE:
1834 if (XVECLEN (base, 0) == 1)
1835 base = fake_pool_base, literal_pool = true;
1836 else
1837 base = XVECEXP (base, 0, 1);
1838 break;
1840 default:
1841 return false;
1844 if (!REG_P (base)
1845 || (GET_MODE (base) != SImode
1846 && GET_MODE (base) != Pmode))
1847 return false;
1849 if (REGNO (base) == STACK_POINTER_REGNUM
1850 || REGNO (base) == FRAME_POINTER_REGNUM
1851 || ((reload_completed || reload_in_progress)
1852 && frame_pointer_needed
1853 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1854 || REGNO (base) == ARG_POINTER_REGNUM
1855 || (flag_pic
1856 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1857 pointer = base_ptr = true;
1859 if ((reload_completed || reload_in_progress)
1860 && base == cfun->machine->base_reg)
1861 pointer = base_ptr = literal_pool = true;
1864 /* Validate index register. */
1865 if (indx)
1867 if (GET_CODE (indx) == UNSPEC)
1868 switch (XINT (indx, 1))
1870 case UNSPEC_LTREF:
1871 if (!disp)
1872 disp = gen_rtx_UNSPEC (Pmode,
1873 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1874 UNSPEC_LTREL_OFFSET);
1875 else
1876 return false;
1878 indx = XVECEXP (indx, 0, 1);
1879 break;
1881 case UNSPEC_LTREL_BASE:
1882 if (XVECLEN (indx, 0) == 1)
1883 indx = fake_pool_base, literal_pool = true;
1884 else
1885 indx = XVECEXP (indx, 0, 1);
1886 break;
1888 default:
1889 return false;
1892 if (!REG_P (indx)
1893 || (GET_MODE (indx) != SImode
1894 && GET_MODE (indx) != Pmode))
1895 return false;
1897 if (REGNO (indx) == STACK_POINTER_REGNUM
1898 || REGNO (indx) == FRAME_POINTER_REGNUM
1899 || ((reload_completed || reload_in_progress)
1900 && frame_pointer_needed
1901 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1902 || REGNO (indx) == ARG_POINTER_REGNUM
1903 || (flag_pic
1904 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1905 pointer = indx_ptr = true;
1907 if ((reload_completed || reload_in_progress)
1908 && indx == cfun->machine->base_reg)
1909 pointer = indx_ptr = literal_pool = true;
1912 /* Prefer to use pointer as base, not index. */
1913 if (base && indx && !base_ptr
1914 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1916 rtx tmp = base;
1917 base = indx;
1918 indx = tmp;
1921 /* Validate displacement. */
1922 if (!disp)
1924 /* If virtual registers are involved, the displacement will change later
1925 anyway as the virtual registers get eliminated. This could make a
1926 valid displacement invalid, but it is more likely to make an invalid
1927 displacement valid, because we sometimes access the register save area
1928 via negative offsets to one of those registers.
1929 Thus we don't check the displacement for validity here. If after
1930 elimination the displacement turns out to be invalid after all,
1931 this is fixed up by reload in any case. */
1932 if (base != arg_pointer_rtx
1933 && indx != arg_pointer_rtx
1934 && base != return_address_pointer_rtx
1935 && indx != return_address_pointer_rtx
1936 && base != frame_pointer_rtx
1937 && indx != frame_pointer_rtx
1938 && base != virtual_stack_vars_rtx
1939 && indx != virtual_stack_vars_rtx)
1940 if (!DISP_IN_RANGE (offset))
1941 return false;
1943 else
1945 /* All the special cases are pointers. */
1946 pointer = true;
1948 /* In the small-PIC case, the linker converts @GOT
1949 and @GOTNTPOFF offsets to possible displacements. */
1950 if (GET_CODE (disp) == UNSPEC
1951 && (XINT (disp, 1) == UNSPEC_GOT
1952 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1953 && flag_pic == 1)
1958 /* Accept chunkified literal pool symbol references. */
1959 else if (cfun && cfun->machine
1960 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1961 && GET_CODE (disp) == MINUS
1962 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1963 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1968 /* Accept literal pool references. */
1969 else if (GET_CODE (disp) == UNSPEC
1970 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1972 orig_disp = gen_rtx_CONST (Pmode, disp);
1973 if (offset)
1975 /* If we have an offset, make sure it does not
1976 exceed the size of the constant pool entry. */
1977 rtx sym = XVECEXP (disp, 0, 0);
1978 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1979 return false;
1981 orig_disp = plus_constant (orig_disp, offset);
1985 else
1986 return false;
1989 if (!base && !indx)
1990 pointer = true;
1992 if (out)
1994 out->base = base;
1995 out->indx = indx;
1996 out->disp = orig_disp;
1997 out->pointer = pointer;
1998 out->literal_pool = literal_pool;
2001 return true;
2004 /* Decompose a RTL expression OP for a shift count into its components,
2005 and return the base register in BASE and the offset in OFFSET.
2007 Return true if OP is a valid shift count, false if not. */
2009 bool
2010 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
2012 HOST_WIDE_INT off = 0;
2014 /* We can have an integer constant, an address register,
2015 or a sum of the two. */
2016 if (GET_CODE (op) == CONST_INT)
2018 off = INTVAL (op);
2019 op = NULL_RTX;
2021 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
2023 off = INTVAL (XEXP (op, 1));
2024 op = XEXP (op, 0);
2026 while (op && GET_CODE (op) == SUBREG)
2027 op = SUBREG_REG (op);
2029 if (op && GET_CODE (op) != REG)
2030 return false;
2032 if (offset)
2033 *offset = off;
2034 if (base)
2035 *base = op;
2037 return true;
2041 /* Return true if CODE is a valid address without index. */
2043 bool
2044 s390_legitimate_address_without_index_p (rtx op)
2046 struct s390_address addr;
2048 if (!s390_decompose_address (XEXP (op, 0), &addr))
2049 return false;
2050 if (addr.indx)
2051 return false;
2053 return true;
2057 /* Evaluates constraint strings described by the regular expression
2058 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
2059 constraint given in STR, or 0 else. */
2062 s390_mem_constraint (const char *str, rtx op)
2064 struct s390_address addr;
2065 char c = str[0];
2067 /* Check for offsettable variants of memory constraints. */
2068 if (c == 'A')
2070 /* Only accept non-volatile MEMs. */
2071 if (!MEM_P (op) || MEM_VOLATILE_P (op))
2072 return 0;
2074 if ((reload_completed || reload_in_progress)
2075 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
2076 return 0;
2078 c = str[1];
2081 /* Check for non-literal-pool variants of memory constraints. */
2082 else if (c == 'B')
2084 if (GET_CODE (op) != MEM)
2085 return 0;
2086 if (!s390_decompose_address (XEXP (op, 0), &addr))
2087 return 0;
2088 if (addr.literal_pool)
2089 return 0;
2091 c = str[1];
2094 switch (c)
2096 case 'Q':
2097 if (GET_CODE (op) != MEM)
2098 return 0;
2099 if (!s390_decompose_address (XEXP (op, 0), &addr))
2100 return 0;
2101 if (addr.indx)
2102 return 0;
2104 if (TARGET_LONG_DISPLACEMENT)
2106 if (!s390_short_displacement (addr.disp))
2107 return 0;
2109 break;
2111 case 'R':
2112 if (GET_CODE (op) != MEM)
2113 return 0;
2115 if (TARGET_LONG_DISPLACEMENT)
2117 if (!s390_decompose_address (XEXP (op, 0), &addr))
2118 return 0;
2119 if (!s390_short_displacement (addr.disp))
2120 return 0;
2122 break;
2124 case 'S':
2125 if (!TARGET_LONG_DISPLACEMENT)
2126 return 0;
2127 if (GET_CODE (op) != MEM)
2128 return 0;
2129 if (!s390_decompose_address (XEXP (op, 0), &addr))
2130 return 0;
2131 if (addr.indx)
2132 return 0;
2133 if (s390_short_displacement (addr.disp))
2134 return 0;
2135 break;
2137 case 'T':
2138 if (!TARGET_LONG_DISPLACEMENT)
2139 return 0;
2140 if (GET_CODE (op) != MEM)
2141 return 0;
2142 if (!s390_decompose_address (XEXP (op, 0), &addr))
2143 return 0;
2144 if (s390_short_displacement (addr.disp))
2145 return 0;
2146 break;
2148 case 'U':
2149 if (TARGET_LONG_DISPLACEMENT)
2151 if (!s390_decompose_address (op, &addr))
2152 return 0;
2153 if (!s390_short_displacement (addr.disp))
2154 return 0;
2156 break;
2158 case 'W':
2159 if (!TARGET_LONG_DISPLACEMENT)
2160 return 0;
2161 if (!s390_decompose_address (op, &addr))
2162 return 0;
2163 if (s390_short_displacement (addr.disp))
2164 return 0;
2165 break;
2167 case 'Y':
2168 /* Simply check for the basic form of a shift count. Reload will
2169 take care of making sure we have a proper base register. */
2170 if (!s390_decompose_shift_count (op, NULL, NULL))
2171 return 0;
2172 break;
2174 default:
2175 return 0;
2178 return 1;
2183 /* Evaluates constraint strings starting with letter O. Input
2184 parameter C is the second letter following the "O" in the constraint
2185 string. Returns 1 if VALUE meets the respective constraint and 0
2186 otherwise. */
2189 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2191 if (!TARGET_EXTIMM)
2192 return 0;
2194 switch (c)
2196 case 's':
2197 return trunc_int_for_mode (value, SImode) == value;
2199 case 'p':
2200 return value == 0
2201 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2203 case 'n':
2204 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
2206 default:
2207 gcc_unreachable ();
2212 /* Evaluates constraint strings starting with letter N. Parameter STR
2213 contains the letters following letter "N" in the constraint string.
2214 Returns true if VALUE matches the constraint. */
2217 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2219 enum machine_mode mode, part_mode;
2220 int def;
2221 int part, part_goal;
2224 if (str[0] == 'x')
2225 part_goal = -1;
2226 else
2227 part_goal = str[0] - '0';
2229 switch (str[1])
2231 case 'Q':
2232 part_mode = QImode;
2233 break;
2234 case 'H':
2235 part_mode = HImode;
2236 break;
2237 case 'S':
2238 part_mode = SImode;
2239 break;
2240 default:
2241 return 0;
2244 switch (str[2])
2246 case 'H':
2247 mode = HImode;
2248 break;
2249 case 'S':
2250 mode = SImode;
2251 break;
2252 case 'D':
2253 mode = DImode;
2254 break;
2255 default:
2256 return 0;
2259 switch (str[3])
2261 case '0':
2262 def = 0;
2263 break;
2264 case 'F':
2265 def = -1;
2266 break;
2267 default:
2268 return 0;
2271 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2272 return 0;
2274 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2275 if (part < 0)
2276 return 0;
2277 if (part_goal != -1 && part_goal != part)
2278 return 0;
2280 return 1;
2284 /* Returns true if the input parameter VALUE is a float zero. */
2287 s390_float_const_zero_p (rtx value)
2289 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2290 && value == CONST0_RTX (GET_MODE (value)));
2294 /* Compute a (partial) cost for rtx X. Return true if the complete
2295 cost has been computed, and false if subexpressions should be
2296 scanned. In either case, *TOTAL contains the cost result.
2297 CODE contains GET_CODE (x), OUTER_CODE contains the code
2298 of the superexpression of x. */
2300 static bool
2301 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2303 switch (code)
2305 case CONST:
2306 case CONST_INT:
2307 case LABEL_REF:
2308 case SYMBOL_REF:
2309 case CONST_DOUBLE:
2310 case MEM:
2311 *total = 0;
2312 return true;
2314 case ASHIFT:
2315 case ASHIFTRT:
2316 case LSHIFTRT:
2317 case ROTATE:
2318 case ROTATERT:
2319 case AND:
2320 case IOR:
2321 case XOR:
2322 case NEG:
2323 case NOT:
2324 *total = COSTS_N_INSNS (1);
2325 return false;
2327 case PLUS:
2328 case MINUS:
2329 /* Check for multiply and add. */
2330 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2331 && GET_CODE (XEXP (x, 0)) == MULT
2332 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2334 /* This is the multiply and add case. */
2335 if (GET_MODE (x) == DFmode)
2336 *total = s390_cost->madbr;
2337 else
2338 *total = s390_cost->maebr;
2339 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2340 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2341 + rtx_cost (XEXP (x, 1), code);
2342 return true; /* Do not do an additional recursive descent. */
2344 *total = COSTS_N_INSNS (1);
2345 return false;
2347 case MULT:
2348 switch (GET_MODE (x))
2350 case SImode:
2352 rtx left = XEXP (x, 0);
2353 rtx right = XEXP (x, 1);
2354 if (GET_CODE (right) == CONST_INT
2355 && CONST_OK_FOR_K (INTVAL (right)))
2356 *total = s390_cost->mhi;
2357 else if (GET_CODE (left) == SIGN_EXTEND)
2358 *total = s390_cost->mh;
2359 else
2360 *total = s390_cost->ms; /* msr, ms, msy */
2361 break;
2363 case DImode:
2365 rtx left = XEXP (x, 0);
2366 rtx right = XEXP (x, 1);
2367 if (TARGET_64BIT)
2369 if (GET_CODE (right) == CONST_INT
2370 && CONST_OK_FOR_K (INTVAL (right)))
2371 *total = s390_cost->mghi;
2372 else if (GET_CODE (left) == SIGN_EXTEND)
2373 *total = s390_cost->msgf;
2374 else
2375 *total = s390_cost->msg; /* msgr, msg */
2377 else /* TARGET_31BIT */
2379 if (GET_CODE (left) == SIGN_EXTEND
2380 && GET_CODE (right) == SIGN_EXTEND)
2381 /* mulsidi case: mr, m */
2382 *total = s390_cost->m;
2383 else if (GET_CODE (left) == ZERO_EXTEND
2384 && GET_CODE (right) == ZERO_EXTEND
2385 && TARGET_CPU_ZARCH)
2386 /* umulsidi case: ml, mlr */
2387 *total = s390_cost->ml;
2388 else
2389 /* Complex calculation is required. */
2390 *total = COSTS_N_INSNS (40);
2392 break;
2394 case SFmode:
2395 case DFmode:
2396 *total = s390_cost->mult_df;
2397 break;
2398 case TFmode:
2399 *total = s390_cost->mxbr;
2400 break;
2401 default:
2402 return false;
2404 return false;
2406 case UDIV:
2407 case UMOD:
2408 if (GET_MODE (x) == TImode) /* 128 bit division */
2409 *total = s390_cost->dlgr;
2410 else if (GET_MODE (x) == DImode)
2412 rtx right = XEXP (x, 1);
2413 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2414 *total = s390_cost->dlr;
2415 else /* 64 by 64 bit division */
2416 *total = s390_cost->dlgr;
2418 else if (GET_MODE (x) == SImode) /* 32 bit division */
2419 *total = s390_cost->dlr;
2420 return false;
2422 case DIV:
2423 case MOD:
2424 if (GET_MODE (x) == DImode)
2426 rtx right = XEXP (x, 1);
2427 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2428 if (TARGET_64BIT)
2429 *total = s390_cost->dsgfr;
2430 else
2431 *total = s390_cost->dr;
2432 else /* 64 by 64 bit division */
2433 *total = s390_cost->dsgr;
2435 else if (GET_MODE (x) == SImode) /* 32 bit division */
2436 *total = s390_cost->dlr;
2437 else if (GET_MODE (x) == SFmode)
2439 *total = s390_cost->debr;
2441 else if (GET_MODE (x) == DFmode)
2443 *total = s390_cost->ddbr;
2445 else if (GET_MODE (x) == TFmode)
2447 *total = s390_cost->dxbr;
2449 return false;
2451 case SQRT:
2452 if (GET_MODE (x) == SFmode)
2453 *total = s390_cost->sqebr;
2454 else if (GET_MODE (x) == DFmode)
2455 *total = s390_cost->sqdbr;
2456 else /* TFmode */
2457 *total = s390_cost->sqxbr;
2458 return false;
2460 case SIGN_EXTEND:
2461 case ZERO_EXTEND:
2462 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2463 || outer_code == PLUS || outer_code == MINUS
2464 || outer_code == COMPARE)
2465 *total = 0;
2466 return false;
2468 case COMPARE:
2469 *total = COSTS_N_INSNS (1);
2470 if (GET_CODE (XEXP (x, 0)) == AND
2471 && GET_CODE (XEXP (x, 1)) == CONST_INT
2472 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2474 rtx op0 = XEXP (XEXP (x, 0), 0);
2475 rtx op1 = XEXP (XEXP (x, 0), 1);
2476 rtx op2 = XEXP (x, 1);
2478 if (memory_operand (op0, GET_MODE (op0))
2479 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2480 return true;
2481 if (register_operand (op0, GET_MODE (op0))
2482 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2483 return true;
2485 return false;
2487 default:
2488 return false;
2492 /* Return the cost of an address rtx ADDR. */
2494 static int
2495 s390_address_cost (rtx addr)
2497 struct s390_address ad;
2498 if (!s390_decompose_address (addr, &ad))
2499 return 1000;
2501 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2504 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2505 otherwise return 0. */
2508 tls_symbolic_operand (rtx op)
2510 if (GET_CODE (op) != SYMBOL_REF)
2511 return 0;
2512 return SYMBOL_REF_TLS_MODEL (op);
2515 /* Split DImode access register reference REG (on 64-bit) into its constituent
2516 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2517 gen_highpart cannot be used as they assume all registers are word-sized,
2518 while our access registers have only half that size. */
2520 void
2521 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2523 gcc_assert (TARGET_64BIT);
2524 gcc_assert (ACCESS_REG_P (reg));
2525 gcc_assert (GET_MODE (reg) == DImode);
2526 gcc_assert (!(REGNO (reg) & 1));
2528 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2529 *hi = gen_rtx_REG (SImode, REGNO (reg));
2532 /* Return true if OP contains a symbol reference */
2534 bool
2535 symbolic_reference_mentioned_p (rtx op)
2537 const char *fmt;
2538 int i;
2540 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2541 return 1;
2543 fmt = GET_RTX_FORMAT (GET_CODE (op));
2544 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2546 if (fmt[i] == 'E')
2548 int j;
2550 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2551 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2552 return 1;
2555 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2556 return 1;
2559 return 0;
2562 /* Return true if OP contains a reference to a thread-local symbol. */
2564 bool
2565 tls_symbolic_reference_mentioned_p (rtx op)
2567 const char *fmt;
2568 int i;
2570 if (GET_CODE (op) == SYMBOL_REF)
2571 return tls_symbolic_operand (op);
2573 fmt = GET_RTX_FORMAT (GET_CODE (op));
2574 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2576 if (fmt[i] == 'E')
2578 int j;
2580 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2581 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2582 return true;
2585 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2586 return true;
2589 return false;
2593 /* Return true if OP is a legitimate general operand when
2594 generating PIC code. It is given that flag_pic is on
2595 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2598 legitimate_pic_operand_p (rtx op)
2600 /* Accept all non-symbolic constants. */
2601 if (!SYMBOLIC_CONST (op))
2602 return 1;
2604 /* Reject everything else; must be handled
2605 via emit_symbolic_move. */
2606 return 0;
2609 /* Returns true if the constant value OP is a legitimate general operand.
2610 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2613 legitimate_constant_p (rtx op)
2615 /* Accept all non-symbolic constants. */
2616 if (!SYMBOLIC_CONST (op))
2617 return 1;
2619 /* Accept immediate LARL operands. */
2620 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2621 return 1;
2623 /* Thread-local symbols are never legal constants. This is
2624 so that emit_call knows that computing such addresses
2625 might require a function call. */
2626 if (TLS_SYMBOLIC_CONST (op))
2627 return 0;
2629 /* In the PIC case, symbolic constants must *not* be
2630 forced into the literal pool. We accept them here,
2631 so that they will be handled by emit_symbolic_move. */
2632 if (flag_pic)
2633 return 1;
2635 /* All remaining non-PIC symbolic constants are
2636 forced into the literal pool. */
2637 return 0;
2640 /* Determine if it's legal to put X into the constant pool. This
2641 is not possible if X contains the address of a symbol that is
2642 not constant (TLS) or not known at final link time (PIC). */
2644 static bool
2645 s390_cannot_force_const_mem (rtx x)
2647 switch (GET_CODE (x))
2649 case CONST_INT:
2650 case CONST_DOUBLE:
2651 /* Accept all non-symbolic constants. */
2652 return false;
2654 case LABEL_REF:
2655 /* Labels are OK iff we are non-PIC. */
2656 return flag_pic != 0;
2658 case SYMBOL_REF:
2659 /* 'Naked' TLS symbol references are never OK,
2660 non-TLS symbols are OK iff we are non-PIC. */
2661 if (tls_symbolic_operand (x))
2662 return true;
2663 else
2664 return flag_pic != 0;
2666 case CONST:
2667 return s390_cannot_force_const_mem (XEXP (x, 0));
2668 case PLUS:
2669 case MINUS:
2670 return s390_cannot_force_const_mem (XEXP (x, 0))
2671 || s390_cannot_force_const_mem (XEXP (x, 1));
2673 case UNSPEC:
2674 switch (XINT (x, 1))
2676 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2677 case UNSPEC_LTREL_OFFSET:
2678 case UNSPEC_GOT:
2679 case UNSPEC_GOTOFF:
2680 case UNSPEC_PLTOFF:
2681 case UNSPEC_TLSGD:
2682 case UNSPEC_TLSLDM:
2683 case UNSPEC_NTPOFF:
2684 case UNSPEC_DTPOFF:
2685 case UNSPEC_GOTNTPOFF:
2686 case UNSPEC_INDNTPOFF:
2687 return false;
2689 /* If the literal pool shares the code section, be put
2690 execute template placeholders into the pool as well. */
2691 case UNSPEC_INSN:
2692 return TARGET_CPU_ZARCH;
2694 default:
2695 return true;
2697 break;
2699 default:
2700 gcc_unreachable ();
2704 /* Returns true if the constant value OP is a legitimate general
2705 operand during and after reload. The difference to
2706 legitimate_constant_p is that this function will not accept
2707 a constant that would need to be forced to the literal pool
2708 before it can be used as operand. */
2710 bool
2711 legitimate_reload_constant_p (rtx op)
2713 /* Accept la(y) operands. */
2714 if (GET_CODE (op) == CONST_INT
2715 && DISP_IN_RANGE (INTVAL (op)))
2716 return true;
2718 /* Accept l(g)hi/l(g)fi operands. */
2719 if (GET_CODE (op) == CONST_INT
2720 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2721 return true;
2723 /* Accept lliXX operands. */
2724 if (TARGET_ZARCH
2725 && GET_CODE (op) == CONST_INT
2726 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2727 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2728 return true;
2730 if (TARGET_EXTIMM
2731 && GET_CODE (op) == CONST_INT
2732 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2733 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2734 return true;
2736 /* Accept larl operands. */
2737 if (TARGET_CPU_ZARCH
2738 && larl_operand (op, VOIDmode))
2739 return true;
2741 /* Accept lzXX operands. */
2742 if (GET_CODE (op) == CONST_DOUBLE
2743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2744 return true;
2746 /* Accept double-word operands that can be split. */
2747 if (GET_CODE (op) == CONST_INT
2748 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2750 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2751 rtx hi = operand_subword (op, 0, 0, dword_mode);
2752 rtx lo = operand_subword (op, 1, 0, dword_mode);
2753 return legitimate_reload_constant_p (hi)
2754 && legitimate_reload_constant_p (lo);
2757 /* Everything else cannot be handled without reload. */
2758 return false;
2761 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2762 return the class of reg to actually use. */
2764 enum reg_class
2765 s390_preferred_reload_class (rtx op, enum reg_class class)
2767 switch (GET_CODE (op))
2769 /* Constants we cannot reload must be forced into the
2770 literal pool. */
2772 case CONST_DOUBLE:
2773 case CONST_INT:
2774 if (legitimate_reload_constant_p (op))
2775 return class;
2776 else
2777 return NO_REGS;
2779 /* If a symbolic constant or a PLUS is reloaded,
2780 it is most likely being used as an address, so
2781 prefer ADDR_REGS. If 'class' is not a superset
2782 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2783 case PLUS:
2784 case LABEL_REF:
2785 case SYMBOL_REF:
2786 case CONST:
2787 if (reg_class_subset_p (ADDR_REGS, class))
2788 return ADDR_REGS;
2789 else
2790 return NO_REGS;
2792 default:
2793 break;
2796 return class;
2799 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2800 and return these parts in SYMREF and ADDEND. You can pass NULL in
2801 SYMREF and/or ADDEND if you are not interested in these values. */
2803 static bool
2804 s390_symref_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
2806 HOST_WIDE_INT tmpaddend = 0;
2808 if (GET_CODE (addr) == CONST)
2809 addr = XEXP (addr, 0);
2811 if (GET_CODE (addr) == PLUS)
2813 if (GET_CODE (XEXP (addr, 0)) == SYMBOL_REF
2814 && CONST_INT_P (XEXP (addr, 1)))
2816 tmpaddend = INTVAL (XEXP (addr, 1));
2817 addr = XEXP (addr, 0);
2819 else
2820 return false;
2822 else
2823 if (GET_CODE (addr) != SYMBOL_REF)
2824 return false;
2826 if (symref)
2827 *symref = addr;
2828 if (addend)
2829 *addend = tmpaddend;
2831 return true;
2834 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2835 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2836 aligned. */
2838 bool
2839 s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
2841 HOST_WIDE_INT addend;
2842 rtx symref;
2844 if (!s390_symref_operand_p (addr, &symref, &addend))
2845 return false;
2847 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref)
2848 && !(addend & (alignment - 1)));
2851 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2852 operand SCRATCH is used to reload the even part of the address and
2853 adding one. */
2855 void
2856 s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
2858 HOST_WIDE_INT addend;
2859 rtx symref;
2861 if (!s390_symref_operand_p (addr, &symref, &addend))
2862 gcc_unreachable ();
2864 if (!(addend & 1))
2865 /* Easy case. The addend is even so larl will do fine. */
2866 emit_move_insn (reg, addr);
2867 else
2869 /* We can leave the scratch register untouched if the target
2870 register is a valid base register. */
2871 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
2872 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
2873 scratch = reg;
2875 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
2876 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
2878 if (addend != 1)
2879 emit_move_insn (scratch,
2880 gen_rtx_CONST (Pmode,
2881 gen_rtx_PLUS (Pmode, symref,
2882 GEN_INT (addend - 1))));
2883 else
2884 emit_move_insn (scratch, symref);
2886 /* Increment the address using la in order to avoid clobbering cc. */
2887 emit_move_insn (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
2891 /* Generate what is necessary to move between REG and MEM using
2892 SCRATCH. The direction is given by TOMEM. */
2894 void
2895 s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
2897 /* Reload might have pulled a constant out of the literal pool.
2898 Force it back in. */
2899 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
2900 || GET_CODE (mem) == CONST)
2901 mem = force_const_mem (GET_MODE (reg), mem);
2903 gcc_assert (MEM_P (mem));
2905 /* For a load from memory we can leave the scratch register
2906 untouched if the target register is a valid base register. */
2907 if (!tomem
2908 && REGNO (reg) < FIRST_PSEUDO_REGISTER
2909 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
2910 && GET_MODE (reg) == GET_MODE (scratch))
2911 scratch = reg;
2913 /* Load address into scratch register. Since we can't have a
2914 secondary reload for a secondary reload we have to cover the case
2915 where larl would need a secondary reload here as well. */
2916 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
2918 /* Now we can use a standard load/store to do the move. */
2919 if (tomem)
2920 emit_move_insn (replace_equiv_address (mem, scratch), reg);
2921 else
2922 emit_move_insn (reg, replace_equiv_address (mem, scratch));
2925 /* Inform reload about cases where moving X with a mode MODE to a register in
2926 CLASS requires an extra scratch or immediate register. Return the class
2927 needed for the immediate register. */
2929 static enum reg_class
2930 s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2931 enum machine_mode mode, secondary_reload_info *sri)
2933 /* Intermediate register needed. */
2934 if (reg_classes_intersect_p (CC_REGS, class))
2935 return GENERAL_REGS;
2937 if (TARGET_Z10)
2939 /* On z10 several optimizer steps may generate larl operands with
2940 an odd addend. */
2941 if (in_p
2942 && s390_symref_operand_p (x, NULL, NULL)
2943 && mode == Pmode
2944 && !s390_check_symref_alignment (x, 2))
2945 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
2946 : CODE_FOR_reloadsi_larl_odd_addend_z10);
2948 /* On z10 we need a scratch register when moving QI, TI or floating
2949 point mode values from or to a memory location with a SYMBOL_REF
2950 or if the symref addend of a SI or DI move is not aligned to the
2951 width of the access. */
2952 if (MEM_P (x)
2953 && s390_symref_operand_p (XEXP (x, 0), NULL, NULL)
2954 && (mode == QImode || mode == TImode || FLOAT_MODE_P (mode)
2955 || (!TARGET_64BIT && mode == DImode)
2956 || ((mode == HImode || mode == SImode || mode == DImode)
2957 && (!s390_check_symref_alignment (XEXP (x, 0),
2958 GET_MODE_SIZE (mode))))))
2960 #define __SECONDARY_RELOAD_CASE(M,m) \
2961 case M##mode: \
2962 if (TARGET_64BIT) \
2963 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
2964 CODE_FOR_reload##m##di_tomem_z10; \
2965 else \
2966 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
2967 CODE_FOR_reload##m##si_tomem_z10; \
2968 break;
2970 switch (GET_MODE (x))
2972 __SECONDARY_RELOAD_CASE (QI, qi);
2973 __SECONDARY_RELOAD_CASE (HI, hi);
2974 __SECONDARY_RELOAD_CASE (SI, si);
2975 __SECONDARY_RELOAD_CASE (DI, di);
2976 __SECONDARY_RELOAD_CASE (TI, ti);
2977 __SECONDARY_RELOAD_CASE (SF, sf);
2978 __SECONDARY_RELOAD_CASE (DF, df);
2979 __SECONDARY_RELOAD_CASE (TF, tf);
2980 __SECONDARY_RELOAD_CASE (SD, sd);
2981 __SECONDARY_RELOAD_CASE (DD, dd);
2982 __SECONDARY_RELOAD_CASE (TD, td);
2984 default:
2985 gcc_unreachable ();
2987 #undef __SECONDARY_RELOAD_CASE
2991 /* We need a scratch register when loading a PLUS expression which
2992 is not a legitimate operand of the LOAD ADDRESS instruction. */
2993 if (in_p && s390_plus_operand (x, mode))
2994 sri->icode = (TARGET_64BIT ?
2995 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2997 /* Performing a multiword move from or to memory we have to make sure the
2998 second chunk in memory is addressable without causing a displacement
2999 overflow. If that would be the case we calculate the address in
3000 a scratch register. */
3001 if (MEM_P (x)
3002 && GET_CODE (XEXP (x, 0)) == PLUS
3003 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3004 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
3005 + GET_MODE_SIZE (mode) - 1))
3007 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3008 in a s_operand address since we may fallback to lm/stm. So we only
3009 have to care about overflows in the b+i+d case. */
3010 if ((reg_classes_intersect_p (GENERAL_REGS, class)
3011 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
3012 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
3013 /* For FP_REGS no lm/stm is available so this check is triggered
3014 for displacement overflows in b+i+d and b+d like addresses. */
3015 || (reg_classes_intersect_p (FP_REGS, class)
3016 && s390_class_max_nregs (FP_REGS, mode) > 1))
3018 if (in_p)
3019 sri->icode = (TARGET_64BIT ?
3020 CODE_FOR_reloaddi_nonoffmem_in :
3021 CODE_FOR_reloadsi_nonoffmem_in);
3022 else
3023 sri->icode = (TARGET_64BIT ?
3024 CODE_FOR_reloaddi_nonoffmem_out :
3025 CODE_FOR_reloadsi_nonoffmem_out);
3029 /* A scratch address register is needed when a symbolic constant is
3030 copied to r0 compiling with -fPIC. In other cases the target
3031 register might be used as temporary (see legitimize_pic_address). */
3032 if (in_p && SYMBOLIC_CONST (x) && flag_pic == 2 && class != ADDR_REGS)
3033 sri->icode = (TARGET_64BIT ?
3034 CODE_FOR_reloaddi_PIC_addr :
3035 CODE_FOR_reloadsi_PIC_addr);
3037 /* Either scratch or no register needed. */
3038 return NO_REGS;
3041 /* Generate code to load SRC, which is PLUS that is not a
3042 legitimate operand for the LA instruction, into TARGET.
3043 SCRATCH may be used as scratch register. */
3045 void
3046 s390_expand_plus_operand (rtx target, rtx src,
3047 rtx scratch)
3049 rtx sum1, sum2;
3050 struct s390_address ad;
3052 /* src must be a PLUS; get its two operands. */
3053 gcc_assert (GET_CODE (src) == PLUS);
3054 gcc_assert (GET_MODE (src) == Pmode);
3056 /* Check if any of the two operands is already scheduled
3057 for replacement by reload. This can happen e.g. when
3058 float registers occur in an address. */
3059 sum1 = find_replacement (&XEXP (src, 0));
3060 sum2 = find_replacement (&XEXP (src, 1));
3061 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3063 /* If the address is already strictly valid, there's nothing to do. */
3064 if (!s390_decompose_address (src, &ad)
3065 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3066 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
3068 /* Otherwise, one of the operands cannot be an address register;
3069 we reload its value into the scratch register. */
3070 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
3072 emit_move_insn (scratch, sum1);
3073 sum1 = scratch;
3075 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
3077 emit_move_insn (scratch, sum2);
3078 sum2 = scratch;
3081 /* According to the way these invalid addresses are generated
3082 in reload.c, it should never happen (at least on s390) that
3083 *neither* of the PLUS components, after find_replacements
3084 was applied, is an address register. */
3085 if (sum1 == scratch && sum2 == scratch)
3087 debug_rtx (src);
3088 gcc_unreachable ();
3091 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3094 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3095 is only ever performed on addresses, so we can mark the
3096 sum as legitimate for LA in any case. */
3097 s390_load_address (target, src);
3101 /* Return true if ADDR is a valid memory address.
3102 STRICT specifies whether strict register checking applies. */
3104 bool
3105 legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
3107 struct s390_address ad;
3109 if (TARGET_Z10
3110 && larl_operand (addr, VOIDmode)
3111 && (mode == VOIDmode
3112 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
3113 return true;
3115 if (!s390_decompose_address (addr, &ad))
3116 return false;
3118 if (strict)
3120 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3121 return false;
3123 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3124 return false;
3126 else
3128 if (ad.base
3129 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
3130 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3131 return false;
3133 if (ad.indx
3134 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
3135 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
3136 return false;
3138 return true;
3141 /* Return true if OP is a valid operand for the LA instruction.
3142 In 31-bit, we need to prove that the result is used as an
3143 address, as LA performs only a 31-bit addition. */
3145 bool
3146 legitimate_la_operand_p (rtx op)
3148 struct s390_address addr;
3149 if (!s390_decompose_address (op, &addr))
3150 return false;
3152 return (TARGET_64BIT || addr.pointer);
3155 /* Return true if it is valid *and* preferable to use LA to
3156 compute the sum of OP1 and OP2. */
3158 bool
3159 preferred_la_operand_p (rtx op1, rtx op2)
3161 struct s390_address addr;
3163 if (op2 != const0_rtx)
3164 op1 = gen_rtx_PLUS (Pmode, op1, op2);
3166 if (!s390_decompose_address (op1, &addr))
3167 return false;
3168 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3169 return false;
3170 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3171 return false;
3173 if (!TARGET_64BIT && !addr.pointer)
3174 return false;
3176 if (addr.pointer)
3177 return true;
3179 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
3180 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3181 return true;
3183 return false;
3186 /* Emit a forced load-address operation to load SRC into DST.
3187 This will use the LOAD ADDRESS instruction even in situations
3188 where legitimate_la_operand_p (SRC) returns false. */
3190 void
3191 s390_load_address (rtx dst, rtx src)
3193 if (TARGET_64BIT)
3194 emit_move_insn (dst, src);
3195 else
3196 emit_insn (gen_force_la_31 (dst, src));
3199 /* Return a legitimate reference for ORIG (an address) using the
3200 register REG. If REG is 0, a new pseudo is generated.
3202 There are two types of references that must be handled:
3204 1. Global data references must load the address from the GOT, via
3205 the PIC reg. An insn is emitted to do this load, and the reg is
3206 returned.
3208 2. Static data references, constant pool addresses, and code labels
3209 compute the address as an offset from the GOT, whose base is in
3210 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3211 differentiate them from global data objects. The returned
3212 address is the PIC reg + an unspec constant.
3214 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
3215 reg also appears in the address. */
3218 legitimize_pic_address (rtx orig, rtx reg)
3220 rtx addr = orig;
3221 rtx new = orig;
3222 rtx base;
3224 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
3226 if (GET_CODE (addr) == LABEL_REF
3227 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
3229 /* This is a local symbol. */
3230 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
3232 /* Access local symbols PC-relative via LARL.
3233 This is the same as in the non-PIC case, so it is
3234 handled automatically ... */
3236 else
3238 /* Access local symbols relative to the GOT. */
3240 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3242 if (reload_in_progress || reload_completed)
3243 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3245 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
3246 addr = gen_rtx_CONST (Pmode, addr);
3247 addr = force_const_mem (Pmode, addr);
3248 emit_move_insn (temp, addr);
3250 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3251 if (reg != 0)
3253 s390_load_address (reg, new);
3254 new = reg;
3258 else if (GET_CODE (addr) == SYMBOL_REF)
3260 if (reg == 0)
3261 reg = gen_reg_rtx (Pmode);
3263 if (flag_pic == 1)
3265 /* Assume GOT offset < 4k. This is handled the same way
3266 in both 31- and 64-bit code (@GOT). */
3268 if (reload_in_progress || reload_completed)
3269 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3271 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3272 new = gen_rtx_CONST (Pmode, new);
3273 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3274 new = gen_const_mem (Pmode, new);
3275 emit_move_insn (reg, new);
3276 new = reg;
3278 else if (TARGET_CPU_ZARCH)
3280 /* If the GOT offset might be >= 4k, we determine the position
3281 of the GOT entry via a PC-relative LARL (@GOTENT). */
3283 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3285 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3286 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3288 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
3289 new = gen_rtx_CONST (Pmode, new);
3290 emit_move_insn (temp, new);
3292 new = gen_const_mem (Pmode, temp);
3293 emit_move_insn (reg, new);
3294 new = reg;
3296 else
3298 /* If the GOT offset might be >= 4k, we have to load it
3299 from the literal pool (@GOT). */
3301 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3303 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3304 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3306 if (reload_in_progress || reload_completed)
3307 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3309 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3310 addr = gen_rtx_CONST (Pmode, addr);
3311 addr = force_const_mem (Pmode, addr);
3312 emit_move_insn (temp, addr);
3314 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3315 new = gen_const_mem (Pmode, new);
3316 emit_move_insn (reg, new);
3317 new = reg;
3320 else
3322 if (GET_CODE (addr) == CONST)
3324 addr = XEXP (addr, 0);
3325 if (GET_CODE (addr) == UNSPEC)
3327 gcc_assert (XVECLEN (addr, 0) == 1);
3328 switch (XINT (addr, 1))
3330 /* If someone moved a GOT-relative UNSPEC
3331 out of the literal pool, force them back in. */
3332 case UNSPEC_GOTOFF:
3333 case UNSPEC_PLTOFF:
3334 new = force_const_mem (Pmode, orig);
3335 break;
3337 /* @GOT is OK as is if small. */
3338 case UNSPEC_GOT:
3339 if (flag_pic == 2)
3340 new = force_const_mem (Pmode, orig);
3341 break;
3343 /* @GOTENT is OK as is. */
3344 case UNSPEC_GOTENT:
3345 break;
3347 /* @PLT is OK as is on 64-bit, must be converted to
3348 GOT-relative @PLTOFF on 31-bit. */
3349 case UNSPEC_PLT:
3350 if (!TARGET_CPU_ZARCH)
3352 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3354 if (reload_in_progress || reload_completed)
3355 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3357 addr = XVECEXP (addr, 0, 0);
3358 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
3359 UNSPEC_PLTOFF);
3360 addr = gen_rtx_CONST (Pmode, addr);
3361 addr = force_const_mem (Pmode, addr);
3362 emit_move_insn (temp, addr);
3364 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3365 if (reg != 0)
3367 s390_load_address (reg, new);
3368 new = reg;
3371 break;
3373 /* Everything else cannot happen. */
3374 default:
3375 gcc_unreachable ();
3378 else
3379 gcc_assert (GET_CODE (addr) == PLUS);
3381 if (GET_CODE (addr) == PLUS)
3383 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3385 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3386 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3388 /* Check first to see if this is a constant offset
3389 from a local symbol reference. */
3390 if ((GET_CODE (op0) == LABEL_REF
3391 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3392 && GET_CODE (op1) == CONST_INT)
3394 if (TARGET_CPU_ZARCH
3395 && larl_operand (op0, VOIDmode)
3396 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3397 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3399 if (INTVAL (op1) & 1)
3401 /* LARL can't handle odd offsets, so emit a
3402 pair of LARL and LA. */
3403 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3405 if (!DISP_IN_RANGE (INTVAL (op1)))
3407 HOST_WIDE_INT even = INTVAL (op1) - 1;
3408 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3409 op0 = gen_rtx_CONST (Pmode, op0);
3410 op1 = const1_rtx;
3413 emit_move_insn (temp, op0);
3414 new = gen_rtx_PLUS (Pmode, temp, op1);
3416 if (reg != 0)
3418 s390_load_address (reg, new);
3419 new = reg;
3422 else
3424 /* If the offset is even, we can just use LARL.
3425 This will happen automatically. */
3428 else
3430 /* Access local symbols relative to the GOT. */
3432 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3434 if (reload_in_progress || reload_completed)
3435 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3437 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3438 UNSPEC_GOTOFF);
3439 addr = gen_rtx_PLUS (Pmode, addr, op1);
3440 addr = gen_rtx_CONST (Pmode, addr);
3441 addr = force_const_mem (Pmode, addr);
3442 emit_move_insn (temp, addr);
3444 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3445 if (reg != 0)
3447 s390_load_address (reg, new);
3448 new = reg;
3453 /* Now, check whether it is a GOT relative symbol plus offset
3454 that was pulled out of the literal pool. Force it back in. */
3456 else if (GET_CODE (op0) == UNSPEC
3457 && GET_CODE (op1) == CONST_INT
3458 && XINT (op0, 1) == UNSPEC_GOTOFF)
3460 gcc_assert (XVECLEN (op0, 0) == 1);
3462 new = force_const_mem (Pmode, orig);
3465 /* Otherwise, compute the sum. */
3466 else
3468 base = legitimize_pic_address (XEXP (addr, 0), reg);
3469 new = legitimize_pic_address (XEXP (addr, 1),
3470 base == reg ? NULL_RTX : reg);
3471 if (GET_CODE (new) == CONST_INT)
3472 new = plus_constant (base, INTVAL (new));
3473 else
3475 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3477 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3478 new = XEXP (new, 1);
3480 new = gen_rtx_PLUS (Pmode, base, new);
3483 if (GET_CODE (new) == CONST)
3484 new = XEXP (new, 0);
3485 new = force_operand (new, 0);
3489 return new;
3492 /* Load the thread pointer into a register. */
3495 s390_get_thread_pointer (void)
3497 rtx tp = gen_reg_rtx (Pmode);
3499 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3500 mark_reg_pointer (tp, BITS_PER_WORD);
3502 return tp;
3505 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3506 in s390_tls_symbol which always refers to __tls_get_offset.
3507 The returned offset is written to RESULT_REG and an USE rtx is
3508 generated for TLS_CALL. */
3510 static GTY(()) rtx s390_tls_symbol;
3512 static void
3513 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3515 rtx insn;
3517 gcc_assert (flag_pic);
3519 if (!s390_tls_symbol)
3520 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3522 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3523 gen_rtx_REG (Pmode, RETURN_REGNUM));
3525 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3526 RTL_CONST_CALL_P (insn) = 1;
3529 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3530 this (thread-local) address. REG may be used as temporary. */
3532 static rtx
3533 legitimize_tls_address (rtx addr, rtx reg)
3535 rtx new, tls_call, temp, base, r2, insn;
3537 if (GET_CODE (addr) == SYMBOL_REF)
3538 switch (tls_symbolic_operand (addr))
3540 case TLS_MODEL_GLOBAL_DYNAMIC:
3541 start_sequence ();
3542 r2 = gen_rtx_REG (Pmode, 2);
3543 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3544 new = gen_rtx_CONST (Pmode, tls_call);
3545 new = force_const_mem (Pmode, new);
3546 emit_move_insn (r2, new);
3547 s390_emit_tls_call_insn (r2, tls_call);
3548 insn = get_insns ();
3549 end_sequence ();
3551 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3552 temp = gen_reg_rtx (Pmode);
3553 emit_libcall_block (insn, temp, r2, new);
3555 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3556 if (reg != 0)
3558 s390_load_address (reg, new);
3559 new = reg;
3561 break;
3563 case TLS_MODEL_LOCAL_DYNAMIC:
3564 start_sequence ();
3565 r2 = gen_rtx_REG (Pmode, 2);
3566 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3567 new = gen_rtx_CONST (Pmode, tls_call);
3568 new = force_const_mem (Pmode, new);
3569 emit_move_insn (r2, new);
3570 s390_emit_tls_call_insn (r2, tls_call);
3571 insn = get_insns ();
3572 end_sequence ();
3574 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3575 temp = gen_reg_rtx (Pmode);
3576 emit_libcall_block (insn, temp, r2, new);
3578 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3579 base = gen_reg_rtx (Pmode);
3580 s390_load_address (base, new);
3582 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3583 new = gen_rtx_CONST (Pmode, new);
3584 new = force_const_mem (Pmode, new);
3585 temp = gen_reg_rtx (Pmode);
3586 emit_move_insn (temp, new);
3588 new = gen_rtx_PLUS (Pmode, base, temp);
3589 if (reg != 0)
3591 s390_load_address (reg, new);
3592 new = reg;
3594 break;
3596 case TLS_MODEL_INITIAL_EXEC:
3597 if (flag_pic == 1)
3599 /* Assume GOT offset < 4k. This is handled the same way
3600 in both 31- and 64-bit code. */
3602 if (reload_in_progress || reload_completed)
3603 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3605 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3606 new = gen_rtx_CONST (Pmode, new);
3607 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3608 new = gen_const_mem (Pmode, new);
3609 temp = gen_reg_rtx (Pmode);
3610 emit_move_insn (temp, new);
3612 else if (TARGET_CPU_ZARCH)
3614 /* If the GOT offset might be >= 4k, we determine the position
3615 of the GOT entry via a PC-relative LARL. */
3617 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3618 new = gen_rtx_CONST (Pmode, new);
3619 temp = gen_reg_rtx (Pmode);
3620 emit_move_insn (temp, new);
3622 new = gen_const_mem (Pmode, temp);
3623 temp = gen_reg_rtx (Pmode);
3624 emit_move_insn (temp, new);
3626 else if (flag_pic)
3628 /* If the GOT offset might be >= 4k, we have to load it
3629 from the literal pool. */
3631 if (reload_in_progress || reload_completed)
3632 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3634 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3635 new = gen_rtx_CONST (Pmode, new);
3636 new = force_const_mem (Pmode, new);
3637 temp = gen_reg_rtx (Pmode);
3638 emit_move_insn (temp, new);
3640 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3641 new = gen_const_mem (Pmode, new);
3643 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3644 temp = gen_reg_rtx (Pmode);
3645 emit_insn (gen_rtx_SET (Pmode, temp, new));
3647 else
3649 /* In position-dependent code, load the absolute address of
3650 the GOT entry from the literal pool. */
3652 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3653 new = gen_rtx_CONST (Pmode, new);
3654 new = force_const_mem (Pmode, new);
3655 temp = gen_reg_rtx (Pmode);
3656 emit_move_insn (temp, new);
3658 new = temp;
3659 new = gen_const_mem (Pmode, new);
3660 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3661 temp = gen_reg_rtx (Pmode);
3662 emit_insn (gen_rtx_SET (Pmode, temp, new));
3665 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3666 if (reg != 0)
3668 s390_load_address (reg, new);
3669 new = reg;
3671 break;
3673 case TLS_MODEL_LOCAL_EXEC:
3674 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3675 new = gen_rtx_CONST (Pmode, new);
3676 new = force_const_mem (Pmode, new);
3677 temp = gen_reg_rtx (Pmode);
3678 emit_move_insn (temp, new);
3680 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3681 if (reg != 0)
3683 s390_load_address (reg, new);
3684 new = reg;
3686 break;
3688 default:
3689 gcc_unreachable ();
3692 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3694 switch (XINT (XEXP (addr, 0), 1))
3696 case UNSPEC_INDNTPOFF:
3697 gcc_assert (TARGET_CPU_ZARCH);
3698 new = addr;
3699 break;
3701 default:
3702 gcc_unreachable ();
3706 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3707 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3709 new = XEXP (XEXP (addr, 0), 0);
3710 if (GET_CODE (new) != SYMBOL_REF)
3711 new = gen_rtx_CONST (Pmode, new);
3713 new = legitimize_tls_address (new, reg);
3714 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3715 new = force_operand (new, 0);
3718 else
3719 gcc_unreachable (); /* for now ... */
3721 return new;
3724 /* Emit insns making the address in operands[1] valid for a standard
3725 move to operands[0]. operands[1] is replaced by an address which
3726 should be used instead of the former RTX to emit the move
3727 pattern. */
3729 void
3730 emit_symbolic_move (rtx *operands)
3732 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
3734 if (GET_CODE (operands[0]) == MEM)
3735 operands[1] = force_reg (Pmode, operands[1]);
3736 else if (TLS_SYMBOLIC_CONST (operands[1]))
3737 operands[1] = legitimize_tls_address (operands[1], temp);
3738 else if (flag_pic)
3739 operands[1] = legitimize_pic_address (operands[1], temp);
3742 /* Try machine-dependent ways of modifying an illegitimate address X
3743 to be legitimate. If we find one, return the new, valid address.
3745 OLDX is the address as it was before break_out_memory_refs was called.
3746 In some cases it is useful to look at this to decide what needs to be done.
3748 MODE is the mode of the operand pointed to by X.
3750 When -fpic is used, special handling is needed for symbolic references.
3751 See comments by legitimize_pic_address for details. */
3754 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3755 enum machine_mode mode ATTRIBUTE_UNUSED)
3757 rtx constant_term = const0_rtx;
3759 if (TLS_SYMBOLIC_CONST (x))
3761 x = legitimize_tls_address (x, 0);
3763 if (legitimate_address_p (mode, x, FALSE))
3764 return x;
3766 else if (GET_CODE (x) == PLUS
3767 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3768 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3770 return x;
3772 else if (flag_pic)
3774 if (SYMBOLIC_CONST (x)
3775 || (GET_CODE (x) == PLUS
3776 && (SYMBOLIC_CONST (XEXP (x, 0))
3777 || SYMBOLIC_CONST (XEXP (x, 1)))))
3778 x = legitimize_pic_address (x, 0);
3780 if (legitimate_address_p (mode, x, FALSE))
3781 return x;
3784 x = eliminate_constant_term (x, &constant_term);
3786 /* Optimize loading of large displacements by splitting them
3787 into the multiple of 4K and the rest; this allows the
3788 former to be CSE'd if possible.
3790 Don't do this if the displacement is added to a register
3791 pointing into the stack frame, as the offsets will
3792 change later anyway. */
3794 if (GET_CODE (constant_term) == CONST_INT
3795 && !TARGET_LONG_DISPLACEMENT
3796 && !DISP_IN_RANGE (INTVAL (constant_term))
3797 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3799 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3800 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3802 rtx temp = gen_reg_rtx (Pmode);
3803 rtx val = force_operand (GEN_INT (upper), temp);
3804 if (val != temp)
3805 emit_move_insn (temp, val);
3807 x = gen_rtx_PLUS (Pmode, x, temp);
3808 constant_term = GEN_INT (lower);
3811 if (GET_CODE (x) == PLUS)
3813 if (GET_CODE (XEXP (x, 0)) == REG)
3815 rtx temp = gen_reg_rtx (Pmode);
3816 rtx val = force_operand (XEXP (x, 1), temp);
3817 if (val != temp)
3818 emit_move_insn (temp, val);
3820 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3823 else if (GET_CODE (XEXP (x, 1)) == REG)
3825 rtx temp = gen_reg_rtx (Pmode);
3826 rtx val = force_operand (XEXP (x, 0), temp);
3827 if (val != temp)
3828 emit_move_insn (temp, val);
3830 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3834 if (constant_term != const0_rtx)
3835 x = gen_rtx_PLUS (Pmode, x, constant_term);
3837 return x;
3840 /* Try a machine-dependent way of reloading an illegitimate address AD
3841 operand. If we find one, push the reload and and return the new address.
3843 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3844 and TYPE is the reload type of the current reload. */
3846 rtx
3847 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3848 int opnum, int type)
3850 if (!optimize || TARGET_LONG_DISPLACEMENT)
3851 return NULL_RTX;
3853 if (GET_CODE (ad) == PLUS)
3855 rtx tem = simplify_binary_operation (PLUS, Pmode,
3856 XEXP (ad, 0), XEXP (ad, 1));
3857 if (tem)
3858 ad = tem;
3861 if (GET_CODE (ad) == PLUS
3862 && GET_CODE (XEXP (ad, 0)) == REG
3863 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3864 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3866 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3867 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3868 rtx cst, tem, new;
3870 cst = GEN_INT (upper);
3871 if (!legitimate_reload_constant_p (cst))
3872 cst = force_const_mem (Pmode, cst);
3874 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3875 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3877 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3878 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3879 opnum, (enum reload_type) type);
3880 return new;
3883 return NULL_RTX;
3886 /* Emit code to move LEN bytes from DST to SRC. */
3888 void
3889 s390_expand_movmem (rtx dst, rtx src, rtx len)
3891 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3893 if (INTVAL (len) > 0)
3894 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3897 else if (TARGET_MVCLE)
3899 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3902 else
3904 rtx dst_addr, src_addr, count, blocks, temp;
3905 rtx loop_start_label = gen_label_rtx ();
3906 rtx loop_end_label = gen_label_rtx ();
3907 rtx end_label = gen_label_rtx ();
3908 enum machine_mode mode;
3910 mode = GET_MODE (len);
3911 if (mode == VOIDmode)
3912 mode = Pmode;
3914 dst_addr = gen_reg_rtx (Pmode);
3915 src_addr = gen_reg_rtx (Pmode);
3916 count = gen_reg_rtx (mode);
3917 blocks = gen_reg_rtx (mode);
3919 convert_move (count, len, 1);
3920 emit_cmp_and_jump_insns (count, const0_rtx,
3921 EQ, NULL_RTX, mode, 1, end_label);
3923 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3924 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3925 dst = change_address (dst, VOIDmode, dst_addr);
3926 src = change_address (src, VOIDmode, src_addr);
3928 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3929 if (temp != count)
3930 emit_move_insn (count, temp);
3932 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3933 if (temp != blocks)
3934 emit_move_insn (blocks, temp);
3936 emit_cmp_and_jump_insns (blocks, const0_rtx,
3937 EQ, NULL_RTX, mode, 1, loop_end_label);
3939 emit_label (loop_start_label);
3941 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3942 s390_load_address (dst_addr,
3943 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3944 s390_load_address (src_addr,
3945 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3947 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3948 if (temp != blocks)
3949 emit_move_insn (blocks, temp);
3951 emit_cmp_and_jump_insns (blocks, const0_rtx,
3952 EQ, NULL_RTX, mode, 1, loop_end_label);
3954 emit_jump (loop_start_label);
3955 emit_label (loop_end_label);
3957 emit_insn (gen_movmem_short (dst, src,
3958 convert_to_mode (Pmode, count, 1)));
3959 emit_label (end_label);
3963 /* Emit code to set LEN bytes at DST to VAL.
3964 Make use of clrmem if VAL is zero. */
3966 void
3967 s390_expand_setmem (rtx dst, rtx len, rtx val)
3969 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3970 return;
3972 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3974 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
3976 if (val == const0_rtx && INTVAL (len) <= 256)
3977 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3978 else
3980 /* Initialize memory by storing the first byte. */
3981 emit_move_insn (adjust_address (dst, QImode, 0), val);
3983 if (INTVAL (len) > 1)
3985 /* Initiate 1 byte overlap move.
3986 The first byte of DST is propagated through DSTP1.
3987 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3988 DST is set to size 1 so the rest of the memory location
3989 does not count as source operand. */
3990 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3991 set_mem_size (dst, const1_rtx);
3993 emit_insn (gen_movmem_short (dstp1, dst,
3994 GEN_INT (INTVAL (len) - 2)));
3999 else if (TARGET_MVCLE)
4001 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
4002 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
4005 else
4007 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
4008 rtx loop_start_label = gen_label_rtx ();
4009 rtx loop_end_label = gen_label_rtx ();
4010 rtx end_label = gen_label_rtx ();
4011 enum machine_mode mode;
4013 mode = GET_MODE (len);
4014 if (mode == VOIDmode)
4015 mode = Pmode;
4017 dst_addr = gen_reg_rtx (Pmode);
4018 src_addr = gen_reg_rtx (Pmode);
4019 count = gen_reg_rtx (mode);
4020 blocks = gen_reg_rtx (mode);
4022 convert_move (count, len, 1);
4023 emit_cmp_and_jump_insns (count, const0_rtx,
4024 EQ, NULL_RTX, mode, 1, end_label);
4026 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
4027 dst = change_address (dst, VOIDmode, dst_addr);
4029 if (val == const0_rtx)
4030 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4031 else
4033 dstp1 = adjust_address (dst, VOIDmode, 1);
4034 set_mem_size (dst, const1_rtx);
4036 /* Initialize memory by storing the first byte. */
4037 emit_move_insn (adjust_address (dst, QImode, 0), val);
4039 /* If count is 1 we are done. */
4040 emit_cmp_and_jump_insns (count, const1_rtx,
4041 EQ, NULL_RTX, mode, 1, end_label);
4043 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
4045 if (temp != count)
4046 emit_move_insn (count, temp);
4048 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4049 if (temp != blocks)
4050 emit_move_insn (blocks, temp);
4052 emit_cmp_and_jump_insns (blocks, const0_rtx,
4053 EQ, NULL_RTX, mode, 1, loop_end_label);
4055 emit_label (loop_start_label);
4057 if (val == const0_rtx)
4058 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
4059 else
4060 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
4061 s390_load_address (dst_addr,
4062 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
4064 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4065 if (temp != blocks)
4066 emit_move_insn (blocks, temp);
4068 emit_cmp_and_jump_insns (blocks, const0_rtx,
4069 EQ, NULL_RTX, mode, 1, loop_end_label);
4071 emit_jump (loop_start_label);
4072 emit_label (loop_end_label);
4074 if (val == const0_rtx)
4075 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
4076 else
4077 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
4078 emit_label (end_label);
4082 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4083 and return the result in TARGET. */
4085 void
4086 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
4088 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
4089 rtx tmp;
4091 /* As the result of CMPINT is inverted compared to what we need,
4092 we have to swap the operands. */
4093 tmp = op0; op0 = op1; op1 = tmp;
4095 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
4097 if (INTVAL (len) > 0)
4099 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
4100 emit_insn (gen_cmpint (target, ccreg));
4102 else
4103 emit_move_insn (target, const0_rtx);
4105 else if (TARGET_MVCLE)
4107 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
4108 emit_insn (gen_cmpint (target, ccreg));
4110 else
4112 rtx addr0, addr1, count, blocks, temp;
4113 rtx loop_start_label = gen_label_rtx ();
4114 rtx loop_end_label = gen_label_rtx ();
4115 rtx end_label = gen_label_rtx ();
4116 enum machine_mode mode;
4118 mode = GET_MODE (len);
4119 if (mode == VOIDmode)
4120 mode = Pmode;
4122 addr0 = gen_reg_rtx (Pmode);
4123 addr1 = gen_reg_rtx (Pmode);
4124 count = gen_reg_rtx (mode);
4125 blocks = gen_reg_rtx (mode);
4127 convert_move (count, len, 1);
4128 emit_cmp_and_jump_insns (count, const0_rtx,
4129 EQ, NULL_RTX, mode, 1, end_label);
4131 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
4132 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
4133 op0 = change_address (op0, VOIDmode, addr0);
4134 op1 = change_address (op1, VOIDmode, addr1);
4136 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4137 if (temp != count)
4138 emit_move_insn (count, temp);
4140 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4141 if (temp != blocks)
4142 emit_move_insn (blocks, temp);
4144 emit_cmp_and_jump_insns (blocks, const0_rtx,
4145 EQ, NULL_RTX, mode, 1, loop_end_label);
4147 emit_label (loop_start_label);
4149 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
4150 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
4151 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
4152 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
4153 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
4154 emit_jump_insn (temp);
4156 s390_load_address (addr0,
4157 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
4158 s390_load_address (addr1,
4159 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
4161 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4162 if (temp != blocks)
4163 emit_move_insn (blocks, temp);
4165 emit_cmp_and_jump_insns (blocks, const0_rtx,
4166 EQ, NULL_RTX, mode, 1, loop_end_label);
4168 emit_jump (loop_start_label);
4169 emit_label (loop_end_label);
4171 emit_insn (gen_cmpmem_short (op0, op1,
4172 convert_to_mode (Pmode, count, 1)));
4173 emit_label (end_label);
4175 emit_insn (gen_cmpint (target, ccreg));
4180 /* Expand conditional increment or decrement using alc/slb instructions.
4181 Should generate code setting DST to either SRC or SRC + INCREMENT,
4182 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4183 Returns true if successful, false otherwise.
4185 That makes it possible to implement some if-constructs without jumps e.g.:
4186 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4187 unsigned int a, b, c;
4188 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4189 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4190 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4191 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4193 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4194 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4195 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4196 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4197 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4199 bool
4200 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
4201 rtx dst, rtx src, rtx increment)
4203 enum machine_mode cmp_mode;
4204 enum machine_mode cc_mode;
4205 rtx op_res;
4206 rtx insn;
4207 rtvec p;
4208 int ret;
4210 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
4211 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
4212 cmp_mode = SImode;
4213 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
4214 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
4215 cmp_mode = DImode;
4216 else
4217 return false;
4219 /* Try ADD LOGICAL WITH CARRY. */
4220 if (increment == const1_rtx)
4222 /* Determine CC mode to use. */
4223 if (cmp_code == EQ || cmp_code == NE)
4225 if (cmp_op1 != const0_rtx)
4227 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4228 NULL_RTX, 0, OPTAB_WIDEN);
4229 cmp_op1 = const0_rtx;
4232 cmp_code = cmp_code == EQ ? LEU : GTU;
4235 if (cmp_code == LTU || cmp_code == LEU)
4237 rtx tem = cmp_op0;
4238 cmp_op0 = cmp_op1;
4239 cmp_op1 = tem;
4240 cmp_code = swap_condition (cmp_code);
4243 switch (cmp_code)
4245 case GTU:
4246 cc_mode = CCUmode;
4247 break;
4249 case GEU:
4250 cc_mode = CCL3mode;
4251 break;
4253 default:
4254 return false;
4257 /* Emit comparison instruction pattern. */
4258 if (!register_operand (cmp_op0, cmp_mode))
4259 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4261 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4262 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4263 /* We use insn_invalid_p here to add clobbers if required. */
4264 ret = insn_invalid_p (emit_insn (insn));
4265 gcc_assert (!ret);
4267 /* Emit ALC instruction pattern. */
4268 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4269 gen_rtx_REG (cc_mode, CC_REGNUM),
4270 const0_rtx);
4272 if (src != const0_rtx)
4274 if (!register_operand (src, GET_MODE (dst)))
4275 src = force_reg (GET_MODE (dst), src);
4277 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
4278 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
4281 p = rtvec_alloc (2);
4282 RTVEC_ELT (p, 0) =
4283 gen_rtx_SET (VOIDmode, dst, op_res);
4284 RTVEC_ELT (p, 1) =
4285 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4286 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4288 return true;
4291 /* Try SUBTRACT LOGICAL WITH BORROW. */
4292 if (increment == constm1_rtx)
4294 /* Determine CC mode to use. */
4295 if (cmp_code == EQ || cmp_code == NE)
4297 if (cmp_op1 != const0_rtx)
4299 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4300 NULL_RTX, 0, OPTAB_WIDEN);
4301 cmp_op1 = const0_rtx;
4304 cmp_code = cmp_code == EQ ? LEU : GTU;
4307 if (cmp_code == GTU || cmp_code == GEU)
4309 rtx tem = cmp_op0;
4310 cmp_op0 = cmp_op1;
4311 cmp_op1 = tem;
4312 cmp_code = swap_condition (cmp_code);
4315 switch (cmp_code)
4317 case LEU:
4318 cc_mode = CCUmode;
4319 break;
4321 case LTU:
4322 cc_mode = CCL3mode;
4323 break;
4325 default:
4326 return false;
4329 /* Emit comparison instruction pattern. */
4330 if (!register_operand (cmp_op0, cmp_mode))
4331 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4333 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4334 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4335 /* We use insn_invalid_p here to add clobbers if required. */
4336 ret = insn_invalid_p (emit_insn (insn));
4337 gcc_assert (!ret);
4339 /* Emit SLB instruction pattern. */
4340 if (!register_operand (src, GET_MODE (dst)))
4341 src = force_reg (GET_MODE (dst), src);
4343 op_res = gen_rtx_MINUS (GET_MODE (dst),
4344 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
4345 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4346 gen_rtx_REG (cc_mode, CC_REGNUM),
4347 const0_rtx));
4348 p = rtvec_alloc (2);
4349 RTVEC_ELT (p, 0) =
4350 gen_rtx_SET (VOIDmode, dst, op_res);
4351 RTVEC_ELT (p, 1) =
4352 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4353 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4355 return true;
4358 return false;
4361 /* Expand code for the insv template. Return true if successful. */
4363 bool
4364 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4366 int bitsize = INTVAL (op1);
4367 int bitpos = INTVAL (op2);
4369 /* On z10 we can use the risbg instruction to implement insv. */
4370 if (TARGET_Z10
4371 && ((GET_MODE (dest) == DImode && GET_MODE (src) == DImode)
4372 || (GET_MODE (dest) == SImode && GET_MODE (src) == SImode)))
4374 rtx op;
4375 rtx clobber;
4377 op = gen_rtx_SET (GET_MODE(src),
4378 gen_rtx_ZERO_EXTRACT (GET_MODE (dest), dest, op1, op2),
4379 src);
4380 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4381 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
4383 return true;
4386 /* We need byte alignment. */
4387 if (bitsize % BITS_PER_UNIT)
4388 return false;
4390 if (bitpos == 0
4391 && memory_operand (dest, VOIDmode)
4392 && (register_operand (src, word_mode)
4393 || const_int_operand (src, VOIDmode)))
4395 /* Emit standard pattern if possible. */
4396 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4397 if (GET_MODE_BITSIZE (mode) == bitsize)
4398 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4400 /* (set (ze (mem)) (const_int)). */
4401 else if (const_int_operand (src, VOIDmode))
4403 int size = bitsize / BITS_PER_UNIT;
4404 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4405 GET_MODE_SIZE (word_mode) - size);
4407 dest = adjust_address (dest, BLKmode, 0);
4408 set_mem_size (dest, GEN_INT (size));
4409 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4412 /* (set (ze (mem)) (reg)). */
4413 else if (register_operand (src, word_mode))
4415 if (bitsize <= GET_MODE_BITSIZE (SImode))
4416 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4417 const0_rtx), src);
4418 else
4420 /* Emit st,stcmh sequence. */
4421 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4422 int size = stcmh_width / BITS_PER_UNIT;
4424 emit_move_insn (adjust_address (dest, SImode, size),
4425 gen_lowpart (SImode, src));
4426 set_mem_size (dest, GEN_INT (size));
4427 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4428 (stcmh_width), const0_rtx),
4429 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4430 (GET_MODE_BITSIZE (SImode))));
4433 else
4434 return false;
4436 return true;
4439 /* (set (ze (reg)) (const_int)). */
4440 if (TARGET_ZARCH
4441 && register_operand (dest, word_mode)
4442 && (bitpos % 16) == 0
4443 && (bitsize % 16) == 0
4444 && const_int_operand (src, VOIDmode))
4446 HOST_WIDE_INT val = INTVAL (src);
4447 int regpos = bitpos + bitsize;
4449 while (regpos > bitpos)
4451 enum machine_mode putmode;
4452 int putsize;
4454 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4455 putmode = SImode;
4456 else
4457 putmode = HImode;
4459 putsize = GET_MODE_BITSIZE (putmode);
4460 regpos -= putsize;
4461 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4462 GEN_INT (putsize),
4463 GEN_INT (regpos)),
4464 gen_int_mode (val, putmode));
4465 val >>= putsize;
4467 gcc_assert (regpos == bitpos);
4468 return true;
4471 return false;
4474 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4475 register that holds VAL of mode MODE shifted by COUNT bits. */
4477 static inline rtx
4478 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4480 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4481 NULL_RTX, 1, OPTAB_DIRECT);
4482 return expand_simple_binop (SImode, ASHIFT, val, count,
4483 NULL_RTX, 1, OPTAB_DIRECT);
4486 /* Structure to hold the initial parameters for a compare_and_swap operation
4487 in HImode and QImode. */
4489 struct alignment_context
4491 rtx memsi; /* SI aligned memory location. */
4492 rtx shift; /* Bit offset with regard to lsb. */
4493 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4494 rtx modemaski; /* ~modemask */
4495 bool aligned; /* True if memory is aligned, false else. */
4498 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4499 structure AC for transparent simplifying, if the memory alignment is known
4500 to be at least 32bit. MEM is the memory location for the actual operation
4501 and MODE its mode. */
4503 static void
4504 init_alignment_context (struct alignment_context *ac, rtx mem,
4505 enum machine_mode mode)
4507 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4508 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4510 if (ac->aligned)
4511 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4512 else
4514 /* Alignment is unknown. */
4515 rtx byteoffset, addr, align;
4517 /* Force the address into a register. */
4518 addr = force_reg (Pmode, XEXP (mem, 0));
4520 /* Align it to SImode. */
4521 align = expand_simple_binop (Pmode, AND, addr,
4522 GEN_INT (-GET_MODE_SIZE (SImode)),
4523 NULL_RTX, 1, OPTAB_DIRECT);
4524 /* Generate MEM. */
4525 ac->memsi = gen_rtx_MEM (SImode, align);
4526 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4527 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4528 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4530 /* Calculate shiftcount. */
4531 byteoffset = expand_simple_binop (Pmode, AND, addr,
4532 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4533 NULL_RTX, 1, OPTAB_DIRECT);
4534 /* As we already have some offset, evaluate the remaining distance. */
4535 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4536 NULL_RTX, 1, OPTAB_DIRECT);
4539 /* Shift is the byte count, but we need the bitcount. */
4540 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4541 NULL_RTX, 1, OPTAB_DIRECT);
4542 /* Calculate masks. */
4543 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4544 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4545 NULL_RTX, 1, OPTAB_DIRECT);
4546 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4549 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4550 the memory location, CMP the old value to compare MEM with and NEW the value
4551 to set if CMP == MEM.
4552 CMP is never in memory for compare_and_swap_cc because
4553 expand_bool_compare_and_swap puts it into a register for later compare. */
4555 void
4556 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4558 struct alignment_context ac;
4559 rtx cmpv, newv, val, resv, cc;
4560 rtx res = gen_reg_rtx (SImode);
4561 rtx csloop = gen_label_rtx ();
4562 rtx csend = gen_label_rtx ();
4564 gcc_assert (register_operand (target, VOIDmode));
4565 gcc_assert (MEM_P (mem));
4567 init_alignment_context (&ac, mem, mode);
4569 /* Shift the values to the correct bit positions. */
4570 if (!(ac.aligned && MEM_P (cmp)))
4571 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4572 if (!(ac.aligned && MEM_P (new)))
4573 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4575 /* Load full word. Subsequent loads are performed by CS. */
4576 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4577 NULL_RTX, 1, OPTAB_DIRECT);
4579 /* Start CS loop. */
4580 emit_label (csloop);
4581 /* val = "<mem>00..0<mem>"
4582 * cmp = "00..0<cmp>00..0"
4583 * new = "00..0<new>00..0"
4586 /* Patch cmp and new with val at correct position. */
4587 if (ac.aligned && MEM_P (cmp))
4589 cmpv = force_reg (SImode, val);
4590 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4592 else
4593 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4594 NULL_RTX, 1, OPTAB_DIRECT));
4595 if (ac.aligned && MEM_P (new))
4597 newv = force_reg (SImode, val);
4598 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4600 else
4601 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4602 NULL_RTX, 1, OPTAB_DIRECT));
4604 /* Jump to end if we're done (likely?). */
4605 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4606 cmpv, newv));
4608 /* Check for changes outside mode. */
4609 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4610 NULL_RTX, 1, OPTAB_DIRECT);
4611 cc = s390_emit_compare (NE, resv, val);
4612 emit_move_insn (val, resv);
4613 /* Loop internal if so. */
4614 s390_emit_jump (csloop, cc);
4616 emit_label (csend);
4618 /* Return the correct part of the bitfield. */
4619 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4620 NULL_RTX, 1, OPTAB_DIRECT), 1);
4623 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4624 and VAL the value to play with. If AFTER is true then store the value
4625 MEM holds after the operation, if AFTER is false then store the value MEM
4626 holds before the operation. If TARGET is zero then discard that value, else
4627 store it to TARGET. */
4629 void
4630 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4631 rtx target, rtx mem, rtx val, bool after)
4633 struct alignment_context ac;
4634 rtx cmp;
4635 rtx new = gen_reg_rtx (SImode);
4636 rtx orig = gen_reg_rtx (SImode);
4637 rtx csloop = gen_label_rtx ();
4639 gcc_assert (!target || register_operand (target, VOIDmode));
4640 gcc_assert (MEM_P (mem));
4642 init_alignment_context (&ac, mem, mode);
4644 /* Shift val to the correct bit positions.
4645 Preserve "icm", but prevent "ex icm". */
4646 if (!(ac.aligned && code == SET && MEM_P (val)))
4647 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4649 /* Further preparation insns. */
4650 if (code == PLUS || code == MINUS)
4651 emit_move_insn (orig, val);
4652 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4653 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4654 NULL_RTX, 1, OPTAB_DIRECT);
4656 /* Load full word. Subsequent loads are performed by CS. */
4657 cmp = force_reg (SImode, ac.memsi);
4659 /* Start CS loop. */
4660 emit_label (csloop);
4661 emit_move_insn (new, cmp);
4663 /* Patch new with val at correct position. */
4664 switch (code)
4666 case PLUS:
4667 case MINUS:
4668 val = expand_simple_binop (SImode, code, new, orig,
4669 NULL_RTX, 1, OPTAB_DIRECT);
4670 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4671 NULL_RTX, 1, OPTAB_DIRECT);
4672 /* FALLTHRU */
4673 case SET:
4674 if (ac.aligned && MEM_P (val))
4675 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4676 else
4678 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4679 NULL_RTX, 1, OPTAB_DIRECT);
4680 new = expand_simple_binop (SImode, IOR, new, val,
4681 NULL_RTX, 1, OPTAB_DIRECT);
4683 break;
4684 case AND:
4685 case IOR:
4686 case XOR:
4687 new = expand_simple_binop (SImode, code, new, val,
4688 NULL_RTX, 1, OPTAB_DIRECT);
4689 break;
4690 case MULT: /* NAND */
4691 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4692 NULL_RTX, 1, OPTAB_DIRECT);
4693 new = expand_simple_binop (SImode, AND, new, val,
4694 NULL_RTX, 1, OPTAB_DIRECT);
4695 break;
4696 default:
4697 gcc_unreachable ();
4700 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4701 ac.memsi, cmp, new));
4703 /* Return the correct part of the bitfield. */
4704 if (target)
4705 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4706 after ? new : cmp, ac.shift,
4707 NULL_RTX, 1, OPTAB_DIRECT), 1);
4710 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4711 We need to emit DTP-relative relocations. */
4713 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4715 static void
4716 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4718 switch (size)
4720 case 4:
4721 fputs ("\t.long\t", file);
4722 break;
4723 case 8:
4724 fputs ("\t.quad\t", file);
4725 break;
4726 default:
4727 gcc_unreachable ();
4729 output_addr_const (file, x);
4730 fputs ("@DTPOFF", file);
4733 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4734 /* Implement TARGET_MANGLE_TYPE. */
4736 static const char *
4737 s390_mangle_type (const_tree type)
4739 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4740 && TARGET_LONG_DOUBLE_128)
4741 return "g";
4743 /* For all other types, use normal C++ mangling. */
4744 return NULL;
4746 #endif
4748 /* In the name of slightly smaller debug output, and to cater to
4749 general assembler lossage, recognize various UNSPEC sequences
4750 and turn them back into a direct symbol reference. */
4752 static rtx
4753 s390_delegitimize_address (rtx orig_x)
4755 rtx x = orig_x, y;
4757 if (GET_CODE (x) != MEM)
4758 return orig_x;
4760 x = XEXP (x, 0);
4761 if (GET_CODE (x) == PLUS
4762 && GET_CODE (XEXP (x, 1)) == CONST
4763 && GET_CODE (XEXP (x, 0)) == REG
4764 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4766 y = XEXP (XEXP (x, 1), 0);
4767 if (GET_CODE (y) == UNSPEC
4768 && XINT (y, 1) == UNSPEC_GOT)
4769 return XVECEXP (y, 0, 0);
4770 return orig_x;
4773 if (GET_CODE (x) == CONST)
4775 y = XEXP (x, 0);
4776 if (GET_CODE (y) == UNSPEC
4777 && XINT (y, 1) == UNSPEC_GOTENT)
4778 return XVECEXP (y, 0, 0);
4779 return orig_x;
4782 return orig_x;
4785 /* Output operand OP to stdio stream FILE.
4786 OP is an address (register + offset) which is not used to address data;
4787 instead the rightmost bits are interpreted as the value. */
4789 static void
4790 print_shift_count_operand (FILE *file, rtx op)
4792 HOST_WIDE_INT offset;
4793 rtx base;
4795 /* Extract base register and offset. */
4796 if (!s390_decompose_shift_count (op, &base, &offset))
4797 gcc_unreachable ();
4799 /* Sanity check. */
4800 if (base)
4802 gcc_assert (GET_CODE (base) == REG);
4803 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4804 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4807 /* Offsets are constricted to twelve bits. */
4808 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4809 if (base)
4810 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4813 /* See 'get_some_local_dynamic_name'. */
4815 static int
4816 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4818 rtx x = *px;
4820 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4822 x = get_pool_constant (x);
4823 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4826 if (GET_CODE (x) == SYMBOL_REF
4827 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4829 cfun->machine->some_ld_name = XSTR (x, 0);
4830 return 1;
4833 return 0;
4836 /* Locate some local-dynamic symbol still in use by this function
4837 so that we can print its name in local-dynamic base patterns. */
4839 static const char *
4840 get_some_local_dynamic_name (void)
4842 rtx insn;
4844 if (cfun->machine->some_ld_name)
4845 return cfun->machine->some_ld_name;
4847 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4848 if (INSN_P (insn)
4849 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4850 return cfun->machine->some_ld_name;
4852 gcc_unreachable ();
4855 /* Output machine-dependent UNSPECs occurring in address constant X
4856 in assembler syntax to stdio stream FILE. Returns true if the
4857 constant X could be recognized, false otherwise. */
4859 bool
4860 s390_output_addr_const_extra (FILE *file, rtx x)
4862 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4863 switch (XINT (x, 1))
4865 case UNSPEC_GOTENT:
4866 output_addr_const (file, XVECEXP (x, 0, 0));
4867 fprintf (file, "@GOTENT");
4868 return true;
4869 case UNSPEC_GOT:
4870 output_addr_const (file, XVECEXP (x, 0, 0));
4871 fprintf (file, "@GOT");
4872 return true;
4873 case UNSPEC_GOTOFF:
4874 output_addr_const (file, XVECEXP (x, 0, 0));
4875 fprintf (file, "@GOTOFF");
4876 return true;
4877 case UNSPEC_PLT:
4878 output_addr_const (file, XVECEXP (x, 0, 0));
4879 fprintf (file, "@PLT");
4880 return true;
4881 case UNSPEC_PLTOFF:
4882 output_addr_const (file, XVECEXP (x, 0, 0));
4883 fprintf (file, "@PLTOFF");
4884 return true;
4885 case UNSPEC_TLSGD:
4886 output_addr_const (file, XVECEXP (x, 0, 0));
4887 fprintf (file, "@TLSGD");
4888 return true;
4889 case UNSPEC_TLSLDM:
4890 assemble_name (file, get_some_local_dynamic_name ());
4891 fprintf (file, "@TLSLDM");
4892 return true;
4893 case UNSPEC_DTPOFF:
4894 output_addr_const (file, XVECEXP (x, 0, 0));
4895 fprintf (file, "@DTPOFF");
4896 return true;
4897 case UNSPEC_NTPOFF:
4898 output_addr_const (file, XVECEXP (x, 0, 0));
4899 fprintf (file, "@NTPOFF");
4900 return true;
4901 case UNSPEC_GOTNTPOFF:
4902 output_addr_const (file, XVECEXP (x, 0, 0));
4903 fprintf (file, "@GOTNTPOFF");
4904 return true;
4905 case UNSPEC_INDNTPOFF:
4906 output_addr_const (file, XVECEXP (x, 0, 0));
4907 fprintf (file, "@INDNTPOFF");
4908 return true;
4911 return false;
4914 /* Output address operand ADDR in assembler syntax to
4915 stdio stream FILE. */
4917 void
4918 print_operand_address (FILE *file, rtx addr)
4920 struct s390_address ad;
4922 if (s390_symref_operand_p (addr, NULL, NULL))
4924 gcc_assert (TARGET_Z10);
4925 output_addr_const (file, addr);
4926 return;
4929 if (!s390_decompose_address (addr, &ad)
4930 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4931 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4932 output_operand_lossage ("cannot decompose address");
4934 if (ad.disp)
4935 output_addr_const (file, ad.disp);
4936 else
4937 fprintf (file, "0");
4939 if (ad.base && ad.indx)
4940 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4941 reg_names[REGNO (ad.base)]);
4942 else if (ad.base)
4943 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4946 /* Output operand X in assembler syntax to stdio stream FILE.
4947 CODE specified the format flag. The following format flags
4948 are recognized:
4950 'C': print opcode suffix for branch condition.
4951 'D': print opcode suffix for inverse branch condition.
4952 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4953 'G': print the size of the operand in bytes.
4954 'O': print only the displacement of a memory reference.
4955 'R': print only the base register of a memory reference.
4956 'S': print S-type memory reference (base+displacement).
4957 'N': print the second word of a DImode operand.
4958 'M': print the second word of a TImode operand.
4959 'Y': print shift count operand.
4961 'b': print integer X as if it's an unsigned byte.
4962 'c': print integer X as if it's an signed byte.
4963 'x': print integer X as if it's an unsigned halfword.
4964 'h': print integer X as if it's a signed halfword.
4965 'i': print the first nonzero HImode part of X.
4966 'j': print the first HImode part unequal to -1 of X.
4967 'k': print the first nonzero SImode part of X.
4968 'm': print the first SImode part unequal to -1 of X.
4969 'o': print integer X as if it's an unsigned 32bit word. */
4971 void
4972 print_operand (FILE *file, rtx x, int code)
4974 switch (code)
4976 case 'C':
4977 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4978 return;
4980 case 'D':
4981 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4982 return;
4984 case 'J':
4985 if (GET_CODE (x) == SYMBOL_REF)
4987 fprintf (file, "%s", ":tls_load:");
4988 output_addr_const (file, x);
4990 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4992 fprintf (file, "%s", ":tls_gdcall:");
4993 output_addr_const (file, XVECEXP (x, 0, 0));
4995 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4997 fprintf (file, "%s", ":tls_ldcall:");
4998 assemble_name (file, get_some_local_dynamic_name ());
5000 else
5001 gcc_unreachable ();
5002 return;
5004 case 'G':
5005 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
5006 return;
5008 case 'O':
5010 struct s390_address ad;
5011 int ret;
5013 gcc_assert (GET_CODE (x) == MEM);
5014 ret = s390_decompose_address (XEXP (x, 0), &ad);
5015 gcc_assert (ret);
5016 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5017 gcc_assert (!ad.indx);
5019 if (ad.disp)
5020 output_addr_const (file, ad.disp);
5021 else
5022 fprintf (file, "0");
5024 return;
5026 case 'R':
5028 struct s390_address ad;
5029 int ret;
5031 gcc_assert (GET_CODE (x) == MEM);
5032 ret = s390_decompose_address (XEXP (x, 0), &ad);
5033 gcc_assert (ret);
5034 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5035 gcc_assert (!ad.indx);
5037 if (ad.base)
5038 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
5039 else
5040 fprintf (file, "0");
5042 return;
5044 case 'S':
5046 struct s390_address ad;
5047 int ret;
5049 gcc_assert (GET_CODE (x) == MEM);
5050 ret = s390_decompose_address (XEXP (x, 0), &ad);
5051 gcc_assert (ret);
5052 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5053 gcc_assert (!ad.indx);
5055 if (ad.disp)
5056 output_addr_const (file, ad.disp);
5057 else
5058 fprintf (file, "0");
5060 if (ad.base)
5061 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
5063 return;
5065 case 'N':
5066 if (GET_CODE (x) == REG)
5067 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5068 else if (GET_CODE (x) == MEM)
5069 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
5070 else
5071 gcc_unreachable ();
5072 break;
5074 case 'M':
5075 if (GET_CODE (x) == REG)
5076 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5077 else if (GET_CODE (x) == MEM)
5078 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
5079 else
5080 gcc_unreachable ();
5081 break;
5083 case 'Y':
5084 print_shift_count_operand (file, x);
5085 return;
5088 switch (GET_CODE (x))
5090 case REG:
5091 fprintf (file, "%s", reg_names[REGNO (x)]);
5092 break;
5094 case MEM:
5095 output_address (XEXP (x, 0));
5096 break;
5098 case CONST:
5099 case CODE_LABEL:
5100 case LABEL_REF:
5101 case SYMBOL_REF:
5102 output_addr_const (file, x);
5103 break;
5105 case CONST_INT:
5106 if (code == 'b')
5107 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
5108 else if (code == 'c')
5109 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xff) ^ 0x80) - 0x80);
5110 else if (code == 'x')
5111 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
5112 else if (code == 'h')
5113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
5114 else if (code == 'i')
5115 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5116 s390_extract_part (x, HImode, 0));
5117 else if (code == 'j')
5118 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5119 s390_extract_part (x, HImode, -1));
5120 else if (code == 'k')
5121 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5122 s390_extract_part (x, SImode, 0));
5123 else if (code == 'm')
5124 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5125 s390_extract_part (x, SImode, -1));
5126 else if (code == 'o')
5127 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
5128 else
5129 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
5130 break;
5132 case CONST_DOUBLE:
5133 gcc_assert (GET_MODE (x) == VOIDmode);
5134 if (code == 'b')
5135 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
5136 else if (code == 'x')
5137 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
5138 else if (code == 'h')
5139 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
5140 else
5141 gcc_unreachable ();
5142 break;
5144 default:
5145 fatal_insn ("UNKNOWN in print_operand !?", x);
5146 break;
5150 /* Target hook for assembling integer objects. We need to define it
5151 here to work a round a bug in some versions of GAS, which couldn't
5152 handle values smaller than INT_MIN when printed in decimal. */
5154 static bool
5155 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
5157 if (size == 8 && aligned_p
5158 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
5160 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
5161 INTVAL (x));
5162 return true;
5164 return default_assemble_integer (x, size, aligned_p);
5167 /* Returns true if register REGNO is used for forming
5168 a memory address in expression X. */
5170 static bool
5171 reg_used_in_mem_p (int regno, rtx x)
5173 enum rtx_code code = GET_CODE (x);
5174 int i, j;
5175 const char *fmt;
5177 if (code == MEM)
5179 if (refers_to_regno_p (regno, regno+1,
5180 XEXP (x, 0), 0))
5181 return true;
5183 else if (code == SET
5184 && GET_CODE (SET_DEST (x)) == PC)
5186 if (refers_to_regno_p (regno, regno+1,
5187 SET_SRC (x), 0))
5188 return true;
5191 fmt = GET_RTX_FORMAT (code);
5192 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5194 if (fmt[i] == 'e'
5195 && reg_used_in_mem_p (regno, XEXP (x, i)))
5196 return true;
5198 else if (fmt[i] == 'E')
5199 for (j = 0; j < XVECLEN (x, i); j++)
5200 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
5201 return true;
5203 return false;
5206 /* Returns true if expression DEP_RTX sets an address register
5207 used by instruction INSN to address memory. */
5209 static bool
5210 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
5212 rtx target, pat;
5214 if (GET_CODE (dep_rtx) == INSN)
5215 dep_rtx = PATTERN (dep_rtx);
5217 if (GET_CODE (dep_rtx) == SET)
5219 target = SET_DEST (dep_rtx);
5220 if (GET_CODE (target) == STRICT_LOW_PART)
5221 target = XEXP (target, 0);
5222 while (GET_CODE (target) == SUBREG)
5223 target = SUBREG_REG (target);
5225 if (GET_CODE (target) == REG)
5227 int regno = REGNO (target);
5229 if (s390_safe_attr_type (insn) == TYPE_LA)
5231 pat = PATTERN (insn);
5232 if (GET_CODE (pat) == PARALLEL)
5234 gcc_assert (XVECLEN (pat, 0) == 2);
5235 pat = XVECEXP (pat, 0, 0);
5237 gcc_assert (GET_CODE (pat) == SET);
5238 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
5240 else if (get_attr_atype (insn) == ATYPE_AGEN)
5241 return reg_used_in_mem_p (regno, PATTERN (insn));
5244 return false;
5247 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5250 s390_agen_dep_p (rtx dep_insn, rtx insn)
5252 rtx dep_rtx = PATTERN (dep_insn);
5253 int i;
5255 if (GET_CODE (dep_rtx) == SET
5256 && addr_generation_dependency_p (dep_rtx, insn))
5257 return 1;
5258 else if (GET_CODE (dep_rtx) == PARALLEL)
5260 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
5262 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
5263 return 1;
5266 return 0;
5269 /* A C statement (sans semicolon) to update the integer scheduling priority
5270 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5271 reduce the priority to execute INSN later. Do not define this macro if
5272 you do not need to adjust the scheduling priorities of insns.
5274 A STD instruction should be scheduled earlier,
5275 in order to use the bypass. */
5277 static int
5278 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
5280 if (! INSN_P (insn))
5281 return priority;
5283 if (s390_tune != PROCESSOR_2084_Z990
5284 && s390_tune != PROCESSOR_2094_Z9_109)
5285 return priority;
5287 switch (s390_safe_attr_type (insn))
5289 case TYPE_FSTOREDF:
5290 case TYPE_FSTORESF:
5291 priority = priority << 3;
5292 break;
5293 case TYPE_STORE:
5294 case TYPE_STM:
5295 priority = priority << 1;
5296 break;
5297 default:
5298 break;
5300 return priority;
5303 /* The number of instructions that can be issued per cycle. */
5305 static int
5306 s390_issue_rate (void)
5308 switch (s390_tune)
5310 case PROCESSOR_2084_Z990:
5311 case PROCESSOR_2094_Z9_109:
5312 return 3;
5313 case PROCESSOR_2097_Z10:
5314 return 2;
5315 default:
5316 return 1;
5320 static int
5321 s390_first_cycle_multipass_dfa_lookahead (void)
5323 return 4;
5327 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5328 Fix up MEMs as required. */
5330 static void
5331 annotate_constant_pool_refs (rtx *x)
5333 int i, j;
5334 const char *fmt;
5336 gcc_assert (GET_CODE (*x) != SYMBOL_REF
5337 || !CONSTANT_POOL_ADDRESS_P (*x));
5339 /* Literal pool references can only occur inside a MEM ... */
5340 if (GET_CODE (*x) == MEM)
5342 rtx memref = XEXP (*x, 0);
5344 if (GET_CODE (memref) == SYMBOL_REF
5345 && CONSTANT_POOL_ADDRESS_P (memref))
5347 rtx base = cfun->machine->base_reg;
5348 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
5349 UNSPEC_LTREF);
5351 *x = replace_equiv_address (*x, addr);
5352 return;
5355 if (GET_CODE (memref) == CONST
5356 && GET_CODE (XEXP (memref, 0)) == PLUS
5357 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
5358 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
5359 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
5361 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
5362 rtx sym = XEXP (XEXP (memref, 0), 0);
5363 rtx base = cfun->machine->base_reg;
5364 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5365 UNSPEC_LTREF);
5367 *x = replace_equiv_address (*x, plus_constant (addr, off));
5368 return;
5372 /* ... or a load-address type pattern. */
5373 if (GET_CODE (*x) == SET)
5375 rtx addrref = SET_SRC (*x);
5377 if (GET_CODE (addrref) == SYMBOL_REF
5378 && CONSTANT_POOL_ADDRESS_P (addrref))
5380 rtx base = cfun->machine->base_reg;
5381 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5382 UNSPEC_LTREF);
5384 SET_SRC (*x) = addr;
5385 return;
5388 if (GET_CODE (addrref) == CONST
5389 && GET_CODE (XEXP (addrref, 0)) == PLUS
5390 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5391 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5392 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5394 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5395 rtx sym = XEXP (XEXP (addrref, 0), 0);
5396 rtx base = cfun->machine->base_reg;
5397 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5398 UNSPEC_LTREF);
5400 SET_SRC (*x) = plus_constant (addr, off);
5401 return;
5405 /* Annotate LTREL_BASE as well. */
5406 if (GET_CODE (*x) == UNSPEC
5407 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5409 rtx base = cfun->machine->base_reg;
5410 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5411 UNSPEC_LTREL_BASE);
5412 return;
5415 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5416 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5418 if (fmt[i] == 'e')
5420 annotate_constant_pool_refs (&XEXP (*x, i));
5422 else if (fmt[i] == 'E')
5424 for (j = 0; j < XVECLEN (*x, i); j++)
5425 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5430 /* Split all branches that exceed the maximum distance.
5431 Returns true if this created a new literal pool entry. */
5433 static int
5434 s390_split_branches (void)
5436 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5437 int new_literal = 0, ret;
5438 rtx insn, pat, tmp, target;
5439 rtx *label;
5441 /* We need correct insn addresses. */
5443 shorten_branches (get_insns ());
5445 /* Find all branches that exceed 64KB, and split them. */
5447 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5449 if (GET_CODE (insn) != JUMP_INSN)
5450 continue;
5452 pat = PATTERN (insn);
5453 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5454 pat = XVECEXP (pat, 0, 0);
5455 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5456 continue;
5458 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5460 label = &SET_SRC (pat);
5462 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5464 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5465 label = &XEXP (SET_SRC (pat), 1);
5466 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5467 label = &XEXP (SET_SRC (pat), 2);
5468 else
5469 continue;
5471 else
5472 continue;
5474 if (get_attr_length (insn) <= 4)
5475 continue;
5477 /* We are going to use the return register as scratch register,
5478 make sure it will be saved/restored by the prologue/epilogue. */
5479 cfun_frame_layout.save_return_addr_p = 1;
5481 if (!flag_pic)
5483 new_literal = 1;
5484 tmp = force_const_mem (Pmode, *label);
5485 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5486 INSN_ADDRESSES_NEW (tmp, -1);
5487 annotate_constant_pool_refs (&PATTERN (tmp));
5489 target = temp_reg;
5491 else
5493 new_literal = 1;
5494 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5495 UNSPEC_LTREL_OFFSET);
5496 target = gen_rtx_CONST (Pmode, target);
5497 target = force_const_mem (Pmode, target);
5498 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5499 INSN_ADDRESSES_NEW (tmp, -1);
5500 annotate_constant_pool_refs (&PATTERN (tmp));
5502 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5503 cfun->machine->base_reg),
5504 UNSPEC_LTREL_BASE);
5505 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5508 ret = validate_change (insn, label, target, 0);
5509 gcc_assert (ret);
5512 return new_literal;
5516 /* Find an annotated literal pool symbol referenced in RTX X,
5517 and store it at REF. Will abort if X contains references to
5518 more than one such pool symbol; multiple references to the same
5519 symbol are allowed, however.
5521 The rtx pointed to by REF must be initialized to NULL_RTX
5522 by the caller before calling this routine. */
5524 static void
5525 find_constant_pool_ref (rtx x, rtx *ref)
5527 int i, j;
5528 const char *fmt;
5530 /* Ignore LTREL_BASE references. */
5531 if (GET_CODE (x) == UNSPEC
5532 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5533 return;
5534 /* Likewise POOL_ENTRY insns. */
5535 if (GET_CODE (x) == UNSPEC_VOLATILE
5536 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5537 return;
5539 gcc_assert (GET_CODE (x) != SYMBOL_REF
5540 || !CONSTANT_POOL_ADDRESS_P (x));
5542 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5544 rtx sym = XVECEXP (x, 0, 0);
5545 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5546 && CONSTANT_POOL_ADDRESS_P (sym));
5548 if (*ref == NULL_RTX)
5549 *ref = sym;
5550 else
5551 gcc_assert (*ref == sym);
5553 return;
5556 fmt = GET_RTX_FORMAT (GET_CODE (x));
5557 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5559 if (fmt[i] == 'e')
5561 find_constant_pool_ref (XEXP (x, i), ref);
5563 else if (fmt[i] == 'E')
5565 for (j = 0; j < XVECLEN (x, i); j++)
5566 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5571 /* Replace every reference to the annotated literal pool
5572 symbol REF in X by its base plus OFFSET. */
5574 static void
5575 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5577 int i, j;
5578 const char *fmt;
5580 gcc_assert (*x != ref);
5582 if (GET_CODE (*x) == UNSPEC
5583 && XINT (*x, 1) == UNSPEC_LTREF
5584 && XVECEXP (*x, 0, 0) == ref)
5586 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5587 return;
5590 if (GET_CODE (*x) == PLUS
5591 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5592 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5593 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5594 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5596 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5597 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5598 return;
5601 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5602 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5604 if (fmt[i] == 'e')
5606 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5608 else if (fmt[i] == 'E')
5610 for (j = 0; j < XVECLEN (*x, i); j++)
5611 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5616 /* Check whether X contains an UNSPEC_LTREL_BASE.
5617 Return its constant pool symbol if found, NULL_RTX otherwise. */
5619 static rtx
5620 find_ltrel_base (rtx x)
5622 int i, j;
5623 const char *fmt;
5625 if (GET_CODE (x) == UNSPEC
5626 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5627 return XVECEXP (x, 0, 0);
5629 fmt = GET_RTX_FORMAT (GET_CODE (x));
5630 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5632 if (fmt[i] == 'e')
5634 rtx fnd = find_ltrel_base (XEXP (x, i));
5635 if (fnd)
5636 return fnd;
5638 else if (fmt[i] == 'E')
5640 for (j = 0; j < XVECLEN (x, i); j++)
5642 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5643 if (fnd)
5644 return fnd;
5649 return NULL_RTX;
5652 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5654 static void
5655 replace_ltrel_base (rtx *x)
5657 int i, j;
5658 const char *fmt;
5660 if (GET_CODE (*x) == UNSPEC
5661 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5663 *x = XVECEXP (*x, 0, 1);
5664 return;
5667 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5668 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5670 if (fmt[i] == 'e')
5672 replace_ltrel_base (&XEXP (*x, i));
5674 else if (fmt[i] == 'E')
5676 for (j = 0; j < XVECLEN (*x, i); j++)
5677 replace_ltrel_base (&XVECEXP (*x, i, j));
5683 /* We keep a list of constants which we have to add to internal
5684 constant tables in the middle of large functions. */
5686 #define NR_C_MODES 11
5687 enum machine_mode constant_modes[NR_C_MODES] =
5689 TFmode, TImode, TDmode,
5690 DFmode, DImode, DDmode,
5691 SFmode, SImode, SDmode,
5692 HImode,
5693 QImode
5696 struct constant
5698 struct constant *next;
5699 rtx value;
5700 rtx label;
5703 struct constant_pool
5705 struct constant_pool *next;
5706 rtx first_insn;
5707 rtx pool_insn;
5708 bitmap insns;
5709 rtx emit_pool_after;
5711 struct constant *constants[NR_C_MODES];
5712 struct constant *execute;
5713 rtx label;
5714 int size;
5717 /* Allocate new constant_pool structure. */
5719 static struct constant_pool *
5720 s390_alloc_pool (void)
5722 struct constant_pool *pool;
5723 int i;
5725 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5726 pool->next = NULL;
5727 for (i = 0; i < NR_C_MODES; i++)
5728 pool->constants[i] = NULL;
5730 pool->execute = NULL;
5731 pool->label = gen_label_rtx ();
5732 pool->first_insn = NULL_RTX;
5733 pool->pool_insn = NULL_RTX;
5734 pool->insns = BITMAP_ALLOC (NULL);
5735 pool->size = 0;
5736 pool->emit_pool_after = NULL_RTX;
5738 return pool;
5741 /* Create new constant pool covering instructions starting at INSN
5742 and chain it to the end of POOL_LIST. */
5744 static struct constant_pool *
5745 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5747 struct constant_pool *pool, **prev;
5749 pool = s390_alloc_pool ();
5750 pool->first_insn = insn;
5752 for (prev = pool_list; *prev; prev = &(*prev)->next)
5754 *prev = pool;
5756 return pool;
5759 /* End range of instructions covered by POOL at INSN and emit
5760 placeholder insn representing the pool. */
5762 static void
5763 s390_end_pool (struct constant_pool *pool, rtx insn)
5765 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5767 if (!insn)
5768 insn = get_last_insn ();
5770 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5771 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5774 /* Add INSN to the list of insns covered by POOL. */
5776 static void
5777 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5779 bitmap_set_bit (pool->insns, INSN_UID (insn));
5782 /* Return pool out of POOL_LIST that covers INSN. */
5784 static struct constant_pool *
5785 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5787 struct constant_pool *pool;
5789 for (pool = pool_list; pool; pool = pool->next)
5790 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5791 break;
5793 return pool;
5796 /* Add constant VAL of mode MODE to the constant pool POOL. */
5798 static void
5799 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5801 struct constant *c;
5802 int i;
5804 for (i = 0; i < NR_C_MODES; i++)
5805 if (constant_modes[i] == mode)
5806 break;
5807 gcc_assert (i != NR_C_MODES);
5809 for (c = pool->constants[i]; c != NULL; c = c->next)
5810 if (rtx_equal_p (val, c->value))
5811 break;
5813 if (c == NULL)
5815 c = (struct constant *) xmalloc (sizeof *c);
5816 c->value = val;
5817 c->label = gen_label_rtx ();
5818 c->next = pool->constants[i];
5819 pool->constants[i] = c;
5820 pool->size += GET_MODE_SIZE (mode);
5824 /* Find constant VAL of mode MODE in the constant pool POOL.
5825 Return an RTX describing the distance from the start of
5826 the pool to the location of the new constant. */
5828 static rtx
5829 s390_find_constant (struct constant_pool *pool, rtx val,
5830 enum machine_mode mode)
5832 struct constant *c;
5833 rtx offset;
5834 int i;
5836 for (i = 0; i < NR_C_MODES; i++)
5837 if (constant_modes[i] == mode)
5838 break;
5839 gcc_assert (i != NR_C_MODES);
5841 for (c = pool->constants[i]; c != NULL; c = c->next)
5842 if (rtx_equal_p (val, c->value))
5843 break;
5845 gcc_assert (c);
5847 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5848 gen_rtx_LABEL_REF (Pmode, pool->label));
5849 offset = gen_rtx_CONST (Pmode, offset);
5850 return offset;
5853 /* Check whether INSN is an execute. Return the label_ref to its
5854 execute target template if so, NULL_RTX otherwise. */
5856 static rtx
5857 s390_execute_label (rtx insn)
5859 if (GET_CODE (insn) == INSN
5860 && GET_CODE (PATTERN (insn)) == PARALLEL
5861 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5862 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5863 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5865 return NULL_RTX;
5868 /* Add execute target for INSN to the constant pool POOL. */
5870 static void
5871 s390_add_execute (struct constant_pool *pool, rtx insn)
5873 struct constant *c;
5875 for (c = pool->execute; c != NULL; c = c->next)
5876 if (INSN_UID (insn) == INSN_UID (c->value))
5877 break;
5879 if (c == NULL)
5881 c = (struct constant *) xmalloc (sizeof *c);
5882 c->value = insn;
5883 c->label = gen_label_rtx ();
5884 c->next = pool->execute;
5885 pool->execute = c;
5886 pool->size += 6;
5890 /* Find execute target for INSN in the constant pool POOL.
5891 Return an RTX describing the distance from the start of
5892 the pool to the location of the execute target. */
5894 static rtx
5895 s390_find_execute (struct constant_pool *pool, rtx insn)
5897 struct constant *c;
5898 rtx offset;
5900 for (c = pool->execute; c != NULL; c = c->next)
5901 if (INSN_UID (insn) == INSN_UID (c->value))
5902 break;
5904 gcc_assert (c);
5906 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5907 gen_rtx_LABEL_REF (Pmode, pool->label));
5908 offset = gen_rtx_CONST (Pmode, offset);
5909 return offset;
5912 /* For an execute INSN, extract the execute target template. */
5914 static rtx
5915 s390_execute_target (rtx insn)
5917 rtx pattern = PATTERN (insn);
5918 gcc_assert (s390_execute_label (insn));
5920 if (XVECLEN (pattern, 0) == 2)
5922 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5924 else
5926 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5927 int i;
5929 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5930 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5932 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5935 return pattern;
5938 /* Indicate that INSN cannot be duplicated. This is the case for
5939 execute insns that carry a unique label. */
5941 static bool
5942 s390_cannot_copy_insn_p (rtx insn)
5944 rtx label = s390_execute_label (insn);
5945 return label && label != const0_rtx;
5948 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5949 do not emit the pool base label. */
5951 static void
5952 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5954 struct constant *c;
5955 rtx insn = pool->pool_insn;
5956 int i;
5958 /* Switch to rodata section. */
5959 if (TARGET_CPU_ZARCH)
5961 insn = emit_insn_after (gen_pool_section_start (), insn);
5962 INSN_ADDRESSES_NEW (insn, -1);
5965 /* Ensure minimum pool alignment. */
5966 if (TARGET_CPU_ZARCH)
5967 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5968 else
5969 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5970 INSN_ADDRESSES_NEW (insn, -1);
5972 /* Emit pool base label. */
5973 if (!remote_label)
5975 insn = emit_label_after (pool->label, insn);
5976 INSN_ADDRESSES_NEW (insn, -1);
5979 /* Dump constants in descending alignment requirement order,
5980 ensuring proper alignment for every constant. */
5981 for (i = 0; i < NR_C_MODES; i++)
5982 for (c = pool->constants[i]; c; c = c->next)
5984 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5985 rtx value = copy_rtx (c->value);
5986 if (GET_CODE (value) == CONST
5987 && GET_CODE (XEXP (value, 0)) == UNSPEC
5988 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5989 && XVECLEN (XEXP (value, 0), 0) == 1)
5991 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5992 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5993 value = gen_rtx_CONST (VOIDmode, value);
5996 insn = emit_label_after (c->label, insn);
5997 INSN_ADDRESSES_NEW (insn, -1);
5999 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
6000 gen_rtvec (1, value),
6001 UNSPECV_POOL_ENTRY);
6002 insn = emit_insn_after (value, insn);
6003 INSN_ADDRESSES_NEW (insn, -1);
6006 /* Ensure minimum alignment for instructions. */
6007 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
6008 INSN_ADDRESSES_NEW (insn, -1);
6010 /* Output in-pool execute template insns. */
6011 for (c = pool->execute; c; c = c->next)
6013 insn = emit_label_after (c->label, insn);
6014 INSN_ADDRESSES_NEW (insn, -1);
6016 insn = emit_insn_after (s390_execute_target (c->value), insn);
6017 INSN_ADDRESSES_NEW (insn, -1);
6020 /* Switch back to previous section. */
6021 if (TARGET_CPU_ZARCH)
6023 insn = emit_insn_after (gen_pool_section_end (), insn);
6024 INSN_ADDRESSES_NEW (insn, -1);
6027 insn = emit_barrier_after (insn);
6028 INSN_ADDRESSES_NEW (insn, -1);
6030 /* Remove placeholder insn. */
6031 remove_insn (pool->pool_insn);
6034 /* Free all memory used by POOL. */
6036 static void
6037 s390_free_pool (struct constant_pool *pool)
6039 struct constant *c, *next;
6040 int i;
6042 for (i = 0; i < NR_C_MODES; i++)
6043 for (c = pool->constants[i]; c; c = next)
6045 next = c->next;
6046 free (c);
6049 for (c = pool->execute; c; c = next)
6051 next = c->next;
6052 free (c);
6055 BITMAP_FREE (pool->insns);
6056 free (pool);
6060 /* Collect main literal pool. Return NULL on overflow. */
6062 static struct constant_pool *
6063 s390_mainpool_start (void)
6065 struct constant_pool *pool;
6066 rtx insn;
6068 pool = s390_alloc_pool ();
6070 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6072 if (GET_CODE (insn) == INSN
6073 && GET_CODE (PATTERN (insn)) == SET
6074 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
6075 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
6077 gcc_assert (!pool->pool_insn);
6078 pool->pool_insn = insn;
6081 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6083 s390_add_execute (pool, insn);
6085 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6087 rtx pool_ref = NULL_RTX;
6088 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6089 if (pool_ref)
6091 rtx constant = get_pool_constant (pool_ref);
6092 enum machine_mode mode = get_pool_mode (pool_ref);
6093 s390_add_constant (pool, constant, mode);
6097 /* If hot/cold partitioning is enabled we have to make sure that
6098 the literal pool is emitted in the same section where the
6099 initialization of the literal pool base pointer takes place.
6100 emit_pool_after is only used in the non-overflow case on non
6101 Z cpus where we can emit the literal pool at the end of the
6102 function body within the text section. */
6103 if (NOTE_P (insn)
6104 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6105 && !pool->emit_pool_after)
6106 pool->emit_pool_after = PREV_INSN (insn);
6109 gcc_assert (pool->pool_insn || pool->size == 0);
6111 if (pool->size >= 4096)
6113 /* We're going to chunkify the pool, so remove the main
6114 pool placeholder insn. */
6115 remove_insn (pool->pool_insn);
6117 s390_free_pool (pool);
6118 pool = NULL;
6121 /* If the functions ends with the section where the literal pool
6122 should be emitted set the marker to its end. */
6123 if (pool && !pool->emit_pool_after)
6124 pool->emit_pool_after = get_last_insn ();
6126 return pool;
6129 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6130 Modify the current function to output the pool constants as well as
6131 the pool register setup instruction. */
6133 static void
6134 s390_mainpool_finish (struct constant_pool *pool)
6136 rtx base_reg = cfun->machine->base_reg;
6137 rtx insn;
6139 /* If the pool is empty, we're done. */
6140 if (pool->size == 0)
6142 /* We don't actually need a base register after all. */
6143 cfun->machine->base_reg = NULL_RTX;
6145 if (pool->pool_insn)
6146 remove_insn (pool->pool_insn);
6147 s390_free_pool (pool);
6148 return;
6151 /* We need correct insn addresses. */
6152 shorten_branches (get_insns ());
6154 /* On zSeries, we use a LARL to load the pool register. The pool is
6155 located in the .rodata section, so we emit it after the function. */
6156 if (TARGET_CPU_ZARCH)
6158 insn = gen_main_base_64 (base_reg, pool->label);
6159 insn = emit_insn_after (insn, pool->pool_insn);
6160 INSN_ADDRESSES_NEW (insn, -1);
6161 remove_insn (pool->pool_insn);
6163 insn = get_last_insn ();
6164 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6165 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6167 s390_dump_pool (pool, 0);
6170 /* On S/390, if the total size of the function's code plus literal pool
6171 does not exceed 4096 bytes, we use BASR to set up a function base
6172 pointer, and emit the literal pool at the end of the function. */
6173 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
6174 + pool->size + 8 /* alignment slop */ < 4096)
6176 insn = gen_main_base_31_small (base_reg, pool->label);
6177 insn = emit_insn_after (insn, pool->pool_insn);
6178 INSN_ADDRESSES_NEW (insn, -1);
6179 remove_insn (pool->pool_insn);
6181 insn = emit_label_after (pool->label, insn);
6182 INSN_ADDRESSES_NEW (insn, -1);
6184 /* emit_pool_after will be set by s390_mainpool_start to the
6185 last insn of the section where the literal pool should be
6186 emitted. */
6187 insn = pool->emit_pool_after;
6189 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6190 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6192 s390_dump_pool (pool, 1);
6195 /* Otherwise, we emit an inline literal pool and use BASR to branch
6196 over it, setting up the pool register at the same time. */
6197 else
6199 rtx pool_end = gen_label_rtx ();
6201 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
6202 insn = emit_insn_after (insn, pool->pool_insn);
6203 INSN_ADDRESSES_NEW (insn, -1);
6204 remove_insn (pool->pool_insn);
6206 insn = emit_label_after (pool->label, insn);
6207 INSN_ADDRESSES_NEW (insn, -1);
6209 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6210 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6212 insn = emit_label_after (pool_end, pool->pool_insn);
6213 INSN_ADDRESSES_NEW (insn, -1);
6215 s390_dump_pool (pool, 1);
6219 /* Replace all literal pool references. */
6221 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6223 if (INSN_P (insn))
6224 replace_ltrel_base (&PATTERN (insn));
6226 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6228 rtx addr, pool_ref = NULL_RTX;
6229 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6230 if (pool_ref)
6232 if (s390_execute_label (insn))
6233 addr = s390_find_execute (pool, insn);
6234 else
6235 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
6236 get_pool_mode (pool_ref));
6238 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6239 INSN_CODE (insn) = -1;
6245 /* Free the pool. */
6246 s390_free_pool (pool);
6249 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6250 We have decided we cannot use this pool, so revert all changes
6251 to the current function that were done by s390_mainpool_start. */
6252 static void
6253 s390_mainpool_cancel (struct constant_pool *pool)
6255 /* We didn't actually change the instruction stream, so simply
6256 free the pool memory. */
6257 s390_free_pool (pool);
6261 /* Chunkify the literal pool. */
6263 #define S390_POOL_CHUNK_MIN 0xc00
6264 #define S390_POOL_CHUNK_MAX 0xe00
6266 static struct constant_pool *
6267 s390_chunkify_start (void)
6269 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
6270 int extra_size = 0;
6271 bitmap far_labels;
6272 rtx pending_ltrel = NULL_RTX;
6273 rtx insn;
6275 rtx (*gen_reload_base) (rtx, rtx) =
6276 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
6279 /* We need correct insn addresses. */
6281 shorten_branches (get_insns ());
6283 /* Scan all insns and move literals to pool chunks. */
6285 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6287 bool section_switch_p = false;
6289 /* Check for pending LTREL_BASE. */
6290 if (INSN_P (insn))
6292 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
6293 if (ltrel_base)
6295 gcc_assert (ltrel_base == pending_ltrel);
6296 pending_ltrel = NULL_RTX;
6300 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6302 if (!curr_pool)
6303 curr_pool = s390_start_pool (&pool_list, insn);
6305 s390_add_execute (curr_pool, insn);
6306 s390_add_pool_insn (curr_pool, insn);
6308 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6310 rtx pool_ref = NULL_RTX;
6311 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6312 if (pool_ref)
6314 rtx constant = get_pool_constant (pool_ref);
6315 enum machine_mode mode = get_pool_mode (pool_ref);
6317 if (!curr_pool)
6318 curr_pool = s390_start_pool (&pool_list, insn);
6320 s390_add_constant (curr_pool, constant, mode);
6321 s390_add_pool_insn (curr_pool, insn);
6323 /* Don't split the pool chunk between a LTREL_OFFSET load
6324 and the corresponding LTREL_BASE. */
6325 if (GET_CODE (constant) == CONST
6326 && GET_CODE (XEXP (constant, 0)) == UNSPEC
6327 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
6329 gcc_assert (!pending_ltrel);
6330 pending_ltrel = pool_ref;
6335 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
6337 if (curr_pool)
6338 s390_add_pool_insn (curr_pool, insn);
6339 /* An LTREL_BASE must follow within the same basic block. */
6340 gcc_assert (!pending_ltrel);
6343 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
6344 section_switch_p = true;
6346 if (!curr_pool
6347 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
6348 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
6349 continue;
6351 if (TARGET_CPU_ZARCH)
6353 if (curr_pool->size < S390_POOL_CHUNK_MAX)
6354 continue;
6356 s390_end_pool (curr_pool, NULL_RTX);
6357 curr_pool = NULL;
6359 else
6361 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
6362 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
6363 + extra_size;
6365 /* We will later have to insert base register reload insns.
6366 Those will have an effect on code size, which we need to
6367 consider here. This calculation makes rather pessimistic
6368 worst-case assumptions. */
6369 if (GET_CODE (insn) == CODE_LABEL)
6370 extra_size += 6;
6372 if (chunk_size < S390_POOL_CHUNK_MIN
6373 && curr_pool->size < S390_POOL_CHUNK_MIN
6374 && !section_switch_p)
6375 continue;
6377 /* Pool chunks can only be inserted after BARRIERs ... */
6378 if (GET_CODE (insn) == BARRIER)
6380 s390_end_pool (curr_pool, insn);
6381 curr_pool = NULL;
6382 extra_size = 0;
6385 /* ... so if we don't find one in time, create one. */
6386 else if (chunk_size > S390_POOL_CHUNK_MAX
6387 || curr_pool->size > S390_POOL_CHUNK_MAX
6388 || section_switch_p)
6390 rtx label, jump, barrier;
6392 if (!section_switch_p)
6394 /* We can insert the barrier only after a 'real' insn. */
6395 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
6396 continue;
6397 if (get_attr_length (insn) == 0)
6398 continue;
6399 /* Don't separate LTREL_BASE from the corresponding
6400 LTREL_OFFSET load. */
6401 if (pending_ltrel)
6402 continue;
6404 else
6406 gcc_assert (!pending_ltrel);
6408 /* The old pool has to end before the section switch
6409 note in order to make it part of the current
6410 section. */
6411 insn = PREV_INSN (insn);
6414 label = gen_label_rtx ();
6415 jump = emit_jump_insn_after (gen_jump (label), insn);
6416 barrier = emit_barrier_after (jump);
6417 insn = emit_label_after (label, barrier);
6418 JUMP_LABEL (jump) = label;
6419 LABEL_NUSES (label) = 1;
6421 INSN_ADDRESSES_NEW (jump, -1);
6422 INSN_ADDRESSES_NEW (barrier, -1);
6423 INSN_ADDRESSES_NEW (insn, -1);
6425 s390_end_pool (curr_pool, barrier);
6426 curr_pool = NULL;
6427 extra_size = 0;
6432 if (curr_pool)
6433 s390_end_pool (curr_pool, NULL_RTX);
6434 gcc_assert (!pending_ltrel);
6436 /* Find all labels that are branched into
6437 from an insn belonging to a different chunk. */
6439 far_labels = BITMAP_ALLOC (NULL);
6441 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6443 /* Labels marked with LABEL_PRESERVE_P can be target
6444 of non-local jumps, so we have to mark them.
6445 The same holds for named labels.
6447 Don't do that, however, if it is the label before
6448 a jump table. */
6450 if (GET_CODE (insn) == CODE_LABEL
6451 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6453 rtx vec_insn = next_real_insn (insn);
6454 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6455 PATTERN (vec_insn) : NULL_RTX;
6456 if (!vec_pat
6457 || !(GET_CODE (vec_pat) == ADDR_VEC
6458 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6459 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6462 /* If we have a direct jump (conditional or unconditional)
6463 or a casesi jump, check all potential targets. */
6464 else if (GET_CODE (insn) == JUMP_INSN)
6466 rtx pat = PATTERN (insn);
6467 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6468 pat = XVECEXP (pat, 0, 0);
6470 if (GET_CODE (pat) == SET)
6472 rtx label = JUMP_LABEL (insn);
6473 if (label)
6475 if (s390_find_pool (pool_list, label)
6476 != s390_find_pool (pool_list, insn))
6477 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6480 else if (GET_CODE (pat) == PARALLEL
6481 && XVECLEN (pat, 0) == 2
6482 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6483 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6484 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6486 /* Find the jump table used by this casesi jump. */
6487 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6488 rtx vec_insn = next_real_insn (vec_label);
6489 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6490 PATTERN (vec_insn) : NULL_RTX;
6491 if (vec_pat
6492 && (GET_CODE (vec_pat) == ADDR_VEC
6493 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6495 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6497 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6499 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6501 if (s390_find_pool (pool_list, label)
6502 != s390_find_pool (pool_list, insn))
6503 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6510 /* Insert base register reload insns before every pool. */
6512 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6514 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6515 curr_pool->label);
6516 rtx insn = curr_pool->first_insn;
6517 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6520 /* Insert base register reload insns at every far label. */
6522 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6523 if (GET_CODE (insn) == CODE_LABEL
6524 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6526 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6527 if (pool)
6529 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6530 pool->label);
6531 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6536 BITMAP_FREE (far_labels);
6539 /* Recompute insn addresses. */
6541 init_insn_lengths ();
6542 shorten_branches (get_insns ());
6544 return pool_list;
6547 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6548 After we have decided to use this list, finish implementing
6549 all changes to the current function as required. */
6551 static void
6552 s390_chunkify_finish (struct constant_pool *pool_list)
6554 struct constant_pool *curr_pool = NULL;
6555 rtx insn;
6558 /* Replace all literal pool references. */
6560 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6562 if (INSN_P (insn))
6563 replace_ltrel_base (&PATTERN (insn));
6565 curr_pool = s390_find_pool (pool_list, insn);
6566 if (!curr_pool)
6567 continue;
6569 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6571 rtx addr, pool_ref = NULL_RTX;
6572 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6573 if (pool_ref)
6575 if (s390_execute_label (insn))
6576 addr = s390_find_execute (curr_pool, insn);
6577 else
6578 addr = s390_find_constant (curr_pool,
6579 get_pool_constant (pool_ref),
6580 get_pool_mode (pool_ref));
6582 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6583 INSN_CODE (insn) = -1;
6588 /* Dump out all literal pools. */
6590 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6591 s390_dump_pool (curr_pool, 0);
6593 /* Free pool list. */
6595 while (pool_list)
6597 struct constant_pool *next = pool_list->next;
6598 s390_free_pool (pool_list);
6599 pool_list = next;
6603 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6604 We have decided we cannot use this list, so revert all changes
6605 to the current function that were done by s390_chunkify_start. */
6607 static void
6608 s390_chunkify_cancel (struct constant_pool *pool_list)
6610 struct constant_pool *curr_pool = NULL;
6611 rtx insn;
6613 /* Remove all pool placeholder insns. */
6615 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6617 /* Did we insert an extra barrier? Remove it. */
6618 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6619 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6620 rtx label = NEXT_INSN (curr_pool->pool_insn);
6622 if (jump && GET_CODE (jump) == JUMP_INSN
6623 && barrier && GET_CODE (barrier) == BARRIER
6624 && label && GET_CODE (label) == CODE_LABEL
6625 && GET_CODE (PATTERN (jump)) == SET
6626 && SET_DEST (PATTERN (jump)) == pc_rtx
6627 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6628 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6630 remove_insn (jump);
6631 remove_insn (barrier);
6632 remove_insn (label);
6635 remove_insn (curr_pool->pool_insn);
6638 /* Remove all base register reload insns. */
6640 for (insn = get_insns (); insn; )
6642 rtx next_insn = NEXT_INSN (insn);
6644 if (GET_CODE (insn) == INSN
6645 && GET_CODE (PATTERN (insn)) == SET
6646 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6647 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6648 remove_insn (insn);
6650 insn = next_insn;
6653 /* Free pool list. */
6655 while (pool_list)
6657 struct constant_pool *next = pool_list->next;
6658 s390_free_pool (pool_list);
6659 pool_list = next;
6664 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6666 void
6667 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6669 REAL_VALUE_TYPE r;
6671 switch (GET_MODE_CLASS (mode))
6673 case MODE_FLOAT:
6674 case MODE_DECIMAL_FLOAT:
6675 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6677 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6678 assemble_real (r, mode, align);
6679 break;
6681 case MODE_INT:
6682 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6683 break;
6685 default:
6686 gcc_unreachable ();
6691 /* Return an RTL expression representing the value of the return address
6692 for the frame COUNT steps up from the current frame. FRAME is the
6693 frame pointer of that frame. */
6696 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6698 int offset;
6699 rtx addr;
6701 /* Without backchain, we fail for all but the current frame. */
6703 if (!TARGET_BACKCHAIN && count > 0)
6704 return NULL_RTX;
6706 /* For the current frame, we need to make sure the initial
6707 value of RETURN_REGNUM is actually saved. */
6709 if (count == 0)
6711 /* On non-z architectures branch splitting could overwrite r14. */
6712 if (TARGET_CPU_ZARCH)
6713 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6714 else
6716 cfun_frame_layout.save_return_addr_p = true;
6717 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6721 if (TARGET_PACKED_STACK)
6722 offset = -2 * UNITS_PER_WORD;
6723 else
6724 offset = RETURN_REGNUM * UNITS_PER_WORD;
6726 addr = plus_constant (frame, offset);
6727 addr = memory_address (Pmode, addr);
6728 return gen_rtx_MEM (Pmode, addr);
6731 /* Return an RTL expression representing the back chain stored in
6732 the current stack frame. */
6735 s390_back_chain_rtx (void)
6737 rtx chain;
6739 gcc_assert (TARGET_BACKCHAIN);
6741 if (TARGET_PACKED_STACK)
6742 chain = plus_constant (stack_pointer_rtx,
6743 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6744 else
6745 chain = stack_pointer_rtx;
6747 chain = gen_rtx_MEM (Pmode, chain);
6748 return chain;
6751 /* Find first call clobbered register unused in a function.
6752 This could be used as base register in a leaf function
6753 or for holding the return address before epilogue. */
6755 static int
6756 find_unused_clobbered_reg (void)
6758 int i;
6759 for (i = 0; i < 6; i++)
6760 if (!df_regs_ever_live_p (i))
6761 return i;
6762 return 0;
6766 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6767 clobbered hard regs in SETREG. */
6769 static void
6770 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
6772 int *regs_ever_clobbered = (int *)data;
6773 unsigned int i, regno;
6774 enum machine_mode mode = GET_MODE (setreg);
6776 if (GET_CODE (setreg) == SUBREG)
6778 rtx inner = SUBREG_REG (setreg);
6779 if (!GENERAL_REG_P (inner))
6780 return;
6781 regno = subreg_regno (setreg);
6783 else if (GENERAL_REG_P (setreg))
6784 regno = REGNO (setreg);
6785 else
6786 return;
6788 for (i = regno;
6789 i < regno + HARD_REGNO_NREGS (regno, mode);
6790 i++)
6791 regs_ever_clobbered[i] = 1;
6794 /* Walks through all basic blocks of the current function looking
6795 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6796 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6797 each of those regs. */
6799 static void
6800 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6802 basic_block cur_bb;
6803 rtx cur_insn;
6804 unsigned int i;
6806 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6808 /* For non-leaf functions we have to consider all call clobbered regs to be
6809 clobbered. */
6810 if (!current_function_is_leaf)
6812 for (i = 0; i < 16; i++)
6813 regs_ever_clobbered[i] = call_really_used_regs[i];
6816 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6817 this work is done by liveness analysis (mark_regs_live_at_end).
6818 Special care is needed for functions containing landing pads. Landing pads
6819 may use the eh registers, but the code which sets these registers is not
6820 contained in that function. Hence s390_regs_ever_clobbered is not able to
6821 deal with this automatically. */
6822 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
6823 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6824 if (crtl->calls_eh_return
6825 || (cfun->machine->has_landing_pad_p
6826 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
6827 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6829 /* For nonlocal gotos all call-saved registers have to be saved.
6830 This flag is also set for the unwinding code in libgcc.
6831 See expand_builtin_unwind_init. For regs_ever_live this is done by
6832 reload. */
6833 if (cfun->has_nonlocal_label)
6834 for (i = 0; i < 16; i++)
6835 if (!call_really_used_regs[i])
6836 regs_ever_clobbered[i] = 1;
6838 FOR_EACH_BB (cur_bb)
6840 FOR_BB_INSNS (cur_bb, cur_insn)
6842 if (INSN_P (cur_insn))
6843 note_stores (PATTERN (cur_insn),
6844 s390_reg_clobbered_rtx,
6845 regs_ever_clobbered);
6850 /* Determine the frame area which actually has to be accessed
6851 in the function epilogue. The values are stored at the
6852 given pointers AREA_BOTTOM (address of the lowest used stack
6853 address) and AREA_TOP (address of the first item which does
6854 not belong to the stack frame). */
6856 static void
6857 s390_frame_area (int *area_bottom, int *area_top)
6859 int b, t;
6860 int i;
6862 b = INT_MAX;
6863 t = INT_MIN;
6865 if (cfun_frame_layout.first_restore_gpr != -1)
6867 b = (cfun_frame_layout.gprs_offset
6868 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6869 t = b + (cfun_frame_layout.last_restore_gpr
6870 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6873 if (TARGET_64BIT && cfun_save_high_fprs_p)
6875 b = MIN (b, cfun_frame_layout.f8_offset);
6876 t = MAX (t, (cfun_frame_layout.f8_offset
6877 + cfun_frame_layout.high_fprs * 8));
6880 if (!TARGET_64BIT)
6881 for (i = 2; i < 4; i++)
6882 if (cfun_fpr_bit_p (i))
6884 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6885 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6888 *area_bottom = b;
6889 *area_top = t;
6892 /* Fill cfun->machine with info about register usage of current function.
6893 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6895 static void
6896 s390_register_info (int clobbered_regs[])
6898 int i, j;
6900 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6901 cfun_frame_layout.fpr_bitmap = 0;
6902 cfun_frame_layout.high_fprs = 0;
6903 if (TARGET_64BIT)
6904 for (i = 24; i < 32; i++)
6905 if (df_regs_ever_live_p (i) && !global_regs[i])
6907 cfun_set_fpr_bit (i - 16);
6908 cfun_frame_layout.high_fprs++;
6911 /* Find first and last gpr to be saved. We trust regs_ever_live
6912 data, except that we don't save and restore global registers.
6914 Also, all registers with special meaning to the compiler need
6915 to be handled extra. */
6917 s390_regs_ever_clobbered (clobbered_regs);
6919 for (i = 0; i < 16; i++)
6920 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6922 if (frame_pointer_needed)
6923 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6925 if (flag_pic)
6926 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6927 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
6929 clobbered_regs[BASE_REGNUM]
6930 |= (cfun->machine->base_reg
6931 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6933 clobbered_regs[RETURN_REGNUM]
6934 |= (!current_function_is_leaf
6935 || TARGET_TPF_PROFILING
6936 || cfun->machine->split_branches_pending_p
6937 || cfun_frame_layout.save_return_addr_p
6938 || crtl->calls_eh_return
6939 || cfun->stdarg);
6941 clobbered_regs[STACK_POINTER_REGNUM]
6942 |= (!current_function_is_leaf
6943 || TARGET_TPF_PROFILING
6944 || cfun_save_high_fprs_p
6945 || get_frame_size () > 0
6946 || cfun->calls_alloca
6947 || cfun->stdarg);
6949 for (i = 6; i < 16; i++)
6950 if (df_regs_ever_live_p (i) || clobbered_regs[i])
6951 break;
6952 for (j = 15; j > i; j--)
6953 if (df_regs_ever_live_p (j) || clobbered_regs[j])
6954 break;
6956 if (i == 16)
6958 /* Nothing to save/restore. */
6959 cfun_frame_layout.first_save_gpr_slot = -1;
6960 cfun_frame_layout.last_save_gpr_slot = -1;
6961 cfun_frame_layout.first_save_gpr = -1;
6962 cfun_frame_layout.first_restore_gpr = -1;
6963 cfun_frame_layout.last_save_gpr = -1;
6964 cfun_frame_layout.last_restore_gpr = -1;
6966 else
6968 /* Save slots for gprs from i to j. */
6969 cfun_frame_layout.first_save_gpr_slot = i;
6970 cfun_frame_layout.last_save_gpr_slot = j;
6972 for (i = cfun_frame_layout.first_save_gpr_slot;
6973 i < cfun_frame_layout.last_save_gpr_slot + 1;
6974 i++)
6975 if (clobbered_regs[i])
6976 break;
6978 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6979 if (clobbered_regs[j])
6980 break;
6982 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6984 /* Nothing to save/restore. */
6985 cfun_frame_layout.first_save_gpr = -1;
6986 cfun_frame_layout.first_restore_gpr = -1;
6987 cfun_frame_layout.last_save_gpr = -1;
6988 cfun_frame_layout.last_restore_gpr = -1;
6990 else
6992 /* Save / Restore from gpr i to j. */
6993 cfun_frame_layout.first_save_gpr = i;
6994 cfun_frame_layout.first_restore_gpr = i;
6995 cfun_frame_layout.last_save_gpr = j;
6996 cfun_frame_layout.last_restore_gpr = j;
7000 if (cfun->stdarg)
7002 /* Varargs functions need to save gprs 2 to 6. */
7003 if (cfun->va_list_gpr_size
7004 && crtl->args.info.gprs < GP_ARG_NUM_REG)
7006 int min_gpr = crtl->args.info.gprs;
7007 int max_gpr = min_gpr + cfun->va_list_gpr_size;
7008 if (max_gpr > GP_ARG_NUM_REG)
7009 max_gpr = GP_ARG_NUM_REG;
7011 if (cfun_frame_layout.first_save_gpr == -1
7012 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
7014 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
7015 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
7018 if (cfun_frame_layout.last_save_gpr == -1
7019 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
7021 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
7022 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
7026 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7027 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
7028 && crtl->args.info.fprs < FP_ARG_NUM_REG)
7030 int min_fpr = crtl->args.info.fprs;
7031 int max_fpr = min_fpr + cfun->va_list_fpr_size;
7032 if (max_fpr > FP_ARG_NUM_REG)
7033 max_fpr = FP_ARG_NUM_REG;
7035 /* ??? This is currently required to ensure proper location
7036 of the fpr save slots within the va_list save area. */
7037 if (TARGET_PACKED_STACK)
7038 min_fpr = 0;
7040 for (i = min_fpr; i < max_fpr; i++)
7041 cfun_set_fpr_bit (i);
7045 if (!TARGET_64BIT)
7046 for (i = 2; i < 4; i++)
7047 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
7048 cfun_set_fpr_bit (i);
7051 /* Fill cfun->machine with info about frame of current function. */
7053 static void
7054 s390_frame_info (void)
7056 int i;
7058 cfun_frame_layout.frame_size = get_frame_size ();
7059 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
7060 fatal_error ("total size of local variables exceeds architecture limit");
7062 if (!TARGET_PACKED_STACK)
7064 cfun_frame_layout.backchain_offset = 0;
7065 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
7066 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
7067 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
7068 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
7069 * UNITS_PER_WORD);
7071 else if (TARGET_BACKCHAIN) /* kernel stack layout */
7073 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
7074 - UNITS_PER_WORD);
7075 cfun_frame_layout.gprs_offset
7076 = (cfun_frame_layout.backchain_offset
7077 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
7078 * UNITS_PER_WORD);
7080 if (TARGET_64BIT)
7082 cfun_frame_layout.f4_offset
7083 = (cfun_frame_layout.gprs_offset
7084 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7086 cfun_frame_layout.f0_offset
7087 = (cfun_frame_layout.f4_offset
7088 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7090 else
7092 /* On 31 bit we have to care about alignment of the
7093 floating point regs to provide fastest access. */
7094 cfun_frame_layout.f0_offset
7095 = ((cfun_frame_layout.gprs_offset
7096 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
7097 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7099 cfun_frame_layout.f4_offset
7100 = (cfun_frame_layout.f0_offset
7101 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7104 else /* no backchain */
7106 cfun_frame_layout.f4_offset
7107 = (STACK_POINTER_OFFSET
7108 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7110 cfun_frame_layout.f0_offset
7111 = (cfun_frame_layout.f4_offset
7112 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7114 cfun_frame_layout.gprs_offset
7115 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
7118 if (current_function_is_leaf
7119 && !TARGET_TPF_PROFILING
7120 && cfun_frame_layout.frame_size == 0
7121 && !cfun_save_high_fprs_p
7122 && !cfun->calls_alloca
7123 && !cfun->stdarg)
7124 return;
7126 if (!TARGET_PACKED_STACK)
7127 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
7128 + crtl->outgoing_args_size
7129 + cfun_frame_layout.high_fprs * 8);
7130 else
7132 if (TARGET_BACKCHAIN)
7133 cfun_frame_layout.frame_size += UNITS_PER_WORD;
7135 /* No alignment trouble here because f8-f15 are only saved under
7136 64 bit. */
7137 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
7138 cfun_frame_layout.f4_offset),
7139 cfun_frame_layout.gprs_offset)
7140 - cfun_frame_layout.high_fprs * 8);
7142 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
7144 for (i = 0; i < 8; i++)
7145 if (cfun_fpr_bit_p (i))
7146 cfun_frame_layout.frame_size += 8;
7148 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
7150 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7151 the frame size to sustain 8 byte alignment of stack frames. */
7152 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
7153 STACK_BOUNDARY / BITS_PER_UNIT - 1)
7154 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
7156 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
7160 /* Generate frame layout. Fills in register and frame data for the current
7161 function in cfun->machine. This routine can be called multiple times;
7162 it will re-do the complete frame layout every time. */
7164 static void
7165 s390_init_frame_layout (void)
7167 HOST_WIDE_INT frame_size;
7168 int base_used;
7169 int clobbered_regs[16];
7171 /* On S/390 machines, we may need to perform branch splitting, which
7172 will require both base and return address register. We have no
7173 choice but to assume we're going to need them until right at the
7174 end of the machine dependent reorg phase. */
7175 if (!TARGET_CPU_ZARCH)
7176 cfun->machine->split_branches_pending_p = true;
7180 frame_size = cfun_frame_layout.frame_size;
7182 /* Try to predict whether we'll need the base register. */
7183 base_used = cfun->machine->split_branches_pending_p
7184 || crtl->uses_const_pool
7185 || (!DISP_IN_RANGE (frame_size)
7186 && !CONST_OK_FOR_K (frame_size));
7188 /* Decide which register to use as literal pool base. In small
7189 leaf functions, try to use an unused call-clobbered register
7190 as base register to avoid save/restore overhead. */
7191 if (!base_used)
7192 cfun->machine->base_reg = NULL_RTX;
7193 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
7194 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
7195 else
7196 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
7198 s390_register_info (clobbered_regs);
7199 s390_frame_info ();
7201 while (frame_size != cfun_frame_layout.frame_size);
7204 /* Update frame layout. Recompute actual register save data based on
7205 current info and update regs_ever_live for the special registers.
7206 May be called multiple times, but may never cause *more* registers
7207 to be saved than s390_init_frame_layout allocated room for. */
7209 static void
7210 s390_update_frame_layout (void)
7212 int clobbered_regs[16];
7214 s390_register_info (clobbered_regs);
7216 df_set_regs_ever_live (BASE_REGNUM,
7217 clobbered_regs[BASE_REGNUM] ? true : false);
7218 df_set_regs_ever_live (RETURN_REGNUM,
7219 clobbered_regs[RETURN_REGNUM] ? true : false);
7220 df_set_regs_ever_live (STACK_POINTER_REGNUM,
7221 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
7223 if (cfun->machine->base_reg)
7224 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
7227 /* Return true if it is legal to put a value with MODE into REGNO. */
7229 bool
7230 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
7232 switch (REGNO_REG_CLASS (regno))
7234 case FP_REGS:
7235 if (REGNO_PAIR_OK (regno, mode))
7237 if (mode == SImode || mode == DImode)
7238 return true;
7240 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
7241 return true;
7243 break;
7244 case ADDR_REGS:
7245 if (FRAME_REGNO_P (regno) && mode == Pmode)
7246 return true;
7248 /* fallthrough */
7249 case GENERAL_REGS:
7250 if (REGNO_PAIR_OK (regno, mode))
7252 if (TARGET_64BIT
7253 || (mode != TFmode && mode != TCmode && mode != TDmode))
7254 return true;
7256 break;
7257 case CC_REGS:
7258 if (GET_MODE_CLASS (mode) == MODE_CC)
7259 return true;
7260 break;
7261 case ACCESS_REGS:
7262 if (REGNO_PAIR_OK (regno, mode))
7264 if (mode == SImode || mode == Pmode)
7265 return true;
7267 break;
7268 default:
7269 return false;
7272 return false;
7275 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7277 bool
7278 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
7280 /* Once we've decided upon a register to use as base register, it must
7281 no longer be used for any other purpose. */
7282 if (cfun->machine->base_reg)
7283 if (REGNO (cfun->machine->base_reg) == old_reg
7284 || REGNO (cfun->machine->base_reg) == new_reg)
7285 return false;
7287 return true;
7290 /* Maximum number of registers to represent a value of mode MODE
7291 in a register of class CLASS. */
7293 bool
7294 s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
7296 switch (class)
7298 case FP_REGS:
7299 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7300 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
7301 else
7302 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
7303 case ACCESS_REGS:
7304 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
7305 default:
7306 break;
7308 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7311 /* Return true if register FROM can be eliminated via register TO. */
7313 bool
7314 s390_can_eliminate (int from, int to)
7316 /* On zSeries machines, we have not marked the base register as fixed.
7317 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7318 If a function requires the base register, we say here that this
7319 elimination cannot be performed. This will cause reload to free
7320 up the base register (as if it were fixed). On the other hand,
7321 if the current function does *not* require the base register, we
7322 say here the elimination succeeds, which in turn allows reload
7323 to allocate the base register for any other purpose. */
7324 if (from == BASE_REGNUM && to == BASE_REGNUM)
7326 if (TARGET_CPU_ZARCH)
7328 s390_init_frame_layout ();
7329 return cfun->machine->base_reg == NULL_RTX;
7332 return false;
7335 /* Everything else must point into the stack frame. */
7336 gcc_assert (to == STACK_POINTER_REGNUM
7337 || to == HARD_FRAME_POINTER_REGNUM);
7339 gcc_assert (from == FRAME_POINTER_REGNUM
7340 || from == ARG_POINTER_REGNUM
7341 || from == RETURN_ADDRESS_POINTER_REGNUM);
7343 /* Make sure we actually saved the return address. */
7344 if (from == RETURN_ADDRESS_POINTER_REGNUM)
7345 if (!crtl->calls_eh_return
7346 && !cfun->stdarg
7347 && !cfun_frame_layout.save_return_addr_p)
7348 return false;
7350 return true;
7353 /* Return offset between register FROM and TO initially after prolog. */
7355 HOST_WIDE_INT
7356 s390_initial_elimination_offset (int from, int to)
7358 HOST_WIDE_INT offset;
7359 int index;
7361 /* ??? Why are we called for non-eliminable pairs? */
7362 if (!s390_can_eliminate (from, to))
7363 return 0;
7365 switch (from)
7367 case FRAME_POINTER_REGNUM:
7368 offset = (get_frame_size()
7369 + STACK_POINTER_OFFSET
7370 + crtl->outgoing_args_size);
7371 break;
7373 case ARG_POINTER_REGNUM:
7374 s390_init_frame_layout ();
7375 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
7376 break;
7378 case RETURN_ADDRESS_POINTER_REGNUM:
7379 s390_init_frame_layout ();
7380 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
7381 gcc_assert (index >= 0);
7382 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
7383 offset += index * UNITS_PER_WORD;
7384 break;
7386 case BASE_REGNUM:
7387 offset = 0;
7388 break;
7390 default:
7391 gcc_unreachable ();
7394 return offset;
7397 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7398 to register BASE. Return generated insn. */
7400 static rtx
7401 save_fpr (rtx base, int offset, int regnum)
7403 rtx addr;
7404 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7406 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
7407 set_mem_alias_set (addr, get_varargs_alias_set ());
7408 else
7409 set_mem_alias_set (addr, get_frame_alias_set ());
7411 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
7414 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7415 to register BASE. Return generated insn. */
7417 static rtx
7418 restore_fpr (rtx base, int offset, int regnum)
7420 rtx addr;
7421 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7422 set_mem_alias_set (addr, get_frame_alias_set ());
7424 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
7427 /* Generate insn to save registers FIRST to LAST into
7428 the register save area located at offset OFFSET
7429 relative to register BASE. */
7431 static rtx
7432 save_gprs (rtx base, int offset, int first, int last)
7434 rtx addr, insn, note;
7435 int i;
7437 addr = plus_constant (base, offset);
7438 addr = gen_rtx_MEM (Pmode, addr);
7440 set_mem_alias_set (addr, get_frame_alias_set ());
7442 /* Special-case single register. */
7443 if (first == last)
7445 if (TARGET_64BIT)
7446 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7447 else
7448 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7450 RTX_FRAME_RELATED_P (insn) = 1;
7451 return insn;
7455 insn = gen_store_multiple (addr,
7456 gen_rtx_REG (Pmode, first),
7457 GEN_INT (last - first + 1));
7459 if (first <= 6 && cfun->stdarg)
7460 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7462 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7464 if (first + i <= 6)
7465 set_mem_alias_set (mem, get_varargs_alias_set ());
7468 /* We need to set the FRAME_RELATED flag on all SETs
7469 inside the store-multiple pattern.
7471 However, we must not emit DWARF records for registers 2..5
7472 if they are stored for use by variable arguments ...
7474 ??? Unfortunately, it is not enough to simply not the
7475 FRAME_RELATED flags for those SETs, because the first SET
7476 of the PARALLEL is always treated as if it had the flag
7477 set, even if it does not. Therefore we emit a new pattern
7478 without those registers as REG_FRAME_RELATED_EXPR note. */
7480 if (first >= 6)
7482 rtx pat = PATTERN (insn);
7484 for (i = 0; i < XVECLEN (pat, 0); i++)
7485 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7486 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7488 RTX_FRAME_RELATED_P (insn) = 1;
7490 else if (last >= 6)
7492 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
7493 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7494 gen_rtx_REG (Pmode, 6),
7495 GEN_INT (last - 6 + 1));
7496 note = PATTERN (note);
7498 REG_NOTES (insn) =
7499 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7500 note, REG_NOTES (insn));
7502 for (i = 0; i < XVECLEN (note, 0); i++)
7503 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7504 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7506 RTX_FRAME_RELATED_P (insn) = 1;
7509 return insn;
7512 /* Generate insn to restore registers FIRST to LAST from
7513 the register save area located at offset OFFSET
7514 relative to register BASE. */
7516 static rtx
7517 restore_gprs (rtx base, int offset, int first, int last)
7519 rtx addr, insn;
7521 addr = plus_constant (base, offset);
7522 addr = gen_rtx_MEM (Pmode, addr);
7523 set_mem_alias_set (addr, get_frame_alias_set ());
7525 /* Special-case single register. */
7526 if (first == last)
7528 if (TARGET_64BIT)
7529 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7530 else
7531 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7533 return insn;
7536 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7537 addr,
7538 GEN_INT (last - first + 1));
7539 return insn;
7542 /* Return insn sequence to load the GOT register. */
7544 static GTY(()) rtx got_symbol;
7546 s390_load_got (void)
7548 rtx insns;
7550 if (!got_symbol)
7552 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7553 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7556 start_sequence ();
7558 if (TARGET_CPU_ZARCH)
7560 emit_move_insn (pic_offset_table_rtx, got_symbol);
7562 else
7564 rtx offset;
7566 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7567 UNSPEC_LTREL_OFFSET);
7568 offset = gen_rtx_CONST (Pmode, offset);
7569 offset = force_const_mem (Pmode, offset);
7571 emit_move_insn (pic_offset_table_rtx, offset);
7573 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7574 UNSPEC_LTREL_BASE);
7575 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7577 emit_move_insn (pic_offset_table_rtx, offset);
7580 insns = get_insns ();
7581 end_sequence ();
7582 return insns;
7585 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
7586 and the change to the stack pointer. */
7588 static void
7589 s390_emit_stack_tie (void)
7591 rtx mem = gen_frame_mem (BLKmode,
7592 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7594 emit_insn (gen_stack_tie (mem));
7597 /* Expand the prologue into a bunch of separate insns. */
7599 void
7600 s390_emit_prologue (void)
7602 rtx insn, addr;
7603 rtx temp_reg;
7604 int i;
7605 int offset;
7606 int next_fpr = 0;
7608 /* Complete frame layout. */
7610 s390_update_frame_layout ();
7612 /* Annotate all constant pool references to let the scheduler know
7613 they implicitly use the base register. */
7615 push_topmost_sequence ();
7617 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7618 if (INSN_P (insn))
7620 annotate_constant_pool_refs (&PATTERN (insn));
7621 df_insn_rescan (insn);
7624 pop_topmost_sequence ();
7626 /* Choose best register to use for temp use within prologue.
7627 See below for why TPF must use the register 1. */
7629 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7630 && !current_function_is_leaf
7631 && !TARGET_TPF_PROFILING)
7632 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7633 else
7634 temp_reg = gen_rtx_REG (Pmode, 1);
7636 /* Save call saved gprs. */
7637 if (cfun_frame_layout.first_save_gpr != -1)
7639 insn = save_gprs (stack_pointer_rtx,
7640 cfun_frame_layout.gprs_offset +
7641 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7642 - cfun_frame_layout.first_save_gpr_slot),
7643 cfun_frame_layout.first_save_gpr,
7644 cfun_frame_layout.last_save_gpr);
7645 emit_insn (insn);
7648 /* Dummy insn to mark literal pool slot. */
7650 if (cfun->machine->base_reg)
7651 emit_insn (gen_main_pool (cfun->machine->base_reg));
7653 offset = cfun_frame_layout.f0_offset;
7655 /* Save f0 and f2. */
7656 for (i = 0; i < 2; i++)
7658 if (cfun_fpr_bit_p (i))
7660 save_fpr (stack_pointer_rtx, offset, i + 16);
7661 offset += 8;
7663 else if (!TARGET_PACKED_STACK)
7664 offset += 8;
7667 /* Save f4 and f6. */
7668 offset = cfun_frame_layout.f4_offset;
7669 for (i = 2; i < 4; i++)
7671 if (cfun_fpr_bit_p (i))
7673 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7674 offset += 8;
7676 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7677 therefore are not frame related. */
7678 if (!call_really_used_regs[i + 16])
7679 RTX_FRAME_RELATED_P (insn) = 1;
7681 else if (!TARGET_PACKED_STACK)
7682 offset += 8;
7685 if (TARGET_PACKED_STACK
7686 && cfun_save_high_fprs_p
7687 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7689 offset = (cfun_frame_layout.f8_offset
7690 + (cfun_frame_layout.high_fprs - 1) * 8);
7692 for (i = 15; i > 7 && offset >= 0; i--)
7693 if (cfun_fpr_bit_p (i))
7695 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7697 RTX_FRAME_RELATED_P (insn) = 1;
7698 offset -= 8;
7700 if (offset >= cfun_frame_layout.f8_offset)
7701 next_fpr = i + 16;
7704 if (!TARGET_PACKED_STACK)
7705 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7707 /* Decrement stack pointer. */
7709 if (cfun_frame_layout.frame_size > 0)
7711 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7713 if (s390_stack_size)
7715 HOST_WIDE_INT stack_guard;
7717 if (s390_stack_guard)
7718 stack_guard = s390_stack_guard;
7719 else
7721 /* If no value for stack guard is provided the smallest power of 2
7722 larger than the current frame size is chosen. */
7723 stack_guard = 1;
7724 while (stack_guard < cfun_frame_layout.frame_size)
7725 stack_guard <<= 1;
7728 if (cfun_frame_layout.frame_size >= s390_stack_size)
7730 warning (0, "frame size of function %qs is "
7731 HOST_WIDE_INT_PRINT_DEC
7732 " bytes exceeding user provided stack limit of "
7733 HOST_WIDE_INT_PRINT_DEC " bytes. "
7734 "An unconditional trap is added.",
7735 current_function_name(), cfun_frame_layout.frame_size,
7736 s390_stack_size);
7737 emit_insn (gen_trap ());
7739 else
7741 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7742 & ~(stack_guard - 1));
7743 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7744 GEN_INT (stack_check_mask));
7745 if (TARGET_64BIT)
7746 gen_cmpdi (t, const0_rtx);
7747 else
7748 gen_cmpsi (t, const0_rtx);
7750 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7751 gen_rtx_REG (CCmode,
7752 CC_REGNUM),
7753 const0_rtx),
7754 const0_rtx));
7758 if (s390_warn_framesize > 0
7759 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7760 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7761 current_function_name (), cfun_frame_layout.frame_size);
7763 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7764 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7766 /* Save incoming stack pointer into temp reg. */
7767 if (TARGET_BACKCHAIN || next_fpr)
7768 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7770 /* Subtract frame size from stack pointer. */
7772 if (DISP_IN_RANGE (INTVAL (frame_off)))
7774 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7775 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7776 frame_off));
7777 insn = emit_insn (insn);
7779 else
7781 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7782 frame_off = force_const_mem (Pmode, frame_off);
7784 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7785 annotate_constant_pool_refs (&PATTERN (insn));
7788 RTX_FRAME_RELATED_P (insn) = 1;
7789 REG_NOTES (insn) =
7790 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7791 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7792 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7793 GEN_INT (-cfun_frame_layout.frame_size))),
7794 REG_NOTES (insn));
7796 /* Set backchain. */
7798 if (TARGET_BACKCHAIN)
7800 if (cfun_frame_layout.backchain_offset)
7801 addr = gen_rtx_MEM (Pmode,
7802 plus_constant (stack_pointer_rtx,
7803 cfun_frame_layout.backchain_offset));
7804 else
7805 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7806 set_mem_alias_set (addr, get_frame_alias_set ());
7807 insn = emit_insn (gen_move_insn (addr, temp_reg));
7810 /* If we support asynchronous exceptions (e.g. for Java),
7811 we need to make sure the backchain pointer is set up
7812 before any possibly trapping memory access. */
7814 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7816 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7817 emit_clobber (addr);
7821 /* Save fprs 8 - 15 (64 bit ABI). */
7823 if (cfun_save_high_fprs_p && next_fpr)
7825 /* If the stack might be accessed through a different register
7826 we have to make sure that the stack pointer decrement is not
7827 moved below the use of the stack slots. */
7828 s390_emit_stack_tie ();
7830 insn = emit_insn (gen_add2_insn (temp_reg,
7831 GEN_INT (cfun_frame_layout.f8_offset)));
7833 offset = 0;
7835 for (i = 24; i <= next_fpr; i++)
7836 if (cfun_fpr_bit_p (i - 16))
7838 rtx addr = plus_constant (stack_pointer_rtx,
7839 cfun_frame_layout.frame_size
7840 + cfun_frame_layout.f8_offset
7841 + offset);
7843 insn = save_fpr (temp_reg, offset, i);
7844 offset += 8;
7845 RTX_FRAME_RELATED_P (insn) = 1;
7846 REG_NOTES (insn) =
7847 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7848 gen_rtx_SET (VOIDmode,
7849 gen_rtx_MEM (DFmode, addr),
7850 gen_rtx_REG (DFmode, i)),
7851 REG_NOTES (insn));
7855 /* Set frame pointer, if needed. */
7857 if (frame_pointer_needed)
7859 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7860 RTX_FRAME_RELATED_P (insn) = 1;
7863 /* Set up got pointer, if needed. */
7865 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
7867 rtx insns = s390_load_got ();
7869 for (insn = insns; insn; insn = NEXT_INSN (insn))
7870 annotate_constant_pool_refs (&PATTERN (insn));
7872 emit_insn (insns);
7875 if (TARGET_TPF_PROFILING)
7877 /* Generate a BAS instruction to serve as a function
7878 entry intercept to facilitate the use of tracing
7879 algorithms located at the branch target. */
7880 emit_insn (gen_prologue_tpf ());
7882 /* Emit a blockage here so that all code
7883 lies between the profiling mechanisms. */
7884 emit_insn (gen_blockage ());
7888 /* Expand the epilogue into a bunch of separate insns. */
7890 void
7891 s390_emit_epilogue (bool sibcall)
7893 rtx frame_pointer, return_reg;
7894 int area_bottom, area_top, offset = 0;
7895 int next_offset;
7896 rtvec p;
7897 int i;
7899 if (TARGET_TPF_PROFILING)
7902 /* Generate a BAS instruction to serve as a function
7903 entry intercept to facilitate the use of tracing
7904 algorithms located at the branch target. */
7906 /* Emit a blockage here so that all code
7907 lies between the profiling mechanisms. */
7908 emit_insn (gen_blockage ());
7910 emit_insn (gen_epilogue_tpf ());
7913 /* Check whether to use frame or stack pointer for restore. */
7915 frame_pointer = (frame_pointer_needed
7916 ? hard_frame_pointer_rtx : stack_pointer_rtx);
7918 s390_frame_area (&area_bottom, &area_top);
7920 /* Check whether we can access the register save area.
7921 If not, increment the frame pointer as required. */
7923 if (area_top <= area_bottom)
7925 /* Nothing to restore. */
7927 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7928 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7930 /* Area is in range. */
7931 offset = cfun_frame_layout.frame_size;
7933 else
7935 rtx insn, frame_off;
7937 offset = area_bottom < 0 ? -area_bottom : 0;
7938 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7940 if (DISP_IN_RANGE (INTVAL (frame_off)))
7942 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7943 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7944 insn = emit_insn (insn);
7946 else
7948 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7949 frame_off = force_const_mem (Pmode, frame_off);
7951 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7952 annotate_constant_pool_refs (&PATTERN (insn));
7956 /* Restore call saved fprs. */
7958 if (TARGET_64BIT)
7960 if (cfun_save_high_fprs_p)
7962 next_offset = cfun_frame_layout.f8_offset;
7963 for (i = 24; i < 32; i++)
7965 if (cfun_fpr_bit_p (i - 16))
7967 restore_fpr (frame_pointer,
7968 offset + next_offset, i);
7969 next_offset += 8;
7975 else
7977 next_offset = cfun_frame_layout.f4_offset;
7978 for (i = 18; i < 20; i++)
7980 if (cfun_fpr_bit_p (i - 16))
7982 restore_fpr (frame_pointer,
7983 offset + next_offset, i);
7984 next_offset += 8;
7986 else if (!TARGET_PACKED_STACK)
7987 next_offset += 8;
7992 /* Return register. */
7994 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7996 /* Restore call saved gprs. */
7998 if (cfun_frame_layout.first_restore_gpr != -1)
8000 rtx insn, addr;
8001 int i;
8003 /* Check for global register and save them
8004 to stack location from where they get restored. */
8006 for (i = cfun_frame_layout.first_restore_gpr;
8007 i <= cfun_frame_layout.last_restore_gpr;
8008 i++)
8010 /* These registers are special and need to be
8011 restored in any case. */
8012 if (i == STACK_POINTER_REGNUM
8013 || i == RETURN_REGNUM
8014 || i == BASE_REGNUM
8015 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
8016 continue;
8018 if (global_regs[i])
8020 addr = plus_constant (frame_pointer,
8021 offset + cfun_frame_layout.gprs_offset
8022 + (i - cfun_frame_layout.first_save_gpr_slot)
8023 * UNITS_PER_WORD);
8024 addr = gen_rtx_MEM (Pmode, addr);
8025 set_mem_alias_set (addr, get_frame_alias_set ());
8026 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
8030 if (! sibcall)
8032 /* Fetch return address from stack before load multiple,
8033 this will do good for scheduling. */
8035 if (cfun_frame_layout.save_return_addr_p
8036 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
8037 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
8039 int return_regnum = find_unused_clobbered_reg();
8040 if (!return_regnum)
8041 return_regnum = 4;
8042 return_reg = gen_rtx_REG (Pmode, return_regnum);
8044 addr = plus_constant (frame_pointer,
8045 offset + cfun_frame_layout.gprs_offset
8046 + (RETURN_REGNUM
8047 - cfun_frame_layout.first_save_gpr_slot)
8048 * UNITS_PER_WORD);
8049 addr = gen_rtx_MEM (Pmode, addr);
8050 set_mem_alias_set (addr, get_frame_alias_set ());
8051 emit_move_insn (return_reg, addr);
8055 insn = restore_gprs (frame_pointer,
8056 offset + cfun_frame_layout.gprs_offset
8057 + (cfun_frame_layout.first_restore_gpr
8058 - cfun_frame_layout.first_save_gpr_slot)
8059 * UNITS_PER_WORD,
8060 cfun_frame_layout.first_restore_gpr,
8061 cfun_frame_layout.last_restore_gpr);
8062 emit_insn (insn);
8065 if (! sibcall)
8068 /* Return to caller. */
8070 p = rtvec_alloc (2);
8072 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8073 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
8074 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8079 /* Return the size in bytes of a function argument of
8080 type TYPE and/or mode MODE. At least one of TYPE or
8081 MODE must be specified. */
8083 static int
8084 s390_function_arg_size (enum machine_mode mode, const_tree type)
8086 if (type)
8087 return int_size_in_bytes (type);
8089 /* No type info available for some library calls ... */
8090 if (mode != BLKmode)
8091 return GET_MODE_SIZE (mode);
8093 /* If we have neither type nor mode, abort */
8094 gcc_unreachable ();
8097 /* Return true if a function argument of type TYPE and mode MODE
8098 is to be passed in a floating-point register, if available. */
8100 static bool
8101 s390_function_arg_float (enum machine_mode mode, tree type)
8103 int size = s390_function_arg_size (mode, type);
8104 if (size > 8)
8105 return false;
8107 /* Soft-float changes the ABI: no floating-point registers are used. */
8108 if (TARGET_SOFT_FLOAT)
8109 return false;
8111 /* No type info available for some library calls ... */
8112 if (!type)
8113 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
8115 /* The ABI says that record types with a single member are treated
8116 just like that member would be. */
8117 while (TREE_CODE (type) == RECORD_TYPE)
8119 tree field, single = NULL_TREE;
8121 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
8123 if (TREE_CODE (field) != FIELD_DECL)
8124 continue;
8126 if (single == NULL_TREE)
8127 single = TREE_TYPE (field);
8128 else
8129 return false;
8132 if (single == NULL_TREE)
8133 return false;
8134 else
8135 type = single;
8138 return TREE_CODE (type) == REAL_TYPE;
8141 /* Return true if a function argument of type TYPE and mode MODE
8142 is to be passed in an integer register, or a pair of integer
8143 registers, if available. */
8145 static bool
8146 s390_function_arg_integer (enum machine_mode mode, tree type)
8148 int size = s390_function_arg_size (mode, type);
8149 if (size > 8)
8150 return false;
8152 /* No type info available for some library calls ... */
8153 if (!type)
8154 return GET_MODE_CLASS (mode) == MODE_INT
8155 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8157 /* We accept small integral (and similar) types. */
8158 if (INTEGRAL_TYPE_P (type)
8159 || POINTER_TYPE_P (type)
8160 || TREE_CODE (type) == OFFSET_TYPE
8161 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
8162 return true;
8164 /* We also accept structs of size 1, 2, 4, 8 that are not
8165 passed in floating-point registers. */
8166 if (AGGREGATE_TYPE_P (type)
8167 && exact_log2 (size) >= 0
8168 && !s390_function_arg_float (mode, type))
8169 return true;
8171 return false;
8174 /* Return 1 if a function argument of type TYPE and mode MODE
8175 is to be passed by reference. The ABI specifies that only
8176 structures of size 1, 2, 4, or 8 bytes are passed by value,
8177 all other structures (and complex numbers) are passed by
8178 reference. */
8180 static bool
8181 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
8182 enum machine_mode mode, const_tree type,
8183 bool named ATTRIBUTE_UNUSED)
8185 int size = s390_function_arg_size (mode, type);
8186 if (size > 8)
8187 return true;
8189 if (type)
8191 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
8192 return 1;
8194 if (TREE_CODE (type) == COMPLEX_TYPE
8195 || TREE_CODE (type) == VECTOR_TYPE)
8196 return 1;
8199 return 0;
8202 /* Update the data in CUM to advance over an argument of mode MODE and
8203 data type TYPE. (TYPE is null for libcalls where that information
8204 may not be available.). The boolean NAMED specifies whether the
8205 argument is a named argument (as opposed to an unnamed argument
8206 matching an ellipsis). */
8208 void
8209 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
8210 tree type, int named ATTRIBUTE_UNUSED)
8212 if (s390_function_arg_float (mode, type))
8214 cum->fprs += 1;
8216 else if (s390_function_arg_integer (mode, type))
8218 int size = s390_function_arg_size (mode, type);
8219 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
8221 else
8222 gcc_unreachable ();
8225 /* Define where to put the arguments to a function.
8226 Value is zero to push the argument on the stack,
8227 or a hard register in which to store the argument.
8229 MODE is the argument's machine mode.
8230 TYPE is the data type of the argument (as a tree).
8231 This is null for libcalls where that information may
8232 not be available.
8233 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8234 the preceding args and about the function being called.
8235 NAMED is nonzero if this argument is a named parameter
8236 (otherwise it is an extra parameter matching an ellipsis).
8238 On S/390, we use general purpose registers 2 through 6 to
8239 pass integer, pointer, and certain structure arguments, and
8240 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8241 to pass floating point arguments. All remaining arguments
8242 are pushed to the stack. */
8245 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
8246 int named ATTRIBUTE_UNUSED)
8248 if (s390_function_arg_float (mode, type))
8250 if (cum->fprs + 1 > FP_ARG_NUM_REG)
8251 return 0;
8252 else
8253 return gen_rtx_REG (mode, cum->fprs + 16);
8255 else if (s390_function_arg_integer (mode, type))
8257 int size = s390_function_arg_size (mode, type);
8258 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
8260 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
8261 return 0;
8262 else
8263 return gen_rtx_REG (mode, cum->gprs + 2);
8266 /* After the real arguments, expand_call calls us once again
8267 with a void_type_node type. Whatever we return here is
8268 passed as operand 2 to the call expanders.
8270 We don't need this feature ... */
8271 else if (type == void_type_node)
8272 return const0_rtx;
8274 gcc_unreachable ();
8277 /* Return true if return values of type TYPE should be returned
8278 in a memory buffer whose address is passed by the caller as
8279 hidden first argument. */
8281 static bool
8282 s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
8284 /* We accept small integral (and similar) types. */
8285 if (INTEGRAL_TYPE_P (type)
8286 || POINTER_TYPE_P (type)
8287 || TREE_CODE (type) == OFFSET_TYPE
8288 || TREE_CODE (type) == REAL_TYPE)
8289 return int_size_in_bytes (type) > 8;
8291 /* Aggregates and similar constructs are always returned
8292 in memory. */
8293 if (AGGREGATE_TYPE_P (type)
8294 || TREE_CODE (type) == COMPLEX_TYPE
8295 || TREE_CODE (type) == VECTOR_TYPE)
8296 return true;
8298 /* ??? We get called on all sorts of random stuff from
8299 aggregate_value_p. We can't abort, but it's not clear
8300 what's safe to return. Pretend it's a struct I guess. */
8301 return true;
8304 /* Define where to return a (scalar) value of type TYPE.
8305 If TYPE is null, define where to return a (scalar)
8306 value of mode MODE from a libcall. */
8309 s390_function_value (const_tree type, enum machine_mode mode)
8311 if (type)
8313 int unsignedp = TYPE_UNSIGNED (type);
8314 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
8317 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
8318 gcc_assert (GET_MODE_SIZE (mode) <= 8);
8320 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8321 return gen_rtx_REG (mode, 16);
8322 else
8323 return gen_rtx_REG (mode, 2);
8327 /* Create and return the va_list datatype.
8329 On S/390, va_list is an array type equivalent to
8331 typedef struct __va_list_tag
8333 long __gpr;
8334 long __fpr;
8335 void *__overflow_arg_area;
8336 void *__reg_save_area;
8337 } va_list[1];
8339 where __gpr and __fpr hold the number of general purpose
8340 or floating point arguments used up to now, respectively,
8341 __overflow_arg_area points to the stack location of the
8342 next argument passed on the stack, and __reg_save_area
8343 always points to the start of the register area in the
8344 call frame of the current function. The function prologue
8345 saves all registers used for argument passing into this
8346 area if the function uses variable arguments. */
8348 static tree
8349 s390_build_builtin_va_list (void)
8351 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
8353 record = lang_hooks.types.make_type (RECORD_TYPE);
8355 type_decl =
8356 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
8358 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
8359 long_integer_type_node);
8360 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
8361 long_integer_type_node);
8362 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
8363 ptr_type_node);
8364 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
8365 ptr_type_node);
8367 va_list_gpr_counter_field = f_gpr;
8368 va_list_fpr_counter_field = f_fpr;
8370 DECL_FIELD_CONTEXT (f_gpr) = record;
8371 DECL_FIELD_CONTEXT (f_fpr) = record;
8372 DECL_FIELD_CONTEXT (f_ovf) = record;
8373 DECL_FIELD_CONTEXT (f_sav) = record;
8375 TREE_CHAIN (record) = type_decl;
8376 TYPE_NAME (record) = type_decl;
8377 TYPE_FIELDS (record) = f_gpr;
8378 TREE_CHAIN (f_gpr) = f_fpr;
8379 TREE_CHAIN (f_fpr) = f_ovf;
8380 TREE_CHAIN (f_ovf) = f_sav;
8382 layout_type (record);
8384 /* The correct type is an array type of one element. */
8385 return build_array_type (record, build_index_type (size_zero_node));
8388 /* Implement va_start by filling the va_list structure VALIST.
8389 STDARG_P is always true, and ignored.
8390 NEXTARG points to the first anonymous stack argument.
8392 The following global variables are used to initialize
8393 the va_list structure:
8395 crtl->args.info:
8396 holds number of gprs and fprs used for named arguments.
8397 crtl->args.arg_offset_rtx:
8398 holds the offset of the first anonymous stack argument
8399 (relative to the virtual arg pointer). */
8401 static void
8402 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
8404 HOST_WIDE_INT n_gpr, n_fpr;
8405 int off;
8406 tree f_gpr, f_fpr, f_ovf, f_sav;
8407 tree gpr, fpr, ovf, sav, t;
8409 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8410 f_fpr = TREE_CHAIN (f_gpr);
8411 f_ovf = TREE_CHAIN (f_fpr);
8412 f_sav = TREE_CHAIN (f_ovf);
8414 valist = build_va_arg_indirect_ref (valist);
8415 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8416 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8417 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8418 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8420 /* Count number of gp and fp argument registers used. */
8422 n_gpr = crtl->args.info.gprs;
8423 n_fpr = crtl->args.info.fprs;
8425 if (cfun->va_list_gpr_size)
8427 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
8428 build_int_cst (NULL_TREE, n_gpr));
8429 TREE_SIDE_EFFECTS (t) = 1;
8430 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8433 if (cfun->va_list_fpr_size)
8435 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
8436 build_int_cst (NULL_TREE, n_fpr));
8437 TREE_SIDE_EFFECTS (t) = 1;
8438 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8441 /* Find the overflow area. */
8442 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8443 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8445 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8447 off = INTVAL (crtl->args.arg_offset_rtx);
8448 off = off < 0 ? 0 : off;
8449 if (TARGET_DEBUG_ARG)
8450 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8451 (int)n_gpr, (int)n_fpr, off);
8453 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
8455 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
8456 TREE_SIDE_EFFECTS (t) = 1;
8457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8460 /* Find the register save area. */
8461 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8462 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8464 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
8465 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8466 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
8468 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
8469 TREE_SIDE_EFFECTS (t) = 1;
8470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8474 /* Implement va_arg by updating the va_list structure
8475 VALIST as required to retrieve an argument of type
8476 TYPE, and returning that argument.
8478 Generates code equivalent to:
8480 if (integral value) {
8481 if (size <= 4 && args.gpr < 5 ||
8482 size > 4 && args.gpr < 4 )
8483 ret = args.reg_save_area[args.gpr+8]
8484 else
8485 ret = *args.overflow_arg_area++;
8486 } else if (float value) {
8487 if (args.fgpr < 2)
8488 ret = args.reg_save_area[args.fpr+64]
8489 else
8490 ret = *args.overflow_arg_area++;
8491 } else if (aggregate value) {
8492 if (args.gpr < 5)
8493 ret = *args.reg_save_area[args.gpr]
8494 else
8495 ret = **args.overflow_arg_area++;
8496 } */
8498 static tree
8499 s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
8500 gimple_seq *post_p ATTRIBUTE_UNUSED)
8502 tree f_gpr, f_fpr, f_ovf, f_sav;
8503 tree gpr, fpr, ovf, sav, reg, t, u;
8504 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8505 tree lab_false, lab_over, addr;
8507 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8508 f_fpr = TREE_CHAIN (f_gpr);
8509 f_ovf = TREE_CHAIN (f_fpr);
8510 f_sav = TREE_CHAIN (f_ovf);
8512 valist = build_va_arg_indirect_ref (valist);
8513 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8514 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8515 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8517 /* The tree for args* cannot be shared between gpr/fpr and ovf since
8518 both appear on a lhs. */
8519 valist = unshare_expr (valist);
8520 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8522 size = int_size_in_bytes (type);
8524 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8526 if (TARGET_DEBUG_ARG)
8528 fprintf (stderr, "va_arg: aggregate type");
8529 debug_tree (type);
8532 /* Aggregates are passed by reference. */
8533 indirect_p = 1;
8534 reg = gpr;
8535 n_reg = 1;
8537 /* kernel stack layout on 31 bit: It is assumed here that no padding
8538 will be added by s390_frame_info because for va_args always an even
8539 number of gprs has to be saved r15-r2 = 14 regs. */
8540 sav_ofs = 2 * UNITS_PER_WORD;
8541 sav_scale = UNITS_PER_WORD;
8542 size = UNITS_PER_WORD;
8543 max_reg = GP_ARG_NUM_REG - n_reg;
8545 else if (s390_function_arg_float (TYPE_MODE (type), type))
8547 if (TARGET_DEBUG_ARG)
8549 fprintf (stderr, "va_arg: float type");
8550 debug_tree (type);
8553 /* FP args go in FP registers, if present. */
8554 indirect_p = 0;
8555 reg = fpr;
8556 n_reg = 1;
8557 sav_ofs = 16 * UNITS_PER_WORD;
8558 sav_scale = 8;
8559 max_reg = FP_ARG_NUM_REG - n_reg;
8561 else
8563 if (TARGET_DEBUG_ARG)
8565 fprintf (stderr, "va_arg: other type");
8566 debug_tree (type);
8569 /* Otherwise into GP registers. */
8570 indirect_p = 0;
8571 reg = gpr;
8572 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8574 /* kernel stack layout on 31 bit: It is assumed here that no padding
8575 will be added by s390_frame_info because for va_args always an even
8576 number of gprs has to be saved r15-r2 = 14 regs. */
8577 sav_ofs = 2 * UNITS_PER_WORD;
8579 if (size < UNITS_PER_WORD)
8580 sav_ofs += UNITS_PER_WORD - size;
8582 sav_scale = UNITS_PER_WORD;
8583 max_reg = GP_ARG_NUM_REG - n_reg;
8586 /* Pull the value out of the saved registers ... */
8588 lab_false = create_artificial_label ();
8589 lab_over = create_artificial_label ();
8590 addr = create_tmp_var (ptr_type_node, "addr");
8591 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
8593 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8594 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8595 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8596 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8597 gimplify_and_add (t, pre_p);
8599 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8600 size_int (sav_ofs));
8601 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8602 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8603 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
8605 gimplify_assign (addr, t, pre_p);
8607 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
8609 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
8612 /* ... Otherwise out of the overflow area. */
8614 t = ovf;
8615 if (size < UNITS_PER_WORD)
8616 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8617 size_int (UNITS_PER_WORD - size));
8619 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8621 gimplify_assign (addr, t, pre_p);
8623 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8624 size_int (size));
8625 gimplify_assign (ovf, t, pre_p);
8627 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
8630 /* Increment register save count. */
8632 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8633 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8634 gimplify_and_add (u, pre_p);
8636 if (indirect_p)
8638 t = build_pointer_type (build_pointer_type (type));
8639 addr = fold_convert (t, addr);
8640 addr = build_va_arg_indirect_ref (addr);
8642 else
8644 t = build_pointer_type (type);
8645 addr = fold_convert (t, addr);
8648 return build_va_arg_indirect_ref (addr);
8652 /* Builtins. */
8654 enum s390_builtin
8656 S390_BUILTIN_THREAD_POINTER,
8657 S390_BUILTIN_SET_THREAD_POINTER,
8659 S390_BUILTIN_max
8662 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8663 CODE_FOR_get_tp_64,
8664 CODE_FOR_set_tp_64
8667 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8668 CODE_FOR_get_tp_31,
8669 CODE_FOR_set_tp_31
8672 static void
8673 s390_init_builtins (void)
8675 tree ftype;
8677 ftype = build_function_type (ptr_type_node, void_list_node);
8678 add_builtin_function ("__builtin_thread_pointer", ftype,
8679 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8680 NULL, NULL_TREE);
8682 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8683 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8684 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8685 NULL, NULL_TREE);
8688 /* Expand an expression EXP that calls a built-in function,
8689 with result going to TARGET if that's convenient
8690 (and in mode MODE if that's convenient).
8691 SUBTARGET may be used as the target for computing one of EXP's operands.
8692 IGNORE is nonzero if the value is to be ignored. */
8694 static rtx
8695 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8696 enum machine_mode mode ATTRIBUTE_UNUSED,
8697 int ignore ATTRIBUTE_UNUSED)
8699 #define MAX_ARGS 2
8701 unsigned int const *code_for_builtin =
8702 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8704 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8705 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8706 enum insn_code icode;
8707 rtx op[MAX_ARGS], pat;
8708 int arity;
8709 bool nonvoid;
8710 tree arg;
8711 call_expr_arg_iterator iter;
8713 if (fcode >= S390_BUILTIN_max)
8714 internal_error ("bad builtin fcode");
8715 icode = code_for_builtin[fcode];
8716 if (icode == 0)
8717 internal_error ("bad builtin fcode");
8719 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8721 arity = 0;
8722 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8724 const struct insn_operand_data *insn_op;
8726 if (arg == error_mark_node)
8727 return NULL_RTX;
8728 if (arity > MAX_ARGS)
8729 return NULL_RTX;
8731 insn_op = &insn_data[icode].operand[arity + nonvoid];
8733 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8735 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8736 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8737 arity++;
8740 if (nonvoid)
8742 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8743 if (!target
8744 || GET_MODE (target) != tmode
8745 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8746 target = gen_reg_rtx (tmode);
8749 switch (arity)
8751 case 0:
8752 pat = GEN_FCN (icode) (target);
8753 break;
8754 case 1:
8755 if (nonvoid)
8756 pat = GEN_FCN (icode) (target, op[0]);
8757 else
8758 pat = GEN_FCN (icode) (op[0]);
8759 break;
8760 case 2:
8761 pat = GEN_FCN (icode) (target, op[0], op[1]);
8762 break;
8763 default:
8764 gcc_unreachable ();
8766 if (!pat)
8767 return NULL_RTX;
8768 emit_insn (pat);
8770 if (nonvoid)
8771 return target;
8772 else
8773 return const0_rtx;
8777 /* Output assembly code for the trampoline template to
8778 stdio stream FILE.
8780 On S/390, we use gpr 1 internally in the trampoline code;
8781 gpr 0 is used to hold the static chain. */
8783 void
8784 s390_trampoline_template (FILE *file)
8786 rtx op[2];
8787 op[0] = gen_rtx_REG (Pmode, 0);
8788 op[1] = gen_rtx_REG (Pmode, 1);
8790 if (TARGET_64BIT)
8792 output_asm_insn ("basr\t%1,0", op);
8793 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8794 output_asm_insn ("br\t%1", op);
8795 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8797 else
8799 output_asm_insn ("basr\t%1,0", op);
8800 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8801 output_asm_insn ("br\t%1", op);
8802 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8806 /* Emit RTL insns to initialize the variable parts of a trampoline.
8807 FNADDR is an RTX for the address of the function's pure code.
8808 CXT is an RTX for the static chain value for the function. */
8810 void
8811 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8813 emit_move_insn (gen_rtx_MEM (Pmode,
8814 memory_address (Pmode,
8815 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8816 emit_move_insn (gen_rtx_MEM (Pmode,
8817 memory_address (Pmode,
8818 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8821 /* Output assembler code to FILE to increment profiler label # LABELNO
8822 for profiling a function entry. */
8824 void
8825 s390_function_profiler (FILE *file, int labelno)
8827 rtx op[7];
8829 char label[128];
8830 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8832 fprintf (file, "# function profiler \n");
8834 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8835 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8836 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8838 op[2] = gen_rtx_REG (Pmode, 1);
8839 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8840 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8842 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8843 if (flag_pic)
8845 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8846 op[4] = gen_rtx_CONST (Pmode, op[4]);
8849 if (TARGET_64BIT)
8851 output_asm_insn ("stg\t%0,%1", op);
8852 output_asm_insn ("larl\t%2,%3", op);
8853 output_asm_insn ("brasl\t%0,%4", op);
8854 output_asm_insn ("lg\t%0,%1", op);
8856 else if (!flag_pic)
8858 op[6] = gen_label_rtx ();
8860 output_asm_insn ("st\t%0,%1", op);
8861 output_asm_insn ("bras\t%2,%l6", op);
8862 output_asm_insn (".long\t%4", op);
8863 output_asm_insn (".long\t%3", op);
8864 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8865 output_asm_insn ("l\t%0,0(%2)", op);
8866 output_asm_insn ("l\t%2,4(%2)", op);
8867 output_asm_insn ("basr\t%0,%0", op);
8868 output_asm_insn ("l\t%0,%1", op);
8870 else
8872 op[5] = gen_label_rtx ();
8873 op[6] = gen_label_rtx ();
8875 output_asm_insn ("st\t%0,%1", op);
8876 output_asm_insn ("bras\t%2,%l6", op);
8877 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8878 output_asm_insn (".long\t%4-%l5", op);
8879 output_asm_insn (".long\t%3-%l5", op);
8880 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8881 output_asm_insn ("lr\t%0,%2", op);
8882 output_asm_insn ("a\t%0,0(%2)", op);
8883 output_asm_insn ("a\t%2,4(%2)", op);
8884 output_asm_insn ("basr\t%0,%0", op);
8885 output_asm_insn ("l\t%0,%1", op);
8889 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8890 into its SYMBOL_REF_FLAGS. */
8892 static void
8893 s390_encode_section_info (tree decl, rtx rtl, int first)
8895 default_encode_section_info (decl, rtl, first);
8897 if (TREE_CODE (decl) == VAR_DECL)
8899 /* If a variable has a forced alignment to < 2 bytes, mark it
8900 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
8901 operand. */
8902 if (DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8903 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8904 if (!DECL_SIZE (decl)
8905 || !DECL_ALIGN (decl)
8906 || !host_integerp (DECL_SIZE (decl), 0)
8907 || (DECL_ALIGN (decl) <= 64
8908 && DECL_ALIGN (decl) != tree_low_cst (DECL_SIZE (decl), 0)))
8909 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
8912 /* Literal pool references don't have a decl so they are handled
8913 differently here. We rely on the information in the MEM_ALIGN
8914 entry to decide upon natural alignment. */
8915 if (MEM_P (rtl)
8916 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
8917 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0))
8918 && (MEM_ALIGN (rtl) == 0
8919 || MEM_ALIGN (rtl) < GET_MODE_BITSIZE (GET_MODE (rtl))))
8920 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
8923 /* Output thunk to FILE that implements a C++ virtual function call (with
8924 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8925 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8926 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8927 relative to the resulting this pointer. */
8929 static void
8930 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8931 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8932 tree function)
8934 rtx op[10];
8935 int nonlocal = 0;
8937 /* Operand 0 is the target function. */
8938 op[0] = XEXP (DECL_RTL (function), 0);
8939 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8941 nonlocal = 1;
8942 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8943 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8944 op[0] = gen_rtx_CONST (Pmode, op[0]);
8947 /* Operand 1 is the 'this' pointer. */
8948 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8949 op[1] = gen_rtx_REG (Pmode, 3);
8950 else
8951 op[1] = gen_rtx_REG (Pmode, 2);
8953 /* Operand 2 is the delta. */
8954 op[2] = GEN_INT (delta);
8956 /* Operand 3 is the vcall_offset. */
8957 op[3] = GEN_INT (vcall_offset);
8959 /* Operand 4 is the temporary register. */
8960 op[4] = gen_rtx_REG (Pmode, 1);
8962 /* Operands 5 to 8 can be used as labels. */
8963 op[5] = NULL_RTX;
8964 op[6] = NULL_RTX;
8965 op[7] = NULL_RTX;
8966 op[8] = NULL_RTX;
8968 /* Operand 9 can be used for temporary register. */
8969 op[9] = NULL_RTX;
8971 /* Generate code. */
8972 if (TARGET_64BIT)
8974 /* Setup literal pool pointer if required. */
8975 if ((!DISP_IN_RANGE (delta)
8976 && !CONST_OK_FOR_K (delta)
8977 && !CONST_OK_FOR_Os (delta))
8978 || (!DISP_IN_RANGE (vcall_offset)
8979 && !CONST_OK_FOR_K (vcall_offset)
8980 && !CONST_OK_FOR_Os (vcall_offset)))
8982 op[5] = gen_label_rtx ();
8983 output_asm_insn ("larl\t%4,%5", op);
8986 /* Add DELTA to this pointer. */
8987 if (delta)
8989 if (CONST_OK_FOR_J (delta))
8990 output_asm_insn ("la\t%1,%2(%1)", op);
8991 else if (DISP_IN_RANGE (delta))
8992 output_asm_insn ("lay\t%1,%2(%1)", op);
8993 else if (CONST_OK_FOR_K (delta))
8994 output_asm_insn ("aghi\t%1,%2", op);
8995 else if (CONST_OK_FOR_Os (delta))
8996 output_asm_insn ("agfi\t%1,%2", op);
8997 else
8999 op[6] = gen_label_rtx ();
9000 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
9004 /* Perform vcall adjustment. */
9005 if (vcall_offset)
9007 if (DISP_IN_RANGE (vcall_offset))
9009 output_asm_insn ("lg\t%4,0(%1)", op);
9010 output_asm_insn ("ag\t%1,%3(%4)", op);
9012 else if (CONST_OK_FOR_K (vcall_offset))
9014 output_asm_insn ("lghi\t%4,%3", op);
9015 output_asm_insn ("ag\t%4,0(%1)", op);
9016 output_asm_insn ("ag\t%1,0(%4)", op);
9018 else if (CONST_OK_FOR_Os (vcall_offset))
9020 output_asm_insn ("lgfi\t%4,%3", op);
9021 output_asm_insn ("ag\t%4,0(%1)", op);
9022 output_asm_insn ("ag\t%1,0(%4)", op);
9024 else
9026 op[7] = gen_label_rtx ();
9027 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
9028 output_asm_insn ("ag\t%4,0(%1)", op);
9029 output_asm_insn ("ag\t%1,0(%4)", op);
9033 /* Jump to target. */
9034 output_asm_insn ("jg\t%0", op);
9036 /* Output literal pool if required. */
9037 if (op[5])
9039 output_asm_insn (".align\t4", op);
9040 targetm.asm_out.internal_label (file, "L",
9041 CODE_LABEL_NUMBER (op[5]));
9043 if (op[6])
9045 targetm.asm_out.internal_label (file, "L",
9046 CODE_LABEL_NUMBER (op[6]));
9047 output_asm_insn (".long\t%2", op);
9049 if (op[7])
9051 targetm.asm_out.internal_label (file, "L",
9052 CODE_LABEL_NUMBER (op[7]));
9053 output_asm_insn (".long\t%3", op);
9056 else
9058 /* Setup base pointer if required. */
9059 if (!vcall_offset
9060 || (!DISP_IN_RANGE (delta)
9061 && !CONST_OK_FOR_K (delta)
9062 && !CONST_OK_FOR_Os (delta))
9063 || (!DISP_IN_RANGE (delta)
9064 && !CONST_OK_FOR_K (vcall_offset)
9065 && !CONST_OK_FOR_Os (vcall_offset)))
9067 op[5] = gen_label_rtx ();
9068 output_asm_insn ("basr\t%4,0", op);
9069 targetm.asm_out.internal_label (file, "L",
9070 CODE_LABEL_NUMBER (op[5]));
9073 /* Add DELTA to this pointer. */
9074 if (delta)
9076 if (CONST_OK_FOR_J (delta))
9077 output_asm_insn ("la\t%1,%2(%1)", op);
9078 else if (DISP_IN_RANGE (delta))
9079 output_asm_insn ("lay\t%1,%2(%1)", op);
9080 else if (CONST_OK_FOR_K (delta))
9081 output_asm_insn ("ahi\t%1,%2", op);
9082 else if (CONST_OK_FOR_Os (delta))
9083 output_asm_insn ("afi\t%1,%2", op);
9084 else
9086 op[6] = gen_label_rtx ();
9087 output_asm_insn ("a\t%1,%6-%5(%4)", op);
9091 /* Perform vcall adjustment. */
9092 if (vcall_offset)
9094 if (CONST_OK_FOR_J (vcall_offset))
9096 output_asm_insn ("l\t%4,0(%1)", op);
9097 output_asm_insn ("a\t%1,%3(%4)", op);
9099 else if (DISP_IN_RANGE (vcall_offset))
9101 output_asm_insn ("l\t%4,0(%1)", op);
9102 output_asm_insn ("ay\t%1,%3(%4)", op);
9104 else if (CONST_OK_FOR_K (vcall_offset))
9106 output_asm_insn ("lhi\t%4,%3", op);
9107 output_asm_insn ("a\t%4,0(%1)", op);
9108 output_asm_insn ("a\t%1,0(%4)", op);
9110 else if (CONST_OK_FOR_Os (vcall_offset))
9112 output_asm_insn ("iilf\t%4,%3", op);
9113 output_asm_insn ("a\t%4,0(%1)", op);
9114 output_asm_insn ("a\t%1,0(%4)", op);
9116 else
9118 op[7] = gen_label_rtx ();
9119 output_asm_insn ("l\t%4,%7-%5(%4)", op);
9120 output_asm_insn ("a\t%4,0(%1)", op);
9121 output_asm_insn ("a\t%1,0(%4)", op);
9124 /* We had to clobber the base pointer register.
9125 Re-setup the base pointer (with a different base). */
9126 op[5] = gen_label_rtx ();
9127 output_asm_insn ("basr\t%4,0", op);
9128 targetm.asm_out.internal_label (file, "L",
9129 CODE_LABEL_NUMBER (op[5]));
9132 /* Jump to target. */
9133 op[8] = gen_label_rtx ();
9135 if (!flag_pic)
9136 output_asm_insn ("l\t%4,%8-%5(%4)", op);
9137 else if (!nonlocal)
9138 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9139 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9140 else if (flag_pic == 1)
9142 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9143 output_asm_insn ("l\t%4,%0(%4)", op);
9145 else if (flag_pic == 2)
9147 op[9] = gen_rtx_REG (Pmode, 0);
9148 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
9149 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9150 output_asm_insn ("ar\t%4,%9", op);
9151 output_asm_insn ("l\t%4,0(%4)", op);
9154 output_asm_insn ("br\t%4", op);
9156 /* Output literal pool. */
9157 output_asm_insn (".align\t4", op);
9159 if (nonlocal && flag_pic == 2)
9160 output_asm_insn (".long\t%0", op);
9161 if (nonlocal)
9163 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
9164 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
9167 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
9168 if (!flag_pic)
9169 output_asm_insn (".long\t%0", op);
9170 else
9171 output_asm_insn (".long\t%0-%5", op);
9173 if (op[6])
9175 targetm.asm_out.internal_label (file, "L",
9176 CODE_LABEL_NUMBER (op[6]));
9177 output_asm_insn (".long\t%2", op);
9179 if (op[7])
9181 targetm.asm_out.internal_label (file, "L",
9182 CODE_LABEL_NUMBER (op[7]));
9183 output_asm_insn (".long\t%3", op);
9188 static bool
9189 s390_valid_pointer_mode (enum machine_mode mode)
9191 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9194 /* Checks whether the given CALL_EXPR would use a caller
9195 saved register. This is used to decide whether sibling call
9196 optimization could be performed on the respective function
9197 call. */
9199 static bool
9200 s390_call_saved_register_used (tree call_expr)
9202 CUMULATIVE_ARGS cum;
9203 tree parameter;
9204 enum machine_mode mode;
9205 tree type;
9206 rtx parm_rtx;
9207 int reg, i;
9209 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
9211 for (i = 0; i < call_expr_nargs (call_expr); i++)
9213 parameter = CALL_EXPR_ARG (call_expr, i);
9214 gcc_assert (parameter);
9216 /* For an undeclared variable passed as parameter we will get
9217 an ERROR_MARK node here. */
9218 if (TREE_CODE (parameter) == ERROR_MARK)
9219 return true;
9221 type = TREE_TYPE (parameter);
9222 gcc_assert (type);
9224 mode = TYPE_MODE (type);
9225 gcc_assert (mode);
9227 if (pass_by_reference (&cum, mode, type, true))
9229 mode = Pmode;
9230 type = build_pointer_type (type);
9233 parm_rtx = s390_function_arg (&cum, mode, type, 0);
9235 s390_function_arg_advance (&cum, mode, type, 0);
9237 if (parm_rtx && REG_P (parm_rtx))
9239 for (reg = 0;
9240 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
9241 reg++)
9242 if (! call_used_regs[reg + REGNO (parm_rtx)])
9243 return true;
9246 return false;
9249 /* Return true if the given call expression can be
9250 turned into a sibling call.
9251 DECL holds the declaration of the function to be called whereas
9252 EXP is the call expression itself. */
9254 static bool
9255 s390_function_ok_for_sibcall (tree decl, tree exp)
9257 /* The TPF epilogue uses register 1. */
9258 if (TARGET_TPF_PROFILING)
9259 return false;
9261 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9262 which would have to be restored before the sibcall. */
9263 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
9264 return false;
9266 /* Register 6 on s390 is available as an argument register but unfortunately
9267 "caller saved". This makes functions needing this register for arguments
9268 not suitable for sibcalls. */
9269 return !s390_call_saved_register_used (exp);
9272 /* Return the fixed registers used for condition codes. */
9274 static bool
9275 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
9277 *p1 = CC_REGNUM;
9278 *p2 = INVALID_REGNUM;
9280 return true;
9283 /* This function is used by the call expanders of the machine description.
9284 It emits the call insn itself together with the necessary operations
9285 to adjust the target address and returns the emitted insn.
9286 ADDR_LOCATION is the target address rtx
9287 TLS_CALL the location of the thread-local symbol
9288 RESULT_REG the register where the result of the call should be stored
9289 RETADDR_REG the register where the return address should be stored
9290 If this parameter is NULL_RTX the call is considered
9291 to be a sibling call. */
9294 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
9295 rtx retaddr_reg)
9297 bool plt_call = false;
9298 rtx insn;
9299 rtx call;
9300 rtx clobber;
9301 rtvec vec;
9303 /* Direct function calls need special treatment. */
9304 if (GET_CODE (addr_location) == SYMBOL_REF)
9306 /* When calling a global routine in PIC mode, we must
9307 replace the symbol itself with the PLT stub. */
9308 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
9310 addr_location = gen_rtx_UNSPEC (Pmode,
9311 gen_rtvec (1, addr_location),
9312 UNSPEC_PLT);
9313 addr_location = gen_rtx_CONST (Pmode, addr_location);
9314 plt_call = true;
9317 /* Unless we can use the bras(l) insn, force the
9318 routine address into a register. */
9319 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
9321 if (flag_pic)
9322 addr_location = legitimize_pic_address (addr_location, 0);
9323 else
9324 addr_location = force_reg (Pmode, addr_location);
9328 /* If it is already an indirect call or the code above moved the
9329 SYMBOL_REF to somewhere else make sure the address can be found in
9330 register 1. */
9331 if (retaddr_reg == NULL_RTX
9332 && GET_CODE (addr_location) != SYMBOL_REF
9333 && !plt_call)
9335 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
9336 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9339 addr_location = gen_rtx_MEM (QImode, addr_location);
9340 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
9342 if (result_reg != NULL_RTX)
9343 call = gen_rtx_SET (VOIDmode, result_reg, call);
9345 if (retaddr_reg != NULL_RTX)
9347 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
9349 if (tls_call != NULL_RTX)
9350 vec = gen_rtvec (3, call, clobber,
9351 gen_rtx_USE (VOIDmode, tls_call));
9352 else
9353 vec = gen_rtvec (2, call, clobber);
9355 call = gen_rtx_PARALLEL (VOIDmode, vec);
9358 insn = emit_call_insn (call);
9360 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
9361 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
9363 /* s390_function_ok_for_sibcall should
9364 have denied sibcalls in this case. */
9365 gcc_assert (retaddr_reg != NULL_RTX);
9367 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
9369 return insn;
9372 /* Implement CONDITIONAL_REGISTER_USAGE. */
9374 void
9375 s390_conditional_register_usage (void)
9377 int i;
9379 if (flag_pic)
9381 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9382 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9384 if (TARGET_CPU_ZARCH)
9386 fixed_regs[BASE_REGNUM] = 0;
9387 call_used_regs[BASE_REGNUM] = 0;
9388 fixed_regs[RETURN_REGNUM] = 0;
9389 call_used_regs[RETURN_REGNUM] = 0;
9391 if (TARGET_64BIT)
9393 for (i = 24; i < 32; i++)
9394 call_used_regs[i] = call_really_used_regs[i] = 0;
9396 else
9398 for (i = 18; i < 20; i++)
9399 call_used_regs[i] = call_really_used_regs[i] = 0;
9402 if (TARGET_SOFT_FLOAT)
9404 for (i = 16; i < 32; i++)
9405 call_used_regs[i] = fixed_regs[i] = 1;
9409 /* Corresponding function to eh_return expander. */
9411 static GTY(()) rtx s390_tpf_eh_return_symbol;
9412 void
9413 s390_emit_tpf_eh_return (rtx target)
9415 rtx insn, reg;
9417 if (!s390_tpf_eh_return_symbol)
9418 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
9420 reg = gen_rtx_REG (Pmode, 2);
9422 emit_move_insn (reg, target);
9423 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
9424 gen_rtx_REG (Pmode, RETURN_REGNUM));
9425 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
9427 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
9430 /* Rework the prologue/epilogue to avoid saving/restoring
9431 registers unnecessarily. */
9433 static void
9434 s390_optimize_prologue (void)
9436 rtx insn, new_insn, next_insn;
9438 /* Do a final recompute of the frame-related data. */
9440 s390_update_frame_layout ();
9442 /* If all special registers are in fact used, there's nothing we
9443 can do, so no point in walking the insn list. */
9445 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
9446 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
9447 && (TARGET_CPU_ZARCH
9448 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
9449 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
9450 return;
9452 /* Search for prologue/epilogue insns and replace them. */
9454 for (insn = get_insns (); insn; insn = next_insn)
9456 int first, last, off;
9457 rtx set, base, offset;
9459 next_insn = NEXT_INSN (insn);
9461 if (GET_CODE (insn) != INSN)
9462 continue;
9464 if (GET_CODE (PATTERN (insn)) == PARALLEL
9465 && store_multiple_operation (PATTERN (insn), VOIDmode))
9467 set = XVECEXP (PATTERN (insn), 0, 0);
9468 first = REGNO (SET_SRC (set));
9469 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9470 offset = const0_rtx;
9471 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9472 off = INTVAL (offset);
9474 if (GET_CODE (base) != REG || off < 0)
9475 continue;
9476 if (cfun_frame_layout.first_save_gpr != -1
9477 && (cfun_frame_layout.first_save_gpr < first
9478 || cfun_frame_layout.last_save_gpr > last))
9479 continue;
9480 if (REGNO (base) != STACK_POINTER_REGNUM
9481 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9482 continue;
9483 if (first > BASE_REGNUM || last < BASE_REGNUM)
9484 continue;
9486 if (cfun_frame_layout.first_save_gpr != -1)
9488 new_insn = save_gprs (base,
9489 off + (cfun_frame_layout.first_save_gpr
9490 - first) * UNITS_PER_WORD,
9491 cfun_frame_layout.first_save_gpr,
9492 cfun_frame_layout.last_save_gpr);
9493 new_insn = emit_insn_before (new_insn, insn);
9494 INSN_ADDRESSES_NEW (new_insn, -1);
9497 remove_insn (insn);
9498 continue;
9501 if (cfun_frame_layout.first_save_gpr == -1
9502 && GET_CODE (PATTERN (insn)) == SET
9503 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9504 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9505 || (!TARGET_CPU_ZARCH
9506 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9507 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9509 set = PATTERN (insn);
9510 first = REGNO (SET_SRC (set));
9511 offset = const0_rtx;
9512 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9513 off = INTVAL (offset);
9515 if (GET_CODE (base) != REG || off < 0)
9516 continue;
9517 if (REGNO (base) != STACK_POINTER_REGNUM
9518 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9519 continue;
9521 remove_insn (insn);
9522 continue;
9525 if (GET_CODE (PATTERN (insn)) == PARALLEL
9526 && load_multiple_operation (PATTERN (insn), VOIDmode))
9528 set = XVECEXP (PATTERN (insn), 0, 0);
9529 first = REGNO (SET_DEST (set));
9530 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9531 offset = const0_rtx;
9532 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9533 off = INTVAL (offset);
9535 if (GET_CODE (base) != REG || off < 0)
9536 continue;
9537 if (cfun_frame_layout.first_restore_gpr != -1
9538 && (cfun_frame_layout.first_restore_gpr < first
9539 || cfun_frame_layout.last_restore_gpr > last))
9540 continue;
9541 if (REGNO (base) != STACK_POINTER_REGNUM
9542 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9543 continue;
9544 if (first > BASE_REGNUM || last < BASE_REGNUM)
9545 continue;
9547 if (cfun_frame_layout.first_restore_gpr != -1)
9549 new_insn = restore_gprs (base,
9550 off + (cfun_frame_layout.first_restore_gpr
9551 - first) * UNITS_PER_WORD,
9552 cfun_frame_layout.first_restore_gpr,
9553 cfun_frame_layout.last_restore_gpr);
9554 new_insn = emit_insn_before (new_insn, insn);
9555 INSN_ADDRESSES_NEW (new_insn, -1);
9558 remove_insn (insn);
9559 continue;
9562 if (cfun_frame_layout.first_restore_gpr == -1
9563 && GET_CODE (PATTERN (insn)) == SET
9564 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9565 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9566 || (!TARGET_CPU_ZARCH
9567 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9568 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9570 set = PATTERN (insn);
9571 first = REGNO (SET_DEST (set));
9572 offset = const0_rtx;
9573 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9574 off = INTVAL (offset);
9576 if (GET_CODE (base) != REG || off < 0)
9577 continue;
9578 if (REGNO (base) != STACK_POINTER_REGNUM
9579 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9580 continue;
9582 remove_insn (insn);
9583 continue;
9588 /* Perform machine-dependent processing. */
9590 static void
9591 s390_reorg (void)
9593 bool pool_overflow = false;
9595 /* Make sure all splits have been performed; splits after
9596 machine_dependent_reorg might confuse insn length counts. */
9597 split_all_insns_noflow ();
9599 /* From here on decomposed literal pool addresses must be accepted. */
9600 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
9602 /* Install the main literal pool and the associated base
9603 register load insns.
9605 In addition, there are two problematic situations we need
9606 to correct:
9608 - the literal pool might be > 4096 bytes in size, so that
9609 some of its elements cannot be directly accessed
9611 - a branch target might be > 64K away from the branch, so that
9612 it is not possible to use a PC-relative instruction.
9614 To fix those, we split the single literal pool into multiple
9615 pool chunks, reloading the pool base register at various
9616 points throughout the function to ensure it always points to
9617 the pool chunk the following code expects, and / or replace
9618 PC-relative branches by absolute branches.
9620 However, the two problems are interdependent: splitting the
9621 literal pool can move a branch further away from its target,
9622 causing the 64K limit to overflow, and on the other hand,
9623 replacing a PC-relative branch by an absolute branch means
9624 we need to put the branch target address into the literal
9625 pool, possibly causing it to overflow.
9627 So, we loop trying to fix up both problems until we manage
9628 to satisfy both conditions at the same time. Note that the
9629 loop is guaranteed to terminate as every pass of the loop
9630 strictly decreases the total number of PC-relative branches
9631 in the function. (This is not completely true as there
9632 might be branch-over-pool insns introduced by chunkify_start.
9633 Those never need to be split however.) */
9635 for (;;)
9637 struct constant_pool *pool = NULL;
9639 /* Collect the literal pool. */
9640 if (!pool_overflow)
9642 pool = s390_mainpool_start ();
9643 if (!pool)
9644 pool_overflow = true;
9647 /* If literal pool overflowed, start to chunkify it. */
9648 if (pool_overflow)
9649 pool = s390_chunkify_start ();
9651 /* Split out-of-range branches. If this has created new
9652 literal pool entries, cancel current chunk list and
9653 recompute it. zSeries machines have large branch
9654 instructions, so we never need to split a branch. */
9655 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9657 if (pool_overflow)
9658 s390_chunkify_cancel (pool);
9659 else
9660 s390_mainpool_cancel (pool);
9662 continue;
9665 /* If we made it up to here, both conditions are satisfied.
9666 Finish up literal pool related changes. */
9667 if (pool_overflow)
9668 s390_chunkify_finish (pool);
9669 else
9670 s390_mainpool_finish (pool);
9672 /* We're done splitting branches. */
9673 cfun->machine->split_branches_pending_p = false;
9674 break;
9677 /* Generate out-of-pool execute target insns. */
9678 if (TARGET_CPU_ZARCH)
9680 rtx insn, label, target;
9682 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9684 label = s390_execute_label (insn);
9685 if (!label)
9686 continue;
9688 gcc_assert (label != const0_rtx);
9690 target = emit_label (XEXP (label, 0));
9691 INSN_ADDRESSES_NEW (target, -1);
9693 target = emit_insn (s390_execute_target (insn));
9694 INSN_ADDRESSES_NEW (target, -1);
9698 /* Try to optimize prologue and epilogue further. */
9699 s390_optimize_prologue ();
9703 /* Initialize GCC target structure. */
9705 #undef TARGET_ASM_ALIGNED_HI_OP
9706 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9707 #undef TARGET_ASM_ALIGNED_DI_OP
9708 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9709 #undef TARGET_ASM_INTEGER
9710 #define TARGET_ASM_INTEGER s390_assemble_integer
9712 #undef TARGET_ASM_OPEN_PAREN
9713 #define TARGET_ASM_OPEN_PAREN ""
9715 #undef TARGET_ASM_CLOSE_PAREN
9716 #define TARGET_ASM_CLOSE_PAREN ""
9718 #undef TARGET_DEFAULT_TARGET_FLAGS
9719 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9720 #undef TARGET_HANDLE_OPTION
9721 #define TARGET_HANDLE_OPTION s390_handle_option
9723 #undef TARGET_ENCODE_SECTION_INFO
9724 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
9726 #ifdef HAVE_AS_TLS
9727 #undef TARGET_HAVE_TLS
9728 #define TARGET_HAVE_TLS true
9729 #endif
9730 #undef TARGET_CANNOT_FORCE_CONST_MEM
9731 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
9733 #undef TARGET_DELEGITIMIZE_ADDRESS
9734 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
9736 #undef TARGET_RETURN_IN_MEMORY
9737 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
9739 #undef TARGET_INIT_BUILTINS
9740 #define TARGET_INIT_BUILTINS s390_init_builtins
9741 #undef TARGET_EXPAND_BUILTIN
9742 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
9744 #undef TARGET_ASM_OUTPUT_MI_THUNK
9745 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9746 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9747 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9749 #undef TARGET_SCHED_ADJUST_PRIORITY
9750 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9751 #undef TARGET_SCHED_ISSUE_RATE
9752 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9753 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9754 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
9756 #undef TARGET_CANNOT_COPY_INSN_P
9757 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9758 #undef TARGET_RTX_COSTS
9759 #define TARGET_RTX_COSTS s390_rtx_costs
9760 #undef TARGET_ADDRESS_COST
9761 #define TARGET_ADDRESS_COST s390_address_cost
9763 #undef TARGET_MACHINE_DEPENDENT_REORG
9764 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
9766 #undef TARGET_VALID_POINTER_MODE
9767 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
9769 #undef TARGET_BUILD_BUILTIN_VA_LIST
9770 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9771 #undef TARGET_EXPAND_BUILTIN_VA_START
9772 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
9773 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
9774 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
9776 #undef TARGET_PROMOTE_FUNCTION_ARGS
9777 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
9778 #undef TARGET_PROMOTE_FUNCTION_RETURN
9779 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
9780 #undef TARGET_PASS_BY_REFERENCE
9781 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
9783 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
9784 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
9786 #undef TARGET_FIXED_CONDITION_CODE_REGS
9787 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
9789 #undef TARGET_CC_MODES_COMPATIBLE
9790 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
9792 #undef TARGET_INVALID_WITHIN_DOLOOP
9793 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
9795 #ifdef HAVE_AS_TLS
9796 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9797 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9798 #endif
9800 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
9801 #undef TARGET_MANGLE_TYPE
9802 #define TARGET_MANGLE_TYPE s390_mangle_type
9803 #endif
9805 #undef TARGET_SCALAR_MODE_SUPPORTED_P
9806 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9808 #undef TARGET_SECONDARY_RELOAD
9809 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
9811 #undef TARGET_LIBGCC_CMP_RETURN_MODE
9812 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9814 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9815 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9817 struct gcc_target targetm = TARGET_INITIALIZER;
9819 #include "gt-s390.h"