merge with trunk @ 139506
[official-gcc.git] / gcc / config / s390 / s390.c
blob957707b860724e764d3efd8256288b06e7779728
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "except.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "expr.h"
43 #include "reload.h"
44 #include "toplev.h"
45 #include "basic-block.h"
46 #include "integrate.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "debug.h"
51 #include "langhooks.h"
52 #include "optabs.h"
53 #include "gimple.h"
54 #include "df.h"
57 /* Define the specific costs for a given cpu. */
59 struct processor_costs
61 /* multiplication */
62 const int m; /* cost of an M instruction. */
63 const int mghi; /* cost of an MGHI instruction. */
64 const int mh; /* cost of an MH instruction. */
65 const int mhi; /* cost of an MHI instruction. */
66 const int ml; /* cost of an ML instruction. */
67 const int mr; /* cost of an MR instruction. */
68 const int ms; /* cost of an MS instruction. */
69 const int msg; /* cost of an MSG instruction. */
70 const int msgf; /* cost of an MSGF instruction. */
71 const int msgfr; /* cost of an MSGFR instruction. */
72 const int msgr; /* cost of an MSGR instruction. */
73 const int msr; /* cost of an MSR instruction. */
74 const int mult_df; /* cost of multiplication in DFmode. */
75 const int mxbr;
76 /* square root */
77 const int sqxbr; /* cost of square root in TFmode. */
78 const int sqdbr; /* cost of square root in DFmode. */
79 const int sqebr; /* cost of square root in SFmode. */
80 /* multiply and add */
81 const int madbr; /* cost of multiply and add in DFmode. */
82 const int maebr; /* cost of multiply and add in SFmode. */
83 /* division */
84 const int dxbr;
85 const int ddbr;
86 const int debr;
87 const int dlgr;
88 const int dlr;
89 const int dr;
90 const int dsgfr;
91 const int dsgr;
94 const struct processor_costs *s390_cost;
96 static const
97 struct processor_costs z900_cost =
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
103 COSTS_N_INSNS (5), /* ML */
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
118 COSTS_N_INSNS (134), /* DXBR */
119 COSTS_N_INSNS (30), /* DDBR */
120 COSTS_N_INSNS (27), /* DEBR */
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
128 static const
129 struct processor_costs z990_cost =
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
135 COSTS_N_INSNS (4), /* ML */
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
150 COSTS_N_INSNS (60), /* DXBR */
151 COSTS_N_INSNS (40), /* DDBR */
152 COSTS_N_INSNS (26), /* DEBR */
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
160 static const
161 struct processor_costs z9_109_cost =
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
182 COSTS_N_INSNS (60), /* DXBR */
183 COSTS_N_INSNS (40), /* DDBR */
184 COSTS_N_INSNS (26), /* DEBR */
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
192 static const
193 struct processor_costs z10_cost =
195 COSTS_N_INSNS (10), /* M */
196 COSTS_N_INSNS (10), /* MGHI */
197 COSTS_N_INSNS (10), /* MH */
198 COSTS_N_INSNS (10), /* MHI */
199 COSTS_N_INSNS (10), /* ML */
200 COSTS_N_INSNS (10), /* MR */
201 COSTS_N_INSNS (10), /* MS */
202 COSTS_N_INSNS (10), /* MSG */
203 COSTS_N_INSNS (10), /* MSGF */
204 COSTS_N_INSNS (10), /* MSGFR */
205 COSTS_N_INSNS (10), /* MSGR */
206 COSTS_N_INSNS (10), /* MSR */
207 COSTS_N_INSNS (10), /* multiplication in DFmode */
208 COSTS_N_INSNS (50), /* MXBR */
209 COSTS_N_INSNS (120), /* SQXBR */
210 COSTS_N_INSNS (52), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (10), /* MADBR */
213 COSTS_N_INSNS (10), /* MAEBR */
214 COSTS_N_INSNS (111), /* DXBR */
215 COSTS_N_INSNS (39), /* DDBR */
216 COSTS_N_INSNS (32), /* DEBR */
217 COSTS_N_INSNS (160), /* DLGR */
218 COSTS_N_INSNS (71), /* DLR */
219 COSTS_N_INSNS (71), /* DR */
220 COSTS_N_INSNS (71), /* DSGFR */
221 COSTS_N_INSNS (71), /* DSGR */
224 extern int reload_completed;
226 /* Save information from a "cmpxx" operation until the branch or scc is
227 emitted. */
228 rtx s390_compare_op0, s390_compare_op1;
230 /* Save the result of a compare_and_swap until the branch or scc is
231 emitted. */
232 rtx s390_compare_emitted = NULL_RTX;
234 /* Structure used to hold the components of a S/390 memory
235 address. A legitimate address on S/390 is of the general
236 form
237 base + index + displacement
238 where any of the components is optional.
240 base and index are registers of the class ADDR_REGS,
241 displacement is an unsigned 12-bit immediate constant. */
243 struct s390_address
245 rtx base;
246 rtx indx;
247 rtx disp;
248 bool pointer;
249 bool literal_pool;
252 /* Which cpu are we tuning for. */
253 enum processor_type s390_tune = PROCESSOR_max;
254 enum processor_flags s390_tune_flags;
255 /* Which instruction set architecture to use. */
256 enum processor_type s390_arch;
257 enum processor_flags s390_arch_flags;
259 HOST_WIDE_INT s390_warn_framesize = 0;
260 HOST_WIDE_INT s390_stack_size = 0;
261 HOST_WIDE_INT s390_stack_guard = 0;
263 /* The following structure is embedded in the machine
264 specific part of struct function. */
266 struct s390_frame_layout GTY (())
268 /* Offset within stack frame. */
269 HOST_WIDE_INT gprs_offset;
270 HOST_WIDE_INT f0_offset;
271 HOST_WIDE_INT f4_offset;
272 HOST_WIDE_INT f8_offset;
273 HOST_WIDE_INT backchain_offset;
275 /* Number of first and last gpr where slots in the register
276 save area are reserved for. */
277 int first_save_gpr_slot;
278 int last_save_gpr_slot;
280 /* Number of first and last gpr to be saved, restored. */
281 int first_save_gpr;
282 int first_restore_gpr;
283 int last_save_gpr;
284 int last_restore_gpr;
286 /* Bits standing for floating point registers. Set, if the
287 respective register has to be saved. Starting with reg 16 (f0)
288 at the rightmost bit.
289 Bit 15 - 8 7 6 5 4 3 2 1 0
290 fpr 15 - 8 7 5 3 1 6 4 2 0
291 reg 31 - 24 23 22 21 20 19 18 17 16 */
292 unsigned int fpr_bitmap;
294 /* Number of floating point registers f8-f15 which must be saved. */
295 int high_fprs;
297 /* Set if return address needs to be saved.
298 This flag is set by s390_return_addr_rtx if it could not use
299 the initial value of r14 and therefore depends on r14 saved
300 to the stack. */
301 bool save_return_addr_p;
303 /* Size of stack frame. */
304 HOST_WIDE_INT frame_size;
307 /* Define the structure for the machine field in struct function. */
309 struct machine_function GTY(())
311 struct s390_frame_layout frame_layout;
313 /* Literal pool base register. */
314 rtx base_reg;
316 /* True if we may need to perform branch splitting. */
317 bool split_branches_pending_p;
319 /* True during final stage of literal pool processing. */
320 bool decomposed_literal_pool_addresses_ok_p;
322 /* Some local-dynamic TLS symbol name. */
323 const char *some_ld_name;
325 bool has_landing_pad_p;
328 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
330 #define cfun_frame_layout (cfun->machine->frame_layout)
331 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
332 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
333 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
334 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
335 (1 << (BITNUM)))
336 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
337 (1 << (BITNUM))))
339 /* Number of GPRs and FPRs used for argument passing. */
340 #define GP_ARG_NUM_REG 5
341 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
343 /* A couple of shortcuts. */
344 #define CONST_OK_FOR_J(x) \
345 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
346 #define CONST_OK_FOR_K(x) \
347 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
348 #define CONST_OK_FOR_Os(x) \
349 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
350 #define CONST_OK_FOR_Op(x) \
351 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
352 #define CONST_OK_FOR_On(x) \
353 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
355 #define REGNO_PAIR_OK(REGNO, MODE) \
356 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
358 static enum machine_mode
359 s390_libgcc_cmp_return_mode (void)
361 return TARGET_64BIT ? DImode : SImode;
364 static enum machine_mode
365 s390_libgcc_shift_count_mode (void)
367 return TARGET_64BIT ? DImode : SImode;
370 /* Return true if the back end supports mode MODE. */
371 static bool
372 s390_scalar_mode_supported_p (enum machine_mode mode)
374 if (DECIMAL_FLOAT_MODE_P (mode))
375 return true;
376 else
377 return default_scalar_mode_supported_p (mode);
380 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
382 void
383 s390_set_has_landing_pad_p (bool value)
385 cfun->machine->has_landing_pad_p = value;
388 /* If two condition code modes are compatible, return a condition code
389 mode which is compatible with both. Otherwise, return
390 VOIDmode. */
392 static enum machine_mode
393 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
395 if (m1 == m2)
396 return m1;
398 switch (m1)
400 case CCZmode:
401 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
402 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
403 return m2;
404 return VOIDmode;
406 case CCSmode:
407 case CCUmode:
408 case CCTmode:
409 case CCSRmode:
410 case CCURmode:
411 case CCZ1mode:
412 if (m2 == CCZmode)
413 return m1;
415 return VOIDmode;
417 default:
418 return VOIDmode;
420 return VOIDmode;
423 /* Return true if SET either doesn't set the CC register, or else
424 the source and destination have matching CC modes and that
425 CC mode is at least as constrained as REQ_MODE. */
427 static bool
428 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
430 enum machine_mode set_mode;
432 gcc_assert (GET_CODE (set) == SET);
434 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
435 return 1;
437 set_mode = GET_MODE (SET_DEST (set));
438 switch (set_mode)
440 case CCSmode:
441 case CCSRmode:
442 case CCUmode:
443 case CCURmode:
444 case CCLmode:
445 case CCL1mode:
446 case CCL2mode:
447 case CCL3mode:
448 case CCT1mode:
449 case CCT2mode:
450 case CCT3mode:
451 if (req_mode != set_mode)
452 return 0;
453 break;
455 case CCZmode:
456 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
457 && req_mode != CCSRmode && req_mode != CCURmode)
458 return 0;
459 break;
461 case CCAPmode:
462 case CCANmode:
463 if (req_mode != CCAmode)
464 return 0;
465 break;
467 default:
468 gcc_unreachable ();
471 return (GET_MODE (SET_SRC (set)) == set_mode);
474 /* Return true if every SET in INSN that sets the CC register
475 has source and destination with matching CC modes and that
476 CC mode is at least as constrained as REQ_MODE.
477 If REQ_MODE is VOIDmode, always return false. */
479 bool
480 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
482 int i;
484 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
485 if (req_mode == VOIDmode)
486 return false;
488 if (GET_CODE (PATTERN (insn)) == SET)
489 return s390_match_ccmode_set (PATTERN (insn), req_mode);
491 if (GET_CODE (PATTERN (insn)) == PARALLEL)
492 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
494 rtx set = XVECEXP (PATTERN (insn), 0, i);
495 if (GET_CODE (set) == SET)
496 if (!s390_match_ccmode_set (set, req_mode))
497 return false;
500 return true;
503 /* If a test-under-mask instruction can be used to implement
504 (compare (and ... OP1) OP2), return the CC mode required
505 to do that. Otherwise, return VOIDmode.
506 MIXED is true if the instruction can distinguish between
507 CC1 and CC2 for mixed selected bits (TMxx), it is false
508 if the instruction cannot (TM). */
510 enum machine_mode
511 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
513 int bit0, bit1;
515 /* ??? Fixme: should work on CONST_DOUBLE as well. */
516 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
517 return VOIDmode;
519 /* Selected bits all zero: CC0.
520 e.g.: int a; if ((a & (16 + 128)) == 0) */
521 if (INTVAL (op2) == 0)
522 return CCTmode;
524 /* Selected bits all one: CC3.
525 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
526 if (INTVAL (op2) == INTVAL (op1))
527 return CCT3mode;
529 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
530 int a;
531 if ((a & (16 + 128)) == 16) -> CCT1
532 if ((a & (16 + 128)) == 128) -> CCT2 */
533 if (mixed)
535 bit1 = exact_log2 (INTVAL (op2));
536 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
537 if (bit0 != -1 && bit1 != -1)
538 return bit0 > bit1 ? CCT1mode : CCT2mode;
541 return VOIDmode;
544 /* Given a comparison code OP (EQ, NE, etc.) and the operands
545 OP0 and OP1 of a COMPARE, return the mode to be used for the
546 comparison. */
548 enum machine_mode
549 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
551 switch (code)
553 case EQ:
554 case NE:
555 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
556 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
557 return CCAPmode;
558 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
559 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
560 return CCAPmode;
561 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
562 || GET_CODE (op1) == NEG)
563 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
564 return CCLmode;
566 if (GET_CODE (op0) == AND)
568 /* Check whether we can potentially do it via TM. */
569 enum machine_mode ccmode;
570 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
571 if (ccmode != VOIDmode)
573 /* Relax CCTmode to CCZmode to allow fall-back to AND
574 if that turns out to be beneficial. */
575 return ccmode == CCTmode ? CCZmode : ccmode;
579 if (register_operand (op0, HImode)
580 && GET_CODE (op1) == CONST_INT
581 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
582 return CCT3mode;
583 if (register_operand (op0, QImode)
584 && GET_CODE (op1) == CONST_INT
585 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
586 return CCT3mode;
588 return CCZmode;
590 case LE:
591 case LT:
592 case GE:
593 case GT:
594 /* The only overflow condition of NEG and ABS happens when
595 -INT_MAX is used as parameter, which stays negative. So
596 we have an overflow from a positive value to a negative.
597 Using CCAP mode the resulting cc can be used for comparisons. */
598 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCAPmode;
602 /* If constants are involved in an add instruction it is possible to use
603 the resulting cc for comparisons with zero. Knowing the sign of the
604 constant the overflow behavior gets predictable. e.g.:
605 int a, b; if ((b = a + c) > 0)
606 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
607 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
608 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
610 if (INTVAL (XEXP((op0), 1)) < 0)
611 return CCANmode;
612 else
613 return CCAPmode;
615 /* Fall through. */
616 case UNORDERED:
617 case ORDERED:
618 case UNEQ:
619 case UNLE:
620 case UNLT:
621 case UNGE:
622 case UNGT:
623 case LTGT:
624 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
625 && GET_CODE (op1) != CONST_INT)
626 return CCSRmode;
627 return CCSmode;
629 case LTU:
630 case GEU:
631 if (GET_CODE (op0) == PLUS
632 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
633 return CCL1mode;
635 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
636 && GET_CODE (op1) != CONST_INT)
637 return CCURmode;
638 return CCUmode;
640 case LEU:
641 case GTU:
642 if (GET_CODE (op0) == MINUS
643 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
644 return CCL2mode;
646 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
647 && GET_CODE (op1) != CONST_INT)
648 return CCURmode;
649 return CCUmode;
651 default:
652 gcc_unreachable ();
656 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
657 that we can implement more efficiently. */
659 void
660 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
662 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
663 if ((*code == EQ || *code == NE)
664 && *op1 == const0_rtx
665 && GET_CODE (*op0) == ZERO_EXTRACT
666 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
667 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
668 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
670 rtx inner = XEXP (*op0, 0);
671 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
672 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
673 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
675 if (len > 0 && len < modesize
676 && pos >= 0 && pos + len <= modesize
677 && modesize <= HOST_BITS_PER_WIDE_INT)
679 unsigned HOST_WIDE_INT block;
680 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
681 block <<= modesize - pos - len;
683 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
684 gen_int_mode (block, GET_MODE (inner)));
688 /* Narrow AND of memory against immediate to enable TM. */
689 if ((*code == EQ || *code == NE)
690 && *op1 == const0_rtx
691 && GET_CODE (*op0) == AND
692 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
693 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
695 rtx inner = XEXP (*op0, 0);
696 rtx mask = XEXP (*op0, 1);
698 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
699 if (GET_CODE (inner) == SUBREG
700 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
701 && (GET_MODE_SIZE (GET_MODE (inner))
702 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
703 && ((INTVAL (mask)
704 & GET_MODE_MASK (GET_MODE (inner))
705 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
706 == 0))
707 inner = SUBREG_REG (inner);
709 /* Do not change volatile MEMs. */
710 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
712 int part = s390_single_part (XEXP (*op0, 1),
713 GET_MODE (inner), QImode, 0);
714 if (part >= 0)
716 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
717 inner = adjust_address_nv (inner, QImode, part);
718 *op0 = gen_rtx_AND (QImode, inner, mask);
723 /* Narrow comparisons against 0xffff to HImode if possible. */
724 if ((*code == EQ || *code == NE)
725 && GET_CODE (*op1) == CONST_INT
726 && INTVAL (*op1) == 0xffff
727 && SCALAR_INT_MODE_P (GET_MODE (*op0))
728 && (nonzero_bits (*op0, GET_MODE (*op0))
729 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
731 *op0 = gen_lowpart (HImode, *op0);
732 *op1 = constm1_rtx;
735 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
736 if (GET_CODE (*op0) == UNSPEC
737 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
738 && XVECLEN (*op0, 0) == 1
739 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
740 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
741 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
742 && *op1 == const0_rtx)
744 enum rtx_code new_code = UNKNOWN;
745 switch (*code)
747 case EQ: new_code = EQ; break;
748 case NE: new_code = NE; break;
749 case LT: new_code = GTU; break;
750 case GT: new_code = LTU; break;
751 case LE: new_code = GEU; break;
752 case GE: new_code = LEU; break;
753 default: break;
756 if (new_code != UNKNOWN)
758 *op0 = XVECEXP (*op0, 0, 0);
759 *code = new_code;
763 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
764 if (GET_CODE (*op0) == UNSPEC
765 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
766 && XVECLEN (*op0, 0) == 1
767 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
768 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
769 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
770 && *op1 == const0_rtx)
772 enum rtx_code new_code = UNKNOWN;
773 switch (*code)
775 case EQ: new_code = EQ; break;
776 case NE: new_code = NE; break;
777 default: break;
780 if (new_code != UNKNOWN)
782 *op0 = XVECEXP (*op0, 0, 0);
783 *code = new_code;
787 /* Simplify cascaded EQ, NE with const0_rtx. */
788 if ((*code == NE || *code == EQ)
789 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
790 && GET_MODE (*op0) == SImode
791 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
792 && REG_P (XEXP (*op0, 0))
793 && XEXP (*op0, 1) == const0_rtx
794 && *op1 == const0_rtx)
796 if ((*code == EQ && GET_CODE (*op0) == NE)
797 || (*code == NE && GET_CODE (*op0) == EQ))
798 *code = EQ;
799 else
800 *code = NE;
801 *op0 = XEXP (*op0, 0);
804 /* Prefer register over memory as first operand. */
805 if (MEM_P (*op0) && REG_P (*op1))
807 rtx tem = *op0; *op0 = *op1; *op1 = tem;
808 *code = swap_condition (*code);
812 /* Emit a compare instruction suitable to implement the comparison
813 OP0 CODE OP1. Return the correct condition RTL to be placed in
814 the IF_THEN_ELSE of the conditional branch testing the result. */
817 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
819 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
820 rtx ret = NULL_RTX;
822 /* Do not output a redundant compare instruction if a compare_and_swap
823 pattern already computed the result and the machine modes are compatible. */
824 if (s390_compare_emitted
825 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
826 == GET_MODE (s390_compare_emitted)))
827 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
828 else
830 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
832 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
833 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
835 s390_compare_emitted = NULL_RTX;
836 return ret;
839 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
840 matches CMP.
841 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
842 conditional branch testing the result. */
844 static rtx
845 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new_rtx)
847 rtx ret;
849 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new_rtx));
850 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
852 s390_compare_emitted = NULL_RTX;
854 return ret;
857 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
858 unconditional jump, else a conditional jump under condition COND. */
860 void
861 s390_emit_jump (rtx target, rtx cond)
863 rtx insn;
865 target = gen_rtx_LABEL_REF (VOIDmode, target);
866 if (cond)
867 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
869 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
870 emit_jump_insn (insn);
873 /* Return branch condition mask to implement a branch
874 specified by CODE. Return -1 for invalid comparisons. */
877 s390_branch_condition_mask (rtx code)
879 const int CC0 = 1 << 3;
880 const int CC1 = 1 << 2;
881 const int CC2 = 1 << 1;
882 const int CC3 = 1 << 0;
884 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
885 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
886 gcc_assert (XEXP (code, 1) == const0_rtx);
888 switch (GET_MODE (XEXP (code, 0)))
890 case CCZmode:
891 case CCZ1mode:
892 switch (GET_CODE (code))
894 case EQ: return CC0;
895 case NE: return CC1 | CC2 | CC3;
896 default: return -1;
898 break;
900 case CCT1mode:
901 switch (GET_CODE (code))
903 case EQ: return CC1;
904 case NE: return CC0 | CC2 | CC3;
905 default: return -1;
907 break;
909 case CCT2mode:
910 switch (GET_CODE (code))
912 case EQ: return CC2;
913 case NE: return CC0 | CC1 | CC3;
914 default: return -1;
916 break;
918 case CCT3mode:
919 switch (GET_CODE (code))
921 case EQ: return CC3;
922 case NE: return CC0 | CC1 | CC2;
923 default: return -1;
925 break;
927 case CCLmode:
928 switch (GET_CODE (code))
930 case EQ: return CC0 | CC2;
931 case NE: return CC1 | CC3;
932 default: return -1;
934 break;
936 case CCL1mode:
937 switch (GET_CODE (code))
939 case LTU: return CC2 | CC3; /* carry */
940 case GEU: return CC0 | CC1; /* no carry */
941 default: return -1;
943 break;
945 case CCL2mode:
946 switch (GET_CODE (code))
948 case GTU: return CC0 | CC1; /* borrow */
949 case LEU: return CC2 | CC3; /* no borrow */
950 default: return -1;
952 break;
954 case CCL3mode:
955 switch (GET_CODE (code))
957 case EQ: return CC0 | CC2;
958 case NE: return CC1 | CC3;
959 case LTU: return CC1;
960 case GTU: return CC3;
961 case LEU: return CC1 | CC2;
962 case GEU: return CC2 | CC3;
963 default: return -1;
966 case CCUmode:
967 switch (GET_CODE (code))
969 case EQ: return CC0;
970 case NE: return CC1 | CC2 | CC3;
971 case LTU: return CC1;
972 case GTU: return CC2;
973 case LEU: return CC0 | CC1;
974 case GEU: return CC0 | CC2;
975 default: return -1;
977 break;
979 case CCURmode:
980 switch (GET_CODE (code))
982 case EQ: return CC0;
983 case NE: return CC2 | CC1 | CC3;
984 case LTU: return CC2;
985 case GTU: return CC1;
986 case LEU: return CC0 | CC2;
987 case GEU: return CC0 | CC1;
988 default: return -1;
990 break;
992 case CCAPmode:
993 switch (GET_CODE (code))
995 case EQ: return CC0;
996 case NE: return CC1 | CC2 | CC3;
997 case LT: return CC1 | CC3;
998 case GT: return CC2;
999 case LE: return CC0 | CC1 | CC3;
1000 case GE: return CC0 | CC2;
1001 default: return -1;
1003 break;
1005 case CCANmode:
1006 switch (GET_CODE (code))
1008 case EQ: return CC0;
1009 case NE: return CC1 | CC2 | CC3;
1010 case LT: return CC1;
1011 case GT: return CC2 | CC3;
1012 case LE: return CC0 | CC1;
1013 case GE: return CC0 | CC2 | CC3;
1014 default: return -1;
1016 break;
1018 case CCSmode:
1019 switch (GET_CODE (code))
1021 case EQ: return CC0;
1022 case NE: return CC1 | CC2 | CC3;
1023 case LT: return CC1;
1024 case GT: return CC2;
1025 case LE: return CC0 | CC1;
1026 case GE: return CC0 | CC2;
1027 case UNORDERED: return CC3;
1028 case ORDERED: return CC0 | CC1 | CC2;
1029 case UNEQ: return CC0 | CC3;
1030 case UNLT: return CC1 | CC3;
1031 case UNGT: return CC2 | CC3;
1032 case UNLE: return CC0 | CC1 | CC3;
1033 case UNGE: return CC0 | CC2 | CC3;
1034 case LTGT: return CC1 | CC2;
1035 default: return -1;
1037 break;
1039 case CCSRmode:
1040 switch (GET_CODE (code))
1042 case EQ: return CC0;
1043 case NE: return CC2 | CC1 | CC3;
1044 case LT: return CC2;
1045 case GT: return CC1;
1046 case LE: return CC0 | CC2;
1047 case GE: return CC0 | CC1;
1048 case UNORDERED: return CC3;
1049 case ORDERED: return CC0 | CC2 | CC1;
1050 case UNEQ: return CC0 | CC3;
1051 case UNLT: return CC2 | CC3;
1052 case UNGT: return CC1 | CC3;
1053 case UNLE: return CC0 | CC2 | CC3;
1054 case UNGE: return CC0 | CC1 | CC3;
1055 case LTGT: return CC2 | CC1;
1056 default: return -1;
1058 break;
1060 default:
1061 return -1;
1066 /* Return branch condition mask to implement a compare and branch
1067 specified by CODE. Return -1 for invalid comparisons. */
1070 s390_compare_and_branch_condition_mask (rtx code)
1072 const int CC0 = 1 << 3;
1073 const int CC1 = 1 << 2;
1074 const int CC2 = 1 << 1;
1076 switch (GET_CODE (code))
1078 case EQ:
1079 return CC0;
1080 case NE:
1081 return CC1 | CC2;
1082 case LT:
1083 case LTU:
1084 return CC1;
1085 case GT:
1086 case GTU:
1087 return CC2;
1088 case LE:
1089 case LEU:
1090 return CC0 | CC1;
1091 case GE:
1092 case GEU:
1093 return CC0 | CC2;
1094 default:
1095 gcc_unreachable ();
1097 return -1;
1100 /* If INV is false, return assembler mnemonic string to implement
1101 a branch specified by CODE. If INV is true, return mnemonic
1102 for the corresponding inverted branch. */
1104 static const char *
1105 s390_branch_condition_mnemonic (rtx code, int inv)
1107 int mask;
1109 static const char *const mnemonic[16] =
1111 NULL, "o", "h", "nle",
1112 "l", "nhe", "lh", "ne",
1113 "e", "nlh", "he", "nl",
1114 "le", "nh", "no", NULL
1117 if (GET_CODE (XEXP (code, 0)) == REG
1118 && REGNO (XEXP (code, 0)) == CC_REGNUM
1119 && XEXP (code, 1) == const0_rtx)
1120 mask = s390_branch_condition_mask (code);
1121 else
1122 mask = s390_compare_and_branch_condition_mask (code);
1124 gcc_assert (mask >= 0);
1126 if (inv)
1127 mask ^= 15;
1129 gcc_assert (mask >= 1 && mask <= 14);
1131 return mnemonic[mask];
1134 /* Return the part of op which has a value different from def.
1135 The size of the part is determined by mode.
1136 Use this function only if you already know that op really
1137 contains such a part. */
1139 unsigned HOST_WIDE_INT
1140 s390_extract_part (rtx op, enum machine_mode mode, int def)
1142 unsigned HOST_WIDE_INT value = 0;
1143 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1144 int part_bits = GET_MODE_BITSIZE (mode);
1145 unsigned HOST_WIDE_INT part_mask
1146 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1147 int i;
1149 for (i = 0; i < max_parts; i++)
1151 if (i == 0)
1152 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1153 else
1154 value >>= part_bits;
1156 if ((value & part_mask) != (def & part_mask))
1157 return value & part_mask;
1160 gcc_unreachable ();
1163 /* If OP is an integer constant of mode MODE with exactly one
1164 part of mode PART_MODE unequal to DEF, return the number of that
1165 part. Otherwise, return -1. */
1168 s390_single_part (rtx op,
1169 enum machine_mode mode,
1170 enum machine_mode part_mode,
1171 int def)
1173 unsigned HOST_WIDE_INT value = 0;
1174 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1175 unsigned HOST_WIDE_INT part_mask
1176 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1177 int i, part = -1;
1179 if (GET_CODE (op) != CONST_INT)
1180 return -1;
1182 for (i = 0; i < n_parts; i++)
1184 if (i == 0)
1185 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1186 else
1187 value >>= GET_MODE_BITSIZE (part_mode);
1189 if ((value & part_mask) != (def & part_mask))
1191 if (part != -1)
1192 return -1;
1193 else
1194 part = i;
1197 return part == -1 ? -1 : n_parts - 1 - part;
1200 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1201 bits and no other bits are set in IN. POS and LENGTH can be used
1202 to obtain the start position and the length of the bitfield.
1204 POS gives the position of the first bit of the bitfield counting
1205 from the lowest order bit starting with zero. In order to use this
1206 value for S/390 instructions this has to be converted to "bits big
1207 endian" style. */
1209 bool
1210 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, int size,
1211 int *pos, int *length)
1213 int tmp_pos = 0;
1214 int tmp_length = 0;
1215 int i;
1216 unsigned HOST_WIDE_INT mask = 1ULL;
1217 bool contiguous = false;
1219 for (i = 0; i < size; mask <<= 1, i++)
1221 if (contiguous)
1223 if (mask & in)
1224 tmp_length++;
1225 else
1226 break;
1228 else
1230 if (mask & in)
1232 contiguous = true;
1233 tmp_length++;
1235 else
1236 tmp_pos++;
1240 if (!tmp_length)
1241 return false;
1243 /* Calculate a mask for all bits beyond the contiguous bits. */
1244 mask = (-1LL & ~(((1ULL << (tmp_length + tmp_pos - 1)) << 1) - 1));
1246 if (mask & in)
1247 return false;
1249 if (tmp_length + tmp_pos - 1 > size)
1250 return false;
1252 if (length)
1253 *length = tmp_length;
1255 if (pos)
1256 *pos = tmp_pos;
1258 return true;
1261 /* Check whether we can (and want to) split a double-word
1262 move in mode MODE from SRC to DST into two single-word
1263 moves, moving the subword FIRST_SUBWORD first. */
1265 bool
1266 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1268 /* Floating point registers cannot be split. */
1269 if (FP_REG_P (src) || FP_REG_P (dst))
1270 return false;
1272 /* We don't need to split if operands are directly accessible. */
1273 if (s_operand (src, mode) || s_operand (dst, mode))
1274 return false;
1276 /* Non-offsettable memory references cannot be split. */
1277 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1278 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1279 return false;
1281 /* Moving the first subword must not clobber a register
1282 needed to move the second subword. */
1283 if (register_operand (dst, mode))
1285 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1286 if (reg_overlap_mentioned_p (subreg, src))
1287 return false;
1290 return true;
1293 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1294 and [MEM2, MEM2 + SIZE] do overlap and false
1295 otherwise. */
1297 bool
1298 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1300 rtx addr1, addr2, addr_delta;
1301 HOST_WIDE_INT delta;
1303 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1304 return true;
1306 if (size == 0)
1307 return false;
1309 addr1 = XEXP (mem1, 0);
1310 addr2 = XEXP (mem2, 0);
1312 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1314 /* This overlapping check is used by peepholes merging memory block operations.
1315 Overlapping operations would otherwise be recognized by the S/390 hardware
1316 and would fall back to a slower implementation. Allowing overlapping
1317 operations would lead to slow code but not to wrong code. Therefore we are
1318 somewhat optimistic if we cannot prove that the memory blocks are
1319 overlapping.
1320 That's why we return false here although this may accept operations on
1321 overlapping memory areas. */
1322 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1323 return false;
1325 delta = INTVAL (addr_delta);
1327 if (delta == 0
1328 || (delta > 0 && delta < size)
1329 || (delta < 0 && -delta < size))
1330 return true;
1332 return false;
1335 /* Check whether the address of memory reference MEM2 equals exactly
1336 the address of memory reference MEM1 plus DELTA. Return true if
1337 we can prove this to be the case, false otherwise. */
1339 bool
1340 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1342 rtx addr1, addr2, addr_delta;
1344 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1345 return false;
1347 addr1 = XEXP (mem1, 0);
1348 addr2 = XEXP (mem2, 0);
1350 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1351 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1352 return false;
1354 return true;
1357 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1359 void
1360 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1361 rtx *operands)
1363 enum machine_mode wmode = mode;
1364 rtx dst = operands[0];
1365 rtx src1 = operands[1];
1366 rtx src2 = operands[2];
1367 rtx op, clob, tem;
1369 /* If we cannot handle the operation directly, use a temp register. */
1370 if (!s390_logical_operator_ok_p (operands))
1371 dst = gen_reg_rtx (mode);
1373 /* QImode and HImode patterns make sense only if we have a destination
1374 in memory. Otherwise perform the operation in SImode. */
1375 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1376 wmode = SImode;
1378 /* Widen operands if required. */
1379 if (mode != wmode)
1381 if (GET_CODE (dst) == SUBREG
1382 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1383 dst = tem;
1384 else if (REG_P (dst))
1385 dst = gen_rtx_SUBREG (wmode, dst, 0);
1386 else
1387 dst = gen_reg_rtx (wmode);
1389 if (GET_CODE (src1) == SUBREG
1390 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1391 src1 = tem;
1392 else if (GET_MODE (src1) != VOIDmode)
1393 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1395 if (GET_CODE (src2) == SUBREG
1396 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1397 src2 = tem;
1398 else if (GET_MODE (src2) != VOIDmode)
1399 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1402 /* Emit the instruction. */
1403 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1404 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1405 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1407 /* Fix up the destination if needed. */
1408 if (dst != operands[0])
1409 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1412 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1414 bool
1415 s390_logical_operator_ok_p (rtx *operands)
1417 /* If the destination operand is in memory, it needs to coincide
1418 with one of the source operands. After reload, it has to be
1419 the first source operand. */
1420 if (GET_CODE (operands[0]) == MEM)
1421 return rtx_equal_p (operands[0], operands[1])
1422 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1424 return true;
1427 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1428 operand IMMOP to switch from SS to SI type instructions. */
1430 void
1431 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1433 int def = code == AND ? -1 : 0;
1434 HOST_WIDE_INT mask;
1435 int part;
1437 gcc_assert (GET_CODE (*memop) == MEM);
1438 gcc_assert (!MEM_VOLATILE_P (*memop));
1440 mask = s390_extract_part (*immop, QImode, def);
1441 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1442 gcc_assert (part >= 0);
1444 *memop = adjust_address (*memop, QImode, part);
1445 *immop = gen_int_mode (mask, QImode);
1449 /* How to allocate a 'struct machine_function'. */
1451 static struct machine_function *
1452 s390_init_machine_status (void)
1454 return GGC_CNEW (struct machine_function);
1457 /* Change optimizations to be performed, depending on the
1458 optimization level.
1460 LEVEL is the optimization level specified; 2 if `-O2' is
1461 specified, 1 if `-O' is specified, and 0 if neither is specified.
1463 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1465 void
1466 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1468 /* ??? There are apparently still problems with -fcaller-saves. */
1469 flag_caller_saves = 0;
1471 /* By default, always emit DWARF-2 unwind info. This allows debugging
1472 without maintaining a stack frame back-chain. */
1473 flag_asynchronous_unwind_tables = 1;
1475 /* Use MVCLE instructions to decrease code size if requested. */
1476 if (size != 0)
1477 target_flags |= MASK_MVCLE;
1480 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1481 to the associated processor_type and processor_flags if so. */
1483 static bool
1484 s390_handle_arch_option (const char *arg,
1485 enum processor_type *type,
1486 enum processor_flags *flags)
1488 static struct pta
1490 const char *const name; /* processor name or nickname. */
1491 const enum processor_type processor;
1492 const enum processor_flags flags;
1494 const processor_alias_table[] =
1496 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1497 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1498 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1499 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1500 | PF_LONG_DISPLACEMENT},
1501 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1502 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1503 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1504 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1505 {"z10", PROCESSOR_2097_Z10, PF_IEEE_FLOAT | PF_ZARCH
1506 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP | PF_Z10},
1508 size_t i;
1510 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1511 if (strcmp (arg, processor_alias_table[i].name) == 0)
1513 *type = processor_alias_table[i].processor;
1514 *flags = processor_alias_table[i].flags;
1515 return true;
1517 return false;
1520 /* Implement TARGET_HANDLE_OPTION. */
1522 static bool
1523 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1525 switch (code)
1527 case OPT_march_:
1528 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1530 case OPT_mstack_guard_:
1531 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1532 return false;
1533 if (exact_log2 (s390_stack_guard) == -1)
1534 error ("stack guard value must be an exact power of 2");
1535 return true;
1537 case OPT_mstack_size_:
1538 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1539 return false;
1540 if (exact_log2 (s390_stack_size) == -1)
1541 error ("stack size must be an exact power of 2");
1542 return true;
1544 case OPT_mtune_:
1545 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1547 case OPT_mwarn_framesize_:
1548 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1550 default:
1551 return true;
1555 void
1556 override_options (void)
1558 /* Set up function hooks. */
1559 init_machine_status = s390_init_machine_status;
1561 /* Architecture mode defaults according to ABI. */
1562 if (!(target_flags_explicit & MASK_ZARCH))
1564 if (TARGET_64BIT)
1565 target_flags |= MASK_ZARCH;
1566 else
1567 target_flags &= ~MASK_ZARCH;
1570 /* Determine processor architectural level. */
1571 if (!s390_arch_string)
1573 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1574 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1577 /* Determine processor to tune for. */
1578 if (s390_tune == PROCESSOR_max)
1580 s390_tune = s390_arch;
1581 s390_tune_flags = s390_arch_flags;
1584 /* Sanity checks. */
1585 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
1586 error ("z/Architecture mode not supported on %s", s390_arch_string);
1587 if (TARGET_64BIT && !TARGET_ZARCH)
1588 error ("64-bit ABI not supported in ESA/390 mode");
1590 if (TARGET_HARD_DFP && !TARGET_DFP)
1592 if (target_flags_explicit & MASK_HARD_DFP)
1594 if (!TARGET_CPU_DFP)
1595 error ("Hardware decimal floating point instructions"
1596 " not available on %s", s390_arch_string);
1597 if (!TARGET_ZARCH)
1598 error ("Hardware decimal floating point instructions"
1599 " not available in ESA/390 mode");
1601 else
1602 target_flags &= ~MASK_HARD_DFP;
1605 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1607 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
1608 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1610 target_flags &= ~MASK_HARD_DFP;
1613 /* Set processor cost function. */
1614 switch (s390_tune)
1616 case PROCESSOR_2084_Z990:
1617 s390_cost = &z990_cost;
1618 break;
1619 case PROCESSOR_2094_Z9_109:
1620 s390_cost = &z9_109_cost;
1621 break;
1622 case PROCESSOR_2097_Z10:
1623 s390_cost = &z10_cost;
1624 break;
1625 default:
1626 s390_cost = &z900_cost;
1629 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1630 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1631 "in combination");
1633 if (s390_stack_size)
1635 if (s390_stack_guard >= s390_stack_size)
1636 error ("stack size must be greater than the stack guard value");
1637 else if (s390_stack_size > 1 << 16)
1638 error ("stack size must not be greater than 64k");
1640 else if (s390_stack_guard)
1641 error ("-mstack-guard implies use of -mstack-size");
1643 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1644 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1645 target_flags |= MASK_LONG_DOUBLE_128;
1646 #endif
1649 /* Map for smallest class containing reg regno. */
1651 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1652 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1653 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1654 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1655 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1656 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1657 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1658 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1659 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1660 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1661 ACCESS_REGS, ACCESS_REGS
1664 /* Return attribute type of insn. */
1666 static enum attr_type
1667 s390_safe_attr_type (rtx insn)
1669 if (recog_memoized (insn) >= 0)
1670 return get_attr_type (insn);
1671 else
1672 return TYPE_NONE;
1675 /* Return true if DISP is a valid short displacement. */
1677 static bool
1678 s390_short_displacement (rtx disp)
1680 /* No displacement is OK. */
1681 if (!disp)
1682 return true;
1684 /* Integer displacement in range. */
1685 if (GET_CODE (disp) == CONST_INT)
1686 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1688 /* GOT offset is not OK, the GOT can be large. */
1689 if (GET_CODE (disp) == CONST
1690 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1691 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1692 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1693 return false;
1695 /* All other symbolic constants are literal pool references,
1696 which are OK as the literal pool must be small. */
1697 if (GET_CODE (disp) == CONST)
1698 return true;
1700 return false;
1703 /* Decompose a RTL expression ADDR for a memory address into
1704 its components, returned in OUT.
1706 Returns false if ADDR is not a valid memory address, true
1707 otherwise. If OUT is NULL, don't return the components,
1708 but check for validity only.
1710 Note: Only addresses in canonical form are recognized.
1711 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1712 canonical form so that they will be recognized. */
1714 static int
1715 s390_decompose_address (rtx addr, struct s390_address *out)
1717 HOST_WIDE_INT offset = 0;
1718 rtx base = NULL_RTX;
1719 rtx indx = NULL_RTX;
1720 rtx disp = NULL_RTX;
1721 rtx orig_disp;
1722 bool pointer = false;
1723 bool base_ptr = false;
1724 bool indx_ptr = false;
1725 bool literal_pool = false;
1727 /* We may need to substitute the literal pool base register into the address
1728 below. However, at this point we do not know which register is going to
1729 be used as base, so we substitute the arg pointer register. This is going
1730 to be treated as holding a pointer below -- it shouldn't be used for any
1731 other purpose. */
1732 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1734 /* Decompose address into base + index + displacement. */
1736 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1737 base = addr;
1739 else if (GET_CODE (addr) == PLUS)
1741 rtx op0 = XEXP (addr, 0);
1742 rtx op1 = XEXP (addr, 1);
1743 enum rtx_code code0 = GET_CODE (op0);
1744 enum rtx_code code1 = GET_CODE (op1);
1746 if (code0 == REG || code0 == UNSPEC)
1748 if (code1 == REG || code1 == UNSPEC)
1750 indx = op0; /* index + base */
1751 base = op1;
1754 else
1756 base = op0; /* base + displacement */
1757 disp = op1;
1761 else if (code0 == PLUS)
1763 indx = XEXP (op0, 0); /* index + base + disp */
1764 base = XEXP (op0, 1);
1765 disp = op1;
1768 else
1770 return false;
1774 else
1775 disp = addr; /* displacement */
1777 /* Extract integer part of displacement. */
1778 orig_disp = disp;
1779 if (disp)
1781 if (GET_CODE (disp) == CONST_INT)
1783 offset = INTVAL (disp);
1784 disp = NULL_RTX;
1786 else if (GET_CODE (disp) == CONST
1787 && GET_CODE (XEXP (disp, 0)) == PLUS
1788 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1790 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1791 disp = XEXP (XEXP (disp, 0), 0);
1795 /* Strip off CONST here to avoid special case tests later. */
1796 if (disp && GET_CODE (disp) == CONST)
1797 disp = XEXP (disp, 0);
1799 /* We can convert literal pool addresses to
1800 displacements by basing them off the base register. */
1801 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1803 /* Either base or index must be free to hold the base register. */
1804 if (!base)
1805 base = fake_pool_base, literal_pool = true;
1806 else if (!indx)
1807 indx = fake_pool_base, literal_pool = true;
1808 else
1809 return false;
1811 /* Mark up the displacement. */
1812 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1813 UNSPEC_LTREL_OFFSET);
1816 /* Validate base register. */
1817 if (base)
1819 if (GET_CODE (base) == UNSPEC)
1820 switch (XINT (base, 1))
1822 case UNSPEC_LTREF:
1823 if (!disp)
1824 disp = gen_rtx_UNSPEC (Pmode,
1825 gen_rtvec (1, XVECEXP (base, 0, 0)),
1826 UNSPEC_LTREL_OFFSET);
1827 else
1828 return false;
1830 base = XVECEXP (base, 0, 1);
1831 break;
1833 case UNSPEC_LTREL_BASE:
1834 if (XVECLEN (base, 0) == 1)
1835 base = fake_pool_base, literal_pool = true;
1836 else
1837 base = XVECEXP (base, 0, 1);
1838 break;
1840 default:
1841 return false;
1844 if (!REG_P (base)
1845 || (GET_MODE (base) != SImode
1846 && GET_MODE (base) != Pmode))
1847 return false;
1849 if (REGNO (base) == STACK_POINTER_REGNUM
1850 || REGNO (base) == FRAME_POINTER_REGNUM
1851 || ((reload_completed || reload_in_progress)
1852 && frame_pointer_needed
1853 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1854 || REGNO (base) == ARG_POINTER_REGNUM
1855 || (flag_pic
1856 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1857 pointer = base_ptr = true;
1859 if ((reload_completed || reload_in_progress)
1860 && base == cfun->machine->base_reg)
1861 pointer = base_ptr = literal_pool = true;
1864 /* Validate index register. */
1865 if (indx)
1867 if (GET_CODE (indx) == UNSPEC)
1868 switch (XINT (indx, 1))
1870 case UNSPEC_LTREF:
1871 if (!disp)
1872 disp = gen_rtx_UNSPEC (Pmode,
1873 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1874 UNSPEC_LTREL_OFFSET);
1875 else
1876 return false;
1878 indx = XVECEXP (indx, 0, 1);
1879 break;
1881 case UNSPEC_LTREL_BASE:
1882 if (XVECLEN (indx, 0) == 1)
1883 indx = fake_pool_base, literal_pool = true;
1884 else
1885 indx = XVECEXP (indx, 0, 1);
1886 break;
1888 default:
1889 return false;
1892 if (!REG_P (indx)
1893 || (GET_MODE (indx) != SImode
1894 && GET_MODE (indx) != Pmode))
1895 return false;
1897 if (REGNO (indx) == STACK_POINTER_REGNUM
1898 || REGNO (indx) == FRAME_POINTER_REGNUM
1899 || ((reload_completed || reload_in_progress)
1900 && frame_pointer_needed
1901 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1902 || REGNO (indx) == ARG_POINTER_REGNUM
1903 || (flag_pic
1904 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1905 pointer = indx_ptr = true;
1907 if ((reload_completed || reload_in_progress)
1908 && indx == cfun->machine->base_reg)
1909 pointer = indx_ptr = literal_pool = true;
1912 /* Prefer to use pointer as base, not index. */
1913 if (base && indx && !base_ptr
1914 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1916 rtx tmp = base;
1917 base = indx;
1918 indx = tmp;
1921 /* Validate displacement. */
1922 if (!disp)
1924 /* If virtual registers are involved, the displacement will change later
1925 anyway as the virtual registers get eliminated. This could make a
1926 valid displacement invalid, but it is more likely to make an invalid
1927 displacement valid, because we sometimes access the register save area
1928 via negative offsets to one of those registers.
1929 Thus we don't check the displacement for validity here. If after
1930 elimination the displacement turns out to be invalid after all,
1931 this is fixed up by reload in any case. */
1932 if (base != arg_pointer_rtx
1933 && indx != arg_pointer_rtx
1934 && base != return_address_pointer_rtx
1935 && indx != return_address_pointer_rtx
1936 && base != frame_pointer_rtx
1937 && indx != frame_pointer_rtx
1938 && base != virtual_stack_vars_rtx
1939 && indx != virtual_stack_vars_rtx)
1940 if (!DISP_IN_RANGE (offset))
1941 return false;
1943 else
1945 /* All the special cases are pointers. */
1946 pointer = true;
1948 /* In the small-PIC case, the linker converts @GOT
1949 and @GOTNTPOFF offsets to possible displacements. */
1950 if (GET_CODE (disp) == UNSPEC
1951 && (XINT (disp, 1) == UNSPEC_GOT
1952 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1953 && flag_pic == 1)
1958 /* Accept chunkified literal pool symbol references. */
1959 else if (cfun && cfun->machine
1960 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1961 && GET_CODE (disp) == MINUS
1962 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1963 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1968 /* Accept literal pool references. */
1969 else if (GET_CODE (disp) == UNSPEC
1970 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1972 orig_disp = gen_rtx_CONST (Pmode, disp);
1973 if (offset)
1975 /* If we have an offset, make sure it does not
1976 exceed the size of the constant pool entry. */
1977 rtx sym = XVECEXP (disp, 0, 0);
1978 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1979 return false;
1981 orig_disp = plus_constant (orig_disp, offset);
1985 else
1986 return false;
1989 if (!base && !indx)
1990 pointer = true;
1992 if (out)
1994 out->base = base;
1995 out->indx = indx;
1996 out->disp = orig_disp;
1997 out->pointer = pointer;
1998 out->literal_pool = literal_pool;
2001 return true;
2004 /* Decompose a RTL expression OP for a shift count into its components,
2005 and return the base register in BASE and the offset in OFFSET.
2007 Return true if OP is a valid shift count, false if not. */
2009 bool
2010 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
2012 HOST_WIDE_INT off = 0;
2014 /* We can have an integer constant, an address register,
2015 or a sum of the two. */
2016 if (GET_CODE (op) == CONST_INT)
2018 off = INTVAL (op);
2019 op = NULL_RTX;
2021 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
2023 off = INTVAL (XEXP (op, 1));
2024 op = XEXP (op, 0);
2026 while (op && GET_CODE (op) == SUBREG)
2027 op = SUBREG_REG (op);
2029 if (op && GET_CODE (op) != REG)
2030 return false;
2032 if (offset)
2033 *offset = off;
2034 if (base)
2035 *base = op;
2037 return true;
2041 /* Return true if CODE is a valid address without index. */
2043 bool
2044 s390_legitimate_address_without_index_p (rtx op)
2046 struct s390_address addr;
2048 if (!s390_decompose_address (XEXP (op, 0), &addr))
2049 return false;
2050 if (addr.indx)
2051 return false;
2053 return true;
2057 /* Evaluates constraint strings described by the regular expression
2058 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
2059 constraint given in STR, or 0 else. */
2062 s390_mem_constraint (const char *str, rtx op)
2064 struct s390_address addr;
2065 char c = str[0];
2067 /* Check for offsettable variants of memory constraints. */
2068 if (c == 'A')
2070 /* Only accept non-volatile MEMs. */
2071 if (!MEM_P (op) || MEM_VOLATILE_P (op))
2072 return 0;
2074 if ((reload_completed || reload_in_progress)
2075 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
2076 return 0;
2078 c = str[1];
2081 /* Check for non-literal-pool variants of memory constraints. */
2082 else if (c == 'B')
2084 if (GET_CODE (op) != MEM)
2085 return 0;
2086 if (!s390_decompose_address (XEXP (op, 0), &addr))
2087 return 0;
2088 if (addr.literal_pool)
2089 return 0;
2091 c = str[1];
2094 switch (c)
2096 case 'Q':
2097 if (GET_CODE (op) != MEM)
2098 return 0;
2099 if (!s390_decompose_address (XEXP (op, 0), &addr))
2100 return 0;
2101 if (addr.indx)
2102 return 0;
2104 if (TARGET_LONG_DISPLACEMENT)
2106 if (!s390_short_displacement (addr.disp))
2107 return 0;
2109 break;
2111 case 'R':
2112 if (GET_CODE (op) != MEM)
2113 return 0;
2115 if (TARGET_LONG_DISPLACEMENT)
2117 if (!s390_decompose_address (XEXP (op, 0), &addr))
2118 return 0;
2119 if (!s390_short_displacement (addr.disp))
2120 return 0;
2122 break;
2124 case 'S':
2125 if (!TARGET_LONG_DISPLACEMENT)
2126 return 0;
2127 if (GET_CODE (op) != MEM)
2128 return 0;
2129 if (!s390_decompose_address (XEXP (op, 0), &addr))
2130 return 0;
2131 if (addr.indx)
2132 return 0;
2133 if (s390_short_displacement (addr.disp))
2134 return 0;
2135 break;
2137 case 'T':
2138 if (!TARGET_LONG_DISPLACEMENT)
2139 return 0;
2140 if (GET_CODE (op) != MEM)
2141 return 0;
2142 if (!s390_decompose_address (XEXP (op, 0), &addr))
2143 return 0;
2144 if (s390_short_displacement (addr.disp))
2145 return 0;
2146 break;
2148 case 'U':
2149 if (TARGET_LONG_DISPLACEMENT)
2151 if (!s390_decompose_address (op, &addr))
2152 return 0;
2153 if (!s390_short_displacement (addr.disp))
2154 return 0;
2156 break;
2158 case 'W':
2159 if (!TARGET_LONG_DISPLACEMENT)
2160 return 0;
2161 if (!s390_decompose_address (op, &addr))
2162 return 0;
2163 if (s390_short_displacement (addr.disp))
2164 return 0;
2165 break;
2167 case 'Y':
2168 /* Simply check for the basic form of a shift count. Reload will
2169 take care of making sure we have a proper base register. */
2170 if (!s390_decompose_shift_count (op, NULL, NULL))
2171 return 0;
2172 break;
2174 default:
2175 return 0;
2178 return 1;
2183 /* Evaluates constraint strings starting with letter O. Input
2184 parameter C is the second letter following the "O" in the constraint
2185 string. Returns 1 if VALUE meets the respective constraint and 0
2186 otherwise. */
2189 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2191 if (!TARGET_EXTIMM)
2192 return 0;
2194 switch (c)
2196 case 's':
2197 return trunc_int_for_mode (value, SImode) == value;
2199 case 'p':
2200 return value == 0
2201 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2203 case 'n':
2204 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
2206 default:
2207 gcc_unreachable ();
2212 /* Evaluates constraint strings starting with letter N. Parameter STR
2213 contains the letters following letter "N" in the constraint string.
2214 Returns true if VALUE matches the constraint. */
2217 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2219 enum machine_mode mode, part_mode;
2220 int def;
2221 int part, part_goal;
2224 if (str[0] == 'x')
2225 part_goal = -1;
2226 else
2227 part_goal = str[0] - '0';
2229 switch (str[1])
2231 case 'Q':
2232 part_mode = QImode;
2233 break;
2234 case 'H':
2235 part_mode = HImode;
2236 break;
2237 case 'S':
2238 part_mode = SImode;
2239 break;
2240 default:
2241 return 0;
2244 switch (str[2])
2246 case 'H':
2247 mode = HImode;
2248 break;
2249 case 'S':
2250 mode = SImode;
2251 break;
2252 case 'D':
2253 mode = DImode;
2254 break;
2255 default:
2256 return 0;
2259 switch (str[3])
2261 case '0':
2262 def = 0;
2263 break;
2264 case 'F':
2265 def = -1;
2266 break;
2267 default:
2268 return 0;
2271 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2272 return 0;
2274 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2275 if (part < 0)
2276 return 0;
2277 if (part_goal != -1 && part_goal != part)
2278 return 0;
2280 return 1;
2284 /* Returns true if the input parameter VALUE is a float zero. */
2287 s390_float_const_zero_p (rtx value)
2289 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2290 && value == CONST0_RTX (GET_MODE (value)));
2294 /* Compute a (partial) cost for rtx X. Return true if the complete
2295 cost has been computed, and false if subexpressions should be
2296 scanned. In either case, *TOTAL contains the cost result.
2297 CODE contains GET_CODE (x), OUTER_CODE contains the code
2298 of the superexpression of x. */
2300 static bool
2301 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2303 switch (code)
2305 case CONST:
2306 case CONST_INT:
2307 case LABEL_REF:
2308 case SYMBOL_REF:
2309 case CONST_DOUBLE:
2310 case MEM:
2311 *total = 0;
2312 return true;
2314 case ASHIFT:
2315 case ASHIFTRT:
2316 case LSHIFTRT:
2317 case ROTATE:
2318 case ROTATERT:
2319 case AND:
2320 case IOR:
2321 case XOR:
2322 case NEG:
2323 case NOT:
2324 *total = COSTS_N_INSNS (1);
2325 return false;
2327 case PLUS:
2328 case MINUS:
2329 /* Check for multiply and add. */
2330 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2331 && GET_CODE (XEXP (x, 0)) == MULT
2332 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2334 /* This is the multiply and add case. */
2335 if (GET_MODE (x) == DFmode)
2336 *total = s390_cost->madbr;
2337 else
2338 *total = s390_cost->maebr;
2339 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2340 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2341 + rtx_cost (XEXP (x, 1), code);
2342 return true; /* Do not do an additional recursive descent. */
2344 *total = COSTS_N_INSNS (1);
2345 return false;
2347 case MULT:
2348 switch (GET_MODE (x))
2350 case SImode:
2352 rtx left = XEXP (x, 0);
2353 rtx right = XEXP (x, 1);
2354 if (GET_CODE (right) == CONST_INT
2355 && CONST_OK_FOR_K (INTVAL (right)))
2356 *total = s390_cost->mhi;
2357 else if (GET_CODE (left) == SIGN_EXTEND)
2358 *total = s390_cost->mh;
2359 else
2360 *total = s390_cost->ms; /* msr, ms, msy */
2361 break;
2363 case DImode:
2365 rtx left = XEXP (x, 0);
2366 rtx right = XEXP (x, 1);
2367 if (TARGET_64BIT)
2369 if (GET_CODE (right) == CONST_INT
2370 && CONST_OK_FOR_K (INTVAL (right)))
2371 *total = s390_cost->mghi;
2372 else if (GET_CODE (left) == SIGN_EXTEND)
2373 *total = s390_cost->msgf;
2374 else
2375 *total = s390_cost->msg; /* msgr, msg */
2377 else /* TARGET_31BIT */
2379 if (GET_CODE (left) == SIGN_EXTEND
2380 && GET_CODE (right) == SIGN_EXTEND)
2381 /* mulsidi case: mr, m */
2382 *total = s390_cost->m;
2383 else if (GET_CODE (left) == ZERO_EXTEND
2384 && GET_CODE (right) == ZERO_EXTEND
2385 && TARGET_CPU_ZARCH)
2386 /* umulsidi case: ml, mlr */
2387 *total = s390_cost->ml;
2388 else
2389 /* Complex calculation is required. */
2390 *total = COSTS_N_INSNS (40);
2392 break;
2394 case SFmode:
2395 case DFmode:
2396 *total = s390_cost->mult_df;
2397 break;
2398 case TFmode:
2399 *total = s390_cost->mxbr;
2400 break;
2401 default:
2402 return false;
2404 return false;
2406 case UDIV:
2407 case UMOD:
2408 if (GET_MODE (x) == TImode) /* 128 bit division */
2409 *total = s390_cost->dlgr;
2410 else if (GET_MODE (x) == DImode)
2412 rtx right = XEXP (x, 1);
2413 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2414 *total = s390_cost->dlr;
2415 else /* 64 by 64 bit division */
2416 *total = s390_cost->dlgr;
2418 else if (GET_MODE (x) == SImode) /* 32 bit division */
2419 *total = s390_cost->dlr;
2420 return false;
2422 case DIV:
2423 case MOD:
2424 if (GET_MODE (x) == DImode)
2426 rtx right = XEXP (x, 1);
2427 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2428 if (TARGET_64BIT)
2429 *total = s390_cost->dsgfr;
2430 else
2431 *total = s390_cost->dr;
2432 else /* 64 by 64 bit division */
2433 *total = s390_cost->dsgr;
2435 else if (GET_MODE (x) == SImode) /* 32 bit division */
2436 *total = s390_cost->dlr;
2437 else if (GET_MODE (x) == SFmode)
2439 *total = s390_cost->debr;
2441 else if (GET_MODE (x) == DFmode)
2443 *total = s390_cost->ddbr;
2445 else if (GET_MODE (x) == TFmode)
2447 *total = s390_cost->dxbr;
2449 return false;
2451 case SQRT:
2452 if (GET_MODE (x) == SFmode)
2453 *total = s390_cost->sqebr;
2454 else if (GET_MODE (x) == DFmode)
2455 *total = s390_cost->sqdbr;
2456 else /* TFmode */
2457 *total = s390_cost->sqxbr;
2458 return false;
2460 case SIGN_EXTEND:
2461 case ZERO_EXTEND:
2462 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2463 || outer_code == PLUS || outer_code == MINUS
2464 || outer_code == COMPARE)
2465 *total = 0;
2466 return false;
2468 case COMPARE:
2469 *total = COSTS_N_INSNS (1);
2470 if (GET_CODE (XEXP (x, 0)) == AND
2471 && GET_CODE (XEXP (x, 1)) == CONST_INT
2472 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2474 rtx op0 = XEXP (XEXP (x, 0), 0);
2475 rtx op1 = XEXP (XEXP (x, 0), 1);
2476 rtx op2 = XEXP (x, 1);
2478 if (memory_operand (op0, GET_MODE (op0))
2479 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2480 return true;
2481 if (register_operand (op0, GET_MODE (op0))
2482 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2483 return true;
2485 return false;
2487 default:
2488 return false;
2492 /* Return the cost of an address rtx ADDR. */
2494 static int
2495 s390_address_cost (rtx addr)
2497 struct s390_address ad;
2498 if (!s390_decompose_address (addr, &ad))
2499 return 1000;
2501 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2504 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2505 otherwise return 0. */
2508 tls_symbolic_operand (rtx op)
2510 if (GET_CODE (op) != SYMBOL_REF)
2511 return 0;
2512 return SYMBOL_REF_TLS_MODEL (op);
2515 /* Split DImode access register reference REG (on 64-bit) into its constituent
2516 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2517 gen_highpart cannot be used as they assume all registers are word-sized,
2518 while our access registers have only half that size. */
2520 void
2521 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2523 gcc_assert (TARGET_64BIT);
2524 gcc_assert (ACCESS_REG_P (reg));
2525 gcc_assert (GET_MODE (reg) == DImode);
2526 gcc_assert (!(REGNO (reg) & 1));
2528 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2529 *hi = gen_rtx_REG (SImode, REGNO (reg));
2532 /* Return true if OP contains a symbol reference */
2534 bool
2535 symbolic_reference_mentioned_p (rtx op)
2537 const char *fmt;
2538 int i;
2540 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2541 return 1;
2543 fmt = GET_RTX_FORMAT (GET_CODE (op));
2544 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2546 if (fmt[i] == 'E')
2548 int j;
2550 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2551 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2552 return 1;
2555 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2556 return 1;
2559 return 0;
2562 /* Return true if OP contains a reference to a thread-local symbol. */
2564 bool
2565 tls_symbolic_reference_mentioned_p (rtx op)
2567 const char *fmt;
2568 int i;
2570 if (GET_CODE (op) == SYMBOL_REF)
2571 return tls_symbolic_operand (op);
2573 fmt = GET_RTX_FORMAT (GET_CODE (op));
2574 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2576 if (fmt[i] == 'E')
2578 int j;
2580 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2581 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2582 return true;
2585 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2586 return true;
2589 return false;
2593 /* Return true if OP is a legitimate general operand when
2594 generating PIC code. It is given that flag_pic is on
2595 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2598 legitimate_pic_operand_p (rtx op)
2600 /* Accept all non-symbolic constants. */
2601 if (!SYMBOLIC_CONST (op))
2602 return 1;
2604 /* Reject everything else; must be handled
2605 via emit_symbolic_move. */
2606 return 0;
2609 /* Returns true if the constant value OP is a legitimate general operand.
2610 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2613 legitimate_constant_p (rtx op)
2615 /* Accept all non-symbolic constants. */
2616 if (!SYMBOLIC_CONST (op))
2617 return 1;
2619 /* Accept immediate LARL operands. */
2620 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2621 return 1;
2623 /* Thread-local symbols are never legal constants. This is
2624 so that emit_call knows that computing such addresses
2625 might require a function call. */
2626 if (TLS_SYMBOLIC_CONST (op))
2627 return 0;
2629 /* In the PIC case, symbolic constants must *not* be
2630 forced into the literal pool. We accept them here,
2631 so that they will be handled by emit_symbolic_move. */
2632 if (flag_pic)
2633 return 1;
2635 /* All remaining non-PIC symbolic constants are
2636 forced into the literal pool. */
2637 return 0;
2640 /* Determine if it's legal to put X into the constant pool. This
2641 is not possible if X contains the address of a symbol that is
2642 not constant (TLS) or not known at final link time (PIC). */
2644 static bool
2645 s390_cannot_force_const_mem (rtx x)
2647 switch (GET_CODE (x))
2649 case CONST_INT:
2650 case CONST_DOUBLE:
2651 /* Accept all non-symbolic constants. */
2652 return false;
2654 case LABEL_REF:
2655 /* Labels are OK iff we are non-PIC. */
2656 return flag_pic != 0;
2658 case SYMBOL_REF:
2659 /* 'Naked' TLS symbol references are never OK,
2660 non-TLS symbols are OK iff we are non-PIC. */
2661 if (tls_symbolic_operand (x))
2662 return true;
2663 else
2664 return flag_pic != 0;
2666 case CONST:
2667 return s390_cannot_force_const_mem (XEXP (x, 0));
2668 case PLUS:
2669 case MINUS:
2670 return s390_cannot_force_const_mem (XEXP (x, 0))
2671 || s390_cannot_force_const_mem (XEXP (x, 1));
2673 case UNSPEC:
2674 switch (XINT (x, 1))
2676 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2677 case UNSPEC_LTREL_OFFSET:
2678 case UNSPEC_GOT:
2679 case UNSPEC_GOTOFF:
2680 case UNSPEC_PLTOFF:
2681 case UNSPEC_TLSGD:
2682 case UNSPEC_TLSLDM:
2683 case UNSPEC_NTPOFF:
2684 case UNSPEC_DTPOFF:
2685 case UNSPEC_GOTNTPOFF:
2686 case UNSPEC_INDNTPOFF:
2687 return false;
2689 /* If the literal pool shares the code section, be put
2690 execute template placeholders into the pool as well. */
2691 case UNSPEC_INSN:
2692 return TARGET_CPU_ZARCH;
2694 default:
2695 return true;
2697 break;
2699 default:
2700 gcc_unreachable ();
2704 /* Returns true if the constant value OP is a legitimate general
2705 operand during and after reload. The difference to
2706 legitimate_constant_p is that this function will not accept
2707 a constant that would need to be forced to the literal pool
2708 before it can be used as operand. */
2710 bool
2711 legitimate_reload_constant_p (rtx op)
2713 /* Accept la(y) operands. */
2714 if (GET_CODE (op) == CONST_INT
2715 && DISP_IN_RANGE (INTVAL (op)))
2716 return true;
2718 /* Accept l(g)hi/l(g)fi operands. */
2719 if (GET_CODE (op) == CONST_INT
2720 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2721 return true;
2723 /* Accept lliXX operands. */
2724 if (TARGET_ZARCH
2725 && GET_CODE (op) == CONST_INT
2726 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2727 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2728 return true;
2730 if (TARGET_EXTIMM
2731 && GET_CODE (op) == CONST_INT
2732 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2733 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2734 return true;
2736 /* Accept larl operands. */
2737 if (TARGET_CPU_ZARCH
2738 && larl_operand (op, VOIDmode))
2739 return true;
2741 /* Accept lzXX operands. */
2742 if (GET_CODE (op) == CONST_DOUBLE
2743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2744 return true;
2746 /* Accept double-word operands that can be split. */
2747 if (GET_CODE (op) == CONST_INT
2748 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2750 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2751 rtx hi = operand_subword (op, 0, 0, dword_mode);
2752 rtx lo = operand_subword (op, 1, 0, dword_mode);
2753 return legitimate_reload_constant_p (hi)
2754 && legitimate_reload_constant_p (lo);
2757 /* Everything else cannot be handled without reload. */
2758 return false;
2761 /* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
2762 return the class of reg to actually use. */
2764 enum reg_class
2765 s390_preferred_reload_class (rtx op, enum reg_class rclass)
2767 switch (GET_CODE (op))
2769 /* Constants we cannot reload must be forced into the
2770 literal pool. */
2772 case CONST_DOUBLE:
2773 case CONST_INT:
2774 if (legitimate_reload_constant_p (op))
2775 return rclass;
2776 else
2777 return NO_REGS;
2779 /* If a symbolic constant or a PLUS is reloaded,
2780 it is most likely being used as an address, so
2781 prefer ADDR_REGS. If 'class' is not a superset
2782 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2783 case PLUS:
2784 case LABEL_REF:
2785 case SYMBOL_REF:
2786 case CONST:
2787 if (reg_class_subset_p (ADDR_REGS, rclass))
2788 return ADDR_REGS;
2789 else
2790 return NO_REGS;
2792 default:
2793 break;
2796 return rclass;
2799 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2800 and return these parts in SYMREF and ADDEND. You can pass NULL in
2801 SYMREF and/or ADDEND if you are not interested in these values. */
2803 static bool
2804 s390_symref_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
2806 HOST_WIDE_INT tmpaddend = 0;
2808 if (GET_CODE (addr) == CONST)
2809 addr = XEXP (addr, 0);
2811 if (GET_CODE (addr) == PLUS)
2813 if (GET_CODE (XEXP (addr, 0)) == SYMBOL_REF
2814 && CONST_INT_P (XEXP (addr, 1)))
2816 tmpaddend = INTVAL (XEXP (addr, 1));
2817 addr = XEXP (addr, 0);
2819 else
2820 return false;
2822 else
2823 if (GET_CODE (addr) != SYMBOL_REF)
2824 return false;
2826 if (symref)
2827 *symref = addr;
2828 if (addend)
2829 *addend = tmpaddend;
2831 return true;
2834 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2835 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2836 aligned. */
2838 bool
2839 s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
2841 HOST_WIDE_INT addend;
2842 rtx symref;
2844 if (!s390_symref_operand_p (addr, &symref, &addend))
2845 return false;
2847 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref)
2848 && !(addend & (alignment - 1)));
2851 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2852 operand SCRATCH is used to reload the even part of the address and
2853 adding one. */
2855 void
2856 s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
2858 HOST_WIDE_INT addend;
2859 rtx symref;
2861 if (!s390_symref_operand_p (addr, &symref, &addend))
2862 gcc_unreachable ();
2864 if (!(addend & 1))
2865 /* Easy case. The addend is even so larl will do fine. */
2866 emit_move_insn (reg, addr);
2867 else
2869 /* We can leave the scratch register untouched if the target
2870 register is a valid base register. */
2871 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
2872 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
2873 scratch = reg;
2875 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
2876 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
2878 if (addend != 1)
2879 emit_move_insn (scratch,
2880 gen_rtx_CONST (Pmode,
2881 gen_rtx_PLUS (Pmode, symref,
2882 GEN_INT (addend - 1))));
2883 else
2884 emit_move_insn (scratch, symref);
2886 /* Increment the address using la in order to avoid clobbering cc. */
2887 emit_move_insn (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
2891 /* Generate what is necessary to move between REG and MEM using
2892 SCRATCH. The direction is given by TOMEM. */
2894 void
2895 s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
2897 /* Reload might have pulled a constant out of the literal pool.
2898 Force it back in. */
2899 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
2900 || GET_CODE (mem) == CONST)
2901 mem = force_const_mem (GET_MODE (reg), mem);
2903 gcc_assert (MEM_P (mem));
2905 /* For a load from memory we can leave the scratch register
2906 untouched if the target register is a valid base register. */
2907 if (!tomem
2908 && REGNO (reg) < FIRST_PSEUDO_REGISTER
2909 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
2910 && GET_MODE (reg) == GET_MODE (scratch))
2911 scratch = reg;
2913 /* Load address into scratch register. Since we can't have a
2914 secondary reload for a secondary reload we have to cover the case
2915 where larl would need a secondary reload here as well. */
2916 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
2918 /* Now we can use a standard load/store to do the move. */
2919 if (tomem)
2920 emit_move_insn (replace_equiv_address (mem, scratch), reg);
2921 else
2922 emit_move_insn (reg, replace_equiv_address (mem, scratch));
2925 /* Inform reload about cases where moving X with a mode MODE to a register in
2926 RCLASS requires an extra scratch or immediate register. Return the class
2927 needed for the immediate register. */
2929 static enum reg_class
2930 s390_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
2931 enum machine_mode mode, secondary_reload_info *sri)
2933 /* Intermediate register needed. */
2934 if (reg_classes_intersect_p (CC_REGS, rclass))
2935 return GENERAL_REGS;
2937 if (TARGET_Z10)
2939 /* On z10 several optimizer steps may generate larl operands with
2940 an odd addend. */
2941 if (in_p
2942 && s390_symref_operand_p (x, NULL, NULL)
2943 && mode == Pmode
2944 && !s390_check_symref_alignment (x, 2))
2945 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
2946 : CODE_FOR_reloadsi_larl_odd_addend_z10);
2948 /* On z10 we need a scratch register when moving QI, TI or floating
2949 point mode values from or to a memory location with a SYMBOL_REF
2950 or if the symref addend of a SI or DI move is not aligned to the
2951 width of the access. */
2952 if (MEM_P (x)
2953 && s390_symref_operand_p (XEXP (x, 0), NULL, NULL)
2954 && (mode == QImode || mode == TImode || FLOAT_MODE_P (mode)
2955 || (!TARGET_64BIT && mode == DImode)
2956 || ((mode == HImode || mode == SImode || mode == DImode)
2957 && (!s390_check_symref_alignment (XEXP (x, 0),
2958 GET_MODE_SIZE (mode))))))
2960 #define __SECONDARY_RELOAD_CASE(M,m) \
2961 case M##mode: \
2962 if (TARGET_64BIT) \
2963 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
2964 CODE_FOR_reload##m##di_tomem_z10; \
2965 else \
2966 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
2967 CODE_FOR_reload##m##si_tomem_z10; \
2968 break;
2970 switch (GET_MODE (x))
2972 __SECONDARY_RELOAD_CASE (QI, qi);
2973 __SECONDARY_RELOAD_CASE (HI, hi);
2974 __SECONDARY_RELOAD_CASE (SI, si);
2975 __SECONDARY_RELOAD_CASE (DI, di);
2976 __SECONDARY_RELOAD_CASE (TI, ti);
2977 __SECONDARY_RELOAD_CASE (SF, sf);
2978 __SECONDARY_RELOAD_CASE (DF, df);
2979 __SECONDARY_RELOAD_CASE (TF, tf);
2980 __SECONDARY_RELOAD_CASE (SD, sd);
2981 __SECONDARY_RELOAD_CASE (DD, dd);
2982 __SECONDARY_RELOAD_CASE (TD, td);
2984 default:
2985 gcc_unreachable ();
2987 #undef __SECONDARY_RELOAD_CASE
2991 /* We need a scratch register when loading a PLUS expression which
2992 is not a legitimate operand of the LOAD ADDRESS instruction. */
2993 if (in_p && s390_plus_operand (x, mode))
2994 sri->icode = (TARGET_64BIT ?
2995 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2997 /* Performing a multiword move from or to memory we have to make sure the
2998 second chunk in memory is addressable without causing a displacement
2999 overflow. If that would be the case we calculate the address in
3000 a scratch register. */
3001 if (MEM_P (x)
3002 && GET_CODE (XEXP (x, 0)) == PLUS
3003 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3004 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
3005 + GET_MODE_SIZE (mode) - 1))
3007 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3008 in a s_operand address since we may fallback to lm/stm. So we only
3009 have to care about overflows in the b+i+d case. */
3010 if ((reg_classes_intersect_p (GENERAL_REGS, rclass)
3011 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
3012 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
3013 /* For FP_REGS no lm/stm is available so this check is triggered
3014 for displacement overflows in b+i+d and b+d like addresses. */
3015 || (reg_classes_intersect_p (FP_REGS, rclass)
3016 && s390_class_max_nregs (FP_REGS, mode) > 1))
3018 if (in_p)
3019 sri->icode = (TARGET_64BIT ?
3020 CODE_FOR_reloaddi_nonoffmem_in :
3021 CODE_FOR_reloadsi_nonoffmem_in);
3022 else
3023 sri->icode = (TARGET_64BIT ?
3024 CODE_FOR_reloaddi_nonoffmem_out :
3025 CODE_FOR_reloadsi_nonoffmem_out);
3029 /* A scratch address register is needed when a symbolic constant is
3030 copied to r0 compiling with -fPIC. In other cases the target
3031 register might be used as temporary (see legitimize_pic_address). */
3032 if (in_p && SYMBOLIC_CONST (x) && flag_pic == 2 && rclass != ADDR_REGS)
3033 sri->icode = (TARGET_64BIT ?
3034 CODE_FOR_reloaddi_PIC_addr :
3035 CODE_FOR_reloadsi_PIC_addr);
3037 /* Either scratch or no register needed. */
3038 return NO_REGS;
3041 /* Generate code to load SRC, which is PLUS that is not a
3042 legitimate operand for the LA instruction, into TARGET.
3043 SCRATCH may be used as scratch register. */
3045 void
3046 s390_expand_plus_operand (rtx target, rtx src,
3047 rtx scratch)
3049 rtx sum1, sum2;
3050 struct s390_address ad;
3052 /* src must be a PLUS; get its two operands. */
3053 gcc_assert (GET_CODE (src) == PLUS);
3054 gcc_assert (GET_MODE (src) == Pmode);
3056 /* Check if any of the two operands is already scheduled
3057 for replacement by reload. This can happen e.g. when
3058 float registers occur in an address. */
3059 sum1 = find_replacement (&XEXP (src, 0));
3060 sum2 = find_replacement (&XEXP (src, 1));
3061 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3063 /* If the address is already strictly valid, there's nothing to do. */
3064 if (!s390_decompose_address (src, &ad)
3065 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3066 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
3068 /* Otherwise, one of the operands cannot be an address register;
3069 we reload its value into the scratch register. */
3070 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
3072 emit_move_insn (scratch, sum1);
3073 sum1 = scratch;
3075 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
3077 emit_move_insn (scratch, sum2);
3078 sum2 = scratch;
3081 /* According to the way these invalid addresses are generated
3082 in reload.c, it should never happen (at least on s390) that
3083 *neither* of the PLUS components, after find_replacements
3084 was applied, is an address register. */
3085 if (sum1 == scratch && sum2 == scratch)
3087 debug_rtx (src);
3088 gcc_unreachable ();
3091 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3094 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3095 is only ever performed on addresses, so we can mark the
3096 sum as legitimate for LA in any case. */
3097 s390_load_address (target, src);
3101 /* Return true if ADDR is a valid memory address.
3102 STRICT specifies whether strict register checking applies. */
3104 bool
3105 legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
3107 struct s390_address ad;
3109 if (TARGET_Z10
3110 && larl_operand (addr, VOIDmode)
3111 && (mode == VOIDmode
3112 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
3113 return true;
3115 if (!s390_decompose_address (addr, &ad))
3116 return false;
3118 if (strict)
3120 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3121 return false;
3123 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3124 return false;
3126 else
3128 if (ad.base
3129 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
3130 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3131 return false;
3133 if (ad.indx
3134 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
3135 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
3136 return false;
3138 return true;
3141 /* Return true if OP is a valid operand for the LA instruction.
3142 In 31-bit, we need to prove that the result is used as an
3143 address, as LA performs only a 31-bit addition. */
3145 bool
3146 legitimate_la_operand_p (rtx op)
3148 struct s390_address addr;
3149 if (!s390_decompose_address (op, &addr))
3150 return false;
3152 return (TARGET_64BIT || addr.pointer);
3155 /* Return true if it is valid *and* preferable to use LA to
3156 compute the sum of OP1 and OP2. */
3158 bool
3159 preferred_la_operand_p (rtx op1, rtx op2)
3161 struct s390_address addr;
3163 if (op2 != const0_rtx)
3164 op1 = gen_rtx_PLUS (Pmode, op1, op2);
3166 if (!s390_decompose_address (op1, &addr))
3167 return false;
3168 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3169 return false;
3170 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3171 return false;
3173 if (!TARGET_64BIT && !addr.pointer)
3174 return false;
3176 if (addr.pointer)
3177 return true;
3179 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
3180 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3181 return true;
3183 return false;
3186 /* Emit a forced load-address operation to load SRC into DST.
3187 This will use the LOAD ADDRESS instruction even in situations
3188 where legitimate_la_operand_p (SRC) returns false. */
3190 void
3191 s390_load_address (rtx dst, rtx src)
3193 if (TARGET_64BIT)
3194 emit_move_insn (dst, src);
3195 else
3196 emit_insn (gen_force_la_31 (dst, src));
3199 /* Return a legitimate reference for ORIG (an address) using the
3200 register REG. If REG is 0, a new pseudo is generated.
3202 There are two types of references that must be handled:
3204 1. Global data references must load the address from the GOT, via
3205 the PIC reg. An insn is emitted to do this load, and the reg is
3206 returned.
3208 2. Static data references, constant pool addresses, and code labels
3209 compute the address as an offset from the GOT, whose base is in
3210 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3211 differentiate them from global data objects. The returned
3212 address is the PIC reg + an unspec constant.
3214 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
3215 reg also appears in the address. */
3218 legitimize_pic_address (rtx orig, rtx reg)
3220 rtx addr = orig;
3221 rtx new_rtx = orig;
3222 rtx base;
3224 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
3226 if (GET_CODE (addr) == LABEL_REF
3227 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
3229 /* This is a local symbol. */
3230 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
3232 /* Access local symbols PC-relative via LARL.
3233 This is the same as in the non-PIC case, so it is
3234 handled automatically ... */
3236 else
3238 /* Access local symbols relative to the GOT. */
3240 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3242 if (reload_in_progress || reload_completed)
3243 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3245 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
3246 addr = gen_rtx_CONST (Pmode, addr);
3247 addr = force_const_mem (Pmode, addr);
3248 emit_move_insn (temp, addr);
3250 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3251 if (reg != 0)
3253 s390_load_address (reg, new_rtx);
3254 new_rtx = reg;
3258 else if (GET_CODE (addr) == SYMBOL_REF)
3260 if (reg == 0)
3261 reg = gen_reg_rtx (Pmode);
3263 if (flag_pic == 1)
3265 /* Assume GOT offset < 4k. This is handled the same way
3266 in both 31- and 64-bit code (@GOT). */
3268 if (reload_in_progress || reload_completed)
3269 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3271 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3272 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3273 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
3274 new_rtx = gen_const_mem (Pmode, new_rtx);
3275 emit_move_insn (reg, new_rtx);
3276 new_rtx = reg;
3278 else if (TARGET_CPU_ZARCH)
3280 /* If the GOT offset might be >= 4k, we determine the position
3281 of the GOT entry via a PC-relative LARL (@GOTENT). */
3283 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3285 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3286 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3288 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
3289 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3290 emit_move_insn (temp, new_rtx);
3292 new_rtx = gen_const_mem (Pmode, temp);
3293 emit_move_insn (reg, new_rtx);
3294 new_rtx = reg;
3296 else
3298 /* If the GOT offset might be >= 4k, we have to load it
3299 from the literal pool (@GOT). */
3301 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3303 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3304 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3306 if (reload_in_progress || reload_completed)
3307 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3309 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3310 addr = gen_rtx_CONST (Pmode, addr);
3311 addr = force_const_mem (Pmode, addr);
3312 emit_move_insn (temp, addr);
3314 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3315 new_rtx = gen_const_mem (Pmode, new_rtx);
3316 emit_move_insn (reg, new_rtx);
3317 new_rtx = reg;
3320 else
3322 if (GET_CODE (addr) == CONST)
3324 addr = XEXP (addr, 0);
3325 if (GET_CODE (addr) == UNSPEC)
3327 gcc_assert (XVECLEN (addr, 0) == 1);
3328 switch (XINT (addr, 1))
3330 /* If someone moved a GOT-relative UNSPEC
3331 out of the literal pool, force them back in. */
3332 case UNSPEC_GOTOFF:
3333 case UNSPEC_PLTOFF:
3334 new_rtx = force_const_mem (Pmode, orig);
3335 break;
3337 /* @GOT is OK as is if small. */
3338 case UNSPEC_GOT:
3339 if (flag_pic == 2)
3340 new_rtx = force_const_mem (Pmode, orig);
3341 break;
3343 /* @GOTENT is OK as is. */
3344 case UNSPEC_GOTENT:
3345 break;
3347 /* @PLT is OK as is on 64-bit, must be converted to
3348 GOT-relative @PLTOFF on 31-bit. */
3349 case UNSPEC_PLT:
3350 if (!TARGET_CPU_ZARCH)
3352 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3354 if (reload_in_progress || reload_completed)
3355 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3357 addr = XVECEXP (addr, 0, 0);
3358 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
3359 UNSPEC_PLTOFF);
3360 addr = gen_rtx_CONST (Pmode, addr);
3361 addr = force_const_mem (Pmode, addr);
3362 emit_move_insn (temp, addr);
3364 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3365 if (reg != 0)
3367 s390_load_address (reg, new_rtx);
3368 new_rtx = reg;
3371 break;
3373 /* Everything else cannot happen. */
3374 default:
3375 gcc_unreachable ();
3378 else
3379 gcc_assert (GET_CODE (addr) == PLUS);
3381 if (GET_CODE (addr) == PLUS)
3383 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3385 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3386 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3388 /* Check first to see if this is a constant offset
3389 from a local symbol reference. */
3390 if ((GET_CODE (op0) == LABEL_REF
3391 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3392 && GET_CODE (op1) == CONST_INT)
3394 if (TARGET_CPU_ZARCH
3395 && larl_operand (op0, VOIDmode)
3396 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3397 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3399 if (INTVAL (op1) & 1)
3401 /* LARL can't handle odd offsets, so emit a
3402 pair of LARL and LA. */
3403 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3405 if (!DISP_IN_RANGE (INTVAL (op1)))
3407 HOST_WIDE_INT even = INTVAL (op1) - 1;
3408 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3409 op0 = gen_rtx_CONST (Pmode, op0);
3410 op1 = const1_rtx;
3413 emit_move_insn (temp, op0);
3414 new_rtx = gen_rtx_PLUS (Pmode, temp, op1);
3416 if (reg != 0)
3418 s390_load_address (reg, new_rtx);
3419 new_rtx = reg;
3422 else
3424 /* If the offset is even, we can just use LARL.
3425 This will happen automatically. */
3428 else
3430 /* Access local symbols relative to the GOT. */
3432 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3434 if (reload_in_progress || reload_completed)
3435 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3437 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3438 UNSPEC_GOTOFF);
3439 addr = gen_rtx_PLUS (Pmode, addr, op1);
3440 addr = gen_rtx_CONST (Pmode, addr);
3441 addr = force_const_mem (Pmode, addr);
3442 emit_move_insn (temp, addr);
3444 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3445 if (reg != 0)
3447 s390_load_address (reg, new_rtx);
3448 new_rtx = reg;
3453 /* Now, check whether it is a GOT relative symbol plus offset
3454 that was pulled out of the literal pool. Force it back in. */
3456 else if (GET_CODE (op0) == UNSPEC
3457 && GET_CODE (op1) == CONST_INT
3458 && XINT (op0, 1) == UNSPEC_GOTOFF)
3460 gcc_assert (XVECLEN (op0, 0) == 1);
3462 new_rtx = force_const_mem (Pmode, orig);
3465 /* Otherwise, compute the sum. */
3466 else
3468 base = legitimize_pic_address (XEXP (addr, 0), reg);
3469 new_rtx = legitimize_pic_address (XEXP (addr, 1),
3470 base == reg ? NULL_RTX : reg);
3471 if (GET_CODE (new_rtx) == CONST_INT)
3472 new_rtx = plus_constant (base, INTVAL (new_rtx));
3473 else
3475 if (GET_CODE (new_rtx) == PLUS && CONSTANT_P (XEXP (new_rtx, 1)))
3477 base = gen_rtx_PLUS (Pmode, base, XEXP (new_rtx, 0));
3478 new_rtx = XEXP (new_rtx, 1);
3480 new_rtx = gen_rtx_PLUS (Pmode, base, new_rtx);
3483 if (GET_CODE (new_rtx) == CONST)
3484 new_rtx = XEXP (new_rtx, 0);
3485 new_rtx = force_operand (new_rtx, 0);
3489 return new_rtx;
3492 /* Load the thread pointer into a register. */
3495 s390_get_thread_pointer (void)
3497 rtx tp = gen_reg_rtx (Pmode);
3499 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3500 mark_reg_pointer (tp, BITS_PER_WORD);
3502 return tp;
3505 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3506 in s390_tls_symbol which always refers to __tls_get_offset.
3507 The returned offset is written to RESULT_REG and an USE rtx is
3508 generated for TLS_CALL. */
3510 static GTY(()) rtx s390_tls_symbol;
3512 static void
3513 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3515 rtx insn;
3517 gcc_assert (flag_pic);
3519 if (!s390_tls_symbol)
3520 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3522 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3523 gen_rtx_REG (Pmode, RETURN_REGNUM));
3525 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3526 RTL_CONST_CALL_P (insn) = 1;
3529 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3530 this (thread-local) address. REG may be used as temporary. */
3532 static rtx
3533 legitimize_tls_address (rtx addr, rtx reg)
3535 rtx new_rtx, tls_call, temp, base, r2, insn;
3537 if (GET_CODE (addr) == SYMBOL_REF)
3538 switch (tls_symbolic_operand (addr))
3540 case TLS_MODEL_GLOBAL_DYNAMIC:
3541 start_sequence ();
3542 r2 = gen_rtx_REG (Pmode, 2);
3543 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3544 new_rtx = gen_rtx_CONST (Pmode, tls_call);
3545 new_rtx = force_const_mem (Pmode, new_rtx);
3546 emit_move_insn (r2, new_rtx);
3547 s390_emit_tls_call_insn (r2, tls_call);
3548 insn = get_insns ();
3549 end_sequence ();
3551 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3552 temp = gen_reg_rtx (Pmode);
3553 emit_libcall_block (insn, temp, r2, new_rtx);
3555 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3556 if (reg != 0)
3558 s390_load_address (reg, new_rtx);
3559 new_rtx = reg;
3561 break;
3563 case TLS_MODEL_LOCAL_DYNAMIC:
3564 start_sequence ();
3565 r2 = gen_rtx_REG (Pmode, 2);
3566 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3567 new_rtx = gen_rtx_CONST (Pmode, tls_call);
3568 new_rtx = force_const_mem (Pmode, new_rtx);
3569 emit_move_insn (r2, new_rtx);
3570 s390_emit_tls_call_insn (r2, tls_call);
3571 insn = get_insns ();
3572 end_sequence ();
3574 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3575 temp = gen_reg_rtx (Pmode);
3576 emit_libcall_block (insn, temp, r2, new_rtx);
3578 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3579 base = gen_reg_rtx (Pmode);
3580 s390_load_address (base, new_rtx);
3582 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3583 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3584 new_rtx = force_const_mem (Pmode, new_rtx);
3585 temp = gen_reg_rtx (Pmode);
3586 emit_move_insn (temp, new_rtx);
3588 new_rtx = gen_rtx_PLUS (Pmode, base, temp);
3589 if (reg != 0)
3591 s390_load_address (reg, new_rtx);
3592 new_rtx = reg;
3594 break;
3596 case TLS_MODEL_INITIAL_EXEC:
3597 if (flag_pic == 1)
3599 /* Assume GOT offset < 4k. This is handled the same way
3600 in both 31- and 64-bit code. */
3602 if (reload_in_progress || reload_completed)
3603 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3605 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3606 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3607 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
3608 new_rtx = gen_const_mem (Pmode, new_rtx);
3609 temp = gen_reg_rtx (Pmode);
3610 emit_move_insn (temp, new_rtx);
3612 else if (TARGET_CPU_ZARCH)
3614 /* If the GOT offset might be >= 4k, we determine the position
3615 of the GOT entry via a PC-relative LARL. */
3617 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3618 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3619 temp = gen_reg_rtx (Pmode);
3620 emit_move_insn (temp, new_rtx);
3622 new_rtx = gen_const_mem (Pmode, temp);
3623 temp = gen_reg_rtx (Pmode);
3624 emit_move_insn (temp, new_rtx);
3626 else if (flag_pic)
3628 /* If the GOT offset might be >= 4k, we have to load it
3629 from the literal pool. */
3631 if (reload_in_progress || reload_completed)
3632 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3634 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3635 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3636 new_rtx = force_const_mem (Pmode, new_rtx);
3637 temp = gen_reg_rtx (Pmode);
3638 emit_move_insn (temp, new_rtx);
3640 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3641 new_rtx = gen_const_mem (Pmode, new_rtx);
3643 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
3644 temp = gen_reg_rtx (Pmode);
3645 emit_insn (gen_rtx_SET (Pmode, temp, new_rtx));
3647 else
3649 /* In position-dependent code, load the absolute address of
3650 the GOT entry from the literal pool. */
3652 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3653 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3654 new_rtx = force_const_mem (Pmode, new_rtx);
3655 temp = gen_reg_rtx (Pmode);
3656 emit_move_insn (temp, new_rtx);
3658 new_rtx = temp;
3659 new_rtx = gen_const_mem (Pmode, new_rtx);
3660 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
3661 temp = gen_reg_rtx (Pmode);
3662 emit_insn (gen_rtx_SET (Pmode, temp, new_rtx));
3665 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3666 if (reg != 0)
3668 s390_load_address (reg, new_rtx);
3669 new_rtx = reg;
3671 break;
3673 case TLS_MODEL_LOCAL_EXEC:
3674 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3675 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3676 new_rtx = force_const_mem (Pmode, new_rtx);
3677 temp = gen_reg_rtx (Pmode);
3678 emit_move_insn (temp, new_rtx);
3680 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3681 if (reg != 0)
3683 s390_load_address (reg, new_rtx);
3684 new_rtx = reg;
3686 break;
3688 default:
3689 gcc_unreachable ();
3692 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3694 switch (XINT (XEXP (addr, 0), 1))
3696 case UNSPEC_INDNTPOFF:
3697 gcc_assert (TARGET_CPU_ZARCH);
3698 new_rtx = addr;
3699 break;
3701 default:
3702 gcc_unreachable ();
3706 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3707 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3709 new_rtx = XEXP (XEXP (addr, 0), 0);
3710 if (GET_CODE (new_rtx) != SYMBOL_REF)
3711 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3713 new_rtx = legitimize_tls_address (new_rtx, reg);
3714 new_rtx = plus_constant (new_rtx, INTVAL (XEXP (XEXP (addr, 0), 1)));
3715 new_rtx = force_operand (new_rtx, 0);
3718 else
3719 gcc_unreachable (); /* for now ... */
3721 return new_rtx;
3724 /* Emit insns making the address in operands[1] valid for a standard
3725 move to operands[0]. operands[1] is replaced by an address which
3726 should be used instead of the former RTX to emit the move
3727 pattern. */
3729 void
3730 emit_symbolic_move (rtx *operands)
3732 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
3734 if (GET_CODE (operands[0]) == MEM)
3735 operands[1] = force_reg (Pmode, operands[1]);
3736 else if (TLS_SYMBOLIC_CONST (operands[1]))
3737 operands[1] = legitimize_tls_address (operands[1], temp);
3738 else if (flag_pic)
3739 operands[1] = legitimize_pic_address (operands[1], temp);
3742 /* Try machine-dependent ways of modifying an illegitimate address X
3743 to be legitimate. If we find one, return the new, valid address.
3745 OLDX is the address as it was before break_out_memory_refs was called.
3746 In some cases it is useful to look at this to decide what needs to be done.
3748 MODE is the mode of the operand pointed to by X.
3750 When -fpic is used, special handling is needed for symbolic references.
3751 See comments by legitimize_pic_address for details. */
3754 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3755 enum machine_mode mode ATTRIBUTE_UNUSED)
3757 rtx constant_term = const0_rtx;
3759 if (TLS_SYMBOLIC_CONST (x))
3761 x = legitimize_tls_address (x, 0);
3763 if (legitimate_address_p (mode, x, FALSE))
3764 return x;
3766 else if (GET_CODE (x) == PLUS
3767 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3768 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3770 return x;
3772 else if (flag_pic)
3774 if (SYMBOLIC_CONST (x)
3775 || (GET_CODE (x) == PLUS
3776 && (SYMBOLIC_CONST (XEXP (x, 0))
3777 || SYMBOLIC_CONST (XEXP (x, 1)))))
3778 x = legitimize_pic_address (x, 0);
3780 if (legitimate_address_p (mode, x, FALSE))
3781 return x;
3784 x = eliminate_constant_term (x, &constant_term);
3786 /* Optimize loading of large displacements by splitting them
3787 into the multiple of 4K and the rest; this allows the
3788 former to be CSE'd if possible.
3790 Don't do this if the displacement is added to a register
3791 pointing into the stack frame, as the offsets will
3792 change later anyway. */
3794 if (GET_CODE (constant_term) == CONST_INT
3795 && !TARGET_LONG_DISPLACEMENT
3796 && !DISP_IN_RANGE (INTVAL (constant_term))
3797 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3799 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3800 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3802 rtx temp = gen_reg_rtx (Pmode);
3803 rtx val = force_operand (GEN_INT (upper), temp);
3804 if (val != temp)
3805 emit_move_insn (temp, val);
3807 x = gen_rtx_PLUS (Pmode, x, temp);
3808 constant_term = GEN_INT (lower);
3811 if (GET_CODE (x) == PLUS)
3813 if (GET_CODE (XEXP (x, 0)) == REG)
3815 rtx temp = gen_reg_rtx (Pmode);
3816 rtx val = force_operand (XEXP (x, 1), temp);
3817 if (val != temp)
3818 emit_move_insn (temp, val);
3820 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3823 else if (GET_CODE (XEXP (x, 1)) == REG)
3825 rtx temp = gen_reg_rtx (Pmode);
3826 rtx val = force_operand (XEXP (x, 0), temp);
3827 if (val != temp)
3828 emit_move_insn (temp, val);
3830 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3834 if (constant_term != const0_rtx)
3835 x = gen_rtx_PLUS (Pmode, x, constant_term);
3837 return x;
3840 /* Try a machine-dependent way of reloading an illegitimate address AD
3841 operand. If we find one, push the reload and and return the new address.
3843 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3844 and TYPE is the reload type of the current reload. */
3846 rtx
3847 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3848 int opnum, int type)
3850 if (!optimize || TARGET_LONG_DISPLACEMENT)
3851 return NULL_RTX;
3853 if (GET_CODE (ad) == PLUS)
3855 rtx tem = simplify_binary_operation (PLUS, Pmode,
3856 XEXP (ad, 0), XEXP (ad, 1));
3857 if (tem)
3858 ad = tem;
3861 if (GET_CODE (ad) == PLUS
3862 && GET_CODE (XEXP (ad, 0)) == REG
3863 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3864 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3866 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3867 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3868 rtx cst, tem, new_rtx;
3870 cst = GEN_INT (upper);
3871 if (!legitimate_reload_constant_p (cst))
3872 cst = force_const_mem (Pmode, cst);
3874 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3875 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3877 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3878 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3879 opnum, (enum reload_type) type);
3880 return new_rtx;
3883 return NULL_RTX;
3886 /* Emit code to move LEN bytes from DST to SRC. */
3888 void
3889 s390_expand_movmem (rtx dst, rtx src, rtx len)
3891 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3893 if (INTVAL (len) > 0)
3894 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3897 else if (TARGET_MVCLE)
3899 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3902 else
3904 rtx dst_addr, src_addr, count, blocks, temp;
3905 rtx loop_start_label = gen_label_rtx ();
3906 rtx loop_end_label = gen_label_rtx ();
3907 rtx end_label = gen_label_rtx ();
3908 enum machine_mode mode;
3910 mode = GET_MODE (len);
3911 if (mode == VOIDmode)
3912 mode = Pmode;
3914 dst_addr = gen_reg_rtx (Pmode);
3915 src_addr = gen_reg_rtx (Pmode);
3916 count = gen_reg_rtx (mode);
3917 blocks = gen_reg_rtx (mode);
3919 convert_move (count, len, 1);
3920 emit_cmp_and_jump_insns (count, const0_rtx,
3921 EQ, NULL_RTX, mode, 1, end_label);
3923 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3924 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3925 dst = change_address (dst, VOIDmode, dst_addr);
3926 src = change_address (src, VOIDmode, src_addr);
3928 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3929 if (temp != count)
3930 emit_move_insn (count, temp);
3932 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3933 if (temp != blocks)
3934 emit_move_insn (blocks, temp);
3936 emit_cmp_and_jump_insns (blocks, const0_rtx,
3937 EQ, NULL_RTX, mode, 1, loop_end_label);
3939 emit_label (loop_start_label);
3941 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3942 s390_load_address (dst_addr,
3943 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3944 s390_load_address (src_addr,
3945 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3947 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3948 if (temp != blocks)
3949 emit_move_insn (blocks, temp);
3951 emit_cmp_and_jump_insns (blocks, const0_rtx,
3952 EQ, NULL_RTX, mode, 1, loop_end_label);
3954 emit_jump (loop_start_label);
3955 emit_label (loop_end_label);
3957 emit_insn (gen_movmem_short (dst, src,
3958 convert_to_mode (Pmode, count, 1)));
3959 emit_label (end_label);
3963 /* Emit code to set LEN bytes at DST to VAL.
3964 Make use of clrmem if VAL is zero. */
3966 void
3967 s390_expand_setmem (rtx dst, rtx len, rtx val)
3969 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3970 return;
3972 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3974 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
3976 if (val == const0_rtx && INTVAL (len) <= 256)
3977 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3978 else
3980 /* Initialize memory by storing the first byte. */
3981 emit_move_insn (adjust_address (dst, QImode, 0), val);
3983 if (INTVAL (len) > 1)
3985 /* Initiate 1 byte overlap move.
3986 The first byte of DST is propagated through DSTP1.
3987 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3988 DST is set to size 1 so the rest of the memory location
3989 does not count as source operand. */
3990 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3991 set_mem_size (dst, const1_rtx);
3993 emit_insn (gen_movmem_short (dstp1, dst,
3994 GEN_INT (INTVAL (len) - 2)));
3999 else if (TARGET_MVCLE)
4001 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
4002 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
4005 else
4007 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
4008 rtx loop_start_label = gen_label_rtx ();
4009 rtx loop_end_label = gen_label_rtx ();
4010 rtx end_label = gen_label_rtx ();
4011 enum machine_mode mode;
4013 mode = GET_MODE (len);
4014 if (mode == VOIDmode)
4015 mode = Pmode;
4017 dst_addr = gen_reg_rtx (Pmode);
4018 src_addr = gen_reg_rtx (Pmode);
4019 count = gen_reg_rtx (mode);
4020 blocks = gen_reg_rtx (mode);
4022 convert_move (count, len, 1);
4023 emit_cmp_and_jump_insns (count, const0_rtx,
4024 EQ, NULL_RTX, mode, 1, end_label);
4026 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
4027 dst = change_address (dst, VOIDmode, dst_addr);
4029 if (val == const0_rtx)
4030 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4031 else
4033 dstp1 = adjust_address (dst, VOIDmode, 1);
4034 set_mem_size (dst, const1_rtx);
4036 /* Initialize memory by storing the first byte. */
4037 emit_move_insn (adjust_address (dst, QImode, 0), val);
4039 /* If count is 1 we are done. */
4040 emit_cmp_and_jump_insns (count, const1_rtx,
4041 EQ, NULL_RTX, mode, 1, end_label);
4043 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
4045 if (temp != count)
4046 emit_move_insn (count, temp);
4048 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4049 if (temp != blocks)
4050 emit_move_insn (blocks, temp);
4052 emit_cmp_and_jump_insns (blocks, const0_rtx,
4053 EQ, NULL_RTX, mode, 1, loop_end_label);
4055 emit_label (loop_start_label);
4057 if (val == const0_rtx)
4058 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
4059 else
4060 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
4061 s390_load_address (dst_addr,
4062 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
4064 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4065 if (temp != blocks)
4066 emit_move_insn (blocks, temp);
4068 emit_cmp_and_jump_insns (blocks, const0_rtx,
4069 EQ, NULL_RTX, mode, 1, loop_end_label);
4071 emit_jump (loop_start_label);
4072 emit_label (loop_end_label);
4074 if (val == const0_rtx)
4075 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
4076 else
4077 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
4078 emit_label (end_label);
4082 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4083 and return the result in TARGET. */
4085 void
4086 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
4088 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
4089 rtx tmp;
4091 /* As the result of CMPINT is inverted compared to what we need,
4092 we have to swap the operands. */
4093 tmp = op0; op0 = op1; op1 = tmp;
4095 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
4097 if (INTVAL (len) > 0)
4099 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
4100 emit_insn (gen_cmpint (target, ccreg));
4102 else
4103 emit_move_insn (target, const0_rtx);
4105 else if (TARGET_MVCLE)
4107 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
4108 emit_insn (gen_cmpint (target, ccreg));
4110 else
4112 rtx addr0, addr1, count, blocks, temp;
4113 rtx loop_start_label = gen_label_rtx ();
4114 rtx loop_end_label = gen_label_rtx ();
4115 rtx end_label = gen_label_rtx ();
4116 enum machine_mode mode;
4118 mode = GET_MODE (len);
4119 if (mode == VOIDmode)
4120 mode = Pmode;
4122 addr0 = gen_reg_rtx (Pmode);
4123 addr1 = gen_reg_rtx (Pmode);
4124 count = gen_reg_rtx (mode);
4125 blocks = gen_reg_rtx (mode);
4127 convert_move (count, len, 1);
4128 emit_cmp_and_jump_insns (count, const0_rtx,
4129 EQ, NULL_RTX, mode, 1, end_label);
4131 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
4132 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
4133 op0 = change_address (op0, VOIDmode, addr0);
4134 op1 = change_address (op1, VOIDmode, addr1);
4136 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4137 if (temp != count)
4138 emit_move_insn (count, temp);
4140 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4141 if (temp != blocks)
4142 emit_move_insn (blocks, temp);
4144 emit_cmp_and_jump_insns (blocks, const0_rtx,
4145 EQ, NULL_RTX, mode, 1, loop_end_label);
4147 emit_label (loop_start_label);
4149 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
4150 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
4151 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
4152 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
4153 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
4154 emit_jump_insn (temp);
4156 s390_load_address (addr0,
4157 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
4158 s390_load_address (addr1,
4159 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
4161 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4162 if (temp != blocks)
4163 emit_move_insn (blocks, temp);
4165 emit_cmp_and_jump_insns (blocks, const0_rtx,
4166 EQ, NULL_RTX, mode, 1, loop_end_label);
4168 emit_jump (loop_start_label);
4169 emit_label (loop_end_label);
4171 emit_insn (gen_cmpmem_short (op0, op1,
4172 convert_to_mode (Pmode, count, 1)));
4173 emit_label (end_label);
4175 emit_insn (gen_cmpint (target, ccreg));
4180 /* Expand conditional increment or decrement using alc/slb instructions.
4181 Should generate code setting DST to either SRC or SRC + INCREMENT,
4182 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4183 Returns true if successful, false otherwise.
4185 That makes it possible to implement some if-constructs without jumps e.g.:
4186 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4187 unsigned int a, b, c;
4188 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4189 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4190 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4191 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4193 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4194 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4195 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4196 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4197 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4199 bool
4200 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
4201 rtx dst, rtx src, rtx increment)
4203 enum machine_mode cmp_mode;
4204 enum machine_mode cc_mode;
4205 rtx op_res;
4206 rtx insn;
4207 rtvec p;
4208 int ret;
4210 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
4211 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
4212 cmp_mode = SImode;
4213 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
4214 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
4215 cmp_mode = DImode;
4216 else
4217 return false;
4219 /* Try ADD LOGICAL WITH CARRY. */
4220 if (increment == const1_rtx)
4222 /* Determine CC mode to use. */
4223 if (cmp_code == EQ || cmp_code == NE)
4225 if (cmp_op1 != const0_rtx)
4227 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4228 NULL_RTX, 0, OPTAB_WIDEN);
4229 cmp_op1 = const0_rtx;
4232 cmp_code = cmp_code == EQ ? LEU : GTU;
4235 if (cmp_code == LTU || cmp_code == LEU)
4237 rtx tem = cmp_op0;
4238 cmp_op0 = cmp_op1;
4239 cmp_op1 = tem;
4240 cmp_code = swap_condition (cmp_code);
4243 switch (cmp_code)
4245 case GTU:
4246 cc_mode = CCUmode;
4247 break;
4249 case GEU:
4250 cc_mode = CCL3mode;
4251 break;
4253 default:
4254 return false;
4257 /* Emit comparison instruction pattern. */
4258 if (!register_operand (cmp_op0, cmp_mode))
4259 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4261 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4262 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4263 /* We use insn_invalid_p here to add clobbers if required. */
4264 ret = insn_invalid_p (emit_insn (insn));
4265 gcc_assert (!ret);
4267 /* Emit ALC instruction pattern. */
4268 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4269 gen_rtx_REG (cc_mode, CC_REGNUM),
4270 const0_rtx);
4272 if (src != const0_rtx)
4274 if (!register_operand (src, GET_MODE (dst)))
4275 src = force_reg (GET_MODE (dst), src);
4277 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
4278 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
4281 p = rtvec_alloc (2);
4282 RTVEC_ELT (p, 0) =
4283 gen_rtx_SET (VOIDmode, dst, op_res);
4284 RTVEC_ELT (p, 1) =
4285 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4286 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4288 return true;
4291 /* Try SUBTRACT LOGICAL WITH BORROW. */
4292 if (increment == constm1_rtx)
4294 /* Determine CC mode to use. */
4295 if (cmp_code == EQ || cmp_code == NE)
4297 if (cmp_op1 != const0_rtx)
4299 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4300 NULL_RTX, 0, OPTAB_WIDEN);
4301 cmp_op1 = const0_rtx;
4304 cmp_code = cmp_code == EQ ? LEU : GTU;
4307 if (cmp_code == GTU || cmp_code == GEU)
4309 rtx tem = cmp_op0;
4310 cmp_op0 = cmp_op1;
4311 cmp_op1 = tem;
4312 cmp_code = swap_condition (cmp_code);
4315 switch (cmp_code)
4317 case LEU:
4318 cc_mode = CCUmode;
4319 break;
4321 case LTU:
4322 cc_mode = CCL3mode;
4323 break;
4325 default:
4326 return false;
4329 /* Emit comparison instruction pattern. */
4330 if (!register_operand (cmp_op0, cmp_mode))
4331 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4333 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4334 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4335 /* We use insn_invalid_p here to add clobbers if required. */
4336 ret = insn_invalid_p (emit_insn (insn));
4337 gcc_assert (!ret);
4339 /* Emit SLB instruction pattern. */
4340 if (!register_operand (src, GET_MODE (dst)))
4341 src = force_reg (GET_MODE (dst), src);
4343 op_res = gen_rtx_MINUS (GET_MODE (dst),
4344 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
4345 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4346 gen_rtx_REG (cc_mode, CC_REGNUM),
4347 const0_rtx));
4348 p = rtvec_alloc (2);
4349 RTVEC_ELT (p, 0) =
4350 gen_rtx_SET (VOIDmode, dst, op_res);
4351 RTVEC_ELT (p, 1) =
4352 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4353 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4355 return true;
4358 return false;
4361 /* Expand code for the insv template. Return true if successful. */
4363 bool
4364 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4366 int bitsize = INTVAL (op1);
4367 int bitpos = INTVAL (op2);
4369 /* On z10 we can use the risbg instruction to implement insv. */
4370 if (TARGET_Z10
4371 && ((GET_MODE (dest) == DImode && GET_MODE (src) == DImode)
4372 || (GET_MODE (dest) == SImode && GET_MODE (src) == SImode)))
4374 rtx op;
4375 rtx clobber;
4377 op = gen_rtx_SET (GET_MODE(src),
4378 gen_rtx_ZERO_EXTRACT (GET_MODE (dest), dest, op1, op2),
4379 src);
4380 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4381 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
4383 return true;
4386 /* We need byte alignment. */
4387 if (bitsize % BITS_PER_UNIT)
4388 return false;
4390 if (bitpos == 0
4391 && memory_operand (dest, VOIDmode)
4392 && (register_operand (src, word_mode)
4393 || const_int_operand (src, VOIDmode)))
4395 /* Emit standard pattern if possible. */
4396 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4397 if (GET_MODE_BITSIZE (mode) == bitsize)
4398 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4400 /* (set (ze (mem)) (const_int)). */
4401 else if (const_int_operand (src, VOIDmode))
4403 int size = bitsize / BITS_PER_UNIT;
4404 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4405 GET_MODE_SIZE (word_mode) - size);
4407 dest = adjust_address (dest, BLKmode, 0);
4408 set_mem_size (dest, GEN_INT (size));
4409 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4412 /* (set (ze (mem)) (reg)). */
4413 else if (register_operand (src, word_mode))
4415 if (bitsize <= GET_MODE_BITSIZE (SImode))
4416 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4417 const0_rtx), src);
4418 else
4420 /* Emit st,stcmh sequence. */
4421 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4422 int size = stcmh_width / BITS_PER_UNIT;
4424 emit_move_insn (adjust_address (dest, SImode, size),
4425 gen_lowpart (SImode, src));
4426 set_mem_size (dest, GEN_INT (size));
4427 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4428 (stcmh_width), const0_rtx),
4429 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4430 (GET_MODE_BITSIZE (SImode))));
4433 else
4434 return false;
4436 return true;
4439 /* (set (ze (reg)) (const_int)). */
4440 if (TARGET_ZARCH
4441 && register_operand (dest, word_mode)
4442 && (bitpos % 16) == 0
4443 && (bitsize % 16) == 0
4444 && const_int_operand (src, VOIDmode))
4446 HOST_WIDE_INT val = INTVAL (src);
4447 int regpos = bitpos + bitsize;
4449 while (regpos > bitpos)
4451 enum machine_mode putmode;
4452 int putsize;
4454 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4455 putmode = SImode;
4456 else
4457 putmode = HImode;
4459 putsize = GET_MODE_BITSIZE (putmode);
4460 regpos -= putsize;
4461 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4462 GEN_INT (putsize),
4463 GEN_INT (regpos)),
4464 gen_int_mode (val, putmode));
4465 val >>= putsize;
4467 gcc_assert (regpos == bitpos);
4468 return true;
4471 return false;
4474 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4475 register that holds VAL of mode MODE shifted by COUNT bits. */
4477 static inline rtx
4478 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4480 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4481 NULL_RTX, 1, OPTAB_DIRECT);
4482 return expand_simple_binop (SImode, ASHIFT, val, count,
4483 NULL_RTX, 1, OPTAB_DIRECT);
4486 /* Structure to hold the initial parameters for a compare_and_swap operation
4487 in HImode and QImode. */
4489 struct alignment_context
4491 rtx memsi; /* SI aligned memory location. */
4492 rtx shift; /* Bit offset with regard to lsb. */
4493 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4494 rtx modemaski; /* ~modemask */
4495 bool aligned; /* True if memory is aligned, false else. */
4498 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4499 structure AC for transparent simplifying, if the memory alignment is known
4500 to be at least 32bit. MEM is the memory location for the actual operation
4501 and MODE its mode. */
4503 static void
4504 init_alignment_context (struct alignment_context *ac, rtx mem,
4505 enum machine_mode mode)
4507 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4508 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4510 if (ac->aligned)
4511 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4512 else
4514 /* Alignment is unknown. */
4515 rtx byteoffset, addr, align;
4517 /* Force the address into a register. */
4518 addr = force_reg (Pmode, XEXP (mem, 0));
4520 /* Align it to SImode. */
4521 align = expand_simple_binop (Pmode, AND, addr,
4522 GEN_INT (-GET_MODE_SIZE (SImode)),
4523 NULL_RTX, 1, OPTAB_DIRECT);
4524 /* Generate MEM. */
4525 ac->memsi = gen_rtx_MEM (SImode, align);
4526 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4527 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4528 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4530 /* Calculate shiftcount. */
4531 byteoffset = expand_simple_binop (Pmode, AND, addr,
4532 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4533 NULL_RTX, 1, OPTAB_DIRECT);
4534 /* As we already have some offset, evaluate the remaining distance. */
4535 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4536 NULL_RTX, 1, OPTAB_DIRECT);
4539 /* Shift is the byte count, but we need the bitcount. */
4540 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4541 NULL_RTX, 1, OPTAB_DIRECT);
4542 /* Calculate masks. */
4543 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4544 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4545 NULL_RTX, 1, OPTAB_DIRECT);
4546 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4549 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4550 the memory location, CMP the old value to compare MEM with and NEW_RTX the value
4551 to set if CMP == MEM.
4552 CMP is never in memory for compare_and_swap_cc because
4553 expand_bool_compare_and_swap puts it into a register for later compare. */
4555 void
4556 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new_rtx)
4558 struct alignment_context ac;
4559 rtx cmpv, newv, val, resv, cc;
4560 rtx res = gen_reg_rtx (SImode);
4561 rtx csloop = gen_label_rtx ();
4562 rtx csend = gen_label_rtx ();
4564 gcc_assert (register_operand (target, VOIDmode));
4565 gcc_assert (MEM_P (mem));
4567 init_alignment_context (&ac, mem, mode);
4569 /* Shift the values to the correct bit positions. */
4570 if (!(ac.aligned && MEM_P (cmp)))
4571 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4572 if (!(ac.aligned && MEM_P (new_rtx)))
4573 new_rtx = s390_expand_mask_and_shift (new_rtx, mode, ac.shift);
4575 /* Load full word. Subsequent loads are performed by CS. */
4576 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4577 NULL_RTX, 1, OPTAB_DIRECT);
4579 /* Start CS loop. */
4580 emit_label (csloop);
4581 /* val = "<mem>00..0<mem>"
4582 * cmp = "00..0<cmp>00..0"
4583 * new = "00..0<new>00..0"
4586 /* Patch cmp and new with val at correct position. */
4587 if (ac.aligned && MEM_P (cmp))
4589 cmpv = force_reg (SImode, val);
4590 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4592 else
4593 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4594 NULL_RTX, 1, OPTAB_DIRECT));
4595 if (ac.aligned && MEM_P (new_rtx))
4597 newv = force_reg (SImode, val);
4598 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new_rtx);
4600 else
4601 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
4602 NULL_RTX, 1, OPTAB_DIRECT));
4604 /* Jump to end if we're done (likely?). */
4605 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4606 cmpv, newv));
4608 /* Check for changes outside mode. */
4609 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4610 NULL_RTX, 1, OPTAB_DIRECT);
4611 cc = s390_emit_compare (NE, resv, val);
4612 emit_move_insn (val, resv);
4613 /* Loop internal if so. */
4614 s390_emit_jump (csloop, cc);
4616 emit_label (csend);
4618 /* Return the correct part of the bitfield. */
4619 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4620 NULL_RTX, 1, OPTAB_DIRECT), 1);
4623 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4624 and VAL the value to play with. If AFTER is true then store the value
4625 MEM holds after the operation, if AFTER is false then store the value MEM
4626 holds before the operation. If TARGET is zero then discard that value, else
4627 store it to TARGET. */
4629 void
4630 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4631 rtx target, rtx mem, rtx val, bool after)
4633 struct alignment_context ac;
4634 rtx cmp;
4635 rtx new_rtx = gen_reg_rtx (SImode);
4636 rtx orig = gen_reg_rtx (SImode);
4637 rtx csloop = gen_label_rtx ();
4639 gcc_assert (!target || register_operand (target, VOIDmode));
4640 gcc_assert (MEM_P (mem));
4642 init_alignment_context (&ac, mem, mode);
4644 /* Shift val to the correct bit positions.
4645 Preserve "icm", but prevent "ex icm". */
4646 if (!(ac.aligned && code == SET && MEM_P (val)))
4647 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4649 /* Further preparation insns. */
4650 if (code == PLUS || code == MINUS)
4651 emit_move_insn (orig, val);
4652 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4653 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4654 NULL_RTX, 1, OPTAB_DIRECT);
4656 /* Load full word. Subsequent loads are performed by CS. */
4657 cmp = force_reg (SImode, ac.memsi);
4659 /* Start CS loop. */
4660 emit_label (csloop);
4661 emit_move_insn (new_rtx, cmp);
4663 /* Patch new with val at correct position. */
4664 switch (code)
4666 case PLUS:
4667 case MINUS:
4668 val = expand_simple_binop (SImode, code, new_rtx, orig,
4669 NULL_RTX, 1, OPTAB_DIRECT);
4670 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4671 NULL_RTX, 1, OPTAB_DIRECT);
4672 /* FALLTHRU */
4673 case SET:
4674 if (ac.aligned && MEM_P (val))
4675 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0, SImode, val);
4676 else
4678 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski,
4679 NULL_RTX, 1, OPTAB_DIRECT);
4680 new_rtx = expand_simple_binop (SImode, IOR, new_rtx, val,
4681 NULL_RTX, 1, OPTAB_DIRECT);
4683 break;
4684 case AND:
4685 case IOR:
4686 case XOR:
4687 new_rtx = expand_simple_binop (SImode, code, new_rtx, val,
4688 NULL_RTX, 1, OPTAB_DIRECT);
4689 break;
4690 case MULT: /* NAND */
4691 new_rtx = expand_simple_binop (SImode, XOR, new_rtx, ac.modemask,
4692 NULL_RTX, 1, OPTAB_DIRECT);
4693 new_rtx = expand_simple_binop (SImode, AND, new_rtx, val,
4694 NULL_RTX, 1, OPTAB_DIRECT);
4695 break;
4696 default:
4697 gcc_unreachable ();
4700 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4701 ac.memsi, cmp, new_rtx));
4703 /* Return the correct part of the bitfield. */
4704 if (target)
4705 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4706 after ? new_rtx : cmp, ac.shift,
4707 NULL_RTX, 1, OPTAB_DIRECT), 1);
4710 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4711 We need to emit DTP-relative relocations. */
4713 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4715 static void
4716 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4718 switch (size)
4720 case 4:
4721 fputs ("\t.long\t", file);
4722 break;
4723 case 8:
4724 fputs ("\t.quad\t", file);
4725 break;
4726 default:
4727 gcc_unreachable ();
4729 output_addr_const (file, x);
4730 fputs ("@DTPOFF", file);
4733 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4734 /* Implement TARGET_MANGLE_TYPE. */
4736 static const char *
4737 s390_mangle_type (const_tree type)
4739 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4740 && TARGET_LONG_DOUBLE_128)
4741 return "g";
4743 /* For all other types, use normal C++ mangling. */
4744 return NULL;
4746 #endif
4748 /* In the name of slightly smaller debug output, and to cater to
4749 general assembler lossage, recognize various UNSPEC sequences
4750 and turn them back into a direct symbol reference. */
4752 static rtx
4753 s390_delegitimize_address (rtx orig_x)
4755 rtx x = orig_x, y;
4757 if (GET_CODE (x) != MEM)
4758 return orig_x;
4760 x = XEXP (x, 0);
4761 if (GET_CODE (x) == PLUS
4762 && GET_CODE (XEXP (x, 1)) == CONST
4763 && GET_CODE (XEXP (x, 0)) == REG
4764 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4766 y = XEXP (XEXP (x, 1), 0);
4767 if (GET_CODE (y) == UNSPEC
4768 && XINT (y, 1) == UNSPEC_GOT)
4769 return XVECEXP (y, 0, 0);
4770 return orig_x;
4773 if (GET_CODE (x) == CONST)
4775 y = XEXP (x, 0);
4776 if (GET_CODE (y) == UNSPEC
4777 && XINT (y, 1) == UNSPEC_GOTENT)
4778 return XVECEXP (y, 0, 0);
4779 return orig_x;
4782 return orig_x;
4785 /* Output operand OP to stdio stream FILE.
4786 OP is an address (register + offset) which is not used to address data;
4787 instead the rightmost bits are interpreted as the value. */
4789 static void
4790 print_shift_count_operand (FILE *file, rtx op)
4792 HOST_WIDE_INT offset;
4793 rtx base;
4795 /* Extract base register and offset. */
4796 if (!s390_decompose_shift_count (op, &base, &offset))
4797 gcc_unreachable ();
4799 /* Sanity check. */
4800 if (base)
4802 gcc_assert (GET_CODE (base) == REG);
4803 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4804 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4807 /* Offsets are constricted to twelve bits. */
4808 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4809 if (base)
4810 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4813 /* See 'get_some_local_dynamic_name'. */
4815 static int
4816 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4818 rtx x = *px;
4820 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4822 x = get_pool_constant (x);
4823 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4826 if (GET_CODE (x) == SYMBOL_REF
4827 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4829 cfun->machine->some_ld_name = XSTR (x, 0);
4830 return 1;
4833 return 0;
4836 /* Locate some local-dynamic symbol still in use by this function
4837 so that we can print its name in local-dynamic base patterns. */
4839 static const char *
4840 get_some_local_dynamic_name (void)
4842 rtx insn;
4844 if (cfun->machine->some_ld_name)
4845 return cfun->machine->some_ld_name;
4847 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4848 if (INSN_P (insn)
4849 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4850 return cfun->machine->some_ld_name;
4852 gcc_unreachable ();
4855 /* Output machine-dependent UNSPECs occurring in address constant X
4856 in assembler syntax to stdio stream FILE. Returns true if the
4857 constant X could be recognized, false otherwise. */
4859 bool
4860 s390_output_addr_const_extra (FILE *file, rtx x)
4862 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4863 switch (XINT (x, 1))
4865 case UNSPEC_GOTENT:
4866 output_addr_const (file, XVECEXP (x, 0, 0));
4867 fprintf (file, "@GOTENT");
4868 return true;
4869 case UNSPEC_GOT:
4870 output_addr_const (file, XVECEXP (x, 0, 0));
4871 fprintf (file, "@GOT");
4872 return true;
4873 case UNSPEC_GOTOFF:
4874 output_addr_const (file, XVECEXP (x, 0, 0));
4875 fprintf (file, "@GOTOFF");
4876 return true;
4877 case UNSPEC_PLT:
4878 output_addr_const (file, XVECEXP (x, 0, 0));
4879 fprintf (file, "@PLT");
4880 return true;
4881 case UNSPEC_PLTOFF:
4882 output_addr_const (file, XVECEXP (x, 0, 0));
4883 fprintf (file, "@PLTOFF");
4884 return true;
4885 case UNSPEC_TLSGD:
4886 output_addr_const (file, XVECEXP (x, 0, 0));
4887 fprintf (file, "@TLSGD");
4888 return true;
4889 case UNSPEC_TLSLDM:
4890 assemble_name (file, get_some_local_dynamic_name ());
4891 fprintf (file, "@TLSLDM");
4892 return true;
4893 case UNSPEC_DTPOFF:
4894 output_addr_const (file, XVECEXP (x, 0, 0));
4895 fprintf (file, "@DTPOFF");
4896 return true;
4897 case UNSPEC_NTPOFF:
4898 output_addr_const (file, XVECEXP (x, 0, 0));
4899 fprintf (file, "@NTPOFF");
4900 return true;
4901 case UNSPEC_GOTNTPOFF:
4902 output_addr_const (file, XVECEXP (x, 0, 0));
4903 fprintf (file, "@GOTNTPOFF");
4904 return true;
4905 case UNSPEC_INDNTPOFF:
4906 output_addr_const (file, XVECEXP (x, 0, 0));
4907 fprintf (file, "@INDNTPOFF");
4908 return true;
4911 return false;
4914 /* Output address operand ADDR in assembler syntax to
4915 stdio stream FILE. */
4917 void
4918 print_operand_address (FILE *file, rtx addr)
4920 struct s390_address ad;
4922 if (s390_symref_operand_p (addr, NULL, NULL))
4924 gcc_assert (TARGET_Z10);
4925 output_addr_const (file, addr);
4926 return;
4929 if (!s390_decompose_address (addr, &ad)
4930 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4931 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4932 output_operand_lossage ("cannot decompose address");
4934 if (ad.disp)
4935 output_addr_const (file, ad.disp);
4936 else
4937 fprintf (file, "0");
4939 if (ad.base && ad.indx)
4940 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4941 reg_names[REGNO (ad.base)]);
4942 else if (ad.base)
4943 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4946 /* Output operand X in assembler syntax to stdio stream FILE.
4947 CODE specified the format flag. The following format flags
4948 are recognized:
4950 'C': print opcode suffix for branch condition.
4951 'D': print opcode suffix for inverse branch condition.
4952 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4953 'G': print the size of the operand in bytes.
4954 'O': print only the displacement of a memory reference.
4955 'R': print only the base register of a memory reference.
4956 'S': print S-type memory reference (base+displacement).
4957 'N': print the second word of a DImode operand.
4958 'M': print the second word of a TImode operand.
4959 'Y': print shift count operand.
4961 'b': print integer X as if it's an unsigned byte.
4962 'c': print integer X as if it's an signed byte.
4963 'x': print integer X as if it's an unsigned halfword.
4964 'h': print integer X as if it's a signed halfword.
4965 'i': print the first nonzero HImode part of X.
4966 'j': print the first HImode part unequal to -1 of X.
4967 'k': print the first nonzero SImode part of X.
4968 'm': print the first SImode part unequal to -1 of X.
4969 'o': print integer X as if it's an unsigned 32bit word. */
4971 void
4972 print_operand (FILE *file, rtx x, int code)
4974 switch (code)
4976 case 'C':
4977 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4978 return;
4980 case 'D':
4981 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4982 return;
4984 case 'J':
4985 if (GET_CODE (x) == SYMBOL_REF)
4987 fprintf (file, "%s", ":tls_load:");
4988 output_addr_const (file, x);
4990 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4992 fprintf (file, "%s", ":tls_gdcall:");
4993 output_addr_const (file, XVECEXP (x, 0, 0));
4995 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4997 fprintf (file, "%s", ":tls_ldcall:");
4998 assemble_name (file, get_some_local_dynamic_name ());
5000 else
5001 gcc_unreachable ();
5002 return;
5004 case 'G':
5005 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
5006 return;
5008 case 'O':
5010 struct s390_address ad;
5011 int ret;
5013 gcc_assert (GET_CODE (x) == MEM);
5014 ret = s390_decompose_address (XEXP (x, 0), &ad);
5015 gcc_assert (ret);
5016 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5017 gcc_assert (!ad.indx);
5019 if (ad.disp)
5020 output_addr_const (file, ad.disp);
5021 else
5022 fprintf (file, "0");
5024 return;
5026 case 'R':
5028 struct s390_address ad;
5029 int ret;
5031 gcc_assert (GET_CODE (x) == MEM);
5032 ret = s390_decompose_address (XEXP (x, 0), &ad);
5033 gcc_assert (ret);
5034 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5035 gcc_assert (!ad.indx);
5037 if (ad.base)
5038 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
5039 else
5040 fprintf (file, "0");
5042 return;
5044 case 'S':
5046 struct s390_address ad;
5047 int ret;
5049 gcc_assert (GET_CODE (x) == MEM);
5050 ret = s390_decompose_address (XEXP (x, 0), &ad);
5051 gcc_assert (ret);
5052 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5053 gcc_assert (!ad.indx);
5055 if (ad.disp)
5056 output_addr_const (file, ad.disp);
5057 else
5058 fprintf (file, "0");
5060 if (ad.base)
5061 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
5063 return;
5065 case 'N':
5066 if (GET_CODE (x) == REG)
5067 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5068 else if (GET_CODE (x) == MEM)
5069 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
5070 else
5071 gcc_unreachable ();
5072 break;
5074 case 'M':
5075 if (GET_CODE (x) == REG)
5076 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5077 else if (GET_CODE (x) == MEM)
5078 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
5079 else
5080 gcc_unreachable ();
5081 break;
5083 case 'Y':
5084 print_shift_count_operand (file, x);
5085 return;
5088 switch (GET_CODE (x))
5090 case REG:
5091 fprintf (file, "%s", reg_names[REGNO (x)]);
5092 break;
5094 case MEM:
5095 output_address (XEXP (x, 0));
5096 break;
5098 case CONST:
5099 case CODE_LABEL:
5100 case LABEL_REF:
5101 case SYMBOL_REF:
5102 output_addr_const (file, x);
5103 break;
5105 case CONST_INT:
5106 if (code == 'b')
5107 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
5108 else if (code == 'c')
5109 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xff) ^ 0x80) - 0x80);
5110 else if (code == 'x')
5111 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
5112 else if (code == 'h')
5113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
5114 else if (code == 'i')
5115 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5116 s390_extract_part (x, HImode, 0));
5117 else if (code == 'j')
5118 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5119 s390_extract_part (x, HImode, -1));
5120 else if (code == 'k')
5121 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5122 s390_extract_part (x, SImode, 0));
5123 else if (code == 'm')
5124 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5125 s390_extract_part (x, SImode, -1));
5126 else if (code == 'o')
5127 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
5128 else
5129 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
5130 break;
5132 case CONST_DOUBLE:
5133 gcc_assert (GET_MODE (x) == VOIDmode);
5134 if (code == 'b')
5135 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
5136 else if (code == 'x')
5137 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
5138 else if (code == 'h')
5139 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
5140 else
5141 gcc_unreachable ();
5142 break;
5144 default:
5145 fatal_insn ("UNKNOWN in print_operand !?", x);
5146 break;
5150 /* Target hook for assembling integer objects. We need to define it
5151 here to work a round a bug in some versions of GAS, which couldn't
5152 handle values smaller than INT_MIN when printed in decimal. */
5154 static bool
5155 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
5157 if (size == 8 && aligned_p
5158 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
5160 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
5161 INTVAL (x));
5162 return true;
5164 return default_assemble_integer (x, size, aligned_p);
5167 /* Returns true if register REGNO is used for forming
5168 a memory address in expression X. */
5170 static bool
5171 reg_used_in_mem_p (int regno, rtx x)
5173 enum rtx_code code = GET_CODE (x);
5174 int i, j;
5175 const char *fmt;
5177 if (code == MEM)
5179 if (refers_to_regno_p (regno, regno+1,
5180 XEXP (x, 0), 0))
5181 return true;
5183 else if (code == SET
5184 && GET_CODE (SET_DEST (x)) == PC)
5186 if (refers_to_regno_p (regno, regno+1,
5187 SET_SRC (x), 0))
5188 return true;
5191 fmt = GET_RTX_FORMAT (code);
5192 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5194 if (fmt[i] == 'e'
5195 && reg_used_in_mem_p (regno, XEXP (x, i)))
5196 return true;
5198 else if (fmt[i] == 'E')
5199 for (j = 0; j < XVECLEN (x, i); j++)
5200 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
5201 return true;
5203 return false;
5206 /* Returns true if expression DEP_RTX sets an address register
5207 used by instruction INSN to address memory. */
5209 static bool
5210 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
5212 rtx target, pat;
5214 if (GET_CODE (dep_rtx) == INSN)
5215 dep_rtx = PATTERN (dep_rtx);
5217 if (GET_CODE (dep_rtx) == SET)
5219 target = SET_DEST (dep_rtx);
5220 if (GET_CODE (target) == STRICT_LOW_PART)
5221 target = XEXP (target, 0);
5222 while (GET_CODE (target) == SUBREG)
5223 target = SUBREG_REG (target);
5225 if (GET_CODE (target) == REG)
5227 int regno = REGNO (target);
5229 if (s390_safe_attr_type (insn) == TYPE_LA)
5231 pat = PATTERN (insn);
5232 if (GET_CODE (pat) == PARALLEL)
5234 gcc_assert (XVECLEN (pat, 0) == 2);
5235 pat = XVECEXP (pat, 0, 0);
5237 gcc_assert (GET_CODE (pat) == SET);
5238 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
5240 else if (get_attr_atype (insn) == ATYPE_AGEN)
5241 return reg_used_in_mem_p (regno, PATTERN (insn));
5244 return false;
5247 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5250 s390_agen_dep_p (rtx dep_insn, rtx insn)
5252 rtx dep_rtx = PATTERN (dep_insn);
5253 int i;
5255 if (GET_CODE (dep_rtx) == SET
5256 && addr_generation_dependency_p (dep_rtx, insn))
5257 return 1;
5258 else if (GET_CODE (dep_rtx) == PARALLEL)
5260 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
5262 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
5263 return 1;
5266 return 0;
5270 /* A C statement (sans semicolon) to update the integer scheduling priority
5271 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5272 reduce the priority to execute INSN later. Do not define this macro if
5273 you do not need to adjust the scheduling priorities of insns.
5275 A STD instruction should be scheduled earlier,
5276 in order to use the bypass. */
5278 static int
5279 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
5281 if (! INSN_P (insn))
5282 return priority;
5284 if (s390_tune != PROCESSOR_2084_Z990
5285 && s390_tune != PROCESSOR_2094_Z9_109)
5286 return priority;
5288 switch (s390_safe_attr_type (insn))
5290 case TYPE_FSTOREDF:
5291 case TYPE_FSTORESF:
5292 priority = priority << 3;
5293 break;
5294 case TYPE_STORE:
5295 case TYPE_STM:
5296 priority = priority << 1;
5297 break;
5298 default:
5299 break;
5301 return priority;
5304 /* The number of instructions that can be issued per cycle. */
5306 static int
5307 s390_issue_rate (void)
5309 switch (s390_tune)
5311 case PROCESSOR_2084_Z990:
5312 case PROCESSOR_2094_Z9_109:
5313 return 3;
5314 case PROCESSOR_2097_Z10:
5315 return 2;
5316 default:
5317 return 1;
5321 static int
5322 s390_first_cycle_multipass_dfa_lookahead (void)
5324 return 4;
5328 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5329 Fix up MEMs as required. */
5331 static void
5332 annotate_constant_pool_refs (rtx *x)
5334 int i, j;
5335 const char *fmt;
5337 gcc_assert (GET_CODE (*x) != SYMBOL_REF
5338 || !CONSTANT_POOL_ADDRESS_P (*x));
5340 /* Literal pool references can only occur inside a MEM ... */
5341 if (GET_CODE (*x) == MEM)
5343 rtx memref = XEXP (*x, 0);
5345 if (GET_CODE (memref) == SYMBOL_REF
5346 && CONSTANT_POOL_ADDRESS_P (memref))
5348 rtx base = cfun->machine->base_reg;
5349 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
5350 UNSPEC_LTREF);
5352 *x = replace_equiv_address (*x, addr);
5353 return;
5356 if (GET_CODE (memref) == CONST
5357 && GET_CODE (XEXP (memref, 0)) == PLUS
5358 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
5359 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
5360 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
5362 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
5363 rtx sym = XEXP (XEXP (memref, 0), 0);
5364 rtx base = cfun->machine->base_reg;
5365 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5366 UNSPEC_LTREF);
5368 *x = replace_equiv_address (*x, plus_constant (addr, off));
5369 return;
5373 /* ... or a load-address type pattern. */
5374 if (GET_CODE (*x) == SET)
5376 rtx addrref = SET_SRC (*x);
5378 if (GET_CODE (addrref) == SYMBOL_REF
5379 && CONSTANT_POOL_ADDRESS_P (addrref))
5381 rtx base = cfun->machine->base_reg;
5382 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5383 UNSPEC_LTREF);
5385 SET_SRC (*x) = addr;
5386 return;
5389 if (GET_CODE (addrref) == CONST
5390 && GET_CODE (XEXP (addrref, 0)) == PLUS
5391 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5392 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5393 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5395 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5396 rtx sym = XEXP (XEXP (addrref, 0), 0);
5397 rtx base = cfun->machine->base_reg;
5398 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5399 UNSPEC_LTREF);
5401 SET_SRC (*x) = plus_constant (addr, off);
5402 return;
5406 /* Annotate LTREL_BASE as well. */
5407 if (GET_CODE (*x) == UNSPEC
5408 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5410 rtx base = cfun->machine->base_reg;
5411 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5412 UNSPEC_LTREL_BASE);
5413 return;
5416 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5417 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5419 if (fmt[i] == 'e')
5421 annotate_constant_pool_refs (&XEXP (*x, i));
5423 else if (fmt[i] == 'E')
5425 for (j = 0; j < XVECLEN (*x, i); j++)
5426 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5431 /* Split all branches that exceed the maximum distance.
5432 Returns true if this created a new literal pool entry. */
5434 static int
5435 s390_split_branches (void)
5437 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5438 int new_literal = 0, ret;
5439 rtx insn, pat, tmp, target;
5440 rtx *label;
5442 /* We need correct insn addresses. */
5444 shorten_branches (get_insns ());
5446 /* Find all branches that exceed 64KB, and split them. */
5448 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5450 if (GET_CODE (insn) != JUMP_INSN)
5451 continue;
5453 pat = PATTERN (insn);
5454 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5455 pat = XVECEXP (pat, 0, 0);
5456 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5457 continue;
5459 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5461 label = &SET_SRC (pat);
5463 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5465 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5466 label = &XEXP (SET_SRC (pat), 1);
5467 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5468 label = &XEXP (SET_SRC (pat), 2);
5469 else
5470 continue;
5472 else
5473 continue;
5475 if (get_attr_length (insn) <= 4)
5476 continue;
5478 /* We are going to use the return register as scratch register,
5479 make sure it will be saved/restored by the prologue/epilogue. */
5480 cfun_frame_layout.save_return_addr_p = 1;
5482 if (!flag_pic)
5484 new_literal = 1;
5485 tmp = force_const_mem (Pmode, *label);
5486 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5487 INSN_ADDRESSES_NEW (tmp, -1);
5488 annotate_constant_pool_refs (&PATTERN (tmp));
5490 target = temp_reg;
5492 else
5494 new_literal = 1;
5495 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5496 UNSPEC_LTREL_OFFSET);
5497 target = gen_rtx_CONST (Pmode, target);
5498 target = force_const_mem (Pmode, target);
5499 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5500 INSN_ADDRESSES_NEW (tmp, -1);
5501 annotate_constant_pool_refs (&PATTERN (tmp));
5503 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5504 cfun->machine->base_reg),
5505 UNSPEC_LTREL_BASE);
5506 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5509 ret = validate_change (insn, label, target, 0);
5510 gcc_assert (ret);
5513 return new_literal;
5517 /* Find an annotated literal pool symbol referenced in RTX X,
5518 and store it at REF. Will abort if X contains references to
5519 more than one such pool symbol; multiple references to the same
5520 symbol are allowed, however.
5522 The rtx pointed to by REF must be initialized to NULL_RTX
5523 by the caller before calling this routine. */
5525 static void
5526 find_constant_pool_ref (rtx x, rtx *ref)
5528 int i, j;
5529 const char *fmt;
5531 /* Ignore LTREL_BASE references. */
5532 if (GET_CODE (x) == UNSPEC
5533 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5534 return;
5535 /* Likewise POOL_ENTRY insns. */
5536 if (GET_CODE (x) == UNSPEC_VOLATILE
5537 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5538 return;
5540 gcc_assert (GET_CODE (x) != SYMBOL_REF
5541 || !CONSTANT_POOL_ADDRESS_P (x));
5543 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5545 rtx sym = XVECEXP (x, 0, 0);
5546 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5547 && CONSTANT_POOL_ADDRESS_P (sym));
5549 if (*ref == NULL_RTX)
5550 *ref = sym;
5551 else
5552 gcc_assert (*ref == sym);
5554 return;
5557 fmt = GET_RTX_FORMAT (GET_CODE (x));
5558 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5560 if (fmt[i] == 'e')
5562 find_constant_pool_ref (XEXP (x, i), ref);
5564 else if (fmt[i] == 'E')
5566 for (j = 0; j < XVECLEN (x, i); j++)
5567 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5572 /* Replace every reference to the annotated literal pool
5573 symbol REF in X by its base plus OFFSET. */
5575 static void
5576 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5578 int i, j;
5579 const char *fmt;
5581 gcc_assert (*x != ref);
5583 if (GET_CODE (*x) == UNSPEC
5584 && XINT (*x, 1) == UNSPEC_LTREF
5585 && XVECEXP (*x, 0, 0) == ref)
5587 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5588 return;
5591 if (GET_CODE (*x) == PLUS
5592 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5593 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5594 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5595 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5597 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5598 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5599 return;
5602 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5603 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5605 if (fmt[i] == 'e')
5607 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5609 else if (fmt[i] == 'E')
5611 for (j = 0; j < XVECLEN (*x, i); j++)
5612 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5617 /* Check whether X contains an UNSPEC_LTREL_BASE.
5618 Return its constant pool symbol if found, NULL_RTX otherwise. */
5620 static rtx
5621 find_ltrel_base (rtx x)
5623 int i, j;
5624 const char *fmt;
5626 if (GET_CODE (x) == UNSPEC
5627 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5628 return XVECEXP (x, 0, 0);
5630 fmt = GET_RTX_FORMAT (GET_CODE (x));
5631 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5633 if (fmt[i] == 'e')
5635 rtx fnd = find_ltrel_base (XEXP (x, i));
5636 if (fnd)
5637 return fnd;
5639 else if (fmt[i] == 'E')
5641 for (j = 0; j < XVECLEN (x, i); j++)
5643 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5644 if (fnd)
5645 return fnd;
5650 return NULL_RTX;
5653 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5655 static void
5656 replace_ltrel_base (rtx *x)
5658 int i, j;
5659 const char *fmt;
5661 if (GET_CODE (*x) == UNSPEC
5662 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5664 *x = XVECEXP (*x, 0, 1);
5665 return;
5668 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5669 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5671 if (fmt[i] == 'e')
5673 replace_ltrel_base (&XEXP (*x, i));
5675 else if (fmt[i] == 'E')
5677 for (j = 0; j < XVECLEN (*x, i); j++)
5678 replace_ltrel_base (&XVECEXP (*x, i, j));
5684 /* We keep a list of constants which we have to add to internal
5685 constant tables in the middle of large functions. */
5687 #define NR_C_MODES 11
5688 enum machine_mode constant_modes[NR_C_MODES] =
5690 TFmode, TImode, TDmode,
5691 DFmode, DImode, DDmode,
5692 SFmode, SImode, SDmode,
5693 HImode,
5694 QImode
5697 struct constant
5699 struct constant *next;
5700 rtx value;
5701 rtx label;
5704 struct constant_pool
5706 struct constant_pool *next;
5707 rtx first_insn;
5708 rtx pool_insn;
5709 bitmap insns;
5710 rtx emit_pool_after;
5712 struct constant *constants[NR_C_MODES];
5713 struct constant *execute;
5714 rtx label;
5715 int size;
5718 /* Allocate new constant_pool structure. */
5720 static struct constant_pool *
5721 s390_alloc_pool (void)
5723 struct constant_pool *pool;
5724 int i;
5726 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5727 pool->next = NULL;
5728 for (i = 0; i < NR_C_MODES; i++)
5729 pool->constants[i] = NULL;
5731 pool->execute = NULL;
5732 pool->label = gen_label_rtx ();
5733 pool->first_insn = NULL_RTX;
5734 pool->pool_insn = NULL_RTX;
5735 pool->insns = BITMAP_ALLOC (NULL);
5736 pool->size = 0;
5737 pool->emit_pool_after = NULL_RTX;
5739 return pool;
5742 /* Create new constant pool covering instructions starting at INSN
5743 and chain it to the end of POOL_LIST. */
5745 static struct constant_pool *
5746 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5748 struct constant_pool *pool, **prev;
5750 pool = s390_alloc_pool ();
5751 pool->first_insn = insn;
5753 for (prev = pool_list; *prev; prev = &(*prev)->next)
5755 *prev = pool;
5757 return pool;
5760 /* End range of instructions covered by POOL at INSN and emit
5761 placeholder insn representing the pool. */
5763 static void
5764 s390_end_pool (struct constant_pool *pool, rtx insn)
5766 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5768 if (!insn)
5769 insn = get_last_insn ();
5771 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5772 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5775 /* Add INSN to the list of insns covered by POOL. */
5777 static void
5778 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5780 bitmap_set_bit (pool->insns, INSN_UID (insn));
5783 /* Return pool out of POOL_LIST that covers INSN. */
5785 static struct constant_pool *
5786 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5788 struct constant_pool *pool;
5790 for (pool = pool_list; pool; pool = pool->next)
5791 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5792 break;
5794 return pool;
5797 /* Add constant VAL of mode MODE to the constant pool POOL. */
5799 static void
5800 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5802 struct constant *c;
5803 int i;
5805 for (i = 0; i < NR_C_MODES; i++)
5806 if (constant_modes[i] == mode)
5807 break;
5808 gcc_assert (i != NR_C_MODES);
5810 for (c = pool->constants[i]; c != NULL; c = c->next)
5811 if (rtx_equal_p (val, c->value))
5812 break;
5814 if (c == NULL)
5816 c = (struct constant *) xmalloc (sizeof *c);
5817 c->value = val;
5818 c->label = gen_label_rtx ();
5819 c->next = pool->constants[i];
5820 pool->constants[i] = c;
5821 pool->size += GET_MODE_SIZE (mode);
5825 /* Find constant VAL of mode MODE in the constant pool POOL.
5826 Return an RTX describing the distance from the start of
5827 the pool to the location of the new constant. */
5829 static rtx
5830 s390_find_constant (struct constant_pool *pool, rtx val,
5831 enum machine_mode mode)
5833 struct constant *c;
5834 rtx offset;
5835 int i;
5837 for (i = 0; i < NR_C_MODES; i++)
5838 if (constant_modes[i] == mode)
5839 break;
5840 gcc_assert (i != NR_C_MODES);
5842 for (c = pool->constants[i]; c != NULL; c = c->next)
5843 if (rtx_equal_p (val, c->value))
5844 break;
5846 gcc_assert (c);
5848 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5849 gen_rtx_LABEL_REF (Pmode, pool->label));
5850 offset = gen_rtx_CONST (Pmode, offset);
5851 return offset;
5854 /* Check whether INSN is an execute. Return the label_ref to its
5855 execute target template if so, NULL_RTX otherwise. */
5857 static rtx
5858 s390_execute_label (rtx insn)
5860 if (GET_CODE (insn) == INSN
5861 && GET_CODE (PATTERN (insn)) == PARALLEL
5862 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5863 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5864 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5866 return NULL_RTX;
5869 /* Add execute target for INSN to the constant pool POOL. */
5871 static void
5872 s390_add_execute (struct constant_pool *pool, rtx insn)
5874 struct constant *c;
5876 for (c = pool->execute; c != NULL; c = c->next)
5877 if (INSN_UID (insn) == INSN_UID (c->value))
5878 break;
5880 if (c == NULL)
5882 c = (struct constant *) xmalloc (sizeof *c);
5883 c->value = insn;
5884 c->label = gen_label_rtx ();
5885 c->next = pool->execute;
5886 pool->execute = c;
5887 pool->size += 6;
5891 /* Find execute target for INSN in the constant pool POOL.
5892 Return an RTX describing the distance from the start of
5893 the pool to the location of the execute target. */
5895 static rtx
5896 s390_find_execute (struct constant_pool *pool, rtx insn)
5898 struct constant *c;
5899 rtx offset;
5901 for (c = pool->execute; c != NULL; c = c->next)
5902 if (INSN_UID (insn) == INSN_UID (c->value))
5903 break;
5905 gcc_assert (c);
5907 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5908 gen_rtx_LABEL_REF (Pmode, pool->label));
5909 offset = gen_rtx_CONST (Pmode, offset);
5910 return offset;
5913 /* For an execute INSN, extract the execute target template. */
5915 static rtx
5916 s390_execute_target (rtx insn)
5918 rtx pattern = PATTERN (insn);
5919 gcc_assert (s390_execute_label (insn));
5921 if (XVECLEN (pattern, 0) == 2)
5923 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5925 else
5927 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5928 int i;
5930 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5931 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5933 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5936 return pattern;
5939 /* Indicate that INSN cannot be duplicated. This is the case for
5940 execute insns that carry a unique label. */
5942 static bool
5943 s390_cannot_copy_insn_p (rtx insn)
5945 rtx label = s390_execute_label (insn);
5946 return label && label != const0_rtx;
5949 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5950 do not emit the pool base label. */
5952 static void
5953 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5955 struct constant *c;
5956 rtx insn = pool->pool_insn;
5957 int i;
5959 /* Switch to rodata section. */
5960 if (TARGET_CPU_ZARCH)
5962 insn = emit_insn_after (gen_pool_section_start (), insn);
5963 INSN_ADDRESSES_NEW (insn, -1);
5966 /* Ensure minimum pool alignment. */
5967 if (TARGET_CPU_ZARCH)
5968 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5969 else
5970 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5971 INSN_ADDRESSES_NEW (insn, -1);
5973 /* Emit pool base label. */
5974 if (!remote_label)
5976 insn = emit_label_after (pool->label, insn);
5977 INSN_ADDRESSES_NEW (insn, -1);
5980 /* Dump constants in descending alignment requirement order,
5981 ensuring proper alignment for every constant. */
5982 for (i = 0; i < NR_C_MODES; i++)
5983 for (c = pool->constants[i]; c; c = c->next)
5985 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5986 rtx value = copy_rtx (c->value);
5987 if (GET_CODE (value) == CONST
5988 && GET_CODE (XEXP (value, 0)) == UNSPEC
5989 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5990 && XVECLEN (XEXP (value, 0), 0) == 1)
5992 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5993 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5994 value = gen_rtx_CONST (VOIDmode, value);
5997 insn = emit_label_after (c->label, insn);
5998 INSN_ADDRESSES_NEW (insn, -1);
6000 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
6001 gen_rtvec (1, value),
6002 UNSPECV_POOL_ENTRY);
6003 insn = emit_insn_after (value, insn);
6004 INSN_ADDRESSES_NEW (insn, -1);
6007 /* Ensure minimum alignment for instructions. */
6008 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
6009 INSN_ADDRESSES_NEW (insn, -1);
6011 /* Output in-pool execute template insns. */
6012 for (c = pool->execute; c; c = c->next)
6014 insn = emit_label_after (c->label, insn);
6015 INSN_ADDRESSES_NEW (insn, -1);
6017 insn = emit_insn_after (s390_execute_target (c->value), insn);
6018 INSN_ADDRESSES_NEW (insn, -1);
6021 /* Switch back to previous section. */
6022 if (TARGET_CPU_ZARCH)
6024 insn = emit_insn_after (gen_pool_section_end (), insn);
6025 INSN_ADDRESSES_NEW (insn, -1);
6028 insn = emit_barrier_after (insn);
6029 INSN_ADDRESSES_NEW (insn, -1);
6031 /* Remove placeholder insn. */
6032 remove_insn (pool->pool_insn);
6035 /* Free all memory used by POOL. */
6037 static void
6038 s390_free_pool (struct constant_pool *pool)
6040 struct constant *c, *next;
6041 int i;
6043 for (i = 0; i < NR_C_MODES; i++)
6044 for (c = pool->constants[i]; c; c = next)
6046 next = c->next;
6047 free (c);
6050 for (c = pool->execute; c; c = next)
6052 next = c->next;
6053 free (c);
6056 BITMAP_FREE (pool->insns);
6057 free (pool);
6061 /* Collect main literal pool. Return NULL on overflow. */
6063 static struct constant_pool *
6064 s390_mainpool_start (void)
6066 struct constant_pool *pool;
6067 rtx insn;
6069 pool = s390_alloc_pool ();
6071 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6073 if (GET_CODE (insn) == INSN
6074 && GET_CODE (PATTERN (insn)) == SET
6075 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
6076 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
6078 gcc_assert (!pool->pool_insn);
6079 pool->pool_insn = insn;
6082 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6084 s390_add_execute (pool, insn);
6086 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6088 rtx pool_ref = NULL_RTX;
6089 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6090 if (pool_ref)
6092 rtx constant = get_pool_constant (pool_ref);
6093 enum machine_mode mode = get_pool_mode (pool_ref);
6094 s390_add_constant (pool, constant, mode);
6098 /* If hot/cold partitioning is enabled we have to make sure that
6099 the literal pool is emitted in the same section where the
6100 initialization of the literal pool base pointer takes place.
6101 emit_pool_after is only used in the non-overflow case on non
6102 Z cpus where we can emit the literal pool at the end of the
6103 function body within the text section. */
6104 if (NOTE_P (insn)
6105 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6106 && !pool->emit_pool_after)
6107 pool->emit_pool_after = PREV_INSN (insn);
6110 gcc_assert (pool->pool_insn || pool->size == 0);
6112 if (pool->size >= 4096)
6114 /* We're going to chunkify the pool, so remove the main
6115 pool placeholder insn. */
6116 remove_insn (pool->pool_insn);
6118 s390_free_pool (pool);
6119 pool = NULL;
6122 /* If the functions ends with the section where the literal pool
6123 should be emitted set the marker to its end. */
6124 if (pool && !pool->emit_pool_after)
6125 pool->emit_pool_after = get_last_insn ();
6127 return pool;
6130 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6131 Modify the current function to output the pool constants as well as
6132 the pool register setup instruction. */
6134 static void
6135 s390_mainpool_finish (struct constant_pool *pool)
6137 rtx base_reg = cfun->machine->base_reg;
6138 rtx insn;
6140 /* If the pool is empty, we're done. */
6141 if (pool->size == 0)
6143 /* We don't actually need a base register after all. */
6144 cfun->machine->base_reg = NULL_RTX;
6146 if (pool->pool_insn)
6147 remove_insn (pool->pool_insn);
6148 s390_free_pool (pool);
6149 return;
6152 /* We need correct insn addresses. */
6153 shorten_branches (get_insns ());
6155 /* On zSeries, we use a LARL to load the pool register. The pool is
6156 located in the .rodata section, so we emit it after the function. */
6157 if (TARGET_CPU_ZARCH)
6159 insn = gen_main_base_64 (base_reg, pool->label);
6160 insn = emit_insn_after (insn, pool->pool_insn);
6161 INSN_ADDRESSES_NEW (insn, -1);
6162 remove_insn (pool->pool_insn);
6164 insn = get_last_insn ();
6165 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6166 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6168 s390_dump_pool (pool, 0);
6171 /* On S/390, if the total size of the function's code plus literal pool
6172 does not exceed 4096 bytes, we use BASR to set up a function base
6173 pointer, and emit the literal pool at the end of the function. */
6174 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
6175 + pool->size + 8 /* alignment slop */ < 4096)
6177 insn = gen_main_base_31_small (base_reg, pool->label);
6178 insn = emit_insn_after (insn, pool->pool_insn);
6179 INSN_ADDRESSES_NEW (insn, -1);
6180 remove_insn (pool->pool_insn);
6182 insn = emit_label_after (pool->label, insn);
6183 INSN_ADDRESSES_NEW (insn, -1);
6185 /* emit_pool_after will be set by s390_mainpool_start to the
6186 last insn of the section where the literal pool should be
6187 emitted. */
6188 insn = pool->emit_pool_after;
6190 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6191 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6193 s390_dump_pool (pool, 1);
6196 /* Otherwise, we emit an inline literal pool and use BASR to branch
6197 over it, setting up the pool register at the same time. */
6198 else
6200 rtx pool_end = gen_label_rtx ();
6202 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
6203 insn = emit_insn_after (insn, pool->pool_insn);
6204 INSN_ADDRESSES_NEW (insn, -1);
6205 remove_insn (pool->pool_insn);
6207 insn = emit_label_after (pool->label, insn);
6208 INSN_ADDRESSES_NEW (insn, -1);
6210 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6211 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6213 insn = emit_label_after (pool_end, pool->pool_insn);
6214 INSN_ADDRESSES_NEW (insn, -1);
6216 s390_dump_pool (pool, 1);
6220 /* Replace all literal pool references. */
6222 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6224 if (INSN_P (insn))
6225 replace_ltrel_base (&PATTERN (insn));
6227 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6229 rtx addr, pool_ref = NULL_RTX;
6230 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6231 if (pool_ref)
6233 if (s390_execute_label (insn))
6234 addr = s390_find_execute (pool, insn);
6235 else
6236 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
6237 get_pool_mode (pool_ref));
6239 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6240 INSN_CODE (insn) = -1;
6246 /* Free the pool. */
6247 s390_free_pool (pool);
6250 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6251 We have decided we cannot use this pool, so revert all changes
6252 to the current function that were done by s390_mainpool_start. */
6253 static void
6254 s390_mainpool_cancel (struct constant_pool *pool)
6256 /* We didn't actually change the instruction stream, so simply
6257 free the pool memory. */
6258 s390_free_pool (pool);
6262 /* Chunkify the literal pool. */
6264 #define S390_POOL_CHUNK_MIN 0xc00
6265 #define S390_POOL_CHUNK_MAX 0xe00
6267 static struct constant_pool *
6268 s390_chunkify_start (void)
6270 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
6271 int extra_size = 0;
6272 bitmap far_labels;
6273 rtx pending_ltrel = NULL_RTX;
6274 rtx insn;
6276 rtx (*gen_reload_base) (rtx, rtx) =
6277 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
6280 /* We need correct insn addresses. */
6282 shorten_branches (get_insns ());
6284 /* Scan all insns and move literals to pool chunks. */
6286 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6288 bool section_switch_p = false;
6290 /* Check for pending LTREL_BASE. */
6291 if (INSN_P (insn))
6293 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
6294 if (ltrel_base)
6296 gcc_assert (ltrel_base == pending_ltrel);
6297 pending_ltrel = NULL_RTX;
6301 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6303 if (!curr_pool)
6304 curr_pool = s390_start_pool (&pool_list, insn);
6306 s390_add_execute (curr_pool, insn);
6307 s390_add_pool_insn (curr_pool, insn);
6309 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6311 rtx pool_ref = NULL_RTX;
6312 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6313 if (pool_ref)
6315 rtx constant = get_pool_constant (pool_ref);
6316 enum machine_mode mode = get_pool_mode (pool_ref);
6318 if (!curr_pool)
6319 curr_pool = s390_start_pool (&pool_list, insn);
6321 s390_add_constant (curr_pool, constant, mode);
6322 s390_add_pool_insn (curr_pool, insn);
6324 /* Don't split the pool chunk between a LTREL_OFFSET load
6325 and the corresponding LTREL_BASE. */
6326 if (GET_CODE (constant) == CONST
6327 && GET_CODE (XEXP (constant, 0)) == UNSPEC
6328 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
6330 gcc_assert (!pending_ltrel);
6331 pending_ltrel = pool_ref;
6336 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
6338 if (curr_pool)
6339 s390_add_pool_insn (curr_pool, insn);
6340 /* An LTREL_BASE must follow within the same basic block. */
6341 gcc_assert (!pending_ltrel);
6344 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
6345 section_switch_p = true;
6347 if (!curr_pool
6348 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
6349 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
6350 continue;
6352 if (TARGET_CPU_ZARCH)
6354 if (curr_pool->size < S390_POOL_CHUNK_MAX)
6355 continue;
6357 s390_end_pool (curr_pool, NULL_RTX);
6358 curr_pool = NULL;
6360 else
6362 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
6363 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
6364 + extra_size;
6366 /* We will later have to insert base register reload insns.
6367 Those will have an effect on code size, which we need to
6368 consider here. This calculation makes rather pessimistic
6369 worst-case assumptions. */
6370 if (GET_CODE (insn) == CODE_LABEL)
6371 extra_size += 6;
6373 if (chunk_size < S390_POOL_CHUNK_MIN
6374 && curr_pool->size < S390_POOL_CHUNK_MIN
6375 && !section_switch_p)
6376 continue;
6378 /* Pool chunks can only be inserted after BARRIERs ... */
6379 if (GET_CODE (insn) == BARRIER)
6381 s390_end_pool (curr_pool, insn);
6382 curr_pool = NULL;
6383 extra_size = 0;
6386 /* ... so if we don't find one in time, create one. */
6387 else if (chunk_size > S390_POOL_CHUNK_MAX
6388 || curr_pool->size > S390_POOL_CHUNK_MAX
6389 || section_switch_p)
6391 rtx label, jump, barrier;
6393 if (!section_switch_p)
6395 /* We can insert the barrier only after a 'real' insn. */
6396 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
6397 continue;
6398 if (get_attr_length (insn) == 0)
6399 continue;
6400 /* Don't separate LTREL_BASE from the corresponding
6401 LTREL_OFFSET load. */
6402 if (pending_ltrel)
6403 continue;
6405 else
6407 gcc_assert (!pending_ltrel);
6409 /* The old pool has to end before the section switch
6410 note in order to make it part of the current
6411 section. */
6412 insn = PREV_INSN (insn);
6415 label = gen_label_rtx ();
6416 jump = emit_jump_insn_after (gen_jump (label), insn);
6417 barrier = emit_barrier_after (jump);
6418 insn = emit_label_after (label, barrier);
6419 JUMP_LABEL (jump) = label;
6420 LABEL_NUSES (label) = 1;
6422 INSN_ADDRESSES_NEW (jump, -1);
6423 INSN_ADDRESSES_NEW (barrier, -1);
6424 INSN_ADDRESSES_NEW (insn, -1);
6426 s390_end_pool (curr_pool, barrier);
6427 curr_pool = NULL;
6428 extra_size = 0;
6433 if (curr_pool)
6434 s390_end_pool (curr_pool, NULL_RTX);
6435 gcc_assert (!pending_ltrel);
6437 /* Find all labels that are branched into
6438 from an insn belonging to a different chunk. */
6440 far_labels = BITMAP_ALLOC (NULL);
6442 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6444 /* Labels marked with LABEL_PRESERVE_P can be target
6445 of non-local jumps, so we have to mark them.
6446 The same holds for named labels.
6448 Don't do that, however, if it is the label before
6449 a jump table. */
6451 if (GET_CODE (insn) == CODE_LABEL
6452 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6454 rtx vec_insn = next_real_insn (insn);
6455 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6456 PATTERN (vec_insn) : NULL_RTX;
6457 if (!vec_pat
6458 || !(GET_CODE (vec_pat) == ADDR_VEC
6459 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6460 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6463 /* If we have a direct jump (conditional or unconditional)
6464 or a casesi jump, check all potential targets. */
6465 else if (GET_CODE (insn) == JUMP_INSN)
6467 rtx pat = PATTERN (insn);
6468 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6469 pat = XVECEXP (pat, 0, 0);
6471 if (GET_CODE (pat) == SET)
6473 rtx label = JUMP_LABEL (insn);
6474 if (label)
6476 if (s390_find_pool (pool_list, label)
6477 != s390_find_pool (pool_list, insn))
6478 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6481 else if (GET_CODE (pat) == PARALLEL
6482 && XVECLEN (pat, 0) == 2
6483 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6484 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6485 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6487 /* Find the jump table used by this casesi jump. */
6488 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6489 rtx vec_insn = next_real_insn (vec_label);
6490 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6491 PATTERN (vec_insn) : NULL_RTX;
6492 if (vec_pat
6493 && (GET_CODE (vec_pat) == ADDR_VEC
6494 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6496 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6498 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6500 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6502 if (s390_find_pool (pool_list, label)
6503 != s390_find_pool (pool_list, insn))
6504 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6511 /* Insert base register reload insns before every pool. */
6513 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6515 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6516 curr_pool->label);
6517 rtx insn = curr_pool->first_insn;
6518 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6521 /* Insert base register reload insns at every far label. */
6523 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6524 if (GET_CODE (insn) == CODE_LABEL
6525 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6527 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6528 if (pool)
6530 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6531 pool->label);
6532 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6537 BITMAP_FREE (far_labels);
6540 /* Recompute insn addresses. */
6542 init_insn_lengths ();
6543 shorten_branches (get_insns ());
6545 return pool_list;
6548 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6549 After we have decided to use this list, finish implementing
6550 all changes to the current function as required. */
6552 static void
6553 s390_chunkify_finish (struct constant_pool *pool_list)
6555 struct constant_pool *curr_pool = NULL;
6556 rtx insn;
6559 /* Replace all literal pool references. */
6561 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6563 if (INSN_P (insn))
6564 replace_ltrel_base (&PATTERN (insn));
6566 curr_pool = s390_find_pool (pool_list, insn);
6567 if (!curr_pool)
6568 continue;
6570 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6572 rtx addr, pool_ref = NULL_RTX;
6573 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6574 if (pool_ref)
6576 if (s390_execute_label (insn))
6577 addr = s390_find_execute (curr_pool, insn);
6578 else
6579 addr = s390_find_constant (curr_pool,
6580 get_pool_constant (pool_ref),
6581 get_pool_mode (pool_ref));
6583 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6584 INSN_CODE (insn) = -1;
6589 /* Dump out all literal pools. */
6591 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6592 s390_dump_pool (curr_pool, 0);
6594 /* Free pool list. */
6596 while (pool_list)
6598 struct constant_pool *next = pool_list->next;
6599 s390_free_pool (pool_list);
6600 pool_list = next;
6604 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6605 We have decided we cannot use this list, so revert all changes
6606 to the current function that were done by s390_chunkify_start. */
6608 static void
6609 s390_chunkify_cancel (struct constant_pool *pool_list)
6611 struct constant_pool *curr_pool = NULL;
6612 rtx insn;
6614 /* Remove all pool placeholder insns. */
6616 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6618 /* Did we insert an extra barrier? Remove it. */
6619 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6620 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6621 rtx label = NEXT_INSN (curr_pool->pool_insn);
6623 if (jump && GET_CODE (jump) == JUMP_INSN
6624 && barrier && GET_CODE (barrier) == BARRIER
6625 && label && GET_CODE (label) == CODE_LABEL
6626 && GET_CODE (PATTERN (jump)) == SET
6627 && SET_DEST (PATTERN (jump)) == pc_rtx
6628 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6629 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6631 remove_insn (jump);
6632 remove_insn (barrier);
6633 remove_insn (label);
6636 remove_insn (curr_pool->pool_insn);
6639 /* Remove all base register reload insns. */
6641 for (insn = get_insns (); insn; )
6643 rtx next_insn = NEXT_INSN (insn);
6645 if (GET_CODE (insn) == INSN
6646 && GET_CODE (PATTERN (insn)) == SET
6647 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6648 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6649 remove_insn (insn);
6651 insn = next_insn;
6654 /* Free pool list. */
6656 while (pool_list)
6658 struct constant_pool *next = pool_list->next;
6659 s390_free_pool (pool_list);
6660 pool_list = next;
6665 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6667 void
6668 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6670 REAL_VALUE_TYPE r;
6672 switch (GET_MODE_CLASS (mode))
6674 case MODE_FLOAT:
6675 case MODE_DECIMAL_FLOAT:
6676 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6678 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6679 assemble_real (r, mode, align);
6680 break;
6682 case MODE_INT:
6683 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6684 break;
6686 default:
6687 gcc_unreachable ();
6692 /* Return an RTL expression representing the value of the return address
6693 for the frame COUNT steps up from the current frame. FRAME is the
6694 frame pointer of that frame. */
6697 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6699 int offset;
6700 rtx addr;
6702 /* Without backchain, we fail for all but the current frame. */
6704 if (!TARGET_BACKCHAIN && count > 0)
6705 return NULL_RTX;
6707 /* For the current frame, we need to make sure the initial
6708 value of RETURN_REGNUM is actually saved. */
6710 if (count == 0)
6712 /* On non-z architectures branch splitting could overwrite r14. */
6713 if (TARGET_CPU_ZARCH)
6714 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6715 else
6717 cfun_frame_layout.save_return_addr_p = true;
6718 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6722 if (TARGET_PACKED_STACK)
6723 offset = -2 * UNITS_PER_WORD;
6724 else
6725 offset = RETURN_REGNUM * UNITS_PER_WORD;
6727 addr = plus_constant (frame, offset);
6728 addr = memory_address (Pmode, addr);
6729 return gen_rtx_MEM (Pmode, addr);
6732 /* Return an RTL expression representing the back chain stored in
6733 the current stack frame. */
6736 s390_back_chain_rtx (void)
6738 rtx chain;
6740 gcc_assert (TARGET_BACKCHAIN);
6742 if (TARGET_PACKED_STACK)
6743 chain = plus_constant (stack_pointer_rtx,
6744 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6745 else
6746 chain = stack_pointer_rtx;
6748 chain = gen_rtx_MEM (Pmode, chain);
6749 return chain;
6752 /* Find first call clobbered register unused in a function.
6753 This could be used as base register in a leaf function
6754 or for holding the return address before epilogue. */
6756 static int
6757 find_unused_clobbered_reg (void)
6759 int i;
6760 for (i = 0; i < 6; i++)
6761 if (!df_regs_ever_live_p (i))
6762 return i;
6763 return 0;
6767 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6768 clobbered hard regs in SETREG. */
6770 static void
6771 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
6773 int *regs_ever_clobbered = (int *)data;
6774 unsigned int i, regno;
6775 enum machine_mode mode = GET_MODE (setreg);
6777 if (GET_CODE (setreg) == SUBREG)
6779 rtx inner = SUBREG_REG (setreg);
6780 if (!GENERAL_REG_P (inner))
6781 return;
6782 regno = subreg_regno (setreg);
6784 else if (GENERAL_REG_P (setreg))
6785 regno = REGNO (setreg);
6786 else
6787 return;
6789 for (i = regno;
6790 i < regno + HARD_REGNO_NREGS (regno, mode);
6791 i++)
6792 regs_ever_clobbered[i] = 1;
6795 /* Walks through all basic blocks of the current function looking
6796 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6797 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6798 each of those regs. */
6800 static void
6801 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6803 basic_block cur_bb;
6804 rtx cur_insn;
6805 unsigned int i;
6807 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6809 /* For non-leaf functions we have to consider all call clobbered regs to be
6810 clobbered. */
6811 if (!current_function_is_leaf)
6813 for (i = 0; i < 16; i++)
6814 regs_ever_clobbered[i] = call_really_used_regs[i];
6817 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6818 this work is done by liveness analysis (mark_regs_live_at_end).
6819 Special care is needed for functions containing landing pads. Landing pads
6820 may use the eh registers, but the code which sets these registers is not
6821 contained in that function. Hence s390_regs_ever_clobbered is not able to
6822 deal with this automatically. */
6823 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
6824 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6825 if (crtl->calls_eh_return
6826 || (cfun->machine->has_landing_pad_p
6827 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
6828 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6830 /* For nonlocal gotos all call-saved registers have to be saved.
6831 This flag is also set for the unwinding code in libgcc.
6832 See expand_builtin_unwind_init. For regs_ever_live this is done by
6833 reload. */
6834 if (cfun->has_nonlocal_label)
6835 for (i = 0; i < 16; i++)
6836 if (!call_really_used_regs[i])
6837 regs_ever_clobbered[i] = 1;
6839 FOR_EACH_BB (cur_bb)
6841 FOR_BB_INSNS (cur_bb, cur_insn)
6843 if (INSN_P (cur_insn))
6844 note_stores (PATTERN (cur_insn),
6845 s390_reg_clobbered_rtx,
6846 regs_ever_clobbered);
6851 /* Determine the frame area which actually has to be accessed
6852 in the function epilogue. The values are stored at the
6853 given pointers AREA_BOTTOM (address of the lowest used stack
6854 address) and AREA_TOP (address of the first item which does
6855 not belong to the stack frame). */
6857 static void
6858 s390_frame_area (int *area_bottom, int *area_top)
6860 int b, t;
6861 int i;
6863 b = INT_MAX;
6864 t = INT_MIN;
6866 if (cfun_frame_layout.first_restore_gpr != -1)
6868 b = (cfun_frame_layout.gprs_offset
6869 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6870 t = b + (cfun_frame_layout.last_restore_gpr
6871 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6874 if (TARGET_64BIT && cfun_save_high_fprs_p)
6876 b = MIN (b, cfun_frame_layout.f8_offset);
6877 t = MAX (t, (cfun_frame_layout.f8_offset
6878 + cfun_frame_layout.high_fprs * 8));
6881 if (!TARGET_64BIT)
6882 for (i = 2; i < 4; i++)
6883 if (cfun_fpr_bit_p (i))
6885 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6886 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6889 *area_bottom = b;
6890 *area_top = t;
6893 /* Fill cfun->machine with info about register usage of current function.
6894 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6896 static void
6897 s390_register_info (int clobbered_regs[])
6899 int i, j;
6901 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6902 cfun_frame_layout.fpr_bitmap = 0;
6903 cfun_frame_layout.high_fprs = 0;
6904 if (TARGET_64BIT)
6905 for (i = 24; i < 32; i++)
6906 if (df_regs_ever_live_p (i) && !global_regs[i])
6908 cfun_set_fpr_bit (i - 16);
6909 cfun_frame_layout.high_fprs++;
6912 /* Find first and last gpr to be saved. We trust regs_ever_live
6913 data, except that we don't save and restore global registers.
6915 Also, all registers with special meaning to the compiler need
6916 to be handled extra. */
6918 s390_regs_ever_clobbered (clobbered_regs);
6920 for (i = 0; i < 16; i++)
6921 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6923 if (frame_pointer_needed)
6924 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6926 if (flag_pic)
6927 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6928 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
6930 clobbered_regs[BASE_REGNUM]
6931 |= (cfun->machine->base_reg
6932 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6934 clobbered_regs[RETURN_REGNUM]
6935 |= (!current_function_is_leaf
6936 || TARGET_TPF_PROFILING
6937 || cfun->machine->split_branches_pending_p
6938 || cfun_frame_layout.save_return_addr_p
6939 || crtl->calls_eh_return
6940 || cfun->stdarg);
6942 clobbered_regs[STACK_POINTER_REGNUM]
6943 |= (!current_function_is_leaf
6944 || TARGET_TPF_PROFILING
6945 || cfun_save_high_fprs_p
6946 || get_frame_size () > 0
6947 || cfun->calls_alloca
6948 || cfun->stdarg);
6950 for (i = 6; i < 16; i++)
6951 if (df_regs_ever_live_p (i) || clobbered_regs[i])
6952 break;
6953 for (j = 15; j > i; j--)
6954 if (df_regs_ever_live_p (j) || clobbered_regs[j])
6955 break;
6957 if (i == 16)
6959 /* Nothing to save/restore. */
6960 cfun_frame_layout.first_save_gpr_slot = -1;
6961 cfun_frame_layout.last_save_gpr_slot = -1;
6962 cfun_frame_layout.first_save_gpr = -1;
6963 cfun_frame_layout.first_restore_gpr = -1;
6964 cfun_frame_layout.last_save_gpr = -1;
6965 cfun_frame_layout.last_restore_gpr = -1;
6967 else
6969 /* Save slots for gprs from i to j. */
6970 cfun_frame_layout.first_save_gpr_slot = i;
6971 cfun_frame_layout.last_save_gpr_slot = j;
6973 for (i = cfun_frame_layout.first_save_gpr_slot;
6974 i < cfun_frame_layout.last_save_gpr_slot + 1;
6975 i++)
6976 if (clobbered_regs[i])
6977 break;
6979 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6980 if (clobbered_regs[j])
6981 break;
6983 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6985 /* Nothing to save/restore. */
6986 cfun_frame_layout.first_save_gpr = -1;
6987 cfun_frame_layout.first_restore_gpr = -1;
6988 cfun_frame_layout.last_save_gpr = -1;
6989 cfun_frame_layout.last_restore_gpr = -1;
6991 else
6993 /* Save / Restore from gpr i to j. */
6994 cfun_frame_layout.first_save_gpr = i;
6995 cfun_frame_layout.first_restore_gpr = i;
6996 cfun_frame_layout.last_save_gpr = j;
6997 cfun_frame_layout.last_restore_gpr = j;
7001 if (cfun->stdarg)
7003 /* Varargs functions need to save gprs 2 to 6. */
7004 if (cfun->va_list_gpr_size
7005 && crtl->args.info.gprs < GP_ARG_NUM_REG)
7007 int min_gpr = crtl->args.info.gprs;
7008 int max_gpr = min_gpr + cfun->va_list_gpr_size;
7009 if (max_gpr > GP_ARG_NUM_REG)
7010 max_gpr = GP_ARG_NUM_REG;
7012 if (cfun_frame_layout.first_save_gpr == -1
7013 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
7015 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
7016 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
7019 if (cfun_frame_layout.last_save_gpr == -1
7020 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
7022 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
7023 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
7027 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7028 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
7029 && crtl->args.info.fprs < FP_ARG_NUM_REG)
7031 int min_fpr = crtl->args.info.fprs;
7032 int max_fpr = min_fpr + cfun->va_list_fpr_size;
7033 if (max_fpr > FP_ARG_NUM_REG)
7034 max_fpr = FP_ARG_NUM_REG;
7036 /* ??? This is currently required to ensure proper location
7037 of the fpr save slots within the va_list save area. */
7038 if (TARGET_PACKED_STACK)
7039 min_fpr = 0;
7041 for (i = min_fpr; i < max_fpr; i++)
7042 cfun_set_fpr_bit (i);
7046 if (!TARGET_64BIT)
7047 for (i = 2; i < 4; i++)
7048 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
7049 cfun_set_fpr_bit (i);
7052 /* Fill cfun->machine with info about frame of current function. */
7054 static void
7055 s390_frame_info (void)
7057 int i;
7059 cfun_frame_layout.frame_size = get_frame_size ();
7060 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
7061 fatal_error ("total size of local variables exceeds architecture limit");
7063 if (!TARGET_PACKED_STACK)
7065 cfun_frame_layout.backchain_offset = 0;
7066 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
7067 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
7068 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
7069 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
7070 * UNITS_PER_WORD);
7072 else if (TARGET_BACKCHAIN) /* kernel stack layout */
7074 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
7075 - UNITS_PER_WORD);
7076 cfun_frame_layout.gprs_offset
7077 = (cfun_frame_layout.backchain_offset
7078 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
7079 * UNITS_PER_WORD);
7081 if (TARGET_64BIT)
7083 cfun_frame_layout.f4_offset
7084 = (cfun_frame_layout.gprs_offset
7085 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7087 cfun_frame_layout.f0_offset
7088 = (cfun_frame_layout.f4_offset
7089 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7091 else
7093 /* On 31 bit we have to care about alignment of the
7094 floating point regs to provide fastest access. */
7095 cfun_frame_layout.f0_offset
7096 = ((cfun_frame_layout.gprs_offset
7097 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
7098 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7100 cfun_frame_layout.f4_offset
7101 = (cfun_frame_layout.f0_offset
7102 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7105 else /* no backchain */
7107 cfun_frame_layout.f4_offset
7108 = (STACK_POINTER_OFFSET
7109 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7111 cfun_frame_layout.f0_offset
7112 = (cfun_frame_layout.f4_offset
7113 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7115 cfun_frame_layout.gprs_offset
7116 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
7119 if (current_function_is_leaf
7120 && !TARGET_TPF_PROFILING
7121 && cfun_frame_layout.frame_size == 0
7122 && !cfun_save_high_fprs_p
7123 && !cfun->calls_alloca
7124 && !cfun->stdarg)
7125 return;
7127 if (!TARGET_PACKED_STACK)
7128 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
7129 + crtl->outgoing_args_size
7130 + cfun_frame_layout.high_fprs * 8);
7131 else
7133 if (TARGET_BACKCHAIN)
7134 cfun_frame_layout.frame_size += UNITS_PER_WORD;
7136 /* No alignment trouble here because f8-f15 are only saved under
7137 64 bit. */
7138 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
7139 cfun_frame_layout.f4_offset),
7140 cfun_frame_layout.gprs_offset)
7141 - cfun_frame_layout.high_fprs * 8);
7143 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
7145 for (i = 0; i < 8; i++)
7146 if (cfun_fpr_bit_p (i))
7147 cfun_frame_layout.frame_size += 8;
7149 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
7151 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7152 the frame size to sustain 8 byte alignment of stack frames. */
7153 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
7154 STACK_BOUNDARY / BITS_PER_UNIT - 1)
7155 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
7157 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
7161 /* Generate frame layout. Fills in register and frame data for the current
7162 function in cfun->machine. This routine can be called multiple times;
7163 it will re-do the complete frame layout every time. */
7165 static void
7166 s390_init_frame_layout (void)
7168 HOST_WIDE_INT frame_size;
7169 int base_used;
7170 int clobbered_regs[16];
7172 /* On S/390 machines, we may need to perform branch splitting, which
7173 will require both base and return address register. We have no
7174 choice but to assume we're going to need them until right at the
7175 end of the machine dependent reorg phase. */
7176 if (!TARGET_CPU_ZARCH)
7177 cfun->machine->split_branches_pending_p = true;
7181 frame_size = cfun_frame_layout.frame_size;
7183 /* Try to predict whether we'll need the base register. */
7184 base_used = cfun->machine->split_branches_pending_p
7185 || crtl->uses_const_pool
7186 || (!DISP_IN_RANGE (frame_size)
7187 && !CONST_OK_FOR_K (frame_size));
7189 /* Decide which register to use as literal pool base. In small
7190 leaf functions, try to use an unused call-clobbered register
7191 as base register to avoid save/restore overhead. */
7192 if (!base_used)
7193 cfun->machine->base_reg = NULL_RTX;
7194 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
7195 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
7196 else
7197 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
7199 s390_register_info (clobbered_regs);
7200 s390_frame_info ();
7202 while (frame_size != cfun_frame_layout.frame_size);
7205 /* Update frame layout. Recompute actual register save data based on
7206 current info and update regs_ever_live for the special registers.
7207 May be called multiple times, but may never cause *more* registers
7208 to be saved than s390_init_frame_layout allocated room for. */
7210 static void
7211 s390_update_frame_layout (void)
7213 int clobbered_regs[16];
7215 s390_register_info (clobbered_regs);
7217 df_set_regs_ever_live (BASE_REGNUM,
7218 clobbered_regs[BASE_REGNUM] ? true : false);
7219 df_set_regs_ever_live (RETURN_REGNUM,
7220 clobbered_regs[RETURN_REGNUM] ? true : false);
7221 df_set_regs_ever_live (STACK_POINTER_REGNUM,
7222 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
7224 if (cfun->machine->base_reg)
7225 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
7228 /* Return true if it is legal to put a value with MODE into REGNO. */
7230 bool
7231 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
7233 switch (REGNO_REG_CLASS (regno))
7235 case FP_REGS:
7236 if (REGNO_PAIR_OK (regno, mode))
7238 if (mode == SImode || mode == DImode)
7239 return true;
7241 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
7242 return true;
7244 break;
7245 case ADDR_REGS:
7246 if (FRAME_REGNO_P (regno) && mode == Pmode)
7247 return true;
7249 /* fallthrough */
7250 case GENERAL_REGS:
7251 if (REGNO_PAIR_OK (regno, mode))
7253 if (TARGET_64BIT
7254 || (mode != TFmode && mode != TCmode && mode != TDmode))
7255 return true;
7257 break;
7258 case CC_REGS:
7259 if (GET_MODE_CLASS (mode) == MODE_CC)
7260 return true;
7261 break;
7262 case ACCESS_REGS:
7263 if (REGNO_PAIR_OK (regno, mode))
7265 if (mode == SImode || mode == Pmode)
7266 return true;
7268 break;
7269 default:
7270 return false;
7273 return false;
7276 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7278 bool
7279 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
7281 /* Once we've decided upon a register to use as base register, it must
7282 no longer be used for any other purpose. */
7283 if (cfun->machine->base_reg)
7284 if (REGNO (cfun->machine->base_reg) == old_reg
7285 || REGNO (cfun->machine->base_reg) == new_reg)
7286 return false;
7288 return true;
7291 /* Maximum number of registers to represent a value of mode MODE
7292 in a register of class RCLASS. */
7294 bool
7295 s390_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
7297 switch (rclass)
7299 case FP_REGS:
7300 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7301 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
7302 else
7303 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
7304 case ACCESS_REGS:
7305 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
7306 default:
7307 break;
7309 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7312 /* Return true if register FROM can be eliminated via register TO. */
7314 bool
7315 s390_can_eliminate (int from, int to)
7317 /* On zSeries machines, we have not marked the base register as fixed.
7318 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7319 If a function requires the base register, we say here that this
7320 elimination cannot be performed. This will cause reload to free
7321 up the base register (as if it were fixed). On the other hand,
7322 if the current function does *not* require the base register, we
7323 say here the elimination succeeds, which in turn allows reload
7324 to allocate the base register for any other purpose. */
7325 if (from == BASE_REGNUM && to == BASE_REGNUM)
7327 if (TARGET_CPU_ZARCH)
7329 s390_init_frame_layout ();
7330 return cfun->machine->base_reg == NULL_RTX;
7333 return false;
7336 /* Everything else must point into the stack frame. */
7337 gcc_assert (to == STACK_POINTER_REGNUM
7338 || to == HARD_FRAME_POINTER_REGNUM);
7340 gcc_assert (from == FRAME_POINTER_REGNUM
7341 || from == ARG_POINTER_REGNUM
7342 || from == RETURN_ADDRESS_POINTER_REGNUM);
7344 /* Make sure we actually saved the return address. */
7345 if (from == RETURN_ADDRESS_POINTER_REGNUM)
7346 if (!crtl->calls_eh_return
7347 && !cfun->stdarg
7348 && !cfun_frame_layout.save_return_addr_p)
7349 return false;
7351 return true;
7354 /* Return offset between register FROM and TO initially after prolog. */
7356 HOST_WIDE_INT
7357 s390_initial_elimination_offset (int from, int to)
7359 HOST_WIDE_INT offset;
7360 int index;
7362 /* ??? Why are we called for non-eliminable pairs? */
7363 if (!s390_can_eliminate (from, to))
7364 return 0;
7366 switch (from)
7368 case FRAME_POINTER_REGNUM:
7369 offset = (get_frame_size()
7370 + STACK_POINTER_OFFSET
7371 + crtl->outgoing_args_size);
7372 break;
7374 case ARG_POINTER_REGNUM:
7375 s390_init_frame_layout ();
7376 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
7377 break;
7379 case RETURN_ADDRESS_POINTER_REGNUM:
7380 s390_init_frame_layout ();
7381 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
7382 gcc_assert (index >= 0);
7383 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
7384 offset += index * UNITS_PER_WORD;
7385 break;
7387 case BASE_REGNUM:
7388 offset = 0;
7389 break;
7391 default:
7392 gcc_unreachable ();
7395 return offset;
7398 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7399 to register BASE. Return generated insn. */
7401 static rtx
7402 save_fpr (rtx base, int offset, int regnum)
7404 rtx addr;
7405 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7407 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
7408 set_mem_alias_set (addr, get_varargs_alias_set ());
7409 else
7410 set_mem_alias_set (addr, get_frame_alias_set ());
7412 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
7415 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7416 to register BASE. Return generated insn. */
7418 static rtx
7419 restore_fpr (rtx base, int offset, int regnum)
7421 rtx addr;
7422 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7423 set_mem_alias_set (addr, get_frame_alias_set ());
7425 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
7428 /* Generate insn to save registers FIRST to LAST into
7429 the register save area located at offset OFFSET
7430 relative to register BASE. */
7432 static rtx
7433 save_gprs (rtx base, int offset, int first, int last)
7435 rtx addr, insn, note;
7436 int i;
7438 addr = plus_constant (base, offset);
7439 addr = gen_rtx_MEM (Pmode, addr);
7441 set_mem_alias_set (addr, get_frame_alias_set ());
7443 /* Special-case single register. */
7444 if (first == last)
7446 if (TARGET_64BIT)
7447 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7448 else
7449 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7451 RTX_FRAME_RELATED_P (insn) = 1;
7452 return insn;
7456 insn = gen_store_multiple (addr,
7457 gen_rtx_REG (Pmode, first),
7458 GEN_INT (last - first + 1));
7460 if (first <= 6 && cfun->stdarg)
7461 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7463 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7465 if (first + i <= 6)
7466 set_mem_alias_set (mem, get_varargs_alias_set ());
7469 /* We need to set the FRAME_RELATED flag on all SETs
7470 inside the store-multiple pattern.
7472 However, we must not emit DWARF records for registers 2..5
7473 if they are stored for use by variable arguments ...
7475 ??? Unfortunately, it is not enough to simply not the
7476 FRAME_RELATED flags for those SETs, because the first SET
7477 of the PARALLEL is always treated as if it had the flag
7478 set, even if it does not. Therefore we emit a new pattern
7479 without those registers as REG_FRAME_RELATED_EXPR note. */
7481 if (first >= 6)
7483 rtx pat = PATTERN (insn);
7485 for (i = 0; i < XVECLEN (pat, 0); i++)
7486 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7487 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7489 RTX_FRAME_RELATED_P (insn) = 1;
7491 else if (last >= 6)
7493 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
7494 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7495 gen_rtx_REG (Pmode, 6),
7496 GEN_INT (last - 6 + 1));
7497 note = PATTERN (note);
7499 REG_NOTES (insn) =
7500 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7501 note, REG_NOTES (insn));
7503 for (i = 0; i < XVECLEN (note, 0); i++)
7504 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7505 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7507 RTX_FRAME_RELATED_P (insn) = 1;
7510 return insn;
7513 /* Generate insn to restore registers FIRST to LAST from
7514 the register save area located at offset OFFSET
7515 relative to register BASE. */
7517 static rtx
7518 restore_gprs (rtx base, int offset, int first, int last)
7520 rtx addr, insn;
7522 addr = plus_constant (base, offset);
7523 addr = gen_rtx_MEM (Pmode, addr);
7524 set_mem_alias_set (addr, get_frame_alias_set ());
7526 /* Special-case single register. */
7527 if (first == last)
7529 if (TARGET_64BIT)
7530 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7531 else
7532 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7534 return insn;
7537 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7538 addr,
7539 GEN_INT (last - first + 1));
7540 return insn;
7543 /* Return insn sequence to load the GOT register. */
7545 static GTY(()) rtx got_symbol;
7547 s390_load_got (void)
7549 rtx insns;
7551 if (!got_symbol)
7553 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7554 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7557 start_sequence ();
7559 if (TARGET_CPU_ZARCH)
7561 emit_move_insn (pic_offset_table_rtx, got_symbol);
7563 else
7565 rtx offset;
7567 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7568 UNSPEC_LTREL_OFFSET);
7569 offset = gen_rtx_CONST (Pmode, offset);
7570 offset = force_const_mem (Pmode, offset);
7572 emit_move_insn (pic_offset_table_rtx, offset);
7574 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7575 UNSPEC_LTREL_BASE);
7576 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7578 emit_move_insn (pic_offset_table_rtx, offset);
7581 insns = get_insns ();
7582 end_sequence ();
7583 return insns;
7586 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
7587 and the change to the stack pointer. */
7589 static void
7590 s390_emit_stack_tie (void)
7592 rtx mem = gen_frame_mem (BLKmode,
7593 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7595 emit_insn (gen_stack_tie (mem));
7598 /* Expand the prologue into a bunch of separate insns. */
7600 void
7601 s390_emit_prologue (void)
7603 rtx insn, addr;
7604 rtx temp_reg;
7605 int i;
7606 int offset;
7607 int next_fpr = 0;
7609 /* Complete frame layout. */
7611 s390_update_frame_layout ();
7613 /* Annotate all constant pool references to let the scheduler know
7614 they implicitly use the base register. */
7616 push_topmost_sequence ();
7618 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7619 if (INSN_P (insn))
7621 annotate_constant_pool_refs (&PATTERN (insn));
7622 df_insn_rescan (insn);
7625 pop_topmost_sequence ();
7627 /* Choose best register to use for temp use within prologue.
7628 See below for why TPF must use the register 1. */
7630 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7631 && !current_function_is_leaf
7632 && !TARGET_TPF_PROFILING)
7633 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7634 else
7635 temp_reg = gen_rtx_REG (Pmode, 1);
7637 /* Save call saved gprs. */
7638 if (cfun_frame_layout.first_save_gpr != -1)
7640 insn = save_gprs (stack_pointer_rtx,
7641 cfun_frame_layout.gprs_offset +
7642 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7643 - cfun_frame_layout.first_save_gpr_slot),
7644 cfun_frame_layout.first_save_gpr,
7645 cfun_frame_layout.last_save_gpr);
7646 emit_insn (insn);
7649 /* Dummy insn to mark literal pool slot. */
7651 if (cfun->machine->base_reg)
7652 emit_insn (gen_main_pool (cfun->machine->base_reg));
7654 offset = cfun_frame_layout.f0_offset;
7656 /* Save f0 and f2. */
7657 for (i = 0; i < 2; i++)
7659 if (cfun_fpr_bit_p (i))
7661 save_fpr (stack_pointer_rtx, offset, i + 16);
7662 offset += 8;
7664 else if (!TARGET_PACKED_STACK)
7665 offset += 8;
7668 /* Save f4 and f6. */
7669 offset = cfun_frame_layout.f4_offset;
7670 for (i = 2; i < 4; i++)
7672 if (cfun_fpr_bit_p (i))
7674 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7675 offset += 8;
7677 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7678 therefore are not frame related. */
7679 if (!call_really_used_regs[i + 16])
7680 RTX_FRAME_RELATED_P (insn) = 1;
7682 else if (!TARGET_PACKED_STACK)
7683 offset += 8;
7686 if (TARGET_PACKED_STACK
7687 && cfun_save_high_fprs_p
7688 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7690 offset = (cfun_frame_layout.f8_offset
7691 + (cfun_frame_layout.high_fprs - 1) * 8);
7693 for (i = 15; i > 7 && offset >= 0; i--)
7694 if (cfun_fpr_bit_p (i))
7696 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7698 RTX_FRAME_RELATED_P (insn) = 1;
7699 offset -= 8;
7701 if (offset >= cfun_frame_layout.f8_offset)
7702 next_fpr = i + 16;
7705 if (!TARGET_PACKED_STACK)
7706 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7708 /* Decrement stack pointer. */
7710 if (cfun_frame_layout.frame_size > 0)
7712 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7714 if (s390_stack_size)
7716 HOST_WIDE_INT stack_guard;
7718 if (s390_stack_guard)
7719 stack_guard = s390_stack_guard;
7720 else
7722 /* If no value for stack guard is provided the smallest power of 2
7723 larger than the current frame size is chosen. */
7724 stack_guard = 1;
7725 while (stack_guard < cfun_frame_layout.frame_size)
7726 stack_guard <<= 1;
7729 if (cfun_frame_layout.frame_size >= s390_stack_size)
7731 warning (0, "frame size of function %qs is "
7732 HOST_WIDE_INT_PRINT_DEC
7733 " bytes exceeding user provided stack limit of "
7734 HOST_WIDE_INT_PRINT_DEC " bytes. "
7735 "An unconditional trap is added.",
7736 current_function_name(), cfun_frame_layout.frame_size,
7737 s390_stack_size);
7738 emit_insn (gen_trap ());
7740 else
7742 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7743 & ~(stack_guard - 1));
7744 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7745 GEN_INT (stack_check_mask));
7746 if (TARGET_64BIT)
7747 gen_cmpdi (t, const0_rtx);
7748 else
7749 gen_cmpsi (t, const0_rtx);
7751 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7752 gen_rtx_REG (CCmode,
7753 CC_REGNUM),
7754 const0_rtx),
7755 const0_rtx));
7759 if (s390_warn_framesize > 0
7760 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7761 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7762 current_function_name (), cfun_frame_layout.frame_size);
7764 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7765 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7767 /* Save incoming stack pointer into temp reg. */
7768 if (TARGET_BACKCHAIN || next_fpr)
7769 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7771 /* Subtract frame size from stack pointer. */
7773 if (DISP_IN_RANGE (INTVAL (frame_off)))
7775 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7776 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7777 frame_off));
7778 insn = emit_insn (insn);
7780 else
7782 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7783 frame_off = force_const_mem (Pmode, frame_off);
7785 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7786 annotate_constant_pool_refs (&PATTERN (insn));
7789 RTX_FRAME_RELATED_P (insn) = 1;
7790 REG_NOTES (insn) =
7791 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7792 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7793 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7794 GEN_INT (-cfun_frame_layout.frame_size))),
7795 REG_NOTES (insn));
7797 /* Set backchain. */
7799 if (TARGET_BACKCHAIN)
7801 if (cfun_frame_layout.backchain_offset)
7802 addr = gen_rtx_MEM (Pmode,
7803 plus_constant (stack_pointer_rtx,
7804 cfun_frame_layout.backchain_offset));
7805 else
7806 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7807 set_mem_alias_set (addr, get_frame_alias_set ());
7808 insn = emit_insn (gen_move_insn (addr, temp_reg));
7811 /* If we support asynchronous exceptions (e.g. for Java),
7812 we need to make sure the backchain pointer is set up
7813 before any possibly trapping memory access. */
7815 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7817 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7818 emit_clobber (addr);
7822 /* Save fprs 8 - 15 (64 bit ABI). */
7824 if (cfun_save_high_fprs_p && next_fpr)
7826 /* If the stack might be accessed through a different register
7827 we have to make sure that the stack pointer decrement is not
7828 moved below the use of the stack slots. */
7829 s390_emit_stack_tie ();
7831 insn = emit_insn (gen_add2_insn (temp_reg,
7832 GEN_INT (cfun_frame_layout.f8_offset)));
7834 offset = 0;
7836 for (i = 24; i <= next_fpr; i++)
7837 if (cfun_fpr_bit_p (i - 16))
7839 rtx addr = plus_constant (stack_pointer_rtx,
7840 cfun_frame_layout.frame_size
7841 + cfun_frame_layout.f8_offset
7842 + offset);
7844 insn = save_fpr (temp_reg, offset, i);
7845 offset += 8;
7846 RTX_FRAME_RELATED_P (insn) = 1;
7847 REG_NOTES (insn) =
7848 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7849 gen_rtx_SET (VOIDmode,
7850 gen_rtx_MEM (DFmode, addr),
7851 gen_rtx_REG (DFmode, i)),
7852 REG_NOTES (insn));
7856 /* Set frame pointer, if needed. */
7858 if (frame_pointer_needed)
7860 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7861 RTX_FRAME_RELATED_P (insn) = 1;
7864 /* Set up got pointer, if needed. */
7866 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
7868 rtx insns = s390_load_got ();
7870 for (insn = insns; insn; insn = NEXT_INSN (insn))
7871 annotate_constant_pool_refs (&PATTERN (insn));
7873 emit_insn (insns);
7876 if (TARGET_TPF_PROFILING)
7878 /* Generate a BAS instruction to serve as a function
7879 entry intercept to facilitate the use of tracing
7880 algorithms located at the branch target. */
7881 emit_insn (gen_prologue_tpf ());
7883 /* Emit a blockage here so that all code
7884 lies between the profiling mechanisms. */
7885 emit_insn (gen_blockage ());
7889 /* Expand the epilogue into a bunch of separate insns. */
7891 void
7892 s390_emit_epilogue (bool sibcall)
7894 rtx frame_pointer, return_reg;
7895 int area_bottom, area_top, offset = 0;
7896 int next_offset;
7897 rtvec p;
7898 int i;
7900 if (TARGET_TPF_PROFILING)
7903 /* Generate a BAS instruction to serve as a function
7904 entry intercept to facilitate the use of tracing
7905 algorithms located at the branch target. */
7907 /* Emit a blockage here so that all code
7908 lies between the profiling mechanisms. */
7909 emit_insn (gen_blockage ());
7911 emit_insn (gen_epilogue_tpf ());
7914 /* Check whether to use frame or stack pointer for restore. */
7916 frame_pointer = (frame_pointer_needed
7917 ? hard_frame_pointer_rtx : stack_pointer_rtx);
7919 s390_frame_area (&area_bottom, &area_top);
7921 /* Check whether we can access the register save area.
7922 If not, increment the frame pointer as required. */
7924 if (area_top <= area_bottom)
7926 /* Nothing to restore. */
7928 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7929 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7931 /* Area is in range. */
7932 offset = cfun_frame_layout.frame_size;
7934 else
7936 rtx insn, frame_off;
7938 offset = area_bottom < 0 ? -area_bottom : 0;
7939 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7941 if (DISP_IN_RANGE (INTVAL (frame_off)))
7943 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7944 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7945 insn = emit_insn (insn);
7947 else
7949 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7950 frame_off = force_const_mem (Pmode, frame_off);
7952 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7953 annotate_constant_pool_refs (&PATTERN (insn));
7957 /* Restore call saved fprs. */
7959 if (TARGET_64BIT)
7961 if (cfun_save_high_fprs_p)
7963 next_offset = cfun_frame_layout.f8_offset;
7964 for (i = 24; i < 32; i++)
7966 if (cfun_fpr_bit_p (i - 16))
7968 restore_fpr (frame_pointer,
7969 offset + next_offset, i);
7970 next_offset += 8;
7976 else
7978 next_offset = cfun_frame_layout.f4_offset;
7979 for (i = 18; i < 20; i++)
7981 if (cfun_fpr_bit_p (i - 16))
7983 restore_fpr (frame_pointer,
7984 offset + next_offset, i);
7985 next_offset += 8;
7987 else if (!TARGET_PACKED_STACK)
7988 next_offset += 8;
7993 /* Return register. */
7995 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7997 /* Restore call saved gprs. */
7999 if (cfun_frame_layout.first_restore_gpr != -1)
8001 rtx insn, addr;
8002 int i;
8004 /* Check for global register and save them
8005 to stack location from where they get restored. */
8007 for (i = cfun_frame_layout.first_restore_gpr;
8008 i <= cfun_frame_layout.last_restore_gpr;
8009 i++)
8011 /* These registers are special and need to be
8012 restored in any case. */
8013 if (i == STACK_POINTER_REGNUM
8014 || i == RETURN_REGNUM
8015 || i == BASE_REGNUM
8016 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
8017 continue;
8019 if (global_regs[i])
8021 addr = plus_constant (frame_pointer,
8022 offset + cfun_frame_layout.gprs_offset
8023 + (i - cfun_frame_layout.first_save_gpr_slot)
8024 * UNITS_PER_WORD);
8025 addr = gen_rtx_MEM (Pmode, addr);
8026 set_mem_alias_set (addr, get_frame_alias_set ());
8027 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
8031 if (! sibcall)
8033 /* Fetch return address from stack before load multiple,
8034 this will do good for scheduling. */
8036 if (cfun_frame_layout.save_return_addr_p
8037 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
8038 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
8040 int return_regnum = find_unused_clobbered_reg();
8041 if (!return_regnum)
8042 return_regnum = 4;
8043 return_reg = gen_rtx_REG (Pmode, return_regnum);
8045 addr = plus_constant (frame_pointer,
8046 offset + cfun_frame_layout.gprs_offset
8047 + (RETURN_REGNUM
8048 - cfun_frame_layout.first_save_gpr_slot)
8049 * UNITS_PER_WORD);
8050 addr = gen_rtx_MEM (Pmode, addr);
8051 set_mem_alias_set (addr, get_frame_alias_set ());
8052 emit_move_insn (return_reg, addr);
8056 insn = restore_gprs (frame_pointer,
8057 offset + cfun_frame_layout.gprs_offset
8058 + (cfun_frame_layout.first_restore_gpr
8059 - cfun_frame_layout.first_save_gpr_slot)
8060 * UNITS_PER_WORD,
8061 cfun_frame_layout.first_restore_gpr,
8062 cfun_frame_layout.last_restore_gpr);
8063 emit_insn (insn);
8066 if (! sibcall)
8069 /* Return to caller. */
8071 p = rtvec_alloc (2);
8073 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8074 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
8075 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8080 /* Return the size in bytes of a function argument of
8081 type TYPE and/or mode MODE. At least one of TYPE or
8082 MODE must be specified. */
8084 static int
8085 s390_function_arg_size (enum machine_mode mode, const_tree type)
8087 if (type)
8088 return int_size_in_bytes (type);
8090 /* No type info available for some library calls ... */
8091 if (mode != BLKmode)
8092 return GET_MODE_SIZE (mode);
8094 /* If we have neither type nor mode, abort */
8095 gcc_unreachable ();
8098 /* Return true if a function argument of type TYPE and mode MODE
8099 is to be passed in a floating-point register, if available. */
8101 static bool
8102 s390_function_arg_float (enum machine_mode mode, tree type)
8104 int size = s390_function_arg_size (mode, type);
8105 if (size > 8)
8106 return false;
8108 /* Soft-float changes the ABI: no floating-point registers are used. */
8109 if (TARGET_SOFT_FLOAT)
8110 return false;
8112 /* No type info available for some library calls ... */
8113 if (!type)
8114 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
8116 /* The ABI says that record types with a single member are treated
8117 just like that member would be. */
8118 while (TREE_CODE (type) == RECORD_TYPE)
8120 tree field, single = NULL_TREE;
8122 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
8124 if (TREE_CODE (field) != FIELD_DECL)
8125 continue;
8127 if (single == NULL_TREE)
8128 single = TREE_TYPE (field);
8129 else
8130 return false;
8133 if (single == NULL_TREE)
8134 return false;
8135 else
8136 type = single;
8139 return TREE_CODE (type) == REAL_TYPE;
8142 /* Return true if a function argument of type TYPE and mode MODE
8143 is to be passed in an integer register, or a pair of integer
8144 registers, if available. */
8146 static bool
8147 s390_function_arg_integer (enum machine_mode mode, tree type)
8149 int size = s390_function_arg_size (mode, type);
8150 if (size > 8)
8151 return false;
8153 /* No type info available for some library calls ... */
8154 if (!type)
8155 return GET_MODE_CLASS (mode) == MODE_INT
8156 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8158 /* We accept small integral (and similar) types. */
8159 if (INTEGRAL_TYPE_P (type)
8160 || POINTER_TYPE_P (type)
8161 || TREE_CODE (type) == OFFSET_TYPE
8162 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
8163 return true;
8165 /* We also accept structs of size 1, 2, 4, 8 that are not
8166 passed in floating-point registers. */
8167 if (AGGREGATE_TYPE_P (type)
8168 && exact_log2 (size) >= 0
8169 && !s390_function_arg_float (mode, type))
8170 return true;
8172 return false;
8175 /* Return 1 if a function argument of type TYPE and mode MODE
8176 is to be passed by reference. The ABI specifies that only
8177 structures of size 1, 2, 4, or 8 bytes are passed by value,
8178 all other structures (and complex numbers) are passed by
8179 reference. */
8181 static bool
8182 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
8183 enum machine_mode mode, const_tree type,
8184 bool named ATTRIBUTE_UNUSED)
8186 int size = s390_function_arg_size (mode, type);
8187 if (size > 8)
8188 return true;
8190 if (type)
8192 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
8193 return 1;
8195 if (TREE_CODE (type) == COMPLEX_TYPE
8196 || TREE_CODE (type) == VECTOR_TYPE)
8197 return 1;
8200 return 0;
8203 /* Update the data in CUM to advance over an argument of mode MODE and
8204 data type TYPE. (TYPE is null for libcalls where that information
8205 may not be available.). The boolean NAMED specifies whether the
8206 argument is a named argument (as opposed to an unnamed argument
8207 matching an ellipsis). */
8209 void
8210 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
8211 tree type, int named ATTRIBUTE_UNUSED)
8213 if (s390_function_arg_float (mode, type))
8215 cum->fprs += 1;
8217 else if (s390_function_arg_integer (mode, type))
8219 int size = s390_function_arg_size (mode, type);
8220 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
8222 else
8223 gcc_unreachable ();
8226 /* Define where to put the arguments to a function.
8227 Value is zero to push the argument on the stack,
8228 or a hard register in which to store the argument.
8230 MODE is the argument's machine mode.
8231 TYPE is the data type of the argument (as a tree).
8232 This is null for libcalls where that information may
8233 not be available.
8234 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8235 the preceding args and about the function being called.
8236 NAMED is nonzero if this argument is a named parameter
8237 (otherwise it is an extra parameter matching an ellipsis).
8239 On S/390, we use general purpose registers 2 through 6 to
8240 pass integer, pointer, and certain structure arguments, and
8241 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8242 to pass floating point arguments. All remaining arguments
8243 are pushed to the stack. */
8246 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
8247 int named ATTRIBUTE_UNUSED)
8249 if (s390_function_arg_float (mode, type))
8251 if (cum->fprs + 1 > FP_ARG_NUM_REG)
8252 return 0;
8253 else
8254 return gen_rtx_REG (mode, cum->fprs + 16);
8256 else if (s390_function_arg_integer (mode, type))
8258 int size = s390_function_arg_size (mode, type);
8259 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
8261 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
8262 return 0;
8263 else
8264 return gen_rtx_REG (mode, cum->gprs + 2);
8267 /* After the real arguments, expand_call calls us once again
8268 with a void_type_node type. Whatever we return here is
8269 passed as operand 2 to the call expanders.
8271 We don't need this feature ... */
8272 else if (type == void_type_node)
8273 return const0_rtx;
8275 gcc_unreachable ();
8278 /* Return true if return values of type TYPE should be returned
8279 in a memory buffer whose address is passed by the caller as
8280 hidden first argument. */
8282 static bool
8283 s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
8285 /* We accept small integral (and similar) types. */
8286 if (INTEGRAL_TYPE_P (type)
8287 || POINTER_TYPE_P (type)
8288 || TREE_CODE (type) == OFFSET_TYPE
8289 || TREE_CODE (type) == REAL_TYPE)
8290 return int_size_in_bytes (type) > 8;
8292 /* Aggregates and similar constructs are always returned
8293 in memory. */
8294 if (AGGREGATE_TYPE_P (type)
8295 || TREE_CODE (type) == COMPLEX_TYPE
8296 || TREE_CODE (type) == VECTOR_TYPE)
8297 return true;
8299 /* ??? We get called on all sorts of random stuff from
8300 aggregate_value_p. We can't abort, but it's not clear
8301 what's safe to return. Pretend it's a struct I guess. */
8302 return true;
8305 /* Define where to return a (scalar) value of type TYPE.
8306 If TYPE is null, define where to return a (scalar)
8307 value of mode MODE from a libcall. */
8310 s390_function_value (const_tree type, enum machine_mode mode)
8312 if (type)
8314 int unsignedp = TYPE_UNSIGNED (type);
8315 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
8318 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
8319 gcc_assert (GET_MODE_SIZE (mode) <= 8);
8321 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8322 return gen_rtx_REG (mode, 16);
8323 else
8324 return gen_rtx_REG (mode, 2);
8328 /* Create and return the va_list datatype.
8330 On S/390, va_list is an array type equivalent to
8332 typedef struct __va_list_tag
8334 long __gpr;
8335 long __fpr;
8336 void *__overflow_arg_area;
8337 void *__reg_save_area;
8338 } va_list[1];
8340 where __gpr and __fpr hold the number of general purpose
8341 or floating point arguments used up to now, respectively,
8342 __overflow_arg_area points to the stack location of the
8343 next argument passed on the stack, and __reg_save_area
8344 always points to the start of the register area in the
8345 call frame of the current function. The function prologue
8346 saves all registers used for argument passing into this
8347 area if the function uses variable arguments. */
8349 static tree
8350 s390_build_builtin_va_list (void)
8352 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
8354 record = lang_hooks.types.make_type (RECORD_TYPE);
8356 type_decl =
8357 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
8359 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
8360 long_integer_type_node);
8361 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
8362 long_integer_type_node);
8363 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
8364 ptr_type_node);
8365 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
8366 ptr_type_node);
8368 va_list_gpr_counter_field = f_gpr;
8369 va_list_fpr_counter_field = f_fpr;
8371 DECL_FIELD_CONTEXT (f_gpr) = record;
8372 DECL_FIELD_CONTEXT (f_fpr) = record;
8373 DECL_FIELD_CONTEXT (f_ovf) = record;
8374 DECL_FIELD_CONTEXT (f_sav) = record;
8376 TREE_CHAIN (record) = type_decl;
8377 TYPE_NAME (record) = type_decl;
8378 TYPE_FIELDS (record) = f_gpr;
8379 TREE_CHAIN (f_gpr) = f_fpr;
8380 TREE_CHAIN (f_fpr) = f_ovf;
8381 TREE_CHAIN (f_ovf) = f_sav;
8383 layout_type (record);
8385 /* The correct type is an array type of one element. */
8386 return build_array_type (record, build_index_type (size_zero_node));
8389 /* Implement va_start by filling the va_list structure VALIST.
8390 STDARG_P is always true, and ignored.
8391 NEXTARG points to the first anonymous stack argument.
8393 The following global variables are used to initialize
8394 the va_list structure:
8396 crtl->args.info:
8397 holds number of gprs and fprs used for named arguments.
8398 crtl->args.arg_offset_rtx:
8399 holds the offset of the first anonymous stack argument
8400 (relative to the virtual arg pointer). */
8402 static void
8403 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
8405 HOST_WIDE_INT n_gpr, n_fpr;
8406 int off;
8407 tree f_gpr, f_fpr, f_ovf, f_sav;
8408 tree gpr, fpr, ovf, sav, t;
8410 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8411 f_fpr = TREE_CHAIN (f_gpr);
8412 f_ovf = TREE_CHAIN (f_fpr);
8413 f_sav = TREE_CHAIN (f_ovf);
8415 valist = build_va_arg_indirect_ref (valist);
8416 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8417 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8418 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8419 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8421 /* Count number of gp and fp argument registers used. */
8423 n_gpr = crtl->args.info.gprs;
8424 n_fpr = crtl->args.info.fprs;
8426 if (cfun->va_list_gpr_size)
8428 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
8429 build_int_cst (NULL_TREE, n_gpr));
8430 TREE_SIDE_EFFECTS (t) = 1;
8431 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8434 if (cfun->va_list_fpr_size)
8436 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
8437 build_int_cst (NULL_TREE, n_fpr));
8438 TREE_SIDE_EFFECTS (t) = 1;
8439 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8442 /* Find the overflow area. */
8443 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8444 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8446 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8448 off = INTVAL (crtl->args.arg_offset_rtx);
8449 off = off < 0 ? 0 : off;
8450 if (TARGET_DEBUG_ARG)
8451 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8452 (int)n_gpr, (int)n_fpr, off);
8454 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
8456 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
8457 TREE_SIDE_EFFECTS (t) = 1;
8458 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8461 /* Find the register save area. */
8462 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8463 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8465 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
8466 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8467 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
8469 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
8470 TREE_SIDE_EFFECTS (t) = 1;
8471 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8475 /* Implement va_arg by updating the va_list structure
8476 VALIST as required to retrieve an argument of type
8477 TYPE, and returning that argument.
8479 Generates code equivalent to:
8481 if (integral value) {
8482 if (size <= 4 && args.gpr < 5 ||
8483 size > 4 && args.gpr < 4 )
8484 ret = args.reg_save_area[args.gpr+8]
8485 else
8486 ret = *args.overflow_arg_area++;
8487 } else if (float value) {
8488 if (args.fgpr < 2)
8489 ret = args.reg_save_area[args.fpr+64]
8490 else
8491 ret = *args.overflow_arg_area++;
8492 } else if (aggregate value) {
8493 if (args.gpr < 5)
8494 ret = *args.reg_save_area[args.gpr]
8495 else
8496 ret = **args.overflow_arg_area++;
8497 } */
8499 static tree
8500 s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
8501 gimple_seq *post_p ATTRIBUTE_UNUSED)
8503 tree f_gpr, f_fpr, f_ovf, f_sav;
8504 tree gpr, fpr, ovf, sav, reg, t, u;
8505 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8506 tree lab_false, lab_over, addr;
8508 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8509 f_fpr = TREE_CHAIN (f_gpr);
8510 f_ovf = TREE_CHAIN (f_fpr);
8511 f_sav = TREE_CHAIN (f_ovf);
8513 valist = build_va_arg_indirect_ref (valist);
8514 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8515 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8516 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8518 /* The tree for args* cannot be shared between gpr/fpr and ovf since
8519 both appear on a lhs. */
8520 valist = unshare_expr (valist);
8521 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8523 size = int_size_in_bytes (type);
8525 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8527 if (TARGET_DEBUG_ARG)
8529 fprintf (stderr, "va_arg: aggregate type");
8530 debug_tree (type);
8533 /* Aggregates are passed by reference. */
8534 indirect_p = 1;
8535 reg = gpr;
8536 n_reg = 1;
8538 /* kernel stack layout on 31 bit: It is assumed here that no padding
8539 will be added by s390_frame_info because for va_args always an even
8540 number of gprs has to be saved r15-r2 = 14 regs. */
8541 sav_ofs = 2 * UNITS_PER_WORD;
8542 sav_scale = UNITS_PER_WORD;
8543 size = UNITS_PER_WORD;
8544 max_reg = GP_ARG_NUM_REG - n_reg;
8546 else if (s390_function_arg_float (TYPE_MODE (type), type))
8548 if (TARGET_DEBUG_ARG)
8550 fprintf (stderr, "va_arg: float type");
8551 debug_tree (type);
8554 /* FP args go in FP registers, if present. */
8555 indirect_p = 0;
8556 reg = fpr;
8557 n_reg = 1;
8558 sav_ofs = 16 * UNITS_PER_WORD;
8559 sav_scale = 8;
8560 max_reg = FP_ARG_NUM_REG - n_reg;
8562 else
8564 if (TARGET_DEBUG_ARG)
8566 fprintf (stderr, "va_arg: other type");
8567 debug_tree (type);
8570 /* Otherwise into GP registers. */
8571 indirect_p = 0;
8572 reg = gpr;
8573 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8575 /* kernel stack layout on 31 bit: It is assumed here that no padding
8576 will be added by s390_frame_info because for va_args always an even
8577 number of gprs has to be saved r15-r2 = 14 regs. */
8578 sav_ofs = 2 * UNITS_PER_WORD;
8580 if (size < UNITS_PER_WORD)
8581 sav_ofs += UNITS_PER_WORD - size;
8583 sav_scale = UNITS_PER_WORD;
8584 max_reg = GP_ARG_NUM_REG - n_reg;
8587 /* Pull the value out of the saved registers ... */
8589 lab_false = create_artificial_label ();
8590 lab_over = create_artificial_label ();
8591 addr = create_tmp_var (ptr_type_node, "addr");
8592 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
8594 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8595 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8596 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8597 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8598 gimplify_and_add (t, pre_p);
8600 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8601 size_int (sav_ofs));
8602 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8603 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8604 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
8606 gimplify_assign (addr, t, pre_p);
8608 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
8610 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
8613 /* ... Otherwise out of the overflow area. */
8615 t = ovf;
8616 if (size < UNITS_PER_WORD)
8617 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8618 size_int (UNITS_PER_WORD - size));
8620 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8622 gimplify_assign (addr, t, pre_p);
8624 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8625 size_int (size));
8626 gimplify_assign (ovf, t, pre_p);
8628 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
8631 /* Increment register save count. */
8633 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8634 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8635 gimplify_and_add (u, pre_p);
8637 if (indirect_p)
8639 t = build_pointer_type (build_pointer_type (type));
8640 addr = fold_convert (t, addr);
8641 addr = build_va_arg_indirect_ref (addr);
8643 else
8645 t = build_pointer_type (type);
8646 addr = fold_convert (t, addr);
8649 return build_va_arg_indirect_ref (addr);
8653 /* Builtins. */
8655 enum s390_builtin
8657 S390_BUILTIN_THREAD_POINTER,
8658 S390_BUILTIN_SET_THREAD_POINTER,
8660 S390_BUILTIN_max
8663 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8664 CODE_FOR_get_tp_64,
8665 CODE_FOR_set_tp_64
8668 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8669 CODE_FOR_get_tp_31,
8670 CODE_FOR_set_tp_31
8673 static void
8674 s390_init_builtins (void)
8676 tree ftype;
8678 ftype = build_function_type (ptr_type_node, void_list_node);
8679 add_builtin_function ("__builtin_thread_pointer", ftype,
8680 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8681 NULL, NULL_TREE);
8683 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8684 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8685 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8686 NULL, NULL_TREE);
8689 /* Expand an expression EXP that calls a built-in function,
8690 with result going to TARGET if that's convenient
8691 (and in mode MODE if that's convenient).
8692 SUBTARGET may be used as the target for computing one of EXP's operands.
8693 IGNORE is nonzero if the value is to be ignored. */
8695 static rtx
8696 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8697 enum machine_mode mode ATTRIBUTE_UNUSED,
8698 int ignore ATTRIBUTE_UNUSED)
8700 #define MAX_ARGS 2
8702 unsigned int const *code_for_builtin =
8703 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8705 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8706 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8707 enum insn_code icode;
8708 rtx op[MAX_ARGS], pat;
8709 int arity;
8710 bool nonvoid;
8711 tree arg;
8712 call_expr_arg_iterator iter;
8714 if (fcode >= S390_BUILTIN_max)
8715 internal_error ("bad builtin fcode");
8716 icode = code_for_builtin[fcode];
8717 if (icode == 0)
8718 internal_error ("bad builtin fcode");
8720 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8722 arity = 0;
8723 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8725 const struct insn_operand_data *insn_op;
8727 if (arg == error_mark_node)
8728 return NULL_RTX;
8729 if (arity > MAX_ARGS)
8730 return NULL_RTX;
8732 insn_op = &insn_data[icode].operand[arity + nonvoid];
8734 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8736 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8737 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8738 arity++;
8741 if (nonvoid)
8743 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8744 if (!target
8745 || GET_MODE (target) != tmode
8746 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8747 target = gen_reg_rtx (tmode);
8750 switch (arity)
8752 case 0:
8753 pat = GEN_FCN (icode) (target);
8754 break;
8755 case 1:
8756 if (nonvoid)
8757 pat = GEN_FCN (icode) (target, op[0]);
8758 else
8759 pat = GEN_FCN (icode) (op[0]);
8760 break;
8761 case 2:
8762 pat = GEN_FCN (icode) (target, op[0], op[1]);
8763 break;
8764 default:
8765 gcc_unreachable ();
8767 if (!pat)
8768 return NULL_RTX;
8769 emit_insn (pat);
8771 if (nonvoid)
8772 return target;
8773 else
8774 return const0_rtx;
8778 /* Output assembly code for the trampoline template to
8779 stdio stream FILE.
8781 On S/390, we use gpr 1 internally in the trampoline code;
8782 gpr 0 is used to hold the static chain. */
8784 void
8785 s390_trampoline_template (FILE *file)
8787 rtx op[2];
8788 op[0] = gen_rtx_REG (Pmode, 0);
8789 op[1] = gen_rtx_REG (Pmode, 1);
8791 if (TARGET_64BIT)
8793 output_asm_insn ("basr\t%1,0", op);
8794 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8795 output_asm_insn ("br\t%1", op);
8796 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8798 else
8800 output_asm_insn ("basr\t%1,0", op);
8801 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8802 output_asm_insn ("br\t%1", op);
8803 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8807 /* Emit RTL insns to initialize the variable parts of a trampoline.
8808 FNADDR is an RTX for the address of the function's pure code.
8809 CXT is an RTX for the static chain value for the function. */
8811 void
8812 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8814 emit_move_insn (gen_rtx_MEM (Pmode,
8815 memory_address (Pmode,
8816 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8817 emit_move_insn (gen_rtx_MEM (Pmode,
8818 memory_address (Pmode,
8819 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8822 /* Output assembler code to FILE to increment profiler label # LABELNO
8823 for profiling a function entry. */
8825 void
8826 s390_function_profiler (FILE *file, int labelno)
8828 rtx op[7];
8830 char label[128];
8831 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8833 fprintf (file, "# function profiler \n");
8835 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8836 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8837 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8839 op[2] = gen_rtx_REG (Pmode, 1);
8840 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8841 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8843 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8844 if (flag_pic)
8846 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8847 op[4] = gen_rtx_CONST (Pmode, op[4]);
8850 if (TARGET_64BIT)
8852 output_asm_insn ("stg\t%0,%1", op);
8853 output_asm_insn ("larl\t%2,%3", op);
8854 output_asm_insn ("brasl\t%0,%4", op);
8855 output_asm_insn ("lg\t%0,%1", op);
8857 else if (!flag_pic)
8859 op[6] = gen_label_rtx ();
8861 output_asm_insn ("st\t%0,%1", op);
8862 output_asm_insn ("bras\t%2,%l6", op);
8863 output_asm_insn (".long\t%4", op);
8864 output_asm_insn (".long\t%3", op);
8865 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8866 output_asm_insn ("l\t%0,0(%2)", op);
8867 output_asm_insn ("l\t%2,4(%2)", op);
8868 output_asm_insn ("basr\t%0,%0", op);
8869 output_asm_insn ("l\t%0,%1", op);
8871 else
8873 op[5] = gen_label_rtx ();
8874 op[6] = gen_label_rtx ();
8876 output_asm_insn ("st\t%0,%1", op);
8877 output_asm_insn ("bras\t%2,%l6", op);
8878 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8879 output_asm_insn (".long\t%4-%l5", op);
8880 output_asm_insn (".long\t%3-%l5", op);
8881 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8882 output_asm_insn ("lr\t%0,%2", op);
8883 output_asm_insn ("a\t%0,0(%2)", op);
8884 output_asm_insn ("a\t%2,4(%2)", op);
8885 output_asm_insn ("basr\t%0,%0", op);
8886 output_asm_insn ("l\t%0,%1", op);
8890 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8891 into its SYMBOL_REF_FLAGS. */
8893 static void
8894 s390_encode_section_info (tree decl, rtx rtl, int first)
8896 default_encode_section_info (decl, rtl, first);
8898 if (TREE_CODE (decl) == VAR_DECL)
8900 /* If a variable has a forced alignment to < 2 bytes, mark it
8901 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
8902 operand. */
8903 if (DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8904 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8905 if (!DECL_SIZE (decl)
8906 || !DECL_ALIGN (decl)
8907 || !host_integerp (DECL_SIZE (decl), 0)
8908 || (DECL_ALIGN (decl) <= 64
8909 && DECL_ALIGN (decl) != tree_low_cst (DECL_SIZE (decl), 0)))
8910 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
8913 /* Literal pool references don't have a decl so they are handled
8914 differently here. We rely on the information in the MEM_ALIGN
8915 entry to decide upon natural alignment. */
8916 if (MEM_P (rtl)
8917 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
8918 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0))
8919 && (MEM_ALIGN (rtl) == 0
8920 || MEM_ALIGN (rtl) < GET_MODE_BITSIZE (GET_MODE (rtl))))
8921 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
8924 /* Output thunk to FILE that implements a C++ virtual function call (with
8925 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8926 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8927 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8928 relative to the resulting this pointer. */
8930 static void
8931 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8932 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8933 tree function)
8935 rtx op[10];
8936 int nonlocal = 0;
8938 /* Operand 0 is the target function. */
8939 op[0] = XEXP (DECL_RTL (function), 0);
8940 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8942 nonlocal = 1;
8943 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8944 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8945 op[0] = gen_rtx_CONST (Pmode, op[0]);
8948 /* Operand 1 is the 'this' pointer. */
8949 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8950 op[1] = gen_rtx_REG (Pmode, 3);
8951 else
8952 op[1] = gen_rtx_REG (Pmode, 2);
8954 /* Operand 2 is the delta. */
8955 op[2] = GEN_INT (delta);
8957 /* Operand 3 is the vcall_offset. */
8958 op[3] = GEN_INT (vcall_offset);
8960 /* Operand 4 is the temporary register. */
8961 op[4] = gen_rtx_REG (Pmode, 1);
8963 /* Operands 5 to 8 can be used as labels. */
8964 op[5] = NULL_RTX;
8965 op[6] = NULL_RTX;
8966 op[7] = NULL_RTX;
8967 op[8] = NULL_RTX;
8969 /* Operand 9 can be used for temporary register. */
8970 op[9] = NULL_RTX;
8972 /* Generate code. */
8973 if (TARGET_64BIT)
8975 /* Setup literal pool pointer if required. */
8976 if ((!DISP_IN_RANGE (delta)
8977 && !CONST_OK_FOR_K (delta)
8978 && !CONST_OK_FOR_Os (delta))
8979 || (!DISP_IN_RANGE (vcall_offset)
8980 && !CONST_OK_FOR_K (vcall_offset)
8981 && !CONST_OK_FOR_Os (vcall_offset)))
8983 op[5] = gen_label_rtx ();
8984 output_asm_insn ("larl\t%4,%5", op);
8987 /* Add DELTA to this pointer. */
8988 if (delta)
8990 if (CONST_OK_FOR_J (delta))
8991 output_asm_insn ("la\t%1,%2(%1)", op);
8992 else if (DISP_IN_RANGE (delta))
8993 output_asm_insn ("lay\t%1,%2(%1)", op);
8994 else if (CONST_OK_FOR_K (delta))
8995 output_asm_insn ("aghi\t%1,%2", op);
8996 else if (CONST_OK_FOR_Os (delta))
8997 output_asm_insn ("agfi\t%1,%2", op);
8998 else
9000 op[6] = gen_label_rtx ();
9001 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
9005 /* Perform vcall adjustment. */
9006 if (vcall_offset)
9008 if (DISP_IN_RANGE (vcall_offset))
9010 output_asm_insn ("lg\t%4,0(%1)", op);
9011 output_asm_insn ("ag\t%1,%3(%4)", op);
9013 else if (CONST_OK_FOR_K (vcall_offset))
9015 output_asm_insn ("lghi\t%4,%3", op);
9016 output_asm_insn ("ag\t%4,0(%1)", op);
9017 output_asm_insn ("ag\t%1,0(%4)", op);
9019 else if (CONST_OK_FOR_Os (vcall_offset))
9021 output_asm_insn ("lgfi\t%4,%3", op);
9022 output_asm_insn ("ag\t%4,0(%1)", op);
9023 output_asm_insn ("ag\t%1,0(%4)", op);
9025 else
9027 op[7] = gen_label_rtx ();
9028 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
9029 output_asm_insn ("ag\t%4,0(%1)", op);
9030 output_asm_insn ("ag\t%1,0(%4)", op);
9034 /* Jump to target. */
9035 output_asm_insn ("jg\t%0", op);
9037 /* Output literal pool if required. */
9038 if (op[5])
9040 output_asm_insn (".align\t4", op);
9041 targetm.asm_out.internal_label (file, "L",
9042 CODE_LABEL_NUMBER (op[5]));
9044 if (op[6])
9046 targetm.asm_out.internal_label (file, "L",
9047 CODE_LABEL_NUMBER (op[6]));
9048 output_asm_insn (".long\t%2", op);
9050 if (op[7])
9052 targetm.asm_out.internal_label (file, "L",
9053 CODE_LABEL_NUMBER (op[7]));
9054 output_asm_insn (".long\t%3", op);
9057 else
9059 /* Setup base pointer if required. */
9060 if (!vcall_offset
9061 || (!DISP_IN_RANGE (delta)
9062 && !CONST_OK_FOR_K (delta)
9063 && !CONST_OK_FOR_Os (delta))
9064 || (!DISP_IN_RANGE (delta)
9065 && !CONST_OK_FOR_K (vcall_offset)
9066 && !CONST_OK_FOR_Os (vcall_offset)))
9068 op[5] = gen_label_rtx ();
9069 output_asm_insn ("basr\t%4,0", op);
9070 targetm.asm_out.internal_label (file, "L",
9071 CODE_LABEL_NUMBER (op[5]));
9074 /* Add DELTA to this pointer. */
9075 if (delta)
9077 if (CONST_OK_FOR_J (delta))
9078 output_asm_insn ("la\t%1,%2(%1)", op);
9079 else if (DISP_IN_RANGE (delta))
9080 output_asm_insn ("lay\t%1,%2(%1)", op);
9081 else if (CONST_OK_FOR_K (delta))
9082 output_asm_insn ("ahi\t%1,%2", op);
9083 else if (CONST_OK_FOR_Os (delta))
9084 output_asm_insn ("afi\t%1,%2", op);
9085 else
9087 op[6] = gen_label_rtx ();
9088 output_asm_insn ("a\t%1,%6-%5(%4)", op);
9092 /* Perform vcall adjustment. */
9093 if (vcall_offset)
9095 if (CONST_OK_FOR_J (vcall_offset))
9097 output_asm_insn ("l\t%4,0(%1)", op);
9098 output_asm_insn ("a\t%1,%3(%4)", op);
9100 else if (DISP_IN_RANGE (vcall_offset))
9102 output_asm_insn ("l\t%4,0(%1)", op);
9103 output_asm_insn ("ay\t%1,%3(%4)", op);
9105 else if (CONST_OK_FOR_K (vcall_offset))
9107 output_asm_insn ("lhi\t%4,%3", op);
9108 output_asm_insn ("a\t%4,0(%1)", op);
9109 output_asm_insn ("a\t%1,0(%4)", op);
9111 else if (CONST_OK_FOR_Os (vcall_offset))
9113 output_asm_insn ("iilf\t%4,%3", op);
9114 output_asm_insn ("a\t%4,0(%1)", op);
9115 output_asm_insn ("a\t%1,0(%4)", op);
9117 else
9119 op[7] = gen_label_rtx ();
9120 output_asm_insn ("l\t%4,%7-%5(%4)", op);
9121 output_asm_insn ("a\t%4,0(%1)", op);
9122 output_asm_insn ("a\t%1,0(%4)", op);
9125 /* We had to clobber the base pointer register.
9126 Re-setup the base pointer (with a different base). */
9127 op[5] = gen_label_rtx ();
9128 output_asm_insn ("basr\t%4,0", op);
9129 targetm.asm_out.internal_label (file, "L",
9130 CODE_LABEL_NUMBER (op[5]));
9133 /* Jump to target. */
9134 op[8] = gen_label_rtx ();
9136 if (!flag_pic)
9137 output_asm_insn ("l\t%4,%8-%5(%4)", op);
9138 else if (!nonlocal)
9139 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9140 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9141 else if (flag_pic == 1)
9143 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9144 output_asm_insn ("l\t%4,%0(%4)", op);
9146 else if (flag_pic == 2)
9148 op[9] = gen_rtx_REG (Pmode, 0);
9149 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
9150 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9151 output_asm_insn ("ar\t%4,%9", op);
9152 output_asm_insn ("l\t%4,0(%4)", op);
9155 output_asm_insn ("br\t%4", op);
9157 /* Output literal pool. */
9158 output_asm_insn (".align\t4", op);
9160 if (nonlocal && flag_pic == 2)
9161 output_asm_insn (".long\t%0", op);
9162 if (nonlocal)
9164 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
9165 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
9168 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
9169 if (!flag_pic)
9170 output_asm_insn (".long\t%0", op);
9171 else
9172 output_asm_insn (".long\t%0-%5", op);
9174 if (op[6])
9176 targetm.asm_out.internal_label (file, "L",
9177 CODE_LABEL_NUMBER (op[6]));
9178 output_asm_insn (".long\t%2", op);
9180 if (op[7])
9182 targetm.asm_out.internal_label (file, "L",
9183 CODE_LABEL_NUMBER (op[7]));
9184 output_asm_insn (".long\t%3", op);
9189 static bool
9190 s390_valid_pointer_mode (enum machine_mode mode)
9192 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9195 /* Checks whether the given CALL_EXPR would use a caller
9196 saved register. This is used to decide whether sibling call
9197 optimization could be performed on the respective function
9198 call. */
9200 static bool
9201 s390_call_saved_register_used (tree call_expr)
9203 CUMULATIVE_ARGS cum;
9204 tree parameter;
9205 enum machine_mode mode;
9206 tree type;
9207 rtx parm_rtx;
9208 int reg, i;
9210 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
9212 for (i = 0; i < call_expr_nargs (call_expr); i++)
9214 parameter = CALL_EXPR_ARG (call_expr, i);
9215 gcc_assert (parameter);
9217 /* For an undeclared variable passed as parameter we will get
9218 an ERROR_MARK node here. */
9219 if (TREE_CODE (parameter) == ERROR_MARK)
9220 return true;
9222 type = TREE_TYPE (parameter);
9223 gcc_assert (type);
9225 mode = TYPE_MODE (type);
9226 gcc_assert (mode);
9228 if (pass_by_reference (&cum, mode, type, true))
9230 mode = Pmode;
9231 type = build_pointer_type (type);
9234 parm_rtx = s390_function_arg (&cum, mode, type, 0);
9236 s390_function_arg_advance (&cum, mode, type, 0);
9238 if (parm_rtx && REG_P (parm_rtx))
9240 for (reg = 0;
9241 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
9242 reg++)
9243 if (! call_used_regs[reg + REGNO (parm_rtx)])
9244 return true;
9247 return false;
9250 /* Return true if the given call expression can be
9251 turned into a sibling call.
9252 DECL holds the declaration of the function to be called whereas
9253 EXP is the call expression itself. */
9255 static bool
9256 s390_function_ok_for_sibcall (tree decl, tree exp)
9258 /* The TPF epilogue uses register 1. */
9259 if (TARGET_TPF_PROFILING)
9260 return false;
9262 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9263 which would have to be restored before the sibcall. */
9264 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
9265 return false;
9267 /* Register 6 on s390 is available as an argument register but unfortunately
9268 "caller saved". This makes functions needing this register for arguments
9269 not suitable for sibcalls. */
9270 return !s390_call_saved_register_used (exp);
9273 /* Return the fixed registers used for condition codes. */
9275 static bool
9276 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
9278 *p1 = CC_REGNUM;
9279 *p2 = INVALID_REGNUM;
9281 return true;
9284 /* This function is used by the call expanders of the machine description.
9285 It emits the call insn itself together with the necessary operations
9286 to adjust the target address and returns the emitted insn.
9287 ADDR_LOCATION is the target address rtx
9288 TLS_CALL the location of the thread-local symbol
9289 RESULT_REG the register where the result of the call should be stored
9290 RETADDR_REG the register where the return address should be stored
9291 If this parameter is NULL_RTX the call is considered
9292 to be a sibling call. */
9295 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
9296 rtx retaddr_reg)
9298 bool plt_call = false;
9299 rtx insn;
9300 rtx call;
9301 rtx clobber;
9302 rtvec vec;
9304 /* Direct function calls need special treatment. */
9305 if (GET_CODE (addr_location) == SYMBOL_REF)
9307 /* When calling a global routine in PIC mode, we must
9308 replace the symbol itself with the PLT stub. */
9309 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
9311 addr_location = gen_rtx_UNSPEC (Pmode,
9312 gen_rtvec (1, addr_location),
9313 UNSPEC_PLT);
9314 addr_location = gen_rtx_CONST (Pmode, addr_location);
9315 plt_call = true;
9318 /* Unless we can use the bras(l) insn, force the
9319 routine address into a register. */
9320 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
9322 if (flag_pic)
9323 addr_location = legitimize_pic_address (addr_location, 0);
9324 else
9325 addr_location = force_reg (Pmode, addr_location);
9329 /* If it is already an indirect call or the code above moved the
9330 SYMBOL_REF to somewhere else make sure the address can be found in
9331 register 1. */
9332 if (retaddr_reg == NULL_RTX
9333 && GET_CODE (addr_location) != SYMBOL_REF
9334 && !plt_call)
9336 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
9337 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9340 addr_location = gen_rtx_MEM (QImode, addr_location);
9341 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
9343 if (result_reg != NULL_RTX)
9344 call = gen_rtx_SET (VOIDmode, result_reg, call);
9346 if (retaddr_reg != NULL_RTX)
9348 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
9350 if (tls_call != NULL_RTX)
9351 vec = gen_rtvec (3, call, clobber,
9352 gen_rtx_USE (VOIDmode, tls_call));
9353 else
9354 vec = gen_rtvec (2, call, clobber);
9356 call = gen_rtx_PARALLEL (VOIDmode, vec);
9359 insn = emit_call_insn (call);
9361 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
9362 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
9364 /* s390_function_ok_for_sibcall should
9365 have denied sibcalls in this case. */
9366 gcc_assert (retaddr_reg != NULL_RTX);
9368 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
9370 return insn;
9373 /* Implement CONDITIONAL_REGISTER_USAGE. */
9375 void
9376 s390_conditional_register_usage (void)
9378 int i;
9380 if (flag_pic)
9382 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9383 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9385 if (TARGET_CPU_ZARCH)
9387 fixed_regs[BASE_REGNUM] = 0;
9388 call_used_regs[BASE_REGNUM] = 0;
9389 fixed_regs[RETURN_REGNUM] = 0;
9390 call_used_regs[RETURN_REGNUM] = 0;
9392 if (TARGET_64BIT)
9394 for (i = 24; i < 32; i++)
9395 call_used_regs[i] = call_really_used_regs[i] = 0;
9397 else
9399 for (i = 18; i < 20; i++)
9400 call_used_regs[i] = call_really_used_regs[i] = 0;
9403 if (TARGET_SOFT_FLOAT)
9405 for (i = 16; i < 32; i++)
9406 call_used_regs[i] = fixed_regs[i] = 1;
9410 /* Corresponding function to eh_return expander. */
9412 static GTY(()) rtx s390_tpf_eh_return_symbol;
9413 void
9414 s390_emit_tpf_eh_return (rtx target)
9416 rtx insn, reg;
9418 if (!s390_tpf_eh_return_symbol)
9419 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
9421 reg = gen_rtx_REG (Pmode, 2);
9423 emit_move_insn (reg, target);
9424 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
9425 gen_rtx_REG (Pmode, RETURN_REGNUM));
9426 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
9428 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
9431 /* Rework the prologue/epilogue to avoid saving/restoring
9432 registers unnecessarily. */
9434 static void
9435 s390_optimize_prologue (void)
9437 rtx insn, new_insn, next_insn;
9439 /* Do a final recompute of the frame-related data. */
9441 s390_update_frame_layout ();
9443 /* If all special registers are in fact used, there's nothing we
9444 can do, so no point in walking the insn list. */
9446 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
9447 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
9448 && (TARGET_CPU_ZARCH
9449 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
9450 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
9451 return;
9453 /* Search for prologue/epilogue insns and replace them. */
9455 for (insn = get_insns (); insn; insn = next_insn)
9457 int first, last, off;
9458 rtx set, base, offset;
9460 next_insn = NEXT_INSN (insn);
9462 if (GET_CODE (insn) != INSN)
9463 continue;
9465 if (GET_CODE (PATTERN (insn)) == PARALLEL
9466 && store_multiple_operation (PATTERN (insn), VOIDmode))
9468 set = XVECEXP (PATTERN (insn), 0, 0);
9469 first = REGNO (SET_SRC (set));
9470 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9471 offset = const0_rtx;
9472 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9473 off = INTVAL (offset);
9475 if (GET_CODE (base) != REG || off < 0)
9476 continue;
9477 if (cfun_frame_layout.first_save_gpr != -1
9478 && (cfun_frame_layout.first_save_gpr < first
9479 || cfun_frame_layout.last_save_gpr > last))
9480 continue;
9481 if (REGNO (base) != STACK_POINTER_REGNUM
9482 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9483 continue;
9484 if (first > BASE_REGNUM || last < BASE_REGNUM)
9485 continue;
9487 if (cfun_frame_layout.first_save_gpr != -1)
9489 new_insn = save_gprs (base,
9490 off + (cfun_frame_layout.first_save_gpr
9491 - first) * UNITS_PER_WORD,
9492 cfun_frame_layout.first_save_gpr,
9493 cfun_frame_layout.last_save_gpr);
9494 new_insn = emit_insn_before (new_insn, insn);
9495 INSN_ADDRESSES_NEW (new_insn, -1);
9498 remove_insn (insn);
9499 continue;
9502 if (cfun_frame_layout.first_save_gpr == -1
9503 && GET_CODE (PATTERN (insn)) == SET
9504 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9505 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9506 || (!TARGET_CPU_ZARCH
9507 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9508 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9510 set = PATTERN (insn);
9511 first = REGNO (SET_SRC (set));
9512 offset = const0_rtx;
9513 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9514 off = INTVAL (offset);
9516 if (GET_CODE (base) != REG || off < 0)
9517 continue;
9518 if (REGNO (base) != STACK_POINTER_REGNUM
9519 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9520 continue;
9522 remove_insn (insn);
9523 continue;
9526 if (GET_CODE (PATTERN (insn)) == PARALLEL
9527 && load_multiple_operation (PATTERN (insn), VOIDmode))
9529 set = XVECEXP (PATTERN (insn), 0, 0);
9530 first = REGNO (SET_DEST (set));
9531 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9532 offset = const0_rtx;
9533 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9534 off = INTVAL (offset);
9536 if (GET_CODE (base) != REG || off < 0)
9537 continue;
9538 if (cfun_frame_layout.first_restore_gpr != -1
9539 && (cfun_frame_layout.first_restore_gpr < first
9540 || cfun_frame_layout.last_restore_gpr > last))
9541 continue;
9542 if (REGNO (base) != STACK_POINTER_REGNUM
9543 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9544 continue;
9545 if (first > BASE_REGNUM || last < BASE_REGNUM)
9546 continue;
9548 if (cfun_frame_layout.first_restore_gpr != -1)
9550 new_insn = restore_gprs (base,
9551 off + (cfun_frame_layout.first_restore_gpr
9552 - first) * UNITS_PER_WORD,
9553 cfun_frame_layout.first_restore_gpr,
9554 cfun_frame_layout.last_restore_gpr);
9555 new_insn = emit_insn_before (new_insn, insn);
9556 INSN_ADDRESSES_NEW (new_insn, -1);
9559 remove_insn (insn);
9560 continue;
9563 if (cfun_frame_layout.first_restore_gpr == -1
9564 && GET_CODE (PATTERN (insn)) == SET
9565 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9566 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9567 || (!TARGET_CPU_ZARCH
9568 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9569 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9571 set = PATTERN (insn);
9572 first = REGNO (SET_DEST (set));
9573 offset = const0_rtx;
9574 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9575 off = INTVAL (offset);
9577 if (GET_CODE (base) != REG || off < 0)
9578 continue;
9579 if (REGNO (base) != STACK_POINTER_REGNUM
9580 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9581 continue;
9583 remove_insn (insn);
9584 continue;
9589 /* Perform machine-dependent processing. */
9591 static void
9592 s390_reorg (void)
9594 bool pool_overflow = false;
9596 /* Make sure all splits have been performed; splits after
9597 machine_dependent_reorg might confuse insn length counts. */
9598 split_all_insns_noflow ();
9600 /* From here on decomposed literal pool addresses must be accepted. */
9601 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
9603 /* Install the main literal pool and the associated base
9604 register load insns.
9606 In addition, there are two problematic situations we need
9607 to correct:
9609 - the literal pool might be > 4096 bytes in size, so that
9610 some of its elements cannot be directly accessed
9612 - a branch target might be > 64K away from the branch, so that
9613 it is not possible to use a PC-relative instruction.
9615 To fix those, we split the single literal pool into multiple
9616 pool chunks, reloading the pool base register at various
9617 points throughout the function to ensure it always points to
9618 the pool chunk the following code expects, and / or replace
9619 PC-relative branches by absolute branches.
9621 However, the two problems are interdependent: splitting the
9622 literal pool can move a branch further away from its target,
9623 causing the 64K limit to overflow, and on the other hand,
9624 replacing a PC-relative branch by an absolute branch means
9625 we need to put the branch target address into the literal
9626 pool, possibly causing it to overflow.
9628 So, we loop trying to fix up both problems until we manage
9629 to satisfy both conditions at the same time. Note that the
9630 loop is guaranteed to terminate as every pass of the loop
9631 strictly decreases the total number of PC-relative branches
9632 in the function. (This is not completely true as there
9633 might be branch-over-pool insns introduced by chunkify_start.
9634 Those never need to be split however.) */
9636 for (;;)
9638 struct constant_pool *pool = NULL;
9640 /* Collect the literal pool. */
9641 if (!pool_overflow)
9643 pool = s390_mainpool_start ();
9644 if (!pool)
9645 pool_overflow = true;
9648 /* If literal pool overflowed, start to chunkify it. */
9649 if (pool_overflow)
9650 pool = s390_chunkify_start ();
9652 /* Split out-of-range branches. If this has created new
9653 literal pool entries, cancel current chunk list and
9654 recompute it. zSeries machines have large branch
9655 instructions, so we never need to split a branch. */
9656 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9658 if (pool_overflow)
9659 s390_chunkify_cancel (pool);
9660 else
9661 s390_mainpool_cancel (pool);
9663 continue;
9666 /* If we made it up to here, both conditions are satisfied.
9667 Finish up literal pool related changes. */
9668 if (pool_overflow)
9669 s390_chunkify_finish (pool);
9670 else
9671 s390_mainpool_finish (pool);
9673 /* We're done splitting branches. */
9674 cfun->machine->split_branches_pending_p = false;
9675 break;
9678 /* Generate out-of-pool execute target insns. */
9679 if (TARGET_CPU_ZARCH)
9681 rtx insn, label, target;
9683 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9685 label = s390_execute_label (insn);
9686 if (!label)
9687 continue;
9689 gcc_assert (label != const0_rtx);
9691 target = emit_label (XEXP (label, 0));
9692 INSN_ADDRESSES_NEW (target, -1);
9694 target = emit_insn (s390_execute_target (insn));
9695 INSN_ADDRESSES_NEW (target, -1);
9699 /* Try to optimize prologue and epilogue further. */
9700 s390_optimize_prologue ();
9704 /* Initialize GCC target structure. */
9706 #undef TARGET_ASM_ALIGNED_HI_OP
9707 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9708 #undef TARGET_ASM_ALIGNED_DI_OP
9709 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9710 #undef TARGET_ASM_INTEGER
9711 #define TARGET_ASM_INTEGER s390_assemble_integer
9713 #undef TARGET_ASM_OPEN_PAREN
9714 #define TARGET_ASM_OPEN_PAREN ""
9716 #undef TARGET_ASM_CLOSE_PAREN
9717 #define TARGET_ASM_CLOSE_PAREN ""
9719 #undef TARGET_DEFAULT_TARGET_FLAGS
9720 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9721 #undef TARGET_HANDLE_OPTION
9722 #define TARGET_HANDLE_OPTION s390_handle_option
9724 #undef TARGET_ENCODE_SECTION_INFO
9725 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
9727 #ifdef HAVE_AS_TLS
9728 #undef TARGET_HAVE_TLS
9729 #define TARGET_HAVE_TLS true
9730 #endif
9731 #undef TARGET_CANNOT_FORCE_CONST_MEM
9732 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
9734 #undef TARGET_DELEGITIMIZE_ADDRESS
9735 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
9737 #undef TARGET_RETURN_IN_MEMORY
9738 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
9740 #undef TARGET_INIT_BUILTINS
9741 #define TARGET_INIT_BUILTINS s390_init_builtins
9742 #undef TARGET_EXPAND_BUILTIN
9743 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
9745 #undef TARGET_ASM_OUTPUT_MI_THUNK
9746 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9747 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9748 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9750 #undef TARGET_SCHED_ADJUST_PRIORITY
9751 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9752 #undef TARGET_SCHED_ISSUE_RATE
9753 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9754 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9755 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
9757 #undef TARGET_CANNOT_COPY_INSN_P
9758 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9759 #undef TARGET_RTX_COSTS
9760 #define TARGET_RTX_COSTS s390_rtx_costs
9761 #undef TARGET_ADDRESS_COST
9762 #define TARGET_ADDRESS_COST s390_address_cost
9764 #undef TARGET_MACHINE_DEPENDENT_REORG
9765 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
9767 #undef TARGET_VALID_POINTER_MODE
9768 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
9770 #undef TARGET_BUILD_BUILTIN_VA_LIST
9771 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9772 #undef TARGET_EXPAND_BUILTIN_VA_START
9773 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
9774 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
9775 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
9777 #undef TARGET_PROMOTE_FUNCTION_ARGS
9778 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
9779 #undef TARGET_PROMOTE_FUNCTION_RETURN
9780 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
9781 #undef TARGET_PASS_BY_REFERENCE
9782 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
9784 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
9785 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
9787 #undef TARGET_FIXED_CONDITION_CODE_REGS
9788 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
9790 #undef TARGET_CC_MODES_COMPATIBLE
9791 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
9793 #undef TARGET_INVALID_WITHIN_DOLOOP
9794 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
9796 #ifdef HAVE_AS_TLS
9797 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9798 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9799 #endif
9801 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
9802 #undef TARGET_MANGLE_TYPE
9803 #define TARGET_MANGLE_TYPE s390_mangle_type
9804 #endif
9806 #undef TARGET_SCALAR_MODE_SUPPORTED_P
9807 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9809 #undef TARGET_SECONDARY_RELOAD
9810 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
9812 #undef TARGET_LIBGCC_CMP_RETURN_MODE
9813 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9815 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9816 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9818 struct gcc_target targetm = TARGET_INITIALIZER;
9820 #include "gt-s390.h"