1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx
, unsigned int, int);
60 static void s390_select_rtx_section (enum machine_mode
, rtx
,
61 unsigned HOST_WIDE_INT
);
62 static void s390_encode_section_info (tree
, rtx
, int);
63 static bool s390_cannot_force_const_mem (rtx
);
64 static rtx
s390_delegitimize_address (rtx
);
65 static bool s390_return_in_memory (tree
, tree
);
66 static void s390_init_builtins (void);
67 static rtx
s390_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
68 static void s390_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
70 static enum attr_type
s390_safe_attr_type (rtx
);
72 static int s390_adjust_cost (rtx
, rtx
, rtx
, int);
73 static int s390_adjust_priority (rtx
, int);
74 static int s390_issue_rate (void);
75 static int s390_first_cycle_multipass_dfa_lookahead (void);
76 static bool s390_rtx_costs (rtx
, int, int, int *);
77 static int s390_address_cost (rtx
);
78 static void s390_reorg (void);
79 static bool s390_valid_pointer_mode (enum machine_mode
);
80 static tree
s390_build_builtin_va_list (void);
81 static tree
s390_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
82 static bool s390_function_ok_for_sibcall (tree
, tree
);
83 static bool s390_call_saved_register_used (tree
);
85 #undef TARGET_ASM_ALIGNED_HI_OP
86 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
87 #undef TARGET_ASM_ALIGNED_DI_OP
88 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
89 #undef TARGET_ASM_INTEGER
90 #define TARGET_ASM_INTEGER s390_assemble_integer
92 #undef TARGET_ASM_OPEN_PAREN
93 #define TARGET_ASM_OPEN_PAREN ""
95 #undef TARGET_ASM_CLOSE_PAREN
96 #define TARGET_ASM_CLOSE_PAREN ""
98 #undef TARGET_ASM_SELECT_RTX_SECTION
99 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
101 #undef TARGET_ENCODE_SECTION_INFO
102 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
105 #undef TARGET_HAVE_TLS
106 #define TARGET_HAVE_TLS true
108 #undef TARGET_CANNOT_FORCE_CONST_MEM
109 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
111 #undef TARGET_DELEGITIMIZE_ADDRESS
112 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
114 #undef TARGET_RETURN_IN_MEMORY
115 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
117 #undef TARGET_INIT_BUILTINS
118 #define TARGET_INIT_BUILTINS s390_init_builtins
119 #undef TARGET_EXPAND_BUILTIN
120 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
122 #undef TARGET_ASM_OUTPUT_MI_THUNK
123 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
124 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
125 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
127 #undef TARGET_SCHED_ADJUST_COST
128 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
129 #undef TARGET_SCHED_ADJUST_PRIORITY
130 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
131 #undef TARGET_SCHED_ISSUE_RATE
132 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
133 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
134 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
135 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
136 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
138 #undef TARGET_RTX_COSTS
139 #define TARGET_RTX_COSTS s390_rtx_costs
140 #undef TARGET_ADDRESS_COST
141 #define TARGET_ADDRESS_COST s390_address_cost
143 #undef TARGET_MACHINE_DEPENDENT_REORG
144 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
146 #undef TARGET_VALID_POINTER_MODE
147 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
149 #undef TARGET_BUILD_BUILTIN_VA_LIST
150 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
151 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
152 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
154 #undef TARGET_PROMOTE_FUNCTION_ARGS
155 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
156 #undef TARGET_PROMOTE_FUNCTION_RETURN
157 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
159 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
160 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
162 struct gcc_target targetm
= TARGET_INITIALIZER
;
164 extern int reload_completed
;
166 /* The alias set for prologue/epilogue register save/restore. */
167 static int s390_sr_alias_set
= 0;
169 /* Save information from a "cmpxx" operation until the branch or scc is
171 rtx s390_compare_op0
, s390_compare_op1
;
173 /* Structure used to hold the components of a S/390 memory
174 address. A legitimate address on S/390 is of the general
176 base + index + displacement
177 where any of the components is optional.
179 base and index are registers of the class ADDR_REGS,
180 displacement is an unsigned 12-bit immediate constant. */
190 /* Which cpu are we tuning for. */
191 enum processor_type s390_tune
;
192 enum processor_flags s390_tune_flags
;
193 /* Which instruction set architecture to use. */
194 enum processor_type s390_arch
;
195 enum processor_flags s390_arch_flags
;
197 /* Strings to hold which cpu and instruction set architecture to use. */
198 const char *s390_tune_string
; /* for -mtune=<xxx> */
199 const char *s390_arch_string
; /* for -march=<xxx> */
201 /* Define the structure for the machine field in struct function. */
203 struct machine_function
GTY(())
205 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
208 /* Set if return address needs to be saved. */
209 bool save_return_addr_p
;
211 /* Number of first and last gpr to be saved, restored. */
213 int first_restore_gpr
;
215 int last_restore_gpr
;
217 /* Size of stack frame. */
218 HOST_WIDE_INT frame_size
;
220 /* Literal pool base register. */
223 /* Some local-dynamic TLS symbol name. */
224 const char *some_ld_name
;
227 static int s390_match_ccmode_set (rtx
, enum machine_mode
);
228 static int s390_branch_condition_mask (rtx
);
229 static const char *s390_branch_condition_mnemonic (rtx
, int);
230 static int check_mode (rtx
, enum machine_mode
*);
231 static int general_s_operand (rtx
, enum machine_mode
, int);
232 static int s390_short_displacement (rtx
);
233 static int s390_decompose_address (rtx
, struct s390_address
*);
234 static rtx
get_thread_pointer (void);
235 static rtx
legitimize_tls_address (rtx
, rtx
);
236 static void print_shift_count_operand (FILE *, rtx
);
237 static const char *get_some_local_dynamic_name (void);
238 static int get_some_local_dynamic_name_1 (rtx
*, void *);
239 static int reg_used_in_mem_p (int, rtx
);
240 static int addr_generation_dependency_p (rtx
, rtx
);
241 static int s390_split_branches (void);
242 static void annotate_constant_pool_refs (rtx
*x
);
243 static void find_constant_pool_ref (rtx
, rtx
*);
244 static void replace_constant_pool_ref (rtx
*, rtx
, rtx
);
245 static rtx
find_ltrel_base (rtx
);
246 static void replace_ltrel_base (rtx
*);
247 static void s390_optimize_prolog (bool);
248 static int find_unused_clobbered_reg (void);
249 static void s390_frame_info (int, int);
250 static rtx
save_fpr (rtx
, int, int);
251 static rtx
restore_fpr (rtx
, int, int);
252 static rtx
save_gprs (rtx
, int, int, int);
253 static rtx
restore_gprs (rtx
, int, int, int);
254 static int s390_function_arg_size (enum machine_mode
, tree
);
255 static bool s390_function_arg_float (enum machine_mode
, tree
);
256 static struct machine_function
* s390_init_machine_status (void);
258 /* Check whether integer displacement is in range. */
259 #define DISP_IN_RANGE(d) \
260 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
261 : ((d) >= 0 && (d) <= 4095))
263 /* Return true if SET either doesn't set the CC register, or else
264 the source and destination have matching CC modes and that
265 CC mode is at least as constrained as REQ_MODE. */
268 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
270 enum machine_mode set_mode
;
272 if (GET_CODE (set
) != SET
)
275 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
278 set_mode
= GET_MODE (SET_DEST (set
));
292 if (req_mode
!= set_mode
)
297 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
298 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
304 if (req_mode
!= CCAmode
)
312 return (GET_MODE (SET_SRC (set
)) == set_mode
);
315 /* Return true if every SET in INSN that sets the CC register
316 has source and destination with matching CC modes and that
317 CC mode is at least as constrained as REQ_MODE.
318 If REQ_MODE is VOIDmode, always return false. */
321 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
325 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
326 if (req_mode
== VOIDmode
)
329 if (GET_CODE (PATTERN (insn
)) == SET
)
330 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
332 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
333 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
335 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
336 if (GET_CODE (set
) == SET
)
337 if (!s390_match_ccmode_set (set
, req_mode
))
344 /* If a test-under-mask instruction can be used to implement
345 (compare (and ... OP1) OP2), return the CC mode required
346 to do that. Otherwise, return VOIDmode.
347 MIXED is true if the instruction can distinguish between
348 CC1 and CC2 for mixed selected bits (TMxx), it is false
349 if the instruction cannot (TM). */
352 s390_tm_ccmode (rtx op1
, rtx op2
, int mixed
)
356 /* ??? Fixme: should work on CONST_DOUBLE as well. */
357 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
360 /* Selected bits all zero: CC0. */
361 if (INTVAL (op2
) == 0)
364 /* Selected bits all one: CC3. */
365 if (INTVAL (op2
) == INTVAL (op1
))
368 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
371 bit1
= exact_log2 (INTVAL (op2
));
372 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
373 if (bit0
!= -1 && bit1
!= -1)
374 return bit0
> bit1
? CCT1mode
: CCT2mode
;
380 /* Given a comparison code OP (EQ, NE, etc.) and the operands
381 OP0 and OP1 of a COMPARE, return the mode to be used for the
385 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
391 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
392 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
394 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
395 || GET_CODE (op1
) == NEG
)
396 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
399 if (GET_CODE (op0
) == AND
)
401 /* Check whether we can potentially do it via TM. */
402 enum machine_mode ccmode
;
403 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
404 if (ccmode
!= VOIDmode
)
406 /* Relax CCTmode to CCZmode to allow fall-back to AND
407 if that turns out to be beneficial. */
408 return ccmode
== CCTmode
? CCZmode
: ccmode
;
412 if (register_operand (op0
, HImode
)
413 && GET_CODE (op1
) == CONST_INT
414 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
416 if (register_operand (op0
, QImode
)
417 && GET_CODE (op1
) == CONST_INT
418 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
427 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
428 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
430 if (INTVAL (XEXP((op0
), 1)) < 0)
443 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
444 && GET_CODE (op1
) != CONST_INT
)
450 if (GET_CODE (op0
) == PLUS
451 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
454 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
455 && GET_CODE (op1
) != CONST_INT
)
461 if (GET_CODE (op0
) == MINUS
462 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
465 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
466 && GET_CODE (op1
) != CONST_INT
)
475 /* Emit a compare instruction suitable to implement the comparison
476 OP0 CODE OP1. Return the correct condition RTL to be placed in
477 the IF_THEN_ELSE of the conditional branch testing the result. */
480 s390_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
482 enum machine_mode mode
= s390_select_ccmode (code
, op0
, op1
);
483 rtx cc
= gen_rtx_REG (mode
, CC_REGNUM
);
485 emit_insn (gen_rtx_SET (VOIDmode
, cc
, gen_rtx_COMPARE (mode
, op0
, op1
)));
486 return gen_rtx_fmt_ee (code
, VOIDmode
, cc
, const0_rtx
);
489 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
490 unconditional jump, else a conditional jump under condition COND. */
493 s390_emit_jump (rtx target
, rtx cond
)
497 target
= gen_rtx_LABEL_REF (VOIDmode
, target
);
499 target
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, target
, pc_rtx
);
501 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
, target
);
502 emit_jump_insn (insn
);
505 /* Return nonzero if OP is a valid comparison operator
506 for an ALC condition in mode MODE. */
509 s390_alc_comparison (rtx op
, enum machine_mode mode
)
511 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
514 while (GET_CODE (op
) == ZERO_EXTEND
|| GET_CODE (op
) == SIGN_EXTEND
)
517 if (!COMPARISON_P (op
))
520 if (GET_CODE (XEXP (op
, 0)) != REG
521 || REGNO (XEXP (op
, 0)) != CC_REGNUM
522 || XEXP (op
, 1) != const0_rtx
)
525 switch (GET_MODE (XEXP (op
, 0)))
528 return GET_CODE (op
) == LTU
;
531 return GET_CODE (op
) == LEU
;
534 return GET_CODE (op
) == GEU
;
537 return GET_CODE (op
) == GTU
;
540 return GET_CODE (op
) == LTU
;
543 return GET_CODE (op
) == UNGT
;
546 return GET_CODE (op
) == UNLT
;
553 /* Return nonzero if OP is a valid comparison operator
554 for an SLB condition in mode MODE. */
557 s390_slb_comparison (rtx op
, enum machine_mode mode
)
559 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
562 while (GET_CODE (op
) == ZERO_EXTEND
|| GET_CODE (op
) == SIGN_EXTEND
)
565 if (!COMPARISON_P (op
))
568 if (GET_CODE (XEXP (op
, 0)) != REG
569 || REGNO (XEXP (op
, 0)) != CC_REGNUM
570 || XEXP (op
, 1) != const0_rtx
)
573 switch (GET_MODE (XEXP (op
, 0)))
576 return GET_CODE (op
) == GEU
;
579 return GET_CODE (op
) == GTU
;
582 return GET_CODE (op
) == LTU
;
585 return GET_CODE (op
) == LEU
;
588 return GET_CODE (op
) == GEU
;
591 return GET_CODE (op
) == LE
;
594 return GET_CODE (op
) == GE
;
601 /* Return branch condition mask to implement a branch
602 specified by CODE. */
605 s390_branch_condition_mask (rtx code
)
607 const int CC0
= 1 << 3;
608 const int CC1
= 1 << 2;
609 const int CC2
= 1 << 1;
610 const int CC3
= 1 << 0;
612 if (GET_CODE (XEXP (code
, 0)) != REG
613 || REGNO (XEXP (code
, 0)) != CC_REGNUM
614 || XEXP (code
, 1) != const0_rtx
)
617 switch (GET_MODE (XEXP (code
, 0)))
620 switch (GET_CODE (code
))
623 case NE
: return CC1
| CC2
| CC3
;
630 switch (GET_CODE (code
))
633 case NE
: return CC0
| CC2
| CC3
;
640 switch (GET_CODE (code
))
643 case NE
: return CC0
| CC1
| CC3
;
650 switch (GET_CODE (code
))
653 case NE
: return CC0
| CC1
| CC2
;
660 switch (GET_CODE (code
))
662 case EQ
: return CC0
| CC2
;
663 case NE
: return CC1
| CC3
;
670 switch (GET_CODE (code
))
672 case LTU
: return CC2
| CC3
; /* carry */
673 case GEU
: return CC0
| CC1
; /* no carry */
680 switch (GET_CODE (code
))
682 case GTU
: return CC0
| CC1
; /* borrow */
683 case LEU
: return CC2
| CC3
; /* no borrow */
690 switch (GET_CODE (code
))
692 case EQ
: return CC0
| CC2
;
693 case NE
: return CC1
| CC3
;
694 case LTU
: return CC1
;
695 case GTU
: return CC3
;
696 case LEU
: return CC1
| CC2
;
697 case GEU
: return CC2
| CC3
;
703 switch (GET_CODE (code
))
706 case NE
: return CC1
| CC2
| CC3
;
707 case LTU
: return CC1
;
708 case GTU
: return CC2
;
709 case LEU
: return CC0
| CC1
;
710 case GEU
: return CC0
| CC2
;
717 switch (GET_CODE (code
))
720 case NE
: return CC2
| CC1
| CC3
;
721 case LTU
: return CC2
;
722 case GTU
: return CC1
;
723 case LEU
: return CC0
| CC2
;
724 case GEU
: return CC0
| CC1
;
731 switch (GET_CODE (code
))
734 case NE
: return CC1
| CC2
| CC3
;
735 case LT
: return CC1
| CC3
;
737 case LE
: return CC0
| CC1
| CC3
;
738 case GE
: return CC0
| CC2
;
745 switch (GET_CODE (code
))
748 case NE
: return CC1
| CC2
| CC3
;
750 case GT
: return CC2
| CC3
;
751 case LE
: return CC0
| CC1
;
752 case GE
: return CC0
| CC2
| CC3
;
759 switch (GET_CODE (code
))
762 case NE
: return CC1
| CC2
| CC3
;
765 case LE
: return CC0
| CC1
;
766 case GE
: return CC0
| CC2
;
767 case UNORDERED
: return CC3
;
768 case ORDERED
: return CC0
| CC1
| CC2
;
769 case UNEQ
: return CC0
| CC3
;
770 case UNLT
: return CC1
| CC3
;
771 case UNGT
: return CC2
| CC3
;
772 case UNLE
: return CC0
| CC1
| CC3
;
773 case UNGE
: return CC0
| CC2
| CC3
;
774 case LTGT
: return CC1
| CC2
;
781 switch (GET_CODE (code
))
784 case NE
: return CC2
| CC1
| CC3
;
787 case LE
: return CC0
| CC2
;
788 case GE
: return CC0
| CC1
;
789 case UNORDERED
: return CC3
;
790 case ORDERED
: return CC0
| CC2
| CC1
;
791 case UNEQ
: return CC0
| CC3
;
792 case UNLT
: return CC2
| CC3
;
793 case UNGT
: return CC1
| CC3
;
794 case UNLE
: return CC0
| CC2
| CC3
;
795 case UNGE
: return CC0
| CC1
| CC3
;
796 case LTGT
: return CC2
| CC1
;
807 /* If INV is false, return assembler mnemonic string to implement
808 a branch specified by CODE. If INV is true, return mnemonic
809 for the corresponding inverted branch. */
812 s390_branch_condition_mnemonic (rtx code
, int inv
)
814 static const char *const mnemonic
[16] =
816 NULL
, "o", "h", "nle",
817 "l", "nhe", "lh", "ne",
818 "e", "nlh", "he", "nl",
819 "le", "nh", "no", NULL
822 int mask
= s390_branch_condition_mask (code
);
827 if (mask
< 1 || mask
> 14)
830 return mnemonic
[mask
];
833 /* Return the part of op which has a value different from def.
834 The size of the part is determined by mode.
835 Use this function only if you already know that op really
836 contains such a part. */
838 unsigned HOST_WIDE_INT
839 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
841 unsigned HOST_WIDE_INT value
= 0;
842 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
843 int part_bits
= GET_MODE_BITSIZE (mode
);
844 unsigned HOST_WIDE_INT part_mask
= (1 << part_bits
) - 1;
847 for (i
= 0; i
< max_parts
; i
++)
850 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
854 if ((value
& part_mask
) != (def
& part_mask
))
855 return value
& part_mask
;
861 /* If OP is an integer constant of mode MODE with exactly one
862 part of mode PART_MODE unequal to DEF, return the number of that
863 part. Otherwise, return -1. */
866 s390_single_part (rtx op
,
867 enum machine_mode mode
,
868 enum machine_mode part_mode
,
871 unsigned HOST_WIDE_INT value
= 0;
872 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
873 unsigned HOST_WIDE_INT part_mask
= (1 << GET_MODE_BITSIZE (part_mode
)) - 1;
876 if (GET_CODE (op
) != CONST_INT
)
879 for (i
= 0; i
< n_parts
; i
++)
882 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
884 value
>>= GET_MODE_BITSIZE (part_mode
);
886 if ((value
& part_mask
) != (def
& part_mask
))
894 return part
== -1 ? -1 : n_parts
- 1 - part
;
897 /* Check whether we can (and want to) split a double-word
898 move in mode MODE from SRC to DST into two single-word
899 moves, moving the subword FIRST_SUBWORD first. */
902 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
904 /* Floating point registers cannot be split. */
905 if (FP_REG_P (src
) || FP_REG_P (dst
))
908 /* We don't need to split if operands are directly accessible. */
909 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
912 /* Non-offsettable memory references cannot be split. */
913 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
914 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
917 /* Moving the first subword must not clobber a register
918 needed to move the second subword. */
919 if (register_operand (dst
, mode
))
921 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
922 if (reg_overlap_mentioned_p (subreg
, src
))
930 /* Change optimizations to be performed, depending on the
933 LEVEL is the optimization level specified; 2 if `-O2' is
934 specified, 1 if `-O' is specified, and 0 if neither is specified.
936 SIZE is nonzero if `-Os' is specified and zero otherwise. */
939 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
941 /* ??? There are apparently still problems with -fcaller-saves. */
942 flag_caller_saves
= 0;
944 /* By default, always emit DWARF-2 unwind info. This allows debugging
945 without maintaining a stack frame back-chain. */
946 flag_asynchronous_unwind_tables
= 1;
950 override_options (void)
955 const char *const name
; /* processor name or nickname. */
956 const enum processor_type processor
;
957 const enum processor_flags flags
;
959 const processor_alias_table
[] =
961 {"g5", PROCESSOR_9672_G5
, PF_IEEE_FLOAT
},
962 {"g6", PROCESSOR_9672_G6
, PF_IEEE_FLOAT
},
963 {"z900", PROCESSOR_2064_Z900
, PF_IEEE_FLOAT
| PF_ZARCH
},
964 {"z990", PROCESSOR_2084_Z990
, PF_IEEE_FLOAT
| PF_ZARCH
965 | PF_LONG_DISPLACEMENT
},
968 int const pta_size
= ARRAY_SIZE (processor_alias_table
);
970 /* Acquire a unique set number for our register saves and restores. */
971 s390_sr_alias_set
= new_alias_set ();
973 /* Set up function hooks. */
974 init_machine_status
= s390_init_machine_status
;
976 /* Architecture mode defaults according to ABI. */
977 if (!(target_flags_explicit
& MASK_ZARCH
))
980 target_flags
|= MASK_ZARCH
;
982 target_flags
&= ~MASK_ZARCH
;
985 /* Determine processor architectural level. */
986 if (!s390_arch_string
)
987 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
989 for (i
= 0; i
< pta_size
; i
++)
990 if (! strcmp (s390_arch_string
, processor_alias_table
[i
].name
))
992 s390_arch
= processor_alias_table
[i
].processor
;
993 s390_arch_flags
= processor_alias_table
[i
].flags
;
997 error ("Unknown cpu used in -march=%s.", s390_arch_string
);
999 /* Determine processor to tune for. */
1000 if (!s390_tune_string
)
1002 s390_tune
= s390_arch
;
1003 s390_tune_flags
= s390_arch_flags
;
1004 s390_tune_string
= s390_arch_string
;
1008 for (i
= 0; i
< pta_size
; i
++)
1009 if (! strcmp (s390_tune_string
, processor_alias_table
[i
].name
))
1011 s390_tune
= processor_alias_table
[i
].processor
;
1012 s390_tune_flags
= processor_alias_table
[i
].flags
;
1016 error ("Unknown cpu used in -mtune=%s.", s390_tune_string
);
1019 /* Sanity checks. */
1020 if (TARGET_ZARCH
&& !(s390_arch_flags
& PF_ZARCH
))
1021 error ("z/Architecture mode not supported on %s.", s390_arch_string
);
1022 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1023 error ("64-bit ABI not supported in ESA/390 mode.");
1026 /* Map for smallest class containing reg regno. */
1028 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1029 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1030 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1031 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1032 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1033 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1034 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1035 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1036 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1037 ADDR_REGS
, NO_REGS
, ADDR_REGS
1040 /* Return attribute type of insn. */
1042 static enum attr_type
1043 s390_safe_attr_type (rtx insn
)
1045 if (recog_memoized (insn
) >= 0)
1046 return get_attr_type (insn
);
1051 /* Return true if OP a (const_int 0) operand.
1052 OP is the current operation.
1053 MODE is the current operation mode. */
1056 const0_operand (register rtx op
, enum machine_mode mode
)
1058 return op
== CONST0_RTX (mode
);
1061 /* Return true if OP is constant.
1062 OP is the current operation.
1063 MODE is the current operation mode. */
1066 consttable_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1068 return CONSTANT_P (op
);
1071 /* Return true if the mode of operand OP matches MODE.
1072 If MODE is set to VOIDmode, set it to the mode of OP. */
1075 check_mode (register rtx op
, enum machine_mode
*mode
)
1077 if (*mode
== VOIDmode
)
1078 *mode
= GET_MODE (op
);
1081 if (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != *mode
)
1087 /* Return true if OP a valid operand for the LARL instruction.
1088 OP is the current operation.
1089 MODE is the current operation mode. */
1092 larl_operand (register rtx op
, enum machine_mode mode
)
1094 if (! check_mode (op
, &mode
))
1097 /* Allow labels and local symbols. */
1098 if (GET_CODE (op
) == LABEL_REF
)
1100 if (GET_CODE (op
) == SYMBOL_REF
)
1101 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1102 && SYMBOL_REF_TLS_MODEL (op
) == 0
1103 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1105 /* Everything else must have a CONST, so strip it. */
1106 if (GET_CODE (op
) != CONST
)
1110 /* Allow adding *even* in-range constants. */
1111 if (GET_CODE (op
) == PLUS
)
1113 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
1114 || (INTVAL (XEXP (op
, 1)) & 1) != 0)
1116 #if HOST_BITS_PER_WIDE_INT > 32
1117 if (INTVAL (XEXP (op
, 1)) >= (HOST_WIDE_INT
)1 << 32
1118 || INTVAL (XEXP (op
, 1)) < -((HOST_WIDE_INT
)1 << 32))
1124 /* Labels and local symbols allowed here as well. */
1125 if (GET_CODE (op
) == LABEL_REF
)
1127 if (GET_CODE (op
) == SYMBOL_REF
)
1128 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1129 && SYMBOL_REF_TLS_MODEL (op
) == 0
1130 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1132 /* Now we must have a @GOTENT offset or @PLT stub
1133 or an @INDNTPOFF TLS offset. */
1134 if (GET_CODE (op
) == UNSPEC
1135 && XINT (op
, 1) == UNSPEC_GOTENT
)
1137 if (GET_CODE (op
) == UNSPEC
1138 && XINT (op
, 1) == UNSPEC_PLT
)
1140 if (GET_CODE (op
) == UNSPEC
1141 && XINT (op
, 1) == UNSPEC_INDNTPOFF
)
1147 /* Helper routine to implement s_operand and s_imm_operand.
1148 OP is the current operation.
1149 MODE is the current operation mode.
1150 ALLOW_IMMEDIATE specifies whether immediate operands should
1151 be accepted or not. */
1154 general_s_operand (register rtx op
, enum machine_mode mode
,
1155 int allow_immediate
)
1157 struct s390_address addr
;
1159 /* Call general_operand first, so that we don't have to
1160 check for many special cases. */
1161 if (!general_operand (op
, mode
))
1164 /* Just like memory_operand, allow (subreg (mem ...))
1166 if (reload_completed
1167 && GET_CODE (op
) == SUBREG
1168 && GET_CODE (SUBREG_REG (op
)) == MEM
)
1169 op
= SUBREG_REG (op
);
1171 switch (GET_CODE (op
))
1173 /* Constants are OK as s-operand if ALLOW_IMMEDIATE
1174 is true and we are still before reload. */
1177 if (!allow_immediate
|| reload_completed
)
1181 /* Memory operands are OK unless they already use an
1184 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1188 /* Do not allow literal pool references unless ALLOW_IMMEDIATE
1189 is true. This prevents compares between two literal pool
1190 entries from being accepted. */
1191 if (!allow_immediate
1192 && addr
.base
&& REGNO (addr
.base
) == BASE_REGISTER
)
1203 /* Return true if OP is a valid S-type operand.
1204 OP is the current operation.
1205 MODE is the current operation mode. */
1208 s_operand (register rtx op
, enum machine_mode mode
)
1210 return general_s_operand (op
, mode
, 0);
1213 /* Return true if OP is a valid S-type operand or an immediate
1214 operand that can be addressed as S-type operand by forcing
1215 it into the literal pool.
1216 OP is the current operation.
1217 MODE is the current operation mode. */
1220 s_imm_operand (register rtx op
, enum machine_mode mode
)
1222 return general_s_operand (op
, mode
, 1);
1225 /* Return true if OP a valid shift count operand.
1226 OP is the current operation.
1227 MODE is the current operation mode. */
1230 shift_count_operand (rtx op
, enum machine_mode mode
)
1232 HOST_WIDE_INT offset
= 0;
1234 if (! check_mode (op
, &mode
))
1237 /* We can have an integer constant, an address register,
1238 or a sum of the two. Note that reload already checks
1239 that any register present is an address register, so
1240 we just check for any register here. */
1241 if (GET_CODE (op
) == CONST_INT
)
1243 offset
= INTVAL (op
);
1246 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1248 offset
= INTVAL (XEXP (op
, 1));
1251 while (op
&& GET_CODE (op
) == SUBREG
)
1252 op
= SUBREG_REG (op
);
1253 if (op
&& GET_CODE (op
) != REG
)
1256 /* Unfortunately we have to reject constants that are invalid
1257 for an address, or else reload will get confused. */
1258 if (!DISP_IN_RANGE (offset
))
1264 /* Return true if DISP is a valid short displacement. */
1267 s390_short_displacement (rtx disp
)
1269 /* No displacement is OK. */
1273 /* Integer displacement in range. */
1274 if (GET_CODE (disp
) == CONST_INT
)
1275 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1277 /* GOT offset is not OK, the GOT can be large. */
1278 if (GET_CODE (disp
) == CONST
1279 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1280 && XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
)
1283 /* All other symbolic constants are literal pool references,
1284 which are OK as the literal pool must be small. */
1285 if (GET_CODE (disp
) == CONST
)
1291 /* Return true if OP is a valid operand for a C constraint. */
1294 s390_extra_constraint_str (rtx op
, int c
, const char * str
)
1296 struct s390_address addr
;
1304 if (GET_CODE (op
) != MEM
)
1306 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1311 if (TARGET_LONG_DISPLACEMENT
)
1313 if (!s390_short_displacement (addr
.disp
))
1319 if (GET_CODE (op
) != MEM
)
1322 if (TARGET_LONG_DISPLACEMENT
)
1324 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1326 if (!s390_short_displacement (addr
.disp
))
1332 if (!TARGET_LONG_DISPLACEMENT
)
1334 if (GET_CODE (op
) != MEM
)
1336 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1340 if (s390_short_displacement (addr
.disp
))
1345 if (!TARGET_LONG_DISPLACEMENT
)
1347 if (GET_CODE (op
) != MEM
)
1349 /* Any invalid address here will be fixed up by reload,
1350 so accept it for the most generic constraint. */
1351 if (s390_decompose_address (XEXP (op
, 0), &addr
)
1352 && s390_short_displacement (addr
.disp
))
1357 if (TARGET_LONG_DISPLACEMENT
)
1359 if (!s390_decompose_address (op
, &addr
))
1361 if (!s390_short_displacement (addr
.disp
))
1367 if (!TARGET_LONG_DISPLACEMENT
)
1369 /* Any invalid address here will be fixed up by reload,
1370 so accept it for the most generic constraint. */
1371 if (s390_decompose_address (op
, &addr
)
1372 && s390_short_displacement (addr
.disp
))
1377 return shift_count_operand (op
, VOIDmode
);
1386 /* Return true if VALUE matches the constraint STR. */
1389 s390_const_ok_for_constraint_p (HOST_WIDE_INT value
,
1393 enum machine_mode mode
, part_mode
;
1403 return (unsigned int)value
< 256;
1406 return (unsigned int)value
< 4096;
1409 return value
>= -32768 && value
< 32768;
1412 return (TARGET_LONG_DISPLACEMENT
?
1413 (value
>= -524288 && value
<= 524287)
1414 : (value
>= 0 && value
<= 4095));
1416 return value
== 2147483647;
1419 part
= str
[1] - '0';
1423 case 'H': part_mode
= HImode
; break;
1424 case 'Q': part_mode
= QImode
; break;
1430 case 'H': mode
= HImode
; break;
1431 case 'S': mode
= SImode
; break;
1432 case 'D': mode
= DImode
; break;
1438 case '0': def
= 0; break;
1439 case 'F': def
= -1; break;
1443 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
1446 if (s390_single_part (GEN_INT (value
), mode
, part_mode
, def
) != part
)
1458 /* Compute a (partial) cost for rtx X. Return true if the complete
1459 cost has been computed, and false if subexpressions should be
1460 scanned. In either case, *TOTAL contains the cost result. */
1463 s390_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
1468 if (GET_CODE (XEXP (x
, 0)) == MINUS
1469 && GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
1476 /* Force_const_mem does not work out of reload, because the
1477 saveable_obstack is set to reload_obstack, which does not
1478 live long enough. Because of this we cannot use force_const_mem
1479 in addsi3. This leads to problems with gen_add2_insn with a
1480 constant greater than a short. Because of that we give an
1481 addition of greater constants a cost of 3 (reload1.c 10096). */
1482 /* ??? saveable_obstack no longer exists. */
1483 if (outer_code
== PLUS
1484 && (INTVAL (x
) > 32767 || INTVAL (x
) < -32768))
1485 *total
= COSTS_N_INSNS (3);
1506 *total
= COSTS_N_INSNS (1);
1510 if (GET_MODE (XEXP (x
, 0)) == DImode
)
1511 *total
= COSTS_N_INSNS (40);
1513 *total
= COSTS_N_INSNS (7);
1520 *total
= COSTS_N_INSNS (33);
1528 /* Return the cost of an address rtx ADDR. */
1531 s390_address_cost (rtx addr
)
1533 struct s390_address ad
;
1534 if (!s390_decompose_address (addr
, &ad
))
1537 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1540 /* Return true if OP is a valid operand for the BRAS instruction.
1541 OP is the current operation.
1542 MODE is the current operation mode. */
1545 bras_sym_operand (register rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1547 register enum rtx_code code
= GET_CODE (op
);
1549 /* Allow SYMBOL_REFs. */
1550 if (code
== SYMBOL_REF
)
1553 /* Allow @PLT stubs. */
1555 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1556 && XINT (XEXP (op
, 0), 1) == UNSPEC_PLT
)
1561 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1562 otherwise return 0. */
1565 tls_symbolic_operand (register rtx op
)
1567 if (GET_CODE (op
) != SYMBOL_REF
)
1569 return SYMBOL_REF_TLS_MODEL (op
);
1572 /* Return true if OP is a load multiple operation. It is known to be a
1573 PARALLEL and the first section will be tested.
1574 OP is the current operation.
1575 MODE is the current operation mode. */
1578 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1580 enum machine_mode elt_mode
;
1581 int count
= XVECLEN (op
, 0);
1582 unsigned int dest_regno
;
1587 /* Perform a quick check so we don't blow up below. */
1589 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1590 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
1591 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
1594 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
1595 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
1596 elt_mode
= GET_MODE (SET_DEST (XVECEXP (op
, 0, 0)));
1598 /* Check, is base, or base + displacement. */
1600 if (GET_CODE (src_addr
) == REG
)
1602 else if (GET_CODE (src_addr
) == PLUS
1603 && GET_CODE (XEXP (src_addr
, 0)) == REG
1604 && GET_CODE (XEXP (src_addr
, 1)) == CONST_INT
)
1606 off
= INTVAL (XEXP (src_addr
, 1));
1607 src_addr
= XEXP (src_addr
, 0);
1612 if (src_addr
== frame_pointer_rtx
|| src_addr
== arg_pointer_rtx
)
1615 for (i
= 1; i
< count
; i
++)
1617 rtx elt
= XVECEXP (op
, 0, i
);
1619 if (GET_CODE (elt
) != SET
1620 || GET_CODE (SET_DEST (elt
)) != REG
1621 || GET_MODE (SET_DEST (elt
)) != elt_mode
1622 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
1623 || GET_CODE (SET_SRC (elt
)) != MEM
1624 || GET_MODE (SET_SRC (elt
)) != elt_mode
1625 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1626 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1627 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1628 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1))
1629 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1636 /* Return true if OP is a store multiple operation. It is known to be a
1637 PARALLEL and the first section will be tested.
1638 OP is the current operation.
1639 MODE is the current operation mode. */
1642 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1644 enum machine_mode elt_mode
;
1645 int count
= XVECLEN (op
, 0);
1646 unsigned int src_regno
;
1650 /* Perform a quick check so we don't blow up below. */
1652 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1653 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
1654 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
1657 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
1658 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
1659 elt_mode
= GET_MODE (SET_SRC (XVECEXP (op
, 0, 0)));
1661 /* Check, is base, or base + displacement. */
1663 if (GET_CODE (dest_addr
) == REG
)
1665 else if (GET_CODE (dest_addr
) == PLUS
1666 && GET_CODE (XEXP (dest_addr
, 0)) == REG
1667 && GET_CODE (XEXP (dest_addr
, 1)) == CONST_INT
)
1669 off
= INTVAL (XEXP (dest_addr
, 1));
1670 dest_addr
= XEXP (dest_addr
, 0);
1675 if (dest_addr
== frame_pointer_rtx
|| dest_addr
== arg_pointer_rtx
)
1678 for (i
= 1; i
< count
; i
++)
1680 rtx elt
= XVECEXP (op
, 0, i
);
1682 if (GET_CODE (elt
) != SET
1683 || GET_CODE (SET_SRC (elt
)) != REG
1684 || GET_MODE (SET_SRC (elt
)) != elt_mode
1685 || REGNO (SET_SRC (elt
)) != src_regno
+ i
1686 || GET_CODE (SET_DEST (elt
)) != MEM
1687 || GET_MODE (SET_DEST (elt
)) != elt_mode
1688 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
1689 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
1690 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
1691 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1))
1692 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1699 /* Return true if OP contains a symbol reference */
1702 symbolic_reference_mentioned_p (rtx op
)
1704 register const char *fmt
;
1707 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1710 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1711 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1717 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1718 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1722 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1729 /* Return true if OP contains a reference to a thread-local symbol. */
1732 tls_symbolic_reference_mentioned_p (rtx op
)
1734 register const char *fmt
;
1737 if (GET_CODE (op
) == SYMBOL_REF
)
1738 return tls_symbolic_operand (op
);
1740 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1741 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1747 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1748 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1752 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
1760 /* Return true if OP is a legitimate general operand when
1761 generating PIC code. It is given that flag_pic is on
1762 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1765 legitimate_pic_operand_p (register rtx op
)
1767 /* Accept all non-symbolic constants. */
1768 if (!SYMBOLIC_CONST (op
))
1771 /* Reject everything else; must be handled
1772 via emit_symbolic_move. */
1776 /* Returns true if the constant value OP is a legitimate general operand.
1777 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1780 legitimate_constant_p (register rtx op
)
1782 /* Accept all non-symbolic constants. */
1783 if (!SYMBOLIC_CONST (op
))
1786 /* Accept immediate LARL operands. */
1787 if (TARGET_CPU_ZARCH
&& larl_operand (op
, VOIDmode
))
1790 /* Thread-local symbols are never legal constants. This is
1791 so that emit_call knows that computing such addresses
1792 might require a function call. */
1793 if (TLS_SYMBOLIC_CONST (op
))
1796 /* In the PIC case, symbolic constants must *not* be
1797 forced into the literal pool. We accept them here,
1798 so that they will be handled by emit_symbolic_move. */
1802 /* All remaining non-PIC symbolic constants are
1803 forced into the literal pool. */
1807 /* Determine if it's legal to put X into the constant pool. This
1808 is not possible if X contains the address of a symbol that is
1809 not constant (TLS) or not known at final link time (PIC). */
1812 s390_cannot_force_const_mem (rtx x
)
1814 switch (GET_CODE (x
))
1818 /* Accept all non-symbolic constants. */
1822 /* Labels are OK iff we are non-PIC. */
1823 return flag_pic
!= 0;
1826 /* 'Naked' TLS symbol references are never OK,
1827 non-TLS symbols are OK iff we are non-PIC. */
1828 if (tls_symbolic_operand (x
))
1831 return flag_pic
!= 0;
1834 return s390_cannot_force_const_mem (XEXP (x
, 0));
1837 return s390_cannot_force_const_mem (XEXP (x
, 0))
1838 || s390_cannot_force_const_mem (XEXP (x
, 1));
1841 switch (XINT (x
, 1))
1843 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1844 case UNSPEC_LTREL_OFFSET
:
1852 case UNSPEC_GOTNTPOFF
:
1853 case UNSPEC_INDNTPOFF
:
1866 /* Returns true if the constant value OP is a legitimate general
1867 operand during and after reload. The difference to
1868 legitimate_constant_p is that this function will not accept
1869 a constant that would need to be forced to the literal pool
1870 before it can be used as operand. */
1873 legitimate_reload_constant_p (register rtx op
)
1875 /* Accept la(y) operands. */
1876 if (GET_CODE (op
) == CONST_INT
1877 && DISP_IN_RANGE (INTVAL (op
)))
1880 /* Accept l(g)hi operands. */
1881 if (GET_CODE (op
) == CONST_INT
1882 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', "K"))
1885 /* Accept lliXX operands. */
1887 && s390_single_part (op
, DImode
, HImode
, 0) >= 0)
1890 /* Accept larl operands. */
1891 if (TARGET_CPU_ZARCH
1892 && larl_operand (op
, VOIDmode
))
1895 /* Everything else cannot be handled without reload. */
1899 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1900 return the class of reg to actually use. */
1903 s390_preferred_reload_class (rtx op
, enum reg_class
class)
1905 /* This can happen if a floating point constant is being
1906 reloaded into an integer register. Leave well alone. */
1907 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1908 && class != FP_REGS
)
1911 switch (GET_CODE (op
))
1913 /* Constants we cannot reload must be forced into the
1918 if (legitimate_reload_constant_p (op
))
1923 /* If a symbolic constant or a PLUS is reloaded,
1924 it is most likely being used as an address, so
1925 prefer ADDR_REGS. If 'class' is not a superset
1926 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1931 if (reg_class_subset_p (ADDR_REGS
, class))
1943 /* Return the register class of a scratch register needed to
1944 load IN into a register of class CLASS in MODE.
1946 We need a temporary when loading a PLUS expression which
1947 is not a legitimate operand of the LOAD ADDRESS instruction. */
1950 s390_secondary_input_reload_class (enum reg_class
class ATTRIBUTE_UNUSED
,
1951 enum machine_mode mode
, rtx in
)
1953 if (s390_plus_operand (in
, mode
))
1959 /* Return the register class of a scratch register needed to
1960 store a register of class CLASS in MODE into OUT:
1962 We need a temporary when storing a double-word to a
1963 non-offsettable memory address. */
1966 s390_secondary_output_reload_class (enum reg_class
class,
1967 enum machine_mode mode
, rtx out
)
1969 if ((TARGET_64BIT
? mode
== TImode
1970 : (mode
== DImode
|| mode
== DFmode
))
1971 && reg_classes_intersect_p (GENERAL_REGS
, class)
1972 && GET_CODE (out
) == MEM
1973 && !offsettable_memref_p (out
)
1974 && !s_operand (out
, VOIDmode
))
1980 /* Return true if OP is a PLUS that is not a legitimate
1981 operand for the LA instruction.
1982 OP is the current operation.
1983 MODE is the current operation mode. */
1986 s390_plus_operand (register rtx op
, enum machine_mode mode
)
1988 if (!check_mode (op
, &mode
) || mode
!= Pmode
)
1991 if (GET_CODE (op
) != PLUS
)
1994 if (legitimate_la_operand_p (op
))
2000 /* Generate code to load SRC, which is PLUS that is not a
2001 legitimate operand for the LA instruction, into TARGET.
2002 SCRATCH may be used as scratch register. */
2005 s390_expand_plus_operand (register rtx target
, register rtx src
,
2006 register rtx scratch
)
2009 struct s390_address ad
;
2011 /* src must be a PLUS; get its two operands. */
2012 if (GET_CODE (src
) != PLUS
|| GET_MODE (src
) != Pmode
)
2015 /* Check if any of the two operands is already scheduled
2016 for replacement by reload. This can happen e.g. when
2017 float registers occur in an address. */
2018 sum1
= find_replacement (&XEXP (src
, 0));
2019 sum2
= find_replacement (&XEXP (src
, 1));
2020 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2022 /* If the address is already strictly valid, there's nothing to do. */
2023 if (!s390_decompose_address (src
, &ad
)
2024 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2025 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
2027 /* Otherwise, one of the operands cannot be an address register;
2028 we reload its value into the scratch register. */
2029 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
2031 emit_move_insn (scratch
, sum1
);
2034 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
2036 emit_move_insn (scratch
, sum2
);
2040 /* According to the way these invalid addresses are generated
2041 in reload.c, it should never happen (at least on s390) that
2042 *neither* of the PLUS components, after find_replacements
2043 was applied, is an address register. */
2044 if (sum1
== scratch
&& sum2
== scratch
)
2050 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2053 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2054 is only ever performed on addresses, so we can mark the
2055 sum as legitimate for LA in any case. */
2056 s390_load_address (target
, src
);
2060 /* Decompose a RTL expression ADDR for a memory address into
2061 its components, returned in OUT.
2063 Returns 0 if ADDR is not a valid memory address, nonzero
2064 otherwise. If OUT is NULL, don't return the components,
2065 but check for validity only.
2067 Note: Only addresses in canonical form are recognized.
2068 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2069 canonical form so that they will be recognized. */
2072 s390_decompose_address (register rtx addr
, struct s390_address
*out
)
2074 HOST_WIDE_INT offset
= 0;
2075 rtx base
= NULL_RTX
;
2076 rtx indx
= NULL_RTX
;
2077 rtx disp
= NULL_RTX
;
2079 int pointer
= FALSE
;
2080 int base_ptr
= FALSE
;
2081 int indx_ptr
= FALSE
;
2083 /* Decompose address into base + index + displacement. */
2085 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
2088 else if (GET_CODE (addr
) == PLUS
)
2090 rtx op0
= XEXP (addr
, 0);
2091 rtx op1
= XEXP (addr
, 1);
2092 enum rtx_code code0
= GET_CODE (op0
);
2093 enum rtx_code code1
= GET_CODE (op1
);
2095 if (code0
== REG
|| code0
== UNSPEC
)
2097 if (code1
== REG
|| code1
== UNSPEC
)
2099 indx
= op0
; /* index + base */
2105 base
= op0
; /* base + displacement */
2110 else if (code0
== PLUS
)
2112 indx
= XEXP (op0
, 0); /* index + base + disp */
2113 base
= XEXP (op0
, 1);
2124 disp
= addr
; /* displacement */
2126 /* Extract integer part of displacement. */
2130 if (GET_CODE (disp
) == CONST_INT
)
2132 offset
= INTVAL (disp
);
2135 else if (GET_CODE (disp
) == CONST
2136 && GET_CODE (XEXP (disp
, 0)) == PLUS
2137 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
2139 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
2140 disp
= XEXP (XEXP (disp
, 0), 0);
2144 /* Strip off CONST here to avoid special case tests later. */
2145 if (disp
&& GET_CODE (disp
) == CONST
)
2146 disp
= XEXP (disp
, 0);
2148 /* We can convert literal pool addresses to
2149 displacements by basing them off the base register. */
2150 if (disp
&& GET_CODE (disp
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (disp
))
2152 /* Either base or index must be free to hold the base register. */
2154 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2156 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2160 /* Mark up the displacement. */
2161 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
2162 UNSPEC_LTREL_OFFSET
);
2165 /* Validate base register. */
2168 if (GET_CODE (base
) == UNSPEC
)
2169 switch (XINT (base
, 1))
2173 disp
= gen_rtx_UNSPEC (Pmode
,
2174 gen_rtvec (1, XVECEXP (base
, 0, 0)),
2175 UNSPEC_LTREL_OFFSET
);
2179 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2182 case UNSPEC_LTREL_BASE
:
2183 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2190 if (GET_CODE (base
) != REG
|| GET_MODE (base
) != Pmode
)
2193 if (REGNO (base
) == BASE_REGISTER
2194 || REGNO (base
) == STACK_POINTER_REGNUM
2195 || REGNO (base
) == FRAME_POINTER_REGNUM
2196 || ((reload_completed
|| reload_in_progress
)
2197 && frame_pointer_needed
2198 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
2199 || REGNO (base
) == ARG_POINTER_REGNUM
2201 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
2202 pointer
= base_ptr
= TRUE
;
2205 /* Validate index register. */
2208 if (GET_CODE (indx
) == UNSPEC
)
2209 switch (XINT (indx
, 1))
2213 disp
= gen_rtx_UNSPEC (Pmode
,
2214 gen_rtvec (1, XVECEXP (indx
, 0, 0)),
2215 UNSPEC_LTREL_OFFSET
);
2219 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2222 case UNSPEC_LTREL_BASE
:
2223 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2230 if (GET_CODE (indx
) != REG
|| GET_MODE (indx
) != Pmode
)
2233 if (REGNO (indx
) == BASE_REGISTER
2234 || REGNO (indx
) == STACK_POINTER_REGNUM
2235 || REGNO (indx
) == FRAME_POINTER_REGNUM
2236 || ((reload_completed
|| reload_in_progress
)
2237 && frame_pointer_needed
2238 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
2239 || REGNO (indx
) == ARG_POINTER_REGNUM
2241 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
2242 pointer
= indx_ptr
= TRUE
;
2245 /* Prefer to use pointer as base, not index. */
2246 if (base
&& indx
&& !base_ptr
2247 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
2254 /* Validate displacement. */
2257 /* If the argument pointer is involved, the displacement will change
2258 later anyway as the argument pointer gets eliminated. This could
2259 make a valid displacement invalid, but it is more likely to make
2260 an invalid displacement valid, because we sometimes access the
2261 register save area via negative offsets to the arg pointer.
2262 Thus we don't check the displacement for validity here. If after
2263 elimination the displacement turns out to be invalid after all,
2264 this is fixed up by reload in any case. */
2265 if (base
!= arg_pointer_rtx
&& indx
!= arg_pointer_rtx
)
2266 if (!DISP_IN_RANGE (offset
))
2271 /* All the special cases are pointers. */
2274 /* In the small-PIC case, the linker converts @GOT
2275 and @GOTNTPOFF offsets to possible displacements. */
2276 if (GET_CODE (disp
) == UNSPEC
2277 && (XINT (disp
, 1) == UNSPEC_GOT
2278 || XINT (disp
, 1) == UNSPEC_GOTNTPOFF
)
2285 /* Accept chunkified literal pool symbol references. */
2286 else if (GET_CODE (disp
) == MINUS
2287 && GET_CODE (XEXP (disp
, 0)) == LABEL_REF
2288 && GET_CODE (XEXP (disp
, 1)) == LABEL_REF
)
2293 /* Accept literal pool references. */
2294 else if (GET_CODE (disp
) == UNSPEC
2295 && XINT (disp
, 1) == UNSPEC_LTREL_OFFSET
)
2297 orig_disp
= gen_rtx_CONST (Pmode
, disp
);
2300 /* If we have an offset, make sure it does not
2301 exceed the size of the constant pool entry. */
2302 rtx sym
= XVECEXP (disp
, 0, 0);
2303 if (offset
>= GET_MODE_SIZE (get_pool_mode (sym
)))
2306 orig_disp
= plus_constant (orig_disp
, offset
);
2321 out
->disp
= orig_disp
;
2322 out
->pointer
= pointer
;
2328 /* Return nonzero if ADDR is a valid memory address.
2329 STRICT specifies whether strict register checking applies. */
2332 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
2333 register rtx addr
, int strict
)
2335 struct s390_address ad
;
2336 if (!s390_decompose_address (addr
, &ad
))
2341 if (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2343 if (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
))
2348 if (ad
.base
&& !REG_OK_FOR_BASE_NONSTRICT_P (ad
.base
))
2350 if (ad
.indx
&& !REG_OK_FOR_INDEX_NONSTRICT_P (ad
.indx
))
2357 /* Return 1 if OP is a valid operand for the LA instruction.
2358 In 31-bit, we need to prove that the result is used as an
2359 address, as LA performs only a 31-bit addition. */
2362 legitimate_la_operand_p (register rtx op
)
2364 struct s390_address addr
;
2365 if (!s390_decompose_address (op
, &addr
))
2368 if (TARGET_64BIT
|| addr
.pointer
)
2374 /* Return 1 if OP is a valid operand for the LA instruction,
2375 and we prefer to use LA over addition to compute it. */
2378 preferred_la_operand_p (register rtx op
)
2380 struct s390_address addr
;
2381 if (!s390_decompose_address (op
, &addr
))
2384 if (!TARGET_64BIT
&& !addr
.pointer
)
2390 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
2391 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
2397 /* Emit a forced load-address operation to load SRC into DST.
2398 This will use the LOAD ADDRESS instruction even in situations
2399 where legitimate_la_operand_p (SRC) returns false. */
2402 s390_load_address (rtx dst
, rtx src
)
2405 emit_move_insn (dst
, src
);
2407 emit_insn (gen_force_la_31 (dst
, src
));
2410 /* Return a legitimate reference for ORIG (an address) using the
2411 register REG. If REG is 0, a new pseudo is generated.
2413 There are two types of references that must be handled:
2415 1. Global data references must load the address from the GOT, via
2416 the PIC reg. An insn is emitted to do this load, and the reg is
2419 2. Static data references, constant pool addresses, and code labels
2420 compute the address as an offset from the GOT, whose base is in
2421 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2422 differentiate them from global data objects. The returned
2423 address is the PIC reg + an unspec constant.
2425 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2426 reg also appears in the address. */
2429 legitimize_pic_address (rtx orig
, rtx reg
)
2435 if (GET_CODE (addr
) == LABEL_REF
2436 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
2438 /* This is a local symbol. */
2439 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
2441 /* Access local symbols PC-relative via LARL.
2442 This is the same as in the non-PIC case, so it is
2443 handled automatically ... */
2447 /* Access local symbols relative to the GOT. */
2449 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2451 if (reload_in_progress
|| reload_completed
)
2452 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2454 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
2455 addr
= gen_rtx_CONST (Pmode
, addr
);
2456 addr
= force_const_mem (Pmode
, addr
);
2457 emit_move_insn (temp
, addr
);
2459 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2462 emit_move_insn (reg
, new);
2467 else if (GET_CODE (addr
) == SYMBOL_REF
)
2470 reg
= gen_reg_rtx (Pmode
);
2474 /* Assume GOT offset < 4k. This is handled the same way
2475 in both 31- and 64-bit code (@GOT). */
2477 if (reload_in_progress
|| reload_completed
)
2478 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2480 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2481 new = gen_rtx_CONST (Pmode
, new);
2482 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2483 new = gen_rtx_MEM (Pmode
, new);
2484 RTX_UNCHANGING_P (new) = 1;
2485 emit_move_insn (reg
, new);
2488 else if (TARGET_CPU_ZARCH
)
2490 /* If the GOT offset might be >= 4k, we determine the position
2491 of the GOT entry via a PC-relative LARL (@GOTENT). */
2493 rtx temp
= gen_reg_rtx (Pmode
);
2495 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
2496 new = gen_rtx_CONST (Pmode
, new);
2497 emit_move_insn (temp
, new);
2499 new = gen_rtx_MEM (Pmode
, temp
);
2500 RTX_UNCHANGING_P (new) = 1;
2501 emit_move_insn (reg
, new);
2506 /* If the GOT offset might be >= 4k, we have to load it
2507 from the literal pool (@GOT). */
2509 rtx temp
= gen_reg_rtx (Pmode
);
2511 if (reload_in_progress
|| reload_completed
)
2512 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2514 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2515 addr
= gen_rtx_CONST (Pmode
, addr
);
2516 addr
= force_const_mem (Pmode
, addr
);
2517 emit_move_insn (temp
, addr
);
2519 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2520 new = gen_rtx_MEM (Pmode
, new);
2521 RTX_UNCHANGING_P (new) = 1;
2522 emit_move_insn (reg
, new);
2528 if (GET_CODE (addr
) == CONST
)
2530 addr
= XEXP (addr
, 0);
2531 if (GET_CODE (addr
) == UNSPEC
)
2533 if (XVECLEN (addr
, 0) != 1)
2535 switch (XINT (addr
, 1))
2537 /* If someone moved a GOT-relative UNSPEC
2538 out of the literal pool, force them back in. */
2541 new = force_const_mem (Pmode
, orig
);
2544 /* @GOT is OK as is if small. */
2547 new = force_const_mem (Pmode
, orig
);
2550 /* @GOTENT is OK as is. */
2554 /* @PLT is OK as is on 64-bit, must be converted to
2555 GOT-relative @PLTOFF on 31-bit. */
2557 if (!TARGET_CPU_ZARCH
)
2559 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2561 if (reload_in_progress
|| reload_completed
)
2562 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2564 addr
= XVECEXP (addr
, 0, 0);
2565 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
2567 addr
= gen_rtx_CONST (Pmode
, addr
);
2568 addr
= force_const_mem (Pmode
, addr
);
2569 emit_move_insn (temp
, addr
);
2571 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2574 emit_move_insn (reg
, new);
2580 /* Everything else cannot happen. */
2585 else if (GET_CODE (addr
) != PLUS
)
2588 if (GET_CODE (addr
) == PLUS
)
2590 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
2591 /* Check first to see if this is a constant offset
2592 from a local symbol reference. */
2593 if ((GET_CODE (op0
) == LABEL_REF
2594 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
2595 && GET_CODE (op1
) == CONST_INT
)
2597 if (TARGET_CPU_ZARCH
&& larl_operand (op0
, VOIDmode
))
2599 if (INTVAL (op1
) & 1)
2601 /* LARL can't handle odd offsets, so emit a
2602 pair of LARL and LA. */
2603 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2605 if (!DISP_IN_RANGE (INTVAL (op1
)))
2607 int even
= INTVAL (op1
) - 1;
2608 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
2609 op0
= gen_rtx_CONST (Pmode
, op0
);
2613 emit_move_insn (temp
, op0
);
2614 new = gen_rtx_PLUS (Pmode
, temp
, op1
);
2618 emit_move_insn (reg
, new);
2624 /* If the offset is even, we can just use LARL.
2625 This will happen automatically. */
2630 /* Access local symbols relative to the GOT. */
2632 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2634 if (reload_in_progress
|| reload_completed
)
2635 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2637 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
2639 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
2640 addr
= gen_rtx_CONST (Pmode
, addr
);
2641 addr
= force_const_mem (Pmode
, addr
);
2642 emit_move_insn (temp
, addr
);
2644 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2647 emit_move_insn (reg
, new);
2653 /* Now, check whether it is a GOT relative symbol plus offset
2654 that was pulled out of the literal pool. Force it back in. */
2656 else if (GET_CODE (op0
) == UNSPEC
2657 && GET_CODE (op1
) == CONST_INT
2658 && XINT (op0
, 1) == UNSPEC_GOTOFF
)
2660 if (XVECLEN (op0
, 0) != 1)
2663 new = force_const_mem (Pmode
, orig
);
2666 /* Otherwise, compute the sum. */
2669 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2670 new = legitimize_pic_address (XEXP (addr
, 1),
2671 base
== reg
? NULL_RTX
: reg
);
2672 if (GET_CODE (new) == CONST_INT
)
2673 new = plus_constant (base
, INTVAL (new));
2676 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
2678 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
2679 new = XEXP (new, 1);
2681 new = gen_rtx_PLUS (Pmode
, base
, new);
2684 if (GET_CODE (new) == CONST
)
2685 new = XEXP (new, 0);
2686 new = force_operand (new, 0);
2693 /* Load the thread pointer into a register. */
2696 get_thread_pointer (void)
2700 tp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TP
);
2701 tp
= force_reg (Pmode
, tp
);
2702 mark_reg_pointer (tp
, BITS_PER_WORD
);
2707 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
2708 in s390_tls_symbol which always refers to __tls_get_offset.
2709 The returned offset is written to RESULT_REG and an USE rtx is
2710 generated for TLS_CALL. */
2712 static GTY(()) rtx s390_tls_symbol
;
2715 s390_emit_tls_call_insn (rtx result_reg
, rtx tls_call
)
2722 if (!s390_tls_symbol
)
2723 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
2725 insn
= s390_emit_call (s390_tls_symbol
, tls_call
, result_reg
,
2726 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
2728 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), result_reg
);
2729 CONST_OR_PURE_CALL_P (insn
) = 1;
2732 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2733 this (thread-local) address. REG may be used as temporary. */
2736 legitimize_tls_address (rtx addr
, rtx reg
)
2738 rtx
new, tls_call
, temp
, base
, r2
, insn
;
2740 if (GET_CODE (addr
) == SYMBOL_REF
)
2741 switch (tls_symbolic_operand (addr
))
2743 case TLS_MODEL_GLOBAL_DYNAMIC
:
2745 r2
= gen_rtx_REG (Pmode
, 2);
2746 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
2747 new = gen_rtx_CONST (Pmode
, tls_call
);
2748 new = force_const_mem (Pmode
, new);
2749 emit_move_insn (r2
, new);
2750 s390_emit_tls_call_insn (r2
, tls_call
);
2751 insn
= get_insns ();
2754 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2755 temp
= gen_reg_rtx (Pmode
);
2756 emit_libcall_block (insn
, temp
, r2
, new);
2758 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2761 s390_load_address (reg
, new);
2766 case TLS_MODEL_LOCAL_DYNAMIC
:
2768 r2
= gen_rtx_REG (Pmode
, 2);
2769 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
2770 new = gen_rtx_CONST (Pmode
, tls_call
);
2771 new = force_const_mem (Pmode
, new);
2772 emit_move_insn (r2
, new);
2773 s390_emit_tls_call_insn (r2
, tls_call
);
2774 insn
= get_insns ();
2777 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
2778 temp
= gen_reg_rtx (Pmode
);
2779 emit_libcall_block (insn
, temp
, r2
, new);
2781 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2782 base
= gen_reg_rtx (Pmode
);
2783 s390_load_address (base
, new);
2785 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
2786 new = gen_rtx_CONST (Pmode
, new);
2787 new = force_const_mem (Pmode
, new);
2788 temp
= gen_reg_rtx (Pmode
);
2789 emit_move_insn (temp
, new);
2791 new = gen_rtx_PLUS (Pmode
, base
, temp
);
2794 s390_load_address (reg
, new);
2799 case TLS_MODEL_INITIAL_EXEC
:
2802 /* Assume GOT offset < 4k. This is handled the same way
2803 in both 31- and 64-bit code. */
2805 if (reload_in_progress
|| reload_completed
)
2806 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2808 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2809 new = gen_rtx_CONST (Pmode
, new);
2810 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2811 new = gen_rtx_MEM (Pmode
, new);
2812 RTX_UNCHANGING_P (new) = 1;
2813 temp
= gen_reg_rtx (Pmode
);
2814 emit_move_insn (temp
, new);
2816 else if (TARGET_CPU_ZARCH
)
2818 /* If the GOT offset might be >= 4k, we determine the position
2819 of the GOT entry via a PC-relative LARL. */
2821 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2822 new = gen_rtx_CONST (Pmode
, new);
2823 temp
= gen_reg_rtx (Pmode
);
2824 emit_move_insn (temp
, new);
2826 new = gen_rtx_MEM (Pmode
, temp
);
2827 RTX_UNCHANGING_P (new) = 1;
2828 temp
= gen_reg_rtx (Pmode
);
2829 emit_move_insn (temp
, new);
2833 /* If the GOT offset might be >= 4k, we have to load it
2834 from the literal pool. */
2836 if (reload_in_progress
|| reload_completed
)
2837 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2839 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2840 new = gen_rtx_CONST (Pmode
, new);
2841 new = force_const_mem (Pmode
, new);
2842 temp
= gen_reg_rtx (Pmode
);
2843 emit_move_insn (temp
, new);
2845 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2846 new = gen_rtx_MEM (Pmode
, new);
2847 RTX_UNCHANGING_P (new) = 1;
2849 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2850 temp
= gen_reg_rtx (Pmode
);
2851 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2855 /* In position-dependent code, load the absolute address of
2856 the GOT entry from the literal pool. */
2858 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2859 new = gen_rtx_CONST (Pmode
, new);
2860 new = force_const_mem (Pmode
, new);
2861 temp
= gen_reg_rtx (Pmode
);
2862 emit_move_insn (temp
, new);
2865 new = gen_rtx_MEM (Pmode
, new);
2866 RTX_UNCHANGING_P (new) = 1;
2868 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2869 temp
= gen_reg_rtx (Pmode
);
2870 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2873 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2876 s390_load_address (reg
, new);
2881 case TLS_MODEL_LOCAL_EXEC
:
2882 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2883 new = gen_rtx_CONST (Pmode
, new);
2884 new = force_const_mem (Pmode
, new);
2885 temp
= gen_reg_rtx (Pmode
);
2886 emit_move_insn (temp
, new);
2888 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2891 s390_load_address (reg
, new);
2900 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
2902 switch (XINT (XEXP (addr
, 0), 1))
2904 case UNSPEC_INDNTPOFF
:
2905 if (TARGET_CPU_ZARCH
)
2917 abort (); /* for now ... */
2922 /* Emit insns to move operands[1] into operands[0]. */
2925 emit_symbolic_move (rtx
*operands
)
2927 rtx temp
= no_new_pseudos
? operands
[0] : gen_reg_rtx (Pmode
);
2929 if (GET_CODE (operands
[0]) == MEM
)
2930 operands
[1] = force_reg (Pmode
, operands
[1]);
2931 else if (TLS_SYMBOLIC_CONST (operands
[1]))
2932 operands
[1] = legitimize_tls_address (operands
[1], temp
);
2934 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2937 /* Try machine-dependent ways of modifying an illegitimate address X
2938 to be legitimate. If we find one, return the new, valid address.
2940 OLDX is the address as it was before break_out_memory_refs was called.
2941 In some cases it is useful to look at this to decide what needs to be done.
2943 MODE is the mode of the operand pointed to by X.
2945 When -fpic is used, special handling is needed for symbolic references.
2946 See comments by legitimize_pic_address for details. */
2949 legitimize_address (register rtx x
, register rtx oldx ATTRIBUTE_UNUSED
,
2950 enum machine_mode mode ATTRIBUTE_UNUSED
)
2952 rtx constant_term
= const0_rtx
;
2954 if (TLS_SYMBOLIC_CONST (x
))
2956 x
= legitimize_tls_address (x
, 0);
2958 if (legitimate_address_p (mode
, x
, FALSE
))
2963 if (SYMBOLIC_CONST (x
)
2964 || (GET_CODE (x
) == PLUS
2965 && (SYMBOLIC_CONST (XEXP (x
, 0))
2966 || SYMBOLIC_CONST (XEXP (x
, 1)))))
2967 x
= legitimize_pic_address (x
, 0);
2969 if (legitimate_address_p (mode
, x
, FALSE
))
2973 x
= eliminate_constant_term (x
, &constant_term
);
2975 /* Optimize loading of large displacements by splitting them
2976 into the multiple of 4K and the rest; this allows the
2977 former to be CSE'd if possible.
2979 Don't do this if the displacement is added to a register
2980 pointing into the stack frame, as the offsets will
2981 change later anyway. */
2983 if (GET_CODE (constant_term
) == CONST_INT
2984 && !TARGET_LONG_DISPLACEMENT
2985 && !DISP_IN_RANGE (INTVAL (constant_term
))
2986 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
2988 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
2989 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
2991 rtx temp
= gen_reg_rtx (Pmode
);
2992 rtx val
= force_operand (GEN_INT (upper
), temp
);
2994 emit_move_insn (temp
, val
);
2996 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
2997 constant_term
= GEN_INT (lower
);
3000 if (GET_CODE (x
) == PLUS
)
3002 if (GET_CODE (XEXP (x
, 0)) == REG
)
3004 register rtx temp
= gen_reg_rtx (Pmode
);
3005 register rtx val
= force_operand (XEXP (x
, 1), temp
);
3007 emit_move_insn (temp
, val
);
3009 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
3012 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3014 register rtx temp
= gen_reg_rtx (Pmode
);
3015 register rtx val
= force_operand (XEXP (x
, 0), temp
);
3017 emit_move_insn (temp
, val
);
3019 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
3023 if (constant_term
!= const0_rtx
)
3024 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
3029 /* Emit code to move LEN bytes from DST to SRC. */
3032 s390_expand_movmem (rtx dst
, rtx src
, rtx len
)
3034 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3036 if (INTVAL (len
) > 0)
3037 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
3040 else if (TARGET_MVCLE
)
3042 emit_insn (gen_movmem_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
3047 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3048 rtx loop_start_label
= gen_label_rtx ();
3049 rtx loop_end_label
= gen_label_rtx ();
3050 rtx end_label
= gen_label_rtx ();
3051 enum machine_mode mode
;
3053 mode
= GET_MODE (len
);
3054 if (mode
== VOIDmode
)
3057 dst_addr
= gen_reg_rtx (Pmode
);
3058 src_addr
= gen_reg_rtx (Pmode
);
3059 count
= gen_reg_rtx (mode
);
3060 blocks
= gen_reg_rtx (mode
);
3062 convert_move (count
, len
, 1);
3063 emit_cmp_and_jump_insns (count
, const0_rtx
,
3064 EQ
, NULL_RTX
, mode
, 1, end_label
);
3066 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3067 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
3068 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3069 src
= change_address (src
, VOIDmode
, src_addr
);
3071 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3073 emit_move_insn (count
, temp
);
3075 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3077 emit_move_insn (blocks
, temp
);
3079 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3080 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3082 emit_label (loop_start_label
);
3084 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (255)));
3085 s390_load_address (dst_addr
,
3086 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3087 s390_load_address (src_addr
,
3088 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
3090 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3092 emit_move_insn (blocks
, temp
);
3094 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3095 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3097 emit_jump (loop_start_label
);
3098 emit_label (loop_end_label
);
3100 emit_insn (gen_movmem_short (dst
, src
,
3101 convert_to_mode (Pmode
, count
, 1)));
3102 emit_label (end_label
);
3106 /* Emit code to clear LEN bytes at DST. */
3109 s390_expand_clrmem (rtx dst
, rtx len
)
3111 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3113 if (INTVAL (len
) > 0)
3114 emit_insn (gen_clrmem_short (dst
, GEN_INT (INTVAL (len
) - 1)));
3117 else if (TARGET_MVCLE
)
3119 emit_insn (gen_clrmem_long (dst
, convert_to_mode (Pmode
, len
, 1)));
3124 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3125 rtx loop_start_label
= gen_label_rtx ();
3126 rtx loop_end_label
= gen_label_rtx ();
3127 rtx end_label
= gen_label_rtx ();
3128 enum machine_mode mode
;
3130 mode
= GET_MODE (len
);
3131 if (mode
== VOIDmode
)
3134 dst_addr
= gen_reg_rtx (Pmode
);
3135 src_addr
= gen_reg_rtx (Pmode
);
3136 count
= gen_reg_rtx (mode
);
3137 blocks
= gen_reg_rtx (mode
);
3139 convert_move (count
, len
, 1);
3140 emit_cmp_and_jump_insns (count
, const0_rtx
,
3141 EQ
, NULL_RTX
, mode
, 1, end_label
);
3143 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3144 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3146 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3148 emit_move_insn (count
, temp
);
3150 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3152 emit_move_insn (blocks
, temp
);
3154 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3155 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3157 emit_label (loop_start_label
);
3159 emit_insn (gen_clrmem_short (dst
, GEN_INT (255)));
3160 s390_load_address (dst_addr
,
3161 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3163 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3165 emit_move_insn (blocks
, temp
);
3167 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3168 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3170 emit_jump (loop_start_label
);
3171 emit_label (loop_end_label
);
3173 emit_insn (gen_clrmem_short (dst
, convert_to_mode (Pmode
, count
, 1)));
3174 emit_label (end_label
);
3178 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3179 and return the result in TARGET. */
3182 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
3184 rtx (*gen_result
) (rtx
) =
3185 GET_MODE (target
) == DImode
? gen_cmpint_di
: gen_cmpint_si
;
3187 op0
= protect_from_queue (op0
, 0);
3188 op1
= protect_from_queue (op1
, 0);
3189 len
= protect_from_queue (len
, 0);
3191 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3193 if (INTVAL (len
) > 0)
3195 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
3196 emit_insn (gen_result (target
));
3199 emit_move_insn (target
, const0_rtx
);
3202 else /* if (TARGET_MVCLE) */
3204 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
3205 emit_insn (gen_result (target
));
3209 /* Deactivate for now as profile code cannot cope with
3210 CC being live across basic block boundaries. */
3213 rtx addr0
, addr1
, count
, blocks
, temp
;
3214 rtx loop_start_label
= gen_label_rtx ();
3215 rtx loop_end_label
= gen_label_rtx ();
3216 rtx end_label
= gen_label_rtx ();
3217 enum machine_mode mode
;
3219 mode
= GET_MODE (len
);
3220 if (mode
== VOIDmode
)
3223 addr0
= gen_reg_rtx (Pmode
);
3224 addr1
= gen_reg_rtx (Pmode
);
3225 count
= gen_reg_rtx (mode
);
3226 blocks
= gen_reg_rtx (mode
);
3228 convert_move (count
, len
, 1);
3229 emit_cmp_and_jump_insns (count
, const0_rtx
,
3230 EQ
, NULL_RTX
, mode
, 1, end_label
);
3232 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
3233 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
3234 op0
= change_address (op0
, VOIDmode
, addr0
);
3235 op1
= change_address (op1
, VOIDmode
, addr1
);
3237 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3239 emit_move_insn (count
, temp
);
3241 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3243 emit_move_insn (blocks
, temp
);
3245 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3246 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3248 emit_label (loop_start_label
);
3250 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
3251 temp
= gen_rtx_NE (VOIDmode
, gen_rtx_REG (CCSmode
, 33), const0_rtx
);
3252 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
3253 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
3254 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
3255 emit_jump_insn (temp
);
3257 s390_load_address (addr0
,
3258 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
3259 s390_load_address (addr1
,
3260 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
3262 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3264 emit_move_insn (blocks
, temp
);
3266 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3267 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3269 emit_jump (loop_start_label
);
3270 emit_label (loop_end_label
);
3272 emit_insn (gen_cmpmem_short (op0
, op1
,
3273 convert_to_mode (Pmode
, count
, 1)));
3274 emit_label (end_label
);
3276 emit_insn (gen_result (target
));
3282 /* Expand conditional increment or decrement using alc/slb instructions.
3283 Should generate code setting DST to either SRC or SRC + INCREMENT,
3284 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3285 Returns true if successful, false otherwise. */
3288 s390_expand_addcc (enum rtx_code cmp_code
, rtx cmp_op0
, rtx cmp_op1
,
3289 rtx dst
, rtx src
, rtx increment
)
3291 enum machine_mode cmp_mode
;
3292 enum machine_mode cc_mode
;
3297 if ((GET_MODE (cmp_op0
) == SImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
3298 && (GET_MODE (cmp_op1
) == SImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
3300 else if ((GET_MODE (cmp_op0
) == DImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
3301 && (GET_MODE (cmp_op1
) == DImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
3306 /* Try ADD LOGICAL WITH CARRY. */
3307 if (increment
== const1_rtx
)
3309 /* Determine CC mode to use. */
3310 if (cmp_code
== EQ
|| cmp_code
== NE
)
3312 if (cmp_op1
!= const0_rtx
)
3314 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
3315 NULL_RTX
, 0, OPTAB_WIDEN
);
3316 cmp_op1
= const0_rtx
;
3319 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
3322 if (cmp_code
== LTU
|| cmp_code
== LEU
)
3327 cmp_code
= swap_condition (cmp_code
);
3344 /* Emit comparison instruction pattern. */
3345 if (!register_operand (cmp_op0
, cmp_mode
))
3346 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
3348 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
3349 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
3350 /* We use insn_invalid_p here to add clobbers if required. */
3351 if (insn_invalid_p (emit_insn (insn
)))
3354 /* Emit ALC instruction pattern. */
3355 op_res
= gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
3356 gen_rtx_REG (cc_mode
, CC_REGNUM
),
3359 if (src
!= const0_rtx
)
3361 if (!register_operand (src
, GET_MODE (dst
)))
3362 src
= force_reg (GET_MODE (dst
), src
);
3364 src
= gen_rtx_PLUS (GET_MODE (dst
), src
, const0_rtx
);
3365 op_res
= gen_rtx_PLUS (GET_MODE (dst
), src
, op_res
);
3368 p
= rtvec_alloc (2);
3370 gen_rtx_SET (VOIDmode
, dst
, op_res
);
3372 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
3373 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
3378 /* Try SUBTRACT LOGICAL WITH BORROW. */
3379 if (increment
== constm1_rtx
)
3381 /* Determine CC mode to use. */
3382 if (cmp_code
== EQ
|| cmp_code
== NE
)
3384 if (cmp_op1
!= const0_rtx
)
3386 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
3387 NULL_RTX
, 0, OPTAB_WIDEN
);
3388 cmp_op1
= const0_rtx
;
3391 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
3394 if (cmp_code
== GTU
|| cmp_code
== GEU
)
3399 cmp_code
= swap_condition (cmp_code
);
3416 /* Emit comparison instruction pattern. */
3417 if (!register_operand (cmp_op0
, cmp_mode
))
3418 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
3420 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
3421 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
3422 /* We use insn_invalid_p here to add clobbers if required. */
3423 if (insn_invalid_p (emit_insn (insn
)))
3426 /* Emit SLB instruction pattern. */
3427 if (!register_operand (src
, GET_MODE (dst
)))
3428 src
= force_reg (GET_MODE (dst
), src
);
3430 op_res
= gen_rtx_MINUS (GET_MODE (dst
),
3431 gen_rtx_MINUS (GET_MODE (dst
), src
, const0_rtx
),
3432 gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
3433 gen_rtx_REG (cc_mode
, CC_REGNUM
),
3435 p
= rtvec_alloc (2);
3437 gen_rtx_SET (VOIDmode
, dst
, op_res
);
3439 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
3440 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
3449 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3450 We need to emit DTP-relative relocations. */
3453 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3458 fputs ("\t.long\t", file
);
3461 fputs ("\t.quad\t", file
);
3466 output_addr_const (file
, x
);
3467 fputs ("@DTPOFF", file
);
3470 /* In the name of slightly smaller debug output, and to cater to
3471 general assembler losage, recognize various UNSPEC sequences
3472 and turn them back into a direct symbol reference. */
3475 s390_delegitimize_address (rtx orig_x
)
3479 if (GET_CODE (x
) != MEM
)
3483 if (GET_CODE (x
) == PLUS
3484 && GET_CODE (XEXP (x
, 1)) == CONST
3485 && GET_CODE (XEXP (x
, 0)) == REG
3486 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
3488 y
= XEXP (XEXP (x
, 1), 0);
3489 if (GET_CODE (y
) == UNSPEC
3490 && XINT (y
, 1) == UNSPEC_GOT
)
3491 return XVECEXP (y
, 0, 0);
3495 if (GET_CODE (x
) == CONST
)
3498 if (GET_CODE (y
) == UNSPEC
3499 && XINT (y
, 1) == UNSPEC_GOTENT
)
3500 return XVECEXP (y
, 0, 0);
3507 /* Output shift count operand OP to stdio stream FILE. */
3510 print_shift_count_operand (FILE *file
, rtx op
)
3512 HOST_WIDE_INT offset
= 0;
3514 /* We can have an integer constant, an address register,
3515 or a sum of the two. */
3516 if (GET_CODE (op
) == CONST_INT
)
3518 offset
= INTVAL (op
);
3521 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
3523 offset
= INTVAL (XEXP (op
, 1));
3526 while (op
&& GET_CODE (op
) == SUBREG
)
3527 op
= SUBREG_REG (op
);
3530 if (op
&& (GET_CODE (op
) != REG
3531 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
3532 || REGNO_REG_CLASS (REGNO (op
)) != ADDR_REGS
))
3535 /* Shift counts are truncated to the low six bits anyway. */
3536 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& 63);
3538 fprintf (file
, "(%s)", reg_names
[REGNO (op
)]);
3541 /* Locate some local-dynamic symbol still in use by this function
3542 so that we can print its name in local-dynamic base patterns. */
3545 get_some_local_dynamic_name (void)
3549 if (cfun
->machine
->some_ld_name
)
3550 return cfun
->machine
->some_ld_name
;
3552 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3554 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
3555 return cfun
->machine
->some_ld_name
;
3561 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3565 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
3567 x
= get_pool_constant (x
);
3568 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
3571 if (GET_CODE (x
) == SYMBOL_REF
3572 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
3574 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
3581 /* Output machine-dependent UNSPECs occurring in address constant X
3582 in assembler syntax to stdio stream FILE. Returns true if the
3583 constant X could be recognized, false otherwise. */
3586 s390_output_addr_const_extra (FILE *file
, rtx x
)
3588 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
3589 switch (XINT (x
, 1))
3592 output_addr_const (file
, XVECEXP (x
, 0, 0));
3593 fprintf (file
, "@GOTENT");
3596 output_addr_const (file
, XVECEXP (x
, 0, 0));
3597 fprintf (file
, "@GOT");
3600 output_addr_const (file
, XVECEXP (x
, 0, 0));
3601 fprintf (file
, "@GOTOFF");
3604 output_addr_const (file
, XVECEXP (x
, 0, 0));
3605 fprintf (file
, "@PLT");
3608 output_addr_const (file
, XVECEXP (x
, 0, 0));
3609 fprintf (file
, "@PLTOFF");
3612 output_addr_const (file
, XVECEXP (x
, 0, 0));
3613 fprintf (file
, "@TLSGD");
3616 assemble_name (file
, get_some_local_dynamic_name ());
3617 fprintf (file
, "@TLSLDM");
3620 output_addr_const (file
, XVECEXP (x
, 0, 0));
3621 fprintf (file
, "@DTPOFF");
3624 output_addr_const (file
, XVECEXP (x
, 0, 0));
3625 fprintf (file
, "@NTPOFF");
3627 case UNSPEC_GOTNTPOFF
:
3628 output_addr_const (file
, XVECEXP (x
, 0, 0));
3629 fprintf (file
, "@GOTNTPOFF");
3631 case UNSPEC_INDNTPOFF
:
3632 output_addr_const (file
, XVECEXP (x
, 0, 0));
3633 fprintf (file
, "@INDNTPOFF");
3640 /* Output address operand ADDR in assembler syntax to
3641 stdio stream FILE. */
3644 print_operand_address (FILE *file
, rtx addr
)
3646 struct s390_address ad
;
3648 if (!s390_decompose_address (addr
, &ad
)
3649 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3650 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
3651 output_operand_lossage ("Cannot decompose address.");
3654 output_addr_const (file
, ad
.disp
);
3656 fprintf (file
, "0");
3658 if (ad
.base
&& ad
.indx
)
3659 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
3660 reg_names
[REGNO (ad
.base
)]);
3662 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
3665 /* Output operand X in assembler syntax to stdio stream FILE.
3666 CODE specified the format flag. The following format flags
3669 'C': print opcode suffix for branch condition.
3670 'D': print opcode suffix for inverse branch condition.
3671 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3672 'O': print only the displacement of a memory reference.
3673 'R': print only the base register of a memory reference.
3674 'N': print the second word of a DImode operand.
3675 'M': print the second word of a TImode operand.
3676 'Y': print shift count operand.
3678 'b': print integer X as if it's an unsigned byte.
3679 'x': print integer X as if it's an unsigned word.
3680 'h': print integer X as if it's a signed word.
3681 'i': print the first nonzero HImode part of X.
3682 'j': print the first HImode part unequal to 0xffff of X. */
3685 print_operand (FILE *file
, rtx x
, int code
)
3690 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
3694 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
3698 if (GET_CODE (x
) == SYMBOL_REF
)
3700 fprintf (file
, "%s", ":tls_load:");
3701 output_addr_const (file
, x
);
3703 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
3705 fprintf (file
, "%s", ":tls_gdcall:");
3706 output_addr_const (file
, XVECEXP (x
, 0, 0));
3708 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
3710 fprintf (file
, "%s", ":tls_ldcall:");
3711 assemble_name (file
, get_some_local_dynamic_name ());
3719 struct s390_address ad
;
3721 if (GET_CODE (x
) != MEM
3722 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3723 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3728 output_addr_const (file
, ad
.disp
);
3730 fprintf (file
, "0");
3736 struct s390_address ad
;
3738 if (GET_CODE (x
) != MEM
3739 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3740 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3745 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
3747 fprintf (file
, "0");
3752 if (GET_CODE (x
) == REG
)
3753 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3754 else if (GET_CODE (x
) == MEM
)
3755 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 4));
3761 if (GET_CODE (x
) == REG
)
3762 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3763 else if (GET_CODE (x
) == MEM
)
3764 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 8));
3770 print_shift_count_operand (file
, x
);
3774 switch (GET_CODE (x
))
3777 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
3781 output_address (XEXP (x
, 0));
3788 output_addr_const (file
, x
);
3793 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
3794 else if (code
== 'x')
3795 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
3796 else if (code
== 'h')
3797 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
3798 else if (code
== 'i')
3799 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
3800 s390_extract_part (x
, HImode
, 0));
3801 else if (code
== 'j')
3802 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
3803 s390_extract_part (x
, HImode
, -1));
3805 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3809 if (GET_MODE (x
) != VOIDmode
)
3812 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
3813 else if (code
== 'x')
3814 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
3815 else if (code
== 'h')
3816 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
3822 fatal_insn ("UNKNOWN in print_operand !?", x
);
3827 /* Target hook for assembling integer objects. We need to define it
3828 here to work a round a bug in some versions of GAS, which couldn't
3829 handle values smaller than INT_MIN when printed in decimal. */
3832 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
3834 if (size
== 8 && aligned_p
3835 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
3837 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
3841 return default_assemble_integer (x
, size
, aligned_p
);
3844 /* Returns true if register REGNO is used for forming
3845 a memory address in expression X. */
3848 reg_used_in_mem_p (int regno
, rtx x
)
3850 enum rtx_code code
= GET_CODE (x
);
3856 if (refers_to_regno_p (regno
, regno
+1,
3860 else if (code
== SET
3861 && GET_CODE (SET_DEST (x
)) == PC
)
3863 if (refers_to_regno_p (regno
, regno
+1,
3868 fmt
= GET_RTX_FORMAT (code
);
3869 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3872 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
3875 else if (fmt
[i
] == 'E')
3876 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3877 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
3883 /* Returns true if expression DEP_RTX sets an address register
3884 used by instruction INSN to address memory. */
3887 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
3891 if (GET_CODE (dep_rtx
) == INSN
)
3892 dep_rtx
= PATTERN (dep_rtx
);
3894 if (GET_CODE (dep_rtx
) == SET
)
3896 target
= SET_DEST (dep_rtx
);
3897 if (GET_CODE (target
) == STRICT_LOW_PART
)
3898 target
= XEXP (target
, 0);
3899 while (GET_CODE (target
) == SUBREG
)
3900 target
= SUBREG_REG (target
);
3902 if (GET_CODE (target
) == REG
)
3904 int regno
= REGNO (target
);
3906 if (s390_safe_attr_type (insn
) == TYPE_LA
)
3908 pat
= PATTERN (insn
);
3909 if (GET_CODE (pat
) == PARALLEL
)
3911 if (XVECLEN (pat
, 0) != 2)
3913 pat
= XVECEXP (pat
, 0, 0);
3915 if (GET_CODE (pat
) == SET
)
3916 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
3920 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
3921 return reg_used_in_mem_p (regno
, PATTERN (insn
));
3927 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3930 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
3932 rtx dep_rtx
= PATTERN (dep_insn
);
3935 if (GET_CODE (dep_rtx
) == SET
3936 && addr_generation_dependency_p (dep_rtx
, insn
))
3938 else if (GET_CODE (dep_rtx
) == PARALLEL
)
3940 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
3942 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
3949 /* Return the modified cost of the dependency of instruction INSN
3950 on instruction DEP_INSN through the link LINK. COST is the
3951 default cost of that dependency.
3953 Data dependencies are all handled without delay. However, if a
3954 register is modified and subsequently used as base or index
3955 register of a memory reference, at least 4 cycles need to pass
3956 between setting and using the register to avoid pipeline stalls.
3957 An exception is the LA instruction. An address generated by LA can
3958 be used by introducing only a one cycle stall on the pipeline. */
3961 s390_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3963 /* If the dependence is an anti-dependence, there is no cost. For an
3964 output dependence, there is sometimes a cost, but it doesn't seem
3965 worth handling those few cases. */
3967 if (REG_NOTE_KIND (link
) != 0)
3970 /* If we can't recognize the insns, we can't really do anything. */
3971 if (recog_memoized (insn
) < 0 || recog_memoized (dep_insn
) < 0)
3974 /* Operand forward in case of lr, load and la. */
3975 if (s390_tune
== PROCESSOR_2084_Z990
3977 && (s390_safe_attr_type (dep_insn
) == TYPE_LA
3978 || s390_safe_attr_type (dep_insn
) == TYPE_LR
3979 || s390_safe_attr_type (dep_insn
) == TYPE_LOAD
))
3984 /* A C statement (sans semicolon) to update the integer scheduling priority
3985 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3986 reduce the priority to execute INSN later. Do not define this macro if
3987 you do not need to adjust the scheduling priorities of insns.
3989 A STD instruction should be scheduled earlier,
3990 in order to use the bypass. */
3993 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
3995 if (! INSN_P (insn
))
3998 if (s390_tune
!= PROCESSOR_2084_Z990
)
4001 switch (s390_safe_attr_type (insn
))
4005 priority
= priority
<< 3;
4008 priority
= priority
<< 1;
4016 /* The number of instructions that can be issued per cycle. */
4019 s390_issue_rate (void)
4021 if (s390_tune
== PROCESSOR_2084_Z990
)
4027 s390_first_cycle_multipass_dfa_lookahead (void)
4033 /* Split all branches that exceed the maximum distance.
4034 Returns true if this created a new literal pool entry. */
4037 s390_split_branches (void)
4039 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
4040 int new_literal
= 0;
4041 rtx insn
, pat
, tmp
, target
;
4044 /* We need correct insn addresses. */
4046 shorten_branches (get_insns ());
4048 /* Find all branches that exceed 64KB, and split them. */
4050 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4052 if (GET_CODE (insn
) != JUMP_INSN
)
4055 pat
= PATTERN (insn
);
4056 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
4057 pat
= XVECEXP (pat
, 0, 0);
4058 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
4061 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
4063 label
= &SET_SRC (pat
);
4065 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
4067 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
4068 label
= &XEXP (SET_SRC (pat
), 1);
4069 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
4070 label
= &XEXP (SET_SRC (pat
), 2);
4077 if (get_attr_length (insn
) <= 4)
4080 /* We are going to use the return register as scratch register,
4081 make sure it will be saved/restored by the prologue/epilogue. */
4082 cfun
->machine
->save_return_addr_p
= 1;
4087 tmp
= force_const_mem (Pmode
, *label
);
4088 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
4089 INSN_ADDRESSES_NEW (tmp
, -1);
4090 annotate_constant_pool_refs (&PATTERN (tmp
));
4097 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
4098 UNSPEC_LTREL_OFFSET
);
4099 target
= gen_rtx_CONST (Pmode
, target
);
4100 target
= force_const_mem (Pmode
, target
);
4101 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
4102 INSN_ADDRESSES_NEW (tmp
, -1);
4103 annotate_constant_pool_refs (&PATTERN (tmp
));
4105 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XEXP (target
, 0),
4106 cfun
->machine
->base_reg
),
4108 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
4111 if (!validate_change (insn
, label
, target
, 0))
4118 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4119 Fix up MEMs as required. */
4122 annotate_constant_pool_refs (rtx
*x
)
4127 if (GET_CODE (*x
) == SYMBOL_REF
4128 && CONSTANT_POOL_ADDRESS_P (*x
))
4131 /* Literal pool references can only occur inside a MEM ... */
4132 if (GET_CODE (*x
) == MEM
)
4134 rtx memref
= XEXP (*x
, 0);
4136 if (GET_CODE (memref
) == SYMBOL_REF
4137 && CONSTANT_POOL_ADDRESS_P (memref
))
4139 rtx base
= cfun
->machine
->base_reg
;
4140 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, memref
, base
),
4143 *x
= replace_equiv_address (*x
, addr
);
4147 if (GET_CODE (memref
) == CONST
4148 && GET_CODE (XEXP (memref
, 0)) == PLUS
4149 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
4150 && GET_CODE (XEXP (XEXP (memref
, 0), 0)) == SYMBOL_REF
4151 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref
, 0), 0)))
4153 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
4154 rtx sym
= XEXP (XEXP (memref
, 0), 0);
4155 rtx base
= cfun
->machine
->base_reg
;
4156 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
4159 *x
= replace_equiv_address (*x
, plus_constant (addr
, off
));
4164 /* ... or a load-address type pattern. */
4165 if (GET_CODE (*x
) == SET
)
4167 rtx addrref
= SET_SRC (*x
);
4169 if (GET_CODE (addrref
) == SYMBOL_REF
4170 && CONSTANT_POOL_ADDRESS_P (addrref
))
4172 rtx base
= cfun
->machine
->base_reg
;
4173 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, addrref
, base
),
4176 SET_SRC (*x
) = addr
;
4180 if (GET_CODE (addrref
) == CONST
4181 && GET_CODE (XEXP (addrref
, 0)) == PLUS
4182 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
4183 && GET_CODE (XEXP (XEXP (addrref
, 0), 0)) == SYMBOL_REF
4184 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref
, 0), 0)))
4186 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
4187 rtx sym
= XEXP (XEXP (addrref
, 0), 0);
4188 rtx base
= cfun
->machine
->base_reg
;
4189 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
4192 SET_SRC (*x
) = plus_constant (addr
, off
);
4197 /* Annotate LTREL_BASE as well. */
4198 if (GET_CODE (*x
) == UNSPEC
4199 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4201 rtx base
= cfun
->machine
->base_reg
;
4202 *x
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XVECEXP (*x
, 0, 0), base
),
4207 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4208 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4212 annotate_constant_pool_refs (&XEXP (*x
, i
));
4214 else if (fmt
[i
] == 'E')
4216 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4217 annotate_constant_pool_refs (&XVECEXP (*x
, i
, j
));
4223 /* Find an annotated literal pool symbol referenced in RTX X,
4224 and store it at REF. Will abort if X contains references to
4225 more than one such pool symbol; multiple references to the same
4226 symbol are allowed, however.
4228 The rtx pointed to by REF must be initialized to NULL_RTX
4229 by the caller before calling this routine. */
4232 find_constant_pool_ref (rtx x
, rtx
*ref
)
4237 /* Ignore LTREL_BASE references. */
4238 if (GET_CODE (x
) == UNSPEC
4239 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4241 /* Likewise POOL_ENTRY insns. */
4242 if (GET_CODE (x
) == UNSPEC_VOLATILE
4243 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
4246 if (GET_CODE (x
) == SYMBOL_REF
4247 && CONSTANT_POOL_ADDRESS_P (x
))
4250 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_LTREF
)
4252 rtx sym
= XVECEXP (x
, 0, 0);
4253 if (GET_CODE (sym
) != SYMBOL_REF
4254 || !CONSTANT_POOL_ADDRESS_P (sym
))
4257 if (*ref
== NULL_RTX
)
4259 else if (*ref
!= sym
)
4265 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4266 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4270 find_constant_pool_ref (XEXP (x
, i
), ref
);
4272 else if (fmt
[i
] == 'E')
4274 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4275 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
4280 /* Replace every reference to the annotated literal pool
4281 symbol REF in X by its base plus OFFSET. */
4284 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx offset
)
4292 if (GET_CODE (*x
) == UNSPEC
4293 && XINT (*x
, 1) == UNSPEC_LTREF
4294 && XVECEXP (*x
, 0, 0) == ref
)
4296 *x
= gen_rtx_PLUS (Pmode
, XVECEXP (*x
, 0, 1), offset
);
4300 if (GET_CODE (*x
) == PLUS
4301 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
4302 && GET_CODE (XEXP (*x
, 0)) == UNSPEC
4303 && XINT (XEXP (*x
, 0), 1) == UNSPEC_LTREF
4304 && XVECEXP (XEXP (*x
, 0), 0, 0) == ref
)
4306 rtx addr
= gen_rtx_PLUS (Pmode
, XVECEXP (XEXP (*x
, 0), 0, 1), offset
);
4307 *x
= plus_constant (addr
, INTVAL (XEXP (*x
, 1)));
4311 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4312 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4316 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, offset
);
4318 else if (fmt
[i
] == 'E')
4320 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4321 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, offset
);
4326 /* Check whether X contains an UNSPEC_LTREL_BASE.
4327 Return its constant pool symbol if found, NULL_RTX otherwise. */
4330 find_ltrel_base (rtx x
)
4335 if (GET_CODE (x
) == UNSPEC
4336 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4337 return XVECEXP (x
, 0, 0);
4339 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4340 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4344 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
4348 else if (fmt
[i
] == 'E')
4350 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4352 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
4362 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4365 replace_ltrel_base (rtx
*x
)
4370 if (GET_CODE (*x
) == UNSPEC
4371 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4373 *x
= XVECEXP (*x
, 0, 1);
4377 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4378 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4382 replace_ltrel_base (&XEXP (*x
, i
));
4384 else if (fmt
[i
] == 'E')
4386 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4387 replace_ltrel_base (&XVECEXP (*x
, i
, j
));
4393 /* We keep a list of constants which we have to add to internal
4394 constant tables in the middle of large functions. */
4396 #define NR_C_MODES 7
4397 enum machine_mode constant_modes
[NR_C_MODES
] =
4408 struct constant
*next
;
4413 struct constant_pool
4415 struct constant_pool
*next
;
4420 struct constant
*constants
[NR_C_MODES
];
4425 static struct constant_pool
* s390_mainpool_start (void);
4426 static void s390_mainpool_finish (struct constant_pool
*);
4427 static void s390_mainpool_cancel (struct constant_pool
*);
4429 static struct constant_pool
* s390_chunkify_start (void);
4430 static void s390_chunkify_finish (struct constant_pool
*);
4431 static void s390_chunkify_cancel (struct constant_pool
*);
4433 static struct constant_pool
*s390_start_pool (struct constant_pool
**, rtx
);
4434 static void s390_end_pool (struct constant_pool
*, rtx
);
4435 static void s390_add_pool_insn (struct constant_pool
*, rtx
);
4436 static struct constant_pool
*s390_find_pool (struct constant_pool
*, rtx
);
4437 static void s390_add_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4438 static rtx
s390_find_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4439 static rtx
s390_dump_pool (struct constant_pool
*, bool);
4440 static struct constant_pool
*s390_alloc_pool (void);
4441 static void s390_free_pool (struct constant_pool
*);
4443 /* Create new constant pool covering instructions starting at INSN
4444 and chain it to the end of POOL_LIST. */
4446 static struct constant_pool
*
4447 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
4449 struct constant_pool
*pool
, **prev
;
4451 pool
= s390_alloc_pool ();
4452 pool
->first_insn
= insn
;
4454 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
4461 /* End range of instructions covered by POOL at INSN and emit
4462 placeholder insn representing the pool. */
4465 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
4467 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
4470 insn
= get_last_insn ();
4472 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
4473 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4476 /* Add INSN to the list of insns covered by POOL. */
4479 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
4481 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
4484 /* Return pool out of POOL_LIST that covers INSN. */
4486 static struct constant_pool
*
4487 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
4489 struct constant_pool
*pool
;
4491 for (pool
= pool_list
; pool
; pool
= pool
->next
)
4492 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
4498 /* Add constant VAL of mode MODE to the constant pool POOL. */
4501 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
4506 for (i
= 0; i
< NR_C_MODES
; i
++)
4507 if (constant_modes
[i
] == mode
)
4509 if (i
== NR_C_MODES
)
4512 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4513 if (rtx_equal_p (val
, c
->value
))
4518 c
= (struct constant
*) xmalloc (sizeof *c
);
4520 c
->label
= gen_label_rtx ();
4521 c
->next
= pool
->constants
[i
];
4522 pool
->constants
[i
] = c
;
4523 pool
->size
+= GET_MODE_SIZE (mode
);
4527 /* Find constant VAL of mode MODE in the constant pool POOL.
4528 Return an RTX describing the distance from the start of
4529 the pool to the location of the new constant. */
4532 s390_find_constant (struct constant_pool
*pool
, rtx val
,
4533 enum machine_mode mode
)
4539 for (i
= 0; i
< NR_C_MODES
; i
++)
4540 if (constant_modes
[i
] == mode
)
4542 if (i
== NR_C_MODES
)
4545 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4546 if (rtx_equal_p (val
, c
->value
))
4552 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
4553 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
4554 offset
= gen_rtx_CONST (Pmode
, offset
);
4558 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
4559 do not emit the pool base label. */
4562 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
4568 /* Pool start insn switches to proper section
4569 and guarantees necessary alignment. */
4570 if (TARGET_CPU_ZARCH
)
4571 insn
= emit_insn_after (gen_pool_start_64 (), pool
->pool_insn
);
4573 insn
= emit_insn_after (gen_pool_start_31 (), pool
->pool_insn
);
4574 INSN_ADDRESSES_NEW (insn
, -1);
4578 insn
= emit_label_after (pool
->label
, insn
);
4579 INSN_ADDRESSES_NEW (insn
, -1);
4582 /* Dump constants in descending alignment requirement order,
4583 ensuring proper alignment for every constant. */
4584 for (i
= 0; i
< NR_C_MODES
; i
++)
4585 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
4587 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4588 rtx value
= c
->value
;
4589 if (GET_CODE (value
) == CONST
4590 && GET_CODE (XEXP (value
, 0)) == UNSPEC
4591 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
4592 && XVECLEN (XEXP (value
, 0), 0) == 1)
4594 value
= gen_rtx_MINUS (Pmode
, XVECEXP (XEXP (value
, 0), 0, 0),
4595 gen_rtx_LABEL_REF (VOIDmode
, pool
->label
));
4596 value
= gen_rtx_CONST (VOIDmode
, value
);
4599 insn
= emit_label_after (c
->label
, insn
);
4600 INSN_ADDRESSES_NEW (insn
, -1);
4602 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
4603 gen_rtvec (1, value
),
4604 UNSPECV_POOL_ENTRY
);
4605 insn
= emit_insn_after (value
, insn
);
4606 INSN_ADDRESSES_NEW (insn
, -1);
4609 /* Pool end insn switches back to previous section
4610 and guarantees necessary alignment. */
4611 if (TARGET_CPU_ZARCH
)
4612 insn
= emit_insn_after (gen_pool_end_64 (), insn
);
4614 insn
= emit_insn_after (gen_pool_end_31 (), insn
);
4615 INSN_ADDRESSES_NEW (insn
, -1);
4617 insn
= emit_barrier_after (insn
);
4618 INSN_ADDRESSES_NEW (insn
, -1);
4620 /* Remove placeholder insn. */
4621 remove_insn (pool
->pool_insn
);
4626 /* Allocate new constant_pool structure. */
4628 static struct constant_pool
*
4629 s390_alloc_pool (void)
4631 struct constant_pool
*pool
;
4634 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
4636 for (i
= 0; i
< NR_C_MODES
; i
++)
4637 pool
->constants
[i
] = NULL
;
4639 pool
->label
= gen_label_rtx ();
4640 pool
->first_insn
= NULL_RTX
;
4641 pool
->pool_insn
= NULL_RTX
;
4642 pool
->insns
= BITMAP_XMALLOC ();
4648 /* Free all memory used by POOL. */
4651 s390_free_pool (struct constant_pool
*pool
)
4655 for (i
= 0; i
< NR_C_MODES
; i
++)
4657 struct constant
*c
= pool
->constants
[i
];
4660 struct constant
*next
= c
->next
;
4666 BITMAP_XFREE (pool
->insns
);
4671 /* Collect main literal pool. Return NULL on overflow. */
4673 static struct constant_pool
*
4674 s390_mainpool_start (void)
4676 struct constant_pool
*pool
;
4679 pool
= s390_alloc_pool ();
4681 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4683 if (GET_CODE (insn
) == INSN
4684 && GET_CODE (PATTERN (insn
)) == SET
4685 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC_VOLATILE
4686 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPECV_MAIN_POOL
)
4688 if (pool
->pool_insn
)
4690 pool
->pool_insn
= insn
;
4693 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4695 rtx pool_ref
= NULL_RTX
;
4696 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4699 rtx constant
= get_pool_constant (pool_ref
);
4700 enum machine_mode mode
= get_pool_mode (pool_ref
);
4701 s390_add_constant (pool
, constant
, mode
);
4706 if (!pool
->pool_insn
)
4709 if (pool
->size
>= 4096)
4711 /* We're going to chunkify the pool, so remove the main
4712 pool placeholder insn. */
4713 remove_insn (pool
->pool_insn
);
4715 s390_free_pool (pool
);
4722 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4723 Modify the current function to output the pool constants as well as
4724 the pool register setup instruction. */
4727 s390_mainpool_finish (struct constant_pool
*pool
)
4729 rtx base_reg
= SET_DEST (PATTERN (pool
->pool_insn
));
4732 /* If the pool is empty, we're done. */
4733 if (pool
->size
== 0)
4735 remove_insn (pool
->pool_insn
);
4736 s390_free_pool (pool
);
4740 /* We need correct insn addresses. */
4741 shorten_branches (get_insns ());
4743 /* On zSeries, we use a LARL to load the pool register. The pool is
4744 located in the .rodata section, so we emit it after the function. */
4745 if (TARGET_CPU_ZARCH
)
4747 insn
= gen_main_base_64 (base_reg
, pool
->label
);
4748 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4749 INSN_ADDRESSES_NEW (insn
, -1);
4750 remove_insn (pool
->pool_insn
);
4752 insn
= get_last_insn ();
4753 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4754 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4756 s390_dump_pool (pool
, 0);
4759 /* On S/390, if the total size of the function's code plus literal pool
4760 does not exceed 4096 bytes, we use BASR to set up a function base
4761 pointer, and emit the literal pool at the end of the function. */
4762 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
4763 + pool
->size
+ 8 /* alignment slop */ < 4096)
4765 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
4766 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4767 INSN_ADDRESSES_NEW (insn
, -1);
4768 remove_insn (pool
->pool_insn
);
4770 insn
= emit_label_after (pool
->label
, insn
);
4771 INSN_ADDRESSES_NEW (insn
, -1);
4773 insn
= get_last_insn ();
4774 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4775 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4777 s390_dump_pool (pool
, 1);
4780 /* Otherwise, we emit an inline literal pool and use BASR to branch
4781 over it, setting up the pool register at the same time. */
4784 rtx pool_end
= gen_label_rtx ();
4786 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
4787 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4788 INSN_ADDRESSES_NEW (insn
, -1);
4789 remove_insn (pool
->pool_insn
);
4791 insn
= emit_label_after (pool
->label
, insn
);
4792 INSN_ADDRESSES_NEW (insn
, -1);
4794 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4795 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4797 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
4798 INSN_ADDRESSES_NEW (insn
, -1);
4800 s390_dump_pool (pool
, 1);
4804 /* Replace all literal pool references. */
4806 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4809 replace_ltrel_base (&PATTERN (insn
));
4811 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4813 rtx addr
, pool_ref
= NULL_RTX
;
4814 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4817 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
4818 get_pool_mode (pool_ref
));
4819 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
4820 INSN_CODE (insn
) = -1;
4826 /* Free the pool. */
4827 s390_free_pool (pool
);
4830 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4831 We have decided we cannot use this pool, so revert all changes
4832 to the current function that were done by s390_mainpool_start. */
4834 s390_mainpool_cancel (struct constant_pool
*pool
)
4836 /* We didn't actually change the instruction stream, so simply
4837 free the pool memory. */
4838 s390_free_pool (pool
);
4842 /* Chunkify the literal pool. */
4844 #define S390_POOL_CHUNK_MIN 0xc00
4845 #define S390_POOL_CHUNK_MAX 0xe00
4847 static struct constant_pool
*
4848 s390_chunkify_start (void)
4850 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
4853 rtx pending_ltrel
= NULL_RTX
;
4856 rtx (*gen_reload_base
) (rtx
, rtx
) =
4857 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
4860 /* We need correct insn addresses. */
4862 shorten_branches (get_insns ());
4864 /* Scan all insns and move literals to pool chunks. */
4866 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4868 /* Check for pending LTREL_BASE. */
4871 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
4874 if (ltrel_base
== pending_ltrel
)
4875 pending_ltrel
= NULL_RTX
;
4881 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4883 rtx pool_ref
= NULL_RTX
;
4884 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4887 rtx constant
= get_pool_constant (pool_ref
);
4888 enum machine_mode mode
= get_pool_mode (pool_ref
);
4891 curr_pool
= s390_start_pool (&pool_list
, insn
);
4893 s390_add_constant (curr_pool
, constant
, mode
);
4894 s390_add_pool_insn (curr_pool
, insn
);
4896 /* Don't split the pool chunk between a LTREL_OFFSET load
4897 and the corresponding LTREL_BASE. */
4898 if (GET_CODE (constant
) == CONST
4899 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
4900 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
4904 pending_ltrel
= pool_ref
;
4909 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
4912 s390_add_pool_insn (curr_pool
, insn
);
4913 /* An LTREL_BASE must follow within the same basic block. */
4919 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
4920 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
4923 if (TARGET_CPU_ZARCH
)
4925 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
4928 s390_end_pool (curr_pool
, NULL_RTX
);
4933 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
4934 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
4937 /* We will later have to insert base register reload insns.
4938 Those will have an effect on code size, which we need to
4939 consider here. This calculation makes rather pessimistic
4940 worst-case assumptions. */
4941 if (GET_CODE (insn
) == CODE_LABEL
)
4944 if (chunk_size
< S390_POOL_CHUNK_MIN
4945 && curr_pool
->size
< S390_POOL_CHUNK_MIN
)
4948 /* Pool chunks can only be inserted after BARRIERs ... */
4949 if (GET_CODE (insn
) == BARRIER
)
4951 s390_end_pool (curr_pool
, insn
);
4956 /* ... so if we don't find one in time, create one. */
4957 else if ((chunk_size
> S390_POOL_CHUNK_MAX
4958 || curr_pool
->size
> S390_POOL_CHUNK_MAX
))
4960 rtx label
, jump
, barrier
;
4962 /* We can insert the barrier only after a 'real' insn. */
4963 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
4965 if (get_attr_length (insn
) == 0)
4968 /* Don't separate LTREL_BASE from the corresponding
4969 LTREL_OFFSET load. */
4973 label
= gen_label_rtx ();
4974 jump
= emit_jump_insn_after (gen_jump (label
), insn
);
4975 barrier
= emit_barrier_after (jump
);
4976 insn
= emit_label_after (label
, barrier
);
4977 JUMP_LABEL (jump
) = label
;
4978 LABEL_NUSES (label
) = 1;
4980 INSN_ADDRESSES_NEW (jump
, -1);
4981 INSN_ADDRESSES_NEW (barrier
, -1);
4982 INSN_ADDRESSES_NEW (insn
, -1);
4984 s390_end_pool (curr_pool
, barrier
);
4992 s390_end_pool (curr_pool
, NULL_RTX
);
4997 /* Find all labels that are branched into
4998 from an insn belonging to a different chunk. */
5000 far_labels
= BITMAP_XMALLOC ();
5002 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5004 /* Labels marked with LABEL_PRESERVE_P can be target
5005 of non-local jumps, so we have to mark them.
5006 The same holds for named labels.
5008 Don't do that, however, if it is the label before
5011 if (GET_CODE (insn
) == CODE_LABEL
5012 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
5014 rtx vec_insn
= next_real_insn (insn
);
5015 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
5016 PATTERN (vec_insn
) : NULL_RTX
;
5018 || !(GET_CODE (vec_pat
) == ADDR_VEC
5019 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
5020 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
5023 /* If we have a direct jump (conditional or unconditional)
5024 or a casesi jump, check all potential targets. */
5025 else if (GET_CODE (insn
) == JUMP_INSN
)
5027 rtx pat
= PATTERN (insn
);
5028 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
5029 pat
= XVECEXP (pat
, 0, 0);
5031 if (GET_CODE (pat
) == SET
)
5033 rtx label
= JUMP_LABEL (insn
);
5036 if (s390_find_pool (pool_list
, label
)
5037 != s390_find_pool (pool_list
, insn
))
5038 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
5041 else if (GET_CODE (pat
) == PARALLEL
5042 && XVECLEN (pat
, 0) == 2
5043 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
5044 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
5045 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
5047 /* Find the jump table used by this casesi jump. */
5048 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
5049 rtx vec_insn
= next_real_insn (vec_label
);
5050 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
5051 PATTERN (vec_insn
) : NULL_RTX
;
5053 && (GET_CODE (vec_pat
) == ADDR_VEC
5054 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
5056 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
5058 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
5060 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
5062 if (s390_find_pool (pool_list
, label
)
5063 != s390_find_pool (pool_list
, insn
))
5064 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
5071 /* Insert base register reload insns before every pool. */
5073 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5075 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
5077 rtx insn
= curr_pool
->first_insn
;
5078 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
5081 /* Insert base register reload insns at every far label. */
5083 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5084 if (GET_CODE (insn
) == CODE_LABEL
5085 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
5087 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
5090 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
5092 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
5097 BITMAP_XFREE (far_labels
);
5100 /* Recompute insn addresses. */
5102 init_insn_lengths ();
5103 shorten_branches (get_insns ());
5108 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5109 After we have decided to use this list, finish implementing
5110 all changes to the current function as required. */
5113 s390_chunkify_finish (struct constant_pool
*pool_list
)
5115 struct constant_pool
*curr_pool
= NULL
;
5119 /* Replace all literal pool references. */
5121 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5124 replace_ltrel_base (&PATTERN (insn
));
5126 curr_pool
= s390_find_pool (pool_list
, insn
);
5130 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
5132 rtx addr
, pool_ref
= NULL_RTX
;
5133 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
5136 addr
= s390_find_constant (curr_pool
, get_pool_constant (pool_ref
),
5137 get_pool_mode (pool_ref
));
5138 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
5139 INSN_CODE (insn
) = -1;
5144 /* Dump out all literal pools. */
5146 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5147 s390_dump_pool (curr_pool
, 0);
5149 /* Free pool list. */
5153 struct constant_pool
*next
= pool_list
->next
;
5154 s390_free_pool (pool_list
);
5159 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5160 We have decided we cannot use this list, so revert all changes
5161 to the current function that were done by s390_chunkify_start. */
5164 s390_chunkify_cancel (struct constant_pool
*pool_list
)
5166 struct constant_pool
*curr_pool
= NULL
;
5169 /* Remove all pool placeholder insns. */
5171 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5173 /* Did we insert an extra barrier? Remove it. */
5174 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
5175 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
5176 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
5178 if (jump
&& GET_CODE (jump
) == JUMP_INSN
5179 && barrier
&& GET_CODE (barrier
) == BARRIER
5180 && label
&& GET_CODE (label
) == CODE_LABEL
5181 && GET_CODE (PATTERN (jump
)) == SET
5182 && SET_DEST (PATTERN (jump
)) == pc_rtx
5183 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
5184 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
5187 remove_insn (barrier
);
5188 remove_insn (label
);
5191 remove_insn (curr_pool
->pool_insn
);
5194 /* Remove all base register reload insns. */
5196 for (insn
= get_insns (); insn
; )
5198 rtx next_insn
= NEXT_INSN (insn
);
5200 if (GET_CODE (insn
) == INSN
5201 && GET_CODE (PATTERN (insn
)) == SET
5202 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
5203 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
5209 /* Free pool list. */
5213 struct constant_pool
*next
= pool_list
->next
;
5214 s390_free_pool (pool_list
);
5220 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5223 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
5227 switch (GET_MODE_CLASS (mode
))
5230 if (GET_CODE (exp
) != CONST_DOUBLE
)
5233 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
5234 assemble_real (r
, mode
, align
);
5238 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
5247 /* Rework the prolog/epilog to avoid saving/restoring
5248 registers unnecessarily. BASE_USED specifies whether
5249 the literal pool base register needs to be saved. */
5252 s390_optimize_prolog (bool base_used
)
5254 rtx insn
, new_insn
, next_insn
;
5256 /* Do a final recompute of the frame-related data. */
5258 s390_frame_info (base_used
, cfun
->machine
->save_return_addr_p
);
5259 regs_ever_live
[BASE_REGISTER
] = base_used
;
5260 regs_ever_live
[RETURN_REGNUM
] = cfun
->machine
->save_return_addr_p
;
5261 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5263 /* If all special registers are in fact used, there's nothing we
5264 can do, so no point in walking the insn list. */
5266 if (cfun
->machine
->first_save_gpr
<= BASE_REGISTER
5267 && cfun
->machine
->last_save_gpr
>= BASE_REGISTER
5268 && (TARGET_CPU_ZARCH
5269 || (cfun
->machine
->first_save_gpr
<= RETURN_REGNUM
5270 && cfun
->machine
->last_save_gpr
>= RETURN_REGNUM
)))
5273 /* Search for prolog/epilog insns and replace them. */
5275 for (insn
= get_insns (); insn
; insn
= next_insn
)
5277 int first
, last
, off
;
5278 rtx set
, base
, offset
;
5280 next_insn
= NEXT_INSN (insn
);
5282 if (GET_CODE (insn
) != INSN
)
5285 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5286 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
5288 set
= XVECEXP (PATTERN (insn
), 0, 0);
5289 first
= REGNO (SET_SRC (set
));
5290 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5291 offset
= const0_rtx
;
5292 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5293 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5295 if (GET_CODE (base
) != REG
|| off
< 0)
5297 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5300 if (cfun
->machine
->first_save_gpr
!= -1)
5302 new_insn
= save_gprs (base
, off
, cfun
->machine
->first_save_gpr
,
5303 cfun
->machine
->last_save_gpr
);
5304 new_insn
= emit_insn_before (new_insn
, insn
);
5305 INSN_ADDRESSES_NEW (new_insn
, -1);
5312 if (GET_CODE (PATTERN (insn
)) == SET
5313 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
5314 && REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGISTER
5315 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
5317 set
= PATTERN (insn
);
5318 offset
= const0_rtx
;
5319 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5320 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5322 if (GET_CODE (base
) != REG
|| off
< 0)
5325 if (cfun
->machine
->first_save_gpr
!= -1)
5327 new_insn
= save_gprs (base
, off
, cfun
->machine
->first_save_gpr
,
5328 cfun
->machine
->last_save_gpr
);
5329 new_insn
= emit_insn_before (new_insn
, insn
);
5330 INSN_ADDRESSES_NEW (new_insn
, -1);
5337 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5338 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
5340 set
= XVECEXP (PATTERN (insn
), 0, 0);
5341 first
= REGNO (SET_DEST (set
));
5342 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5343 offset
= const0_rtx
;
5344 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5345 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5347 if (GET_CODE (base
) != REG
|| off
< 0)
5349 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5352 if (cfun
->machine
->first_restore_gpr
!= -1)
5354 new_insn
= restore_gprs (base
, off
, cfun
->machine
->first_restore_gpr
,
5355 cfun
->machine
->last_restore_gpr
);
5356 new_insn
= emit_insn_before (new_insn
, insn
);
5357 INSN_ADDRESSES_NEW (new_insn
, -1);
5364 if (GET_CODE (PATTERN (insn
)) == SET
5365 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
5366 && REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGISTER
5367 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
5369 set
= PATTERN (insn
);
5370 offset
= const0_rtx
;
5371 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5372 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5374 if (GET_CODE (base
) != REG
|| off
< 0)
5377 if (cfun
->machine
->first_restore_gpr
!= -1)
5379 new_insn
= restore_gprs (base
, off
, cfun
->machine
->first_restore_gpr
,
5380 cfun
->machine
->last_restore_gpr
);
5381 new_insn
= emit_insn_before (new_insn
, insn
);
5382 INSN_ADDRESSES_NEW (new_insn
, -1);
5391 /* Perform machine-dependent processing. */
5396 bool base_used
= false;
5397 bool pool_overflow
= false;
5399 /* Make sure all splits have been performed; splits after
5400 machine_dependent_reorg might confuse insn length counts. */
5401 split_all_insns_noflow ();
5404 /* Install the main literal pool and the associated base
5405 register load insns.
5407 In addition, there are two problematic situations we need
5410 - the literal pool might be > 4096 bytes in size, so that
5411 some of its elements cannot be directly accessed
5413 - a branch target might be > 64K away from the branch, so that
5414 it is not possible to use a PC-relative instruction.
5416 To fix those, we split the single literal pool into multiple
5417 pool chunks, reloading the pool base register at various
5418 points throughout the function to ensure it always points to
5419 the pool chunk the following code expects, and / or replace
5420 PC-relative branches by absolute branches.
5422 However, the two problems are interdependent: splitting the
5423 literal pool can move a branch further away from its target,
5424 causing the 64K limit to overflow, and on the other hand,
5425 replacing a PC-relative branch by an absolute branch means
5426 we need to put the branch target address into the literal
5427 pool, possibly causing it to overflow.
5429 So, we loop trying to fix up both problems until we manage
5430 to satisfy both conditions at the same time. Note that the
5431 loop is guaranteed to terminate as every pass of the loop
5432 strictly decreases the total number of PC-relative branches
5433 in the function. (This is not completely true as there
5434 might be branch-over-pool insns introduced by chunkify_start.
5435 Those never need to be split however.) */
5439 struct constant_pool
*pool
= NULL
;
5441 /* Collect the literal pool. */
5444 pool
= s390_mainpool_start ();
5446 pool_overflow
= true;
5449 /* If literal pool overflowed, start to chunkify it. */
5451 pool
= s390_chunkify_start ();
5453 /* Split out-of-range branches. If this has created new
5454 literal pool entries, cancel current chunk list and
5455 recompute it. zSeries machines have large branch
5456 instructions, so we never need to split a branch. */
5457 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
5460 s390_chunkify_cancel (pool
);
5462 s390_mainpool_cancel (pool
);
5467 /* If we made it up to here, both conditions are satisfied.
5468 Finish up literal pool related changes. */
5469 if ((pool_overflow
|| pool
->size
> 0)
5470 && REGNO (cfun
->machine
->base_reg
) == BASE_REGISTER
)
5474 s390_chunkify_finish (pool
);
5476 s390_mainpool_finish (pool
);
5481 s390_optimize_prolog (base_used
);
5485 /* Return an RTL expression representing the value of the return address
5486 for the frame COUNT steps up from the current frame. FRAME is the
5487 frame pointer of that frame. */
5490 s390_return_addr_rtx (int count
, rtx frame
)
5494 /* Without backchain, we fail for all but the current frame. */
5496 if (!TARGET_BACKCHAIN
&& count
> 0)
5499 /* For the current frame, we need to make sure the initial
5500 value of RETURN_REGNUM is actually saved. */
5503 cfun
->machine
->save_return_addr_p
= true;
5505 /* To retrieve the return address we read the stack slot where the
5506 corresponding RETURN_REGNUM value was saved. */
5508 addr
= plus_constant (frame
, RETURN_REGNUM
* UNITS_PER_WORD
);
5509 addr
= memory_address (Pmode
, addr
);
5510 return gen_rtx_MEM (Pmode
, addr
);
5513 /* Find first call clobbered register unused in a function.
5514 This could be used as base register in a leaf function
5515 or for holding the return address before epilogue. */
5518 find_unused_clobbered_reg (void)
5521 for (i
= 0; i
< 6; i
++)
5522 if (!regs_ever_live
[i
])
5527 /* Fill cfun->machine with info about frame of current function.
5528 BASE_USED and RETURN_ADDR_USED specify whether we assume the
5529 base and return address register will need to be saved. */
5532 s390_frame_info (int base_used
, int return_addr_used
)
5536 HOST_WIDE_INT fsize
= get_frame_size ();
5538 if (!TARGET_64BIT
&& fsize
> 0x7fff0000)
5539 fatal_error ("Total size of local variables exceeds architecture limit.");
5541 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5542 cfun
->machine
->save_fprs_p
= 0;
5544 for (i
= 24; i
< 32; i
++)
5545 if (regs_ever_live
[i
] && !global_regs
[i
])
5547 cfun
->machine
->save_fprs_p
= 1;
5551 cfun
->machine
->frame_size
= fsize
+ cfun
->machine
->save_fprs_p
* 64;
5553 /* Does function need to setup frame and save area. */
5555 if (!current_function_is_leaf
5556 || TARGET_TPF_PROFILING
5557 || cfun
->machine
->frame_size
> 0
5558 || current_function_calls_alloca
5559 || current_function_stdarg
)
5560 cfun
->machine
->frame_size
+= STARTING_FRAME_OFFSET
;
5562 /* Find first and last gpr to be saved. We trust regs_ever_live
5563 data, except that we don't save and restore global registers.
5565 Also, all registers with special meaning to the compiler need
5566 to be handled extra. */
5568 for (i
= 0; i
< 16; i
++)
5569 live_regs
[i
] = regs_ever_live
[i
] && !global_regs
[i
];
5572 live_regs
[PIC_OFFSET_TABLE_REGNUM
] =
5573 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
];
5575 live_regs
[BASE_REGISTER
] = base_used
;
5576 live_regs
[RETURN_REGNUM
] = return_addr_used
;
5577 live_regs
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5579 for (i
= 6; i
< 16; i
++)
5582 for (j
= 15; j
> i
; j
--)
5588 /* Nothing to save/restore. */
5589 cfun
->machine
->first_save_gpr
= -1;
5590 cfun
->machine
->first_restore_gpr
= -1;
5591 cfun
->machine
->last_save_gpr
= -1;
5592 cfun
->machine
->last_restore_gpr
= -1;
5596 /* Save / Restore from gpr i to j. */
5597 cfun
->machine
->first_save_gpr
= i
;
5598 cfun
->machine
->first_restore_gpr
= i
;
5599 cfun
->machine
->last_save_gpr
= j
;
5600 cfun
->machine
->last_restore_gpr
= j
;
5603 /* Varargs functions need to save gprs 2 to 6. */
5604 if (current_function_stdarg
)
5606 if (cfun
->machine
->first_save_gpr
== -1
5607 || cfun
->machine
->first_save_gpr
> 2)
5608 cfun
->machine
->first_save_gpr
= 2;
5610 if (cfun
->machine
->last_save_gpr
== -1
5611 || cfun
->machine
->last_save_gpr
< 6)
5612 cfun
->machine
->last_save_gpr
= 6;
5616 /* Return offset between argument pointer and frame pointer
5617 initially after prologue. */
5620 s390_arg_frame_offset (void)
5622 /* See the comment in s390_emit_prologue about the assumptions we make
5623 whether or not the base and return address register need to be saved. */
5624 int return_addr_used
= !current_function_is_leaf
5625 || TARGET_TPF_PROFILING
5626 || regs_ever_live
[RETURN_REGNUM
]
5627 || cfun
->machine
->save_return_addr_p
;
5629 s390_frame_info (1, !TARGET_CPU_ZARCH
|| return_addr_used
);
5630 return cfun
->machine
->frame_size
+ STACK_POINTER_OFFSET
;
5633 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5634 to register BASE. Return generated insn. */
5637 save_fpr (rtx base
, int offset
, int regnum
)
5640 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5641 set_mem_alias_set (addr
, s390_sr_alias_set
);
5643 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
5646 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5647 to register BASE. Return generated insn. */
5650 restore_fpr (rtx base
, int offset
, int regnum
)
5653 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5654 set_mem_alias_set (addr
, s390_sr_alias_set
);
5656 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
5659 /* Generate insn to save registers FIRST to LAST into
5660 the register save area located at offset OFFSET
5661 relative to register BASE. */
5664 save_gprs (rtx base
, int offset
, int first
, int last
)
5666 rtx addr
, insn
, note
;
5669 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5670 addr
= gen_rtx_MEM (Pmode
, addr
);
5671 set_mem_alias_set (addr
, s390_sr_alias_set
);
5673 /* Special-case single register. */
5677 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
5679 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
5681 RTX_FRAME_RELATED_P (insn
) = 1;
5686 insn
= gen_store_multiple (addr
,
5687 gen_rtx_REG (Pmode
, first
),
5688 GEN_INT (last
- first
+ 1));
5691 /* We need to set the FRAME_RELATED flag on all SETs
5692 inside the store-multiple pattern.
5694 However, we must not emit DWARF records for registers 2..5
5695 if they are stored for use by variable arguments ...
5697 ??? Unfortunately, it is not enough to simply not the the
5698 FRAME_RELATED flags for those SETs, because the first SET
5699 of the PARALLEL is always treated as if it had the flag
5700 set, even if it does not. Therefore we emit a new pattern
5701 without those registers as REG_FRAME_RELATED_EXPR note. */
5705 rtx pat
= PATTERN (insn
);
5707 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5708 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
5709 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
5711 RTX_FRAME_RELATED_P (insn
) = 1;
5715 addr
= plus_constant (base
, offset
+ 6 * UNITS_PER_WORD
);
5716 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
5717 gen_rtx_REG (Pmode
, 6),
5718 GEN_INT (last
- 6 + 1));
5719 note
= PATTERN (note
);
5722 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5723 note
, REG_NOTES (insn
));
5725 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
5726 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
)
5727 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
5729 RTX_FRAME_RELATED_P (insn
) = 1;
5735 /* Generate insn to restore registers FIRST to LAST from
5736 the register save area located at offset OFFSET
5737 relative to register BASE. */
5740 restore_gprs (rtx base
, int offset
, int first
, int last
)
5744 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5745 addr
= gen_rtx_MEM (Pmode
, addr
);
5746 set_mem_alias_set (addr
, s390_sr_alias_set
);
5748 /* Special-case single register. */
5752 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
5754 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
5759 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
5761 GEN_INT (last
- first
+ 1));
5765 /* Return insn sequence to load the GOT register. */
5767 static GTY(()) rtx got_symbol
;
5769 s390_load_got (void)
5775 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
5776 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
5781 if (TARGET_CPU_ZARCH
)
5783 emit_move_insn (pic_offset_table_rtx
, got_symbol
);
5789 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
5790 UNSPEC_LTREL_OFFSET
);
5791 offset
= gen_rtx_CONST (Pmode
, offset
);
5792 offset
= force_const_mem (Pmode
, offset
);
5794 emit_move_insn (pic_offset_table_rtx
, offset
);
5796 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
5798 offset
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
5800 emit_move_insn (pic_offset_table_rtx
, offset
);
5803 insns
= get_insns ();
5808 /* Expand the prologue into a bunch of separate insns. */
5811 s390_emit_prologue (void)
5817 /* At this point, we decide whether we'll need to save/restore the
5818 return address register. This decision is final on zSeries machines;
5819 on S/390 it can still be overridden in s390_split_branches. */
5821 if (!current_function_is_leaf
5822 || TARGET_TPF_PROFILING
5823 || regs_ever_live
[RETURN_REGNUM
])
5824 cfun
->machine
->save_return_addr_p
= 1;
5826 /* Decide which register to use as literal pool base. In small leaf
5827 functions, try to use an unused call-clobbered register as base
5828 register to avoid save/restore overhead. */
5830 if (current_function_is_leaf
&& !regs_ever_live
[5])
5831 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, 5);
5833 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
5835 regs_ever_live
[REGNO (cfun
->machine
->base_reg
)] = 1;
5837 /* Compute frame info. Note that at this point, we assume the base
5838 register and -on S/390- the return register always need to be saved.
5839 This is done because the usage of these registers might change even
5840 after the prolog was emitted. If it turns out later that we really
5841 don't need them, the prolog/epilog code is modified again. */
5843 s390_frame_info (1, !TARGET_CPU_ZARCH
|| cfun
->machine
->save_return_addr_p
);
5845 /* We need to update regs_ever_live to avoid data-flow problems. */
5847 regs_ever_live
[BASE_REGISTER
] = 1;
5848 regs_ever_live
[RETURN_REGNUM
] = !TARGET_CPU_ZARCH
5849 || cfun
->machine
->save_return_addr_p
;
5850 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5852 /* Annotate all constant pool references to let the scheduler know
5853 they implicitly use the base register. */
5855 push_topmost_sequence ();
5857 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5859 annotate_constant_pool_refs (&PATTERN (insn
));
5861 pop_topmost_sequence ();
5863 /* Choose best register to use for temp use within prologue.
5864 See below for why TPF must use the register 1. */
5866 if (!current_function_is_leaf
5867 && !TARGET_TPF_PROFILING
)
5868 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5870 temp_reg
= gen_rtx_REG (Pmode
, 1);
5872 /* Save call saved gprs. */
5874 insn
= save_gprs (stack_pointer_rtx
, 0,
5875 cfun
->machine
->first_save_gpr
, cfun
->machine
->last_save_gpr
);
5878 /* Dummy insn to mark literal pool slot. */
5880 emit_insn (gen_main_pool (cfun
->machine
->base_reg
));
5882 /* Save fprs for variable args. */
5884 if (current_function_stdarg
)
5885 for (i
= 16; i
< (TARGET_64BIT
? 20 : 18); i
++)
5886 save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5888 /* Save fprs 4 and 6 if used (31 bit ABI). */
5891 for (i
= 18; i
< 20; i
++)
5892 if (regs_ever_live
[i
] && !global_regs
[i
])
5894 insn
= save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5895 RTX_FRAME_RELATED_P (insn
) = 1;
5898 /* Decrement stack pointer. */
5900 if (cfun
->machine
->frame_size
> 0)
5902 rtx frame_off
= GEN_INT (-cfun
->machine
->frame_size
);
5904 /* Save incoming stack pointer into temp reg. */
5906 if (TARGET_BACKCHAIN
|| cfun
->machine
->save_fprs_p
)
5908 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
5911 /* Subtract frame size from stack pointer. */
5913 if (DISP_IN_RANGE (INTVAL (frame_off
)))
5915 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5916 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5918 insn
= emit_insn (insn
);
5922 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
5923 frame_off
= force_const_mem (Pmode
, frame_off
);
5925 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
5926 annotate_constant_pool_refs (&PATTERN (insn
));
5929 RTX_FRAME_RELATED_P (insn
) = 1;
5931 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5932 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5933 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5934 GEN_INT (-cfun
->machine
->frame_size
))),
5937 /* Set backchain. */
5939 if (TARGET_BACKCHAIN
)
5941 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
5942 set_mem_alias_set (addr
, s390_sr_alias_set
);
5943 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
5946 /* If we support asynchronous exceptions (e.g. for Java),
5947 we need to make sure the backchain pointer is set up
5948 before any possibly trapping memory access. */
5950 if (TARGET_BACKCHAIN
&& flag_non_call_exceptions
)
5952 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
5953 emit_insn (gen_rtx_CLOBBER (VOIDmode
, addr
));
5957 /* Save fprs 8 - 15 (64 bit ABI). */
5959 if (cfun
->machine
->save_fprs_p
)
5961 insn
= emit_insn (gen_add2_insn (temp_reg
, GEN_INT(-64)));
5963 for (i
= 24; i
< 32; i
++)
5964 if (regs_ever_live
[i
] && !global_regs
[i
])
5966 rtx addr
= plus_constant (stack_pointer_rtx
,
5967 cfun
->machine
->frame_size
- 64 + (i
-24)*8);
5969 insn
= save_fpr (temp_reg
, (i
-24)*8, i
);
5970 RTX_FRAME_RELATED_P (insn
) = 1;
5972 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5973 gen_rtx_SET (VOIDmode
,
5974 gen_rtx_MEM (DFmode
, addr
),
5975 gen_rtx_REG (DFmode
, i
)),
5980 /* Set frame pointer, if needed. */
5982 if (frame_pointer_needed
)
5984 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
5985 RTX_FRAME_RELATED_P (insn
) = 1;
5988 /* Set up got pointer, if needed. */
5990 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
5992 rtx insns
= s390_load_got ();
5994 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5996 annotate_constant_pool_refs (&PATTERN (insn
));
5998 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
6005 if (TARGET_TPF_PROFILING
)
6007 /* Generate a BAS instruction to serve as a function
6008 entry intercept to facilitate the use of tracing
6009 algorithms located at the branch target. */
6010 emit_insn (gen_prologue_tpf ());
6012 /* Emit a blockage here so that all code
6013 lies between the profiling mechanisms. */
6014 emit_insn (gen_blockage ());
6018 /* Expand the epilogue into a bunch of separate insns. */
6021 s390_emit_epilogue (bool sibcall
)
6023 rtx frame_pointer
, return_reg
;
6024 int area_bottom
, area_top
, offset
= 0;
6028 if (TARGET_TPF_PROFILING
)
6031 /* Generate a BAS instruction to serve as a function
6032 entry intercept to facilitate the use of tracing
6033 algorithms located at the branch target. */
6035 /* Emit a blockage here so that all code
6036 lies between the profiling mechanisms. */
6037 emit_insn (gen_blockage ());
6039 emit_insn (gen_epilogue_tpf ());
6042 /* Check whether to use frame or stack pointer for restore. */
6044 frame_pointer
= frame_pointer_needed
?
6045 hard_frame_pointer_rtx
: stack_pointer_rtx
;
6047 /* Compute which parts of the save area we need to access. */
6049 if (cfun
->machine
->first_restore_gpr
!= -1)
6051 area_bottom
= cfun
->machine
->first_restore_gpr
* UNITS_PER_WORD
;
6052 area_top
= (cfun
->machine
->last_restore_gpr
+ 1) * UNITS_PER_WORD
;
6056 area_bottom
= INT_MAX
;
6062 if (cfun
->machine
->save_fprs_p
)
6064 if (area_bottom
> -64)
6072 for (i
= 18; i
< 20; i
++)
6073 if (regs_ever_live
[i
] && !global_regs
[i
])
6075 if (area_bottom
> 16*UNITS_PER_WORD
+ 8*(i
-16))
6076 area_bottom
= 16*UNITS_PER_WORD
+ 8*(i
-16);
6077 if (area_top
< 16*UNITS_PER_WORD
+ 8*(i
-16) + 8)
6078 area_top
= 16*UNITS_PER_WORD
+ 8*(i
-16) + 8;
6082 /* Check whether we can access the register save area.
6083 If not, increment the frame pointer as required. */
6085 if (area_top
<= area_bottom
)
6087 /* Nothing to restore. */
6089 else if (DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_bottom
)
6090 && DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_top
-1))
6092 /* Area is in range. */
6093 offset
= cfun
->machine
->frame_size
;
6097 rtx insn
, frame_off
;
6099 offset
= area_bottom
< 0 ? -area_bottom
: 0;
6100 frame_off
= GEN_INT (cfun
->machine
->frame_size
- offset
);
6102 if (DISP_IN_RANGE (INTVAL (frame_off
)))
6104 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
6105 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
6106 insn
= emit_insn (insn
);
6110 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
6111 frame_off
= force_const_mem (Pmode
, frame_off
);
6113 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
6114 annotate_constant_pool_refs (&PATTERN (insn
));
6118 /* Restore call saved fprs. */
6122 if (cfun
->machine
->save_fprs_p
)
6123 for (i
= 24; i
< 32; i
++)
6124 if (regs_ever_live
[i
] && !global_regs
[i
])
6125 restore_fpr (frame_pointer
,
6126 offset
- 64 + (i
-24) * 8, i
);
6130 for (i
= 18; i
< 20; i
++)
6131 if (regs_ever_live
[i
] && !global_regs
[i
])
6132 restore_fpr (frame_pointer
,
6133 offset
+ 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
6136 /* Return register. */
6138 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
6140 /* Restore call saved gprs. */
6142 if (cfun
->machine
->first_restore_gpr
!= -1)
6147 /* Check for global register and save them
6148 to stack location from where they get restored. */
6150 for (i
= cfun
->machine
->first_restore_gpr
;
6151 i
<= cfun
->machine
->last_restore_gpr
;
6154 /* These registers are special and need to be
6155 restored in any case. */
6156 if (i
== STACK_POINTER_REGNUM
6157 || i
== RETURN_REGNUM
6158 || i
== BASE_REGISTER
6159 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
6164 addr
= plus_constant (frame_pointer
,
6165 offset
+ i
* UNITS_PER_WORD
);
6166 addr
= gen_rtx_MEM (Pmode
, addr
);
6167 set_mem_alias_set (addr
, s390_sr_alias_set
);
6168 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
6174 /* Fetch return address from stack before load multiple,
6175 this will do good for scheduling. */
6177 if (cfun
->machine
->save_return_addr_p
6178 || (cfun
->machine
->first_restore_gpr
< BASE_REGISTER
6179 && cfun
->machine
->last_restore_gpr
> RETURN_REGNUM
))
6181 int return_regnum
= find_unused_clobbered_reg();
6184 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
6186 addr
= plus_constant (frame_pointer
,
6187 offset
+ RETURN_REGNUM
* UNITS_PER_WORD
);
6188 addr
= gen_rtx_MEM (Pmode
, addr
);
6189 set_mem_alias_set (addr
, s390_sr_alias_set
);
6190 emit_move_insn (return_reg
, addr
);
6194 insn
= restore_gprs (frame_pointer
, offset
,
6195 cfun
->machine
->first_restore_gpr
,
6196 cfun
->machine
->last_restore_gpr
);
6203 /* Return to caller. */
6205 p
= rtvec_alloc (2);
6207 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
6208 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
6209 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
6214 /* Return the size in bytes of a function argument of
6215 type TYPE and/or mode MODE. At least one of TYPE or
6216 MODE must be specified. */
6219 s390_function_arg_size (enum machine_mode mode
, tree type
)
6222 return int_size_in_bytes (type
);
6224 /* No type info available for some library calls ... */
6225 if (mode
!= BLKmode
)
6226 return GET_MODE_SIZE (mode
);
6228 /* If we have neither type nor mode, abort */
6232 /* Return true if a function argument of type TYPE and mode MODE
6233 is to be passed in a floating-point register, if available. */
6236 s390_function_arg_float (enum machine_mode mode
, tree type
)
6238 int size
= s390_function_arg_size (mode
, type
);
6242 /* Soft-float changes the ABI: no floating-point registers are used. */
6243 if (TARGET_SOFT_FLOAT
)
6246 /* No type info available for some library calls ... */
6248 return mode
== SFmode
|| mode
== DFmode
;
6250 /* The ABI says that record types with a single member are treated
6251 just like that member would be. */
6252 while (TREE_CODE (type
) == RECORD_TYPE
)
6254 tree field
, single
= NULL_TREE
;
6256 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
6258 if (TREE_CODE (field
) != FIELD_DECL
)
6261 if (single
== NULL_TREE
)
6262 single
= TREE_TYPE (field
);
6267 if (single
== NULL_TREE
)
6273 return TREE_CODE (type
) == REAL_TYPE
;
6276 /* Return true if a function argument of type TYPE and mode MODE
6277 is to be passed in an integer register, or a pair of integer
6278 registers, if available. */
6281 s390_function_arg_integer (enum machine_mode mode
, tree type
)
6283 int size
= s390_function_arg_size (mode
, type
);
6287 /* No type info available for some library calls ... */
6289 return GET_MODE_CLASS (mode
) == MODE_INT
6290 || (TARGET_SOFT_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
);
6292 /* We accept small integral (and similar) types. */
6293 if (INTEGRAL_TYPE_P (type
)
6294 || POINTER_TYPE_P (type
)
6295 || TREE_CODE (type
) == OFFSET_TYPE
6296 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
6299 /* We also accept structs of size 1, 2, 4, 8 that are not
6300 passed in floating-point registers. */
6301 if (AGGREGATE_TYPE_P (type
)
6302 && exact_log2 (size
) >= 0
6303 && !s390_function_arg_float (mode
, type
))
6309 /* Return 1 if a function argument of type TYPE and mode MODE
6310 is to be passed by reference. The ABI specifies that only
6311 structures of size 1, 2, 4, or 8 bytes are passed by value,
6312 all other structures (and complex numbers) are passed by
6316 s390_function_arg_pass_by_reference (enum machine_mode mode
, tree type
)
6318 int size
= s390_function_arg_size (mode
, type
);
6324 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
6327 if (TREE_CODE (type
) == COMPLEX_TYPE
6328 || TREE_CODE (type
) == VECTOR_TYPE
)
6335 /* Update the data in CUM to advance over an argument of mode MODE and
6336 data type TYPE. (TYPE is null for libcalls where that information
6337 may not be available.). The boolean NAMED specifies whether the
6338 argument is a named argument (as opposed to an unnamed argument
6339 matching an ellipsis). */
6342 s390_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
6343 tree type
, int named ATTRIBUTE_UNUSED
)
6345 if (s390_function_arg_pass_by_reference (mode
, type
))
6349 else if (s390_function_arg_float (mode
, type
))
6353 else if (s390_function_arg_integer (mode
, type
))
6355 int size
= s390_function_arg_size (mode
, type
);
6356 cum
->gprs
+= ((size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
);
6362 /* Define where to put the arguments to a function.
6363 Value is zero to push the argument on the stack,
6364 or a hard register in which to store the argument.
6366 MODE is the argument's machine mode.
6367 TYPE is the data type of the argument (as a tree).
6368 This is null for libcalls where that information may
6370 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6371 the preceding args and about the function being called.
6372 NAMED is nonzero if this argument is a named parameter
6373 (otherwise it is an extra parameter matching an ellipsis).
6375 On S/390, we use general purpose registers 2 through 6 to
6376 pass integer, pointer, and certain structure arguments, and
6377 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6378 to pass floating point arguments. All remaining arguments
6379 are pushed to the stack. */
6382 s390_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
6383 int named ATTRIBUTE_UNUSED
)
6385 if (s390_function_arg_pass_by_reference (mode
, type
))
6388 if (s390_function_arg_float (mode
, type
))
6390 if (cum
->fprs
+ 1 > (TARGET_64BIT
? 4 : 2))
6393 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
6395 else if (s390_function_arg_integer (mode
, type
))
6397 int size
= s390_function_arg_size (mode
, type
);
6398 int n_gprs
= (size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
;
6400 if (cum
->gprs
+ n_gprs
> 5)
6403 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
6406 /* After the real arguments, expand_call calls us once again
6407 with a void_type_node type. Whatever we return here is
6408 passed as operand 2 to the call expanders.
6410 We don't need this feature ... */
6411 else if (type
== void_type_node
)
6417 /* Return true if return values of type TYPE should be returned
6418 in a memory buffer whose address is passed by the caller as
6419 hidden first argument. */
6422 s390_return_in_memory (tree type
, tree fundecl ATTRIBUTE_UNUSED
)
6424 /* We accept small integral (and similar) types. */
6425 if (INTEGRAL_TYPE_P (type
)
6426 || POINTER_TYPE_P (type
)
6427 || TREE_CODE (type
) == OFFSET_TYPE
6428 || TREE_CODE (type
) == REAL_TYPE
)
6429 return int_size_in_bytes (type
) > 8;
6431 /* Aggregates and similar constructs are always returned
6433 if (AGGREGATE_TYPE_P (type
)
6434 || TREE_CODE (type
) == COMPLEX_TYPE
6435 || TREE_CODE (type
) == VECTOR_TYPE
)
6438 /* ??? We get called on all sorts of random stuff from
6439 aggregate_value_p. We can't abort, but it's not clear
6440 what's safe to return. Pretend it's a struct I guess. */
6444 /* Define where to return a (scalar) value of type TYPE.
6445 If TYPE is null, define where to return a (scalar)
6446 value of mode MODE from a libcall. */
6449 s390_function_value (tree type
, enum machine_mode mode
)
6453 int unsignedp
= TYPE_UNSIGNED (type
);
6454 mode
= promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1);
6457 if (GET_MODE_CLASS (mode
) != MODE_INT
6458 && GET_MODE_CLASS (mode
) != MODE_FLOAT
)
6460 if (GET_MODE_SIZE (mode
) > 8)
6463 if (TARGET_HARD_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
)
6464 return gen_rtx_REG (mode
, 16);
6466 return gen_rtx_REG (mode
, 2);
6470 /* Create and return the va_list datatype.
6472 On S/390, va_list is an array type equivalent to
6474 typedef struct __va_list_tag
6478 void *__overflow_arg_area;
6479 void *__reg_save_area;
6482 where __gpr and __fpr hold the number of general purpose
6483 or floating point arguments used up to now, respectively,
6484 __overflow_arg_area points to the stack location of the
6485 next argument passed on the stack, and __reg_save_area
6486 always points to the start of the register area in the
6487 call frame of the current function. The function prologue
6488 saves all registers used for argument passing into this
6489 area if the function uses variable arguments. */
6492 s390_build_builtin_va_list (void)
6494 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
6496 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6499 build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
6501 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("__gpr"),
6502 long_integer_type_node
);
6503 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("__fpr"),
6504 long_integer_type_node
);
6505 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("__overflow_arg_area"),
6507 f_sav
= build_decl (FIELD_DECL
, get_identifier ("__reg_save_area"),
6510 DECL_FIELD_CONTEXT (f_gpr
) = record
;
6511 DECL_FIELD_CONTEXT (f_fpr
) = record
;
6512 DECL_FIELD_CONTEXT (f_ovf
) = record
;
6513 DECL_FIELD_CONTEXT (f_sav
) = record
;
6515 TREE_CHAIN (record
) = type_decl
;
6516 TYPE_NAME (record
) = type_decl
;
6517 TYPE_FIELDS (record
) = f_gpr
;
6518 TREE_CHAIN (f_gpr
) = f_fpr
;
6519 TREE_CHAIN (f_fpr
) = f_ovf
;
6520 TREE_CHAIN (f_ovf
) = f_sav
;
6522 layout_type (record
);
6524 /* The correct type is an array type of one element. */
6525 return build_array_type (record
, build_index_type (size_zero_node
));
6528 /* Implement va_start by filling the va_list structure VALIST.
6529 STDARG_P is always true, and ignored.
6530 NEXTARG points to the first anonymous stack argument.
6532 The following global variables are used to initialize
6533 the va_list structure:
6535 current_function_args_info:
6536 holds number of gprs and fprs used for named arguments.
6537 current_function_arg_offset_rtx:
6538 holds the offset of the first anonymous stack argument
6539 (relative to the virtual arg pointer). */
6542 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
6544 HOST_WIDE_INT n_gpr
, n_fpr
;
6546 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6547 tree gpr
, fpr
, ovf
, sav
, t
;
6549 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6550 f_fpr
= TREE_CHAIN (f_gpr
);
6551 f_ovf
= TREE_CHAIN (f_fpr
);
6552 f_sav
= TREE_CHAIN (f_ovf
);
6554 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6555 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6556 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6557 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6558 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6560 /* Count number of gp and fp argument registers used. */
6562 n_gpr
= current_function_args_info
.gprs
;
6563 n_fpr
= current_function_args_info
.fprs
;
6565 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
6566 TREE_SIDE_EFFECTS (t
) = 1;
6567 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6569 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
6570 TREE_SIDE_EFFECTS (t
) = 1;
6571 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6573 /* Find the overflow area. */
6574 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
6576 off
= INTVAL (current_function_arg_offset_rtx
);
6577 off
= off
< 0 ? 0 : off
;
6578 if (TARGET_DEBUG_ARG
)
6579 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6580 (int)n_gpr
, (int)n_fpr
, off
);
6582 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
, build_int_2 (off
, 0));
6584 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6585 TREE_SIDE_EFFECTS (t
) = 1;
6586 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6588 /* Find the register save area. */
6589 t
= make_tree (TREE_TYPE (sav
), virtual_incoming_args_rtx
);
6590 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
6591 build_int_2 (-STACK_POINTER_OFFSET
, -1));
6592 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
6593 TREE_SIDE_EFFECTS (t
) = 1;
6594 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6597 /* Implement va_arg by updating the va_list structure
6598 VALIST as required to retrieve an argument of type
6599 TYPE, and returning that argument.
6601 Generates code equivalent to:
6603 if (integral value) {
6604 if (size <= 4 && args.gpr < 5 ||
6605 size > 4 && args.gpr < 4 )
6606 ret = args.reg_save_area[args.gpr+8]
6608 ret = *args.overflow_arg_area++;
6609 } else if (float value) {
6611 ret = args.reg_save_area[args.fpr+64]
6613 ret = *args.overflow_arg_area++;
6614 } else if (aggregate value) {
6616 ret = *args.reg_save_area[args.gpr]
6618 ret = **args.overflow_arg_area++;
6622 s390_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
,
6623 tree
*post_p ATTRIBUTE_UNUSED
)
6625 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6626 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
6627 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
6628 tree lab_false
, lab_over
, addr
;
6630 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6631 f_fpr
= TREE_CHAIN (f_gpr
);
6632 f_ovf
= TREE_CHAIN (f_fpr
);
6633 f_sav
= TREE_CHAIN (f_ovf
);
6635 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6636 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6637 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6638 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6639 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6641 size
= int_size_in_bytes (type
);
6643 if (s390_function_arg_pass_by_reference (TYPE_MODE (type
), type
))
6645 if (TARGET_DEBUG_ARG
)
6647 fprintf (stderr
, "va_arg: aggregate type");
6651 /* Aggregates are passed by reference. */
6655 sav_ofs
= 2 * UNITS_PER_WORD
;
6656 sav_scale
= UNITS_PER_WORD
;
6657 size
= UNITS_PER_WORD
;
6660 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
6662 if (TARGET_DEBUG_ARG
)
6664 fprintf (stderr
, "va_arg: float type");
6668 /* FP args go in FP registers, if present. */
6672 sav_ofs
= 16 * UNITS_PER_WORD
;
6674 /* TARGET_64BIT has up to 4 parameter in fprs */
6675 max_reg
= TARGET_64BIT
? 3 : 1;
6679 if (TARGET_DEBUG_ARG
)
6681 fprintf (stderr
, "va_arg: other type");
6685 /* Otherwise into GP registers. */
6688 n_reg
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
6689 sav_ofs
= 2 * UNITS_PER_WORD
;
6691 if (size
< UNITS_PER_WORD
)
6692 sav_ofs
+= UNITS_PER_WORD
- size
;
6694 sav_scale
= UNITS_PER_WORD
;
6701 /* Pull the value out of the saved registers ... */
6703 lab_false
= create_artificial_label ();
6704 lab_over
= create_artificial_label ();
6705 addr
= create_tmp_var (ptr_type_node
, "addr");
6707 t
= build_int_2 (max_reg
, 0);
6708 TREE_TYPE (t
) = TREE_TYPE (reg
);
6709 t
= build2 (GT_EXPR
, boolean_type_node
, reg
, t
);
6710 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
6711 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
6712 gimplify_and_add (t
, pre_p
);
6715 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
6719 u
= build2 (MULT_EXPR
, long_integer_type_node
,
6720 reg
, build_int_2 (sav_scale
, 0));
6721 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, u
);
6723 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
6724 gimplify_and_add (t
, pre_p
);
6726 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
6727 gimplify_and_add (t
, pre_p
);
6729 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
6730 append_to_statement_list (t
, pre_p
);
6733 /* ... Otherwise out of the overflow area. */
6736 if (size
< UNITS_PER_WORD
)
6737 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
,
6738 build_int_2 (UNITS_PER_WORD
- size
, 0));
6740 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
6742 u
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
6743 gimplify_and_add (u
, pre_p
);
6745 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
6746 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6747 gimplify_and_add (t
, pre_p
);
6749 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
6750 append_to_statement_list (t
, pre_p
);
6753 /* Increment register save count. */
6755 u
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
6756 build_int_2 (n_reg
, 0));
6757 gimplify_and_add (u
, pre_p
);
6761 t
= build_pointer_type (build_pointer_type (type
));
6762 addr
= fold_convert (t
, addr
);
6763 addr
= build_fold_indirect_ref (addr
);
6767 t
= build_pointer_type (type
);
6768 addr
= fold_convert (t
, addr
);
6771 return build_fold_indirect_ref (addr
);
6779 S390_BUILTIN_THREAD_POINTER
,
6780 S390_BUILTIN_SET_THREAD_POINTER
,
6785 static unsigned int const code_for_builtin_64
[S390_BUILTIN_max
] = {
6790 static unsigned int const code_for_builtin_31
[S390_BUILTIN_max
] = {
6796 s390_init_builtins (void)
6800 ftype
= build_function_type (ptr_type_node
, void_list_node
);
6801 builtin_function ("__builtin_thread_pointer", ftype
,
6802 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
6805 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
6806 builtin_function ("__builtin_set_thread_pointer", ftype
,
6807 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
6811 /* Expand an expression EXP that calls a built-in function,
6812 with result going to TARGET if that's convenient
6813 (and in mode MODE if that's convenient).
6814 SUBTARGET may be used as the target for computing one of EXP's operands.
6815 IGNORE is nonzero if the value is to be ignored. */
6818 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
6819 enum machine_mode mode ATTRIBUTE_UNUSED
,
6820 int ignore ATTRIBUTE_UNUSED
)
6824 unsigned int const *code_for_builtin
=
6825 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
6827 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6828 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6829 tree arglist
= TREE_OPERAND (exp
, 1);
6830 enum insn_code icode
;
6831 rtx op
[MAX_ARGS
], pat
;
6835 if (fcode
>= S390_BUILTIN_max
)
6836 internal_error ("bad builtin fcode");
6837 icode
= code_for_builtin
[fcode
];
6839 internal_error ("bad builtin fcode");
6841 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
6843 for (arglist
= TREE_OPERAND (exp
, 1), arity
= 0;
6845 arglist
= TREE_CHAIN (arglist
), arity
++)
6847 const struct insn_operand_data
*insn_op
;
6849 tree arg
= TREE_VALUE (arglist
);
6850 if (arg
== error_mark_node
)
6852 if (arity
> MAX_ARGS
)
6855 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
6857 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, 0);
6859 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
6860 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
6865 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6867 || GET_MODE (target
) != tmode
6868 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6869 target
= gen_reg_rtx (tmode
);
6875 pat
= GEN_FCN (icode
) (target
);
6879 pat
= GEN_FCN (icode
) (target
, op
[0]);
6881 pat
= GEN_FCN (icode
) (op
[0]);
6884 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
6900 /* Output assembly code for the trampoline template to
6903 On S/390, we use gpr 1 internally in the trampoline code;
6904 gpr 0 is used to hold the static chain. */
6907 s390_trampoline_template (FILE *file
)
6911 fprintf (file
, "larl\t%s,0f\n", reg_names
[1]);
6912 fprintf (file
, "lg\t%s,0(%s)\n", reg_names
[0], reg_names
[1]);
6913 fprintf (file
, "lg\t%s,8(%s)\n", reg_names
[1], reg_names
[1]);
6914 fprintf (file
, "br\t%s\n", reg_names
[1]);
6915 fprintf (file
, "0:\t.quad\t0\n");
6916 fprintf (file
, ".quad\t0\n");
6920 fprintf (file
, "basr\t%s,0\n", reg_names
[1]);
6921 fprintf (file
, "l\t%s,10(%s)\n", reg_names
[0], reg_names
[1]);
6922 fprintf (file
, "l\t%s,14(%s)\n", reg_names
[1], reg_names
[1]);
6923 fprintf (file
, "br\t%s\n", reg_names
[1]);
6924 fprintf (file
, ".long\t0\n");
6925 fprintf (file
, ".long\t0\n");
6929 /* Emit RTL insns to initialize the variable parts of a trampoline.
6930 FNADDR is an RTX for the address of the function's pure code.
6931 CXT is an RTX for the static chain value for the function. */
6934 s390_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
6936 emit_move_insn (gen_rtx_MEM (Pmode
,
6937 memory_address (Pmode
,
6938 plus_constant (addr
, (TARGET_64BIT
? 20 : 12) ))), cxt
);
6939 emit_move_insn (gen_rtx_MEM (Pmode
,
6940 memory_address (Pmode
,
6941 plus_constant (addr
, (TARGET_64BIT
? 28 : 16) ))), fnaddr
);
6944 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6945 LOW and HIGH, independent of the host word size. */
6948 s390_gen_rtx_const_DI (int high
, int low
)
6950 #if HOST_BITS_PER_WIDE_INT >= 64
6952 val
= (HOST_WIDE_INT
)high
;
6954 val
|= (HOST_WIDE_INT
)low
;
6956 return GEN_INT (val
);
6958 #if HOST_BITS_PER_WIDE_INT >= 32
6959 return immed_double_const ((HOST_WIDE_INT
)low
, (HOST_WIDE_INT
)high
, DImode
);
6966 /* Output assembler code to FILE to increment profiler label # LABELNO
6967 for profiling a function entry. */
6970 s390_function_profiler (FILE *file
, int labelno
)
6975 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
6977 fprintf (file
, "# function profiler \n");
6979 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
6980 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
6981 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (op
[1], UNITS_PER_WORD
));
6983 op
[2] = gen_rtx_REG (Pmode
, 1);
6984 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
6985 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
6987 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
6990 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
6991 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
6996 output_asm_insn ("stg\t%0,%1", op
);
6997 output_asm_insn ("larl\t%2,%3", op
);
6998 output_asm_insn ("brasl\t%0,%4", op
);
6999 output_asm_insn ("lg\t%0,%1", op
);
7003 op
[6] = gen_label_rtx ();
7005 output_asm_insn ("st\t%0,%1", op
);
7006 output_asm_insn ("bras\t%2,%l6", op
);
7007 output_asm_insn (".long\t%4", op
);
7008 output_asm_insn (".long\t%3", op
);
7009 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
7010 output_asm_insn ("l\t%0,0(%2)", op
);
7011 output_asm_insn ("l\t%2,4(%2)", op
);
7012 output_asm_insn ("basr\t%0,%0", op
);
7013 output_asm_insn ("l\t%0,%1", op
);
7017 op
[5] = gen_label_rtx ();
7018 op
[6] = gen_label_rtx ();
7020 output_asm_insn ("st\t%0,%1", op
);
7021 output_asm_insn ("bras\t%2,%l6", op
);
7022 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
7023 output_asm_insn (".long\t%4-%l5", op
);
7024 output_asm_insn (".long\t%3-%l5", op
);
7025 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
7026 output_asm_insn ("lr\t%0,%2", op
);
7027 output_asm_insn ("a\t%0,0(%2)", op
);
7028 output_asm_insn ("a\t%2,4(%2)", op
);
7029 output_asm_insn ("basr\t%0,%0", op
);
7030 output_asm_insn ("l\t%0,%1", op
);
7034 /* Select section for constant in constant pool. In 32-bit mode,
7035 constants go in the function section; in 64-bit mode in .rodata. */
7038 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED
,
7039 rtx x ATTRIBUTE_UNUSED
,
7040 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
7042 if (TARGET_CPU_ZARCH
)
7043 readonly_data_section ();
7045 function_section (current_function_decl
);
7048 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7049 into its SYMBOL_REF_FLAGS. */
7052 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
7054 default_encode_section_info (decl
, rtl
, first
);
7056 /* If a variable has a forced alignment to < 2 bytes, mark it with
7057 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7058 if (TREE_CODE (decl
) == VAR_DECL
7059 && DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
7060 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
7063 /* Output thunk to FILE that implements a C++ virtual function call (with
7064 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7065 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7066 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7067 relative to the resulting this pointer. */
7070 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
7071 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
7077 /* Operand 0 is the target function. */
7078 op
[0] = XEXP (DECL_RTL (function
), 0);
7079 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
7082 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
7083 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
7084 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
7087 /* Operand 1 is the 'this' pointer. */
7088 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
7089 op
[1] = gen_rtx_REG (Pmode
, 3);
7091 op
[1] = gen_rtx_REG (Pmode
, 2);
7093 /* Operand 2 is the delta. */
7094 op
[2] = GEN_INT (delta
);
7096 /* Operand 3 is the vcall_offset. */
7097 op
[3] = GEN_INT (vcall_offset
);
7099 /* Operand 4 is the temporary register. */
7100 op
[4] = gen_rtx_REG (Pmode
, 1);
7102 /* Operands 5 to 8 can be used as labels. */
7108 /* Operand 9 can be used for temporary register. */
7111 /* Generate code. */
7114 /* Setup literal pool pointer if required. */
7115 if ((!DISP_IN_RANGE (delta
)
7116 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
7117 || (!DISP_IN_RANGE (vcall_offset
)
7118 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
7120 op
[5] = gen_label_rtx ();
7121 output_asm_insn ("larl\t%4,%5", op
);
7124 /* Add DELTA to this pointer. */
7127 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
7128 output_asm_insn ("la\t%1,%2(%1)", op
);
7129 else if (DISP_IN_RANGE (delta
))
7130 output_asm_insn ("lay\t%1,%2(%1)", op
);
7131 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
7132 output_asm_insn ("aghi\t%1,%2", op
);
7135 op
[6] = gen_label_rtx ();
7136 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
7140 /* Perform vcall adjustment. */
7143 if (DISP_IN_RANGE (vcall_offset
))
7145 output_asm_insn ("lg\t%4,0(%1)", op
);
7146 output_asm_insn ("ag\t%1,%3(%4)", op
);
7148 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
7150 output_asm_insn ("lghi\t%4,%3", op
);
7151 output_asm_insn ("ag\t%4,0(%1)", op
);
7152 output_asm_insn ("ag\t%1,0(%4)", op
);
7156 op
[7] = gen_label_rtx ();
7157 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
7158 output_asm_insn ("ag\t%4,0(%1)", op
);
7159 output_asm_insn ("ag\t%1,0(%4)", op
);
7163 /* Jump to target. */
7164 output_asm_insn ("jg\t%0", op
);
7166 /* Output literal pool if required. */
7169 output_asm_insn (".align\t4", op
);
7170 targetm
.asm_out
.internal_label (file
, "L",
7171 CODE_LABEL_NUMBER (op
[5]));
7175 targetm
.asm_out
.internal_label (file
, "L",
7176 CODE_LABEL_NUMBER (op
[6]));
7177 output_asm_insn (".long\t%2", op
);
7181 targetm
.asm_out
.internal_label (file
, "L",
7182 CODE_LABEL_NUMBER (op
[7]));
7183 output_asm_insn (".long\t%3", op
);
7188 /* Setup base pointer if required. */
7190 || (!DISP_IN_RANGE (delta
)
7191 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
7192 || (!DISP_IN_RANGE (delta
)
7193 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
7195 op
[5] = gen_label_rtx ();
7196 output_asm_insn ("basr\t%4,0", op
);
7197 targetm
.asm_out
.internal_label (file
, "L",
7198 CODE_LABEL_NUMBER (op
[5]));
7201 /* Add DELTA to this pointer. */
7204 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
7205 output_asm_insn ("la\t%1,%2(%1)", op
);
7206 else if (DISP_IN_RANGE (delta
))
7207 output_asm_insn ("lay\t%1,%2(%1)", op
);
7208 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
7209 output_asm_insn ("ahi\t%1,%2", op
);
7212 op
[6] = gen_label_rtx ();
7213 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
7217 /* Perform vcall adjustment. */
7220 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'J', "J"))
7222 output_asm_insn ("lg\t%4,0(%1)", op
);
7223 output_asm_insn ("a\t%1,%3(%4)", op
);
7225 else if (DISP_IN_RANGE (vcall_offset
))
7227 output_asm_insn ("lg\t%4,0(%1)", op
);
7228 output_asm_insn ("ay\t%1,%3(%4)", op
);
7230 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
7232 output_asm_insn ("lhi\t%4,%3", op
);
7233 output_asm_insn ("a\t%4,0(%1)", op
);
7234 output_asm_insn ("a\t%1,0(%4)", op
);
7238 op
[7] = gen_label_rtx ();
7239 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
7240 output_asm_insn ("a\t%4,0(%1)", op
);
7241 output_asm_insn ("a\t%1,0(%4)", op
);
7244 /* We had to clobber the base pointer register.
7245 Re-setup the base pointer (with a different base). */
7246 op
[5] = gen_label_rtx ();
7247 output_asm_insn ("basr\t%4,0", op
);
7248 targetm
.asm_out
.internal_label (file
, "L",
7249 CODE_LABEL_NUMBER (op
[5]));
7252 /* Jump to target. */
7253 op
[8] = gen_label_rtx ();
7256 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
7258 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7259 /* We cannot call through .plt, since .plt requires %r12 loaded. */
7260 else if (flag_pic
== 1)
7262 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7263 output_asm_insn ("l\t%4,%0(%4)", op
);
7265 else if (flag_pic
== 2)
7267 op
[9] = gen_rtx_REG (Pmode
, 0);
7268 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
7269 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7270 output_asm_insn ("ar\t%4,%9", op
);
7271 output_asm_insn ("l\t%4,0(%4)", op
);
7274 output_asm_insn ("br\t%4", op
);
7276 /* Output literal pool. */
7277 output_asm_insn (".align\t4", op
);
7279 if (nonlocal
&& flag_pic
== 2)
7280 output_asm_insn (".long\t%0", op
);
7283 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
7284 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
7287 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
7289 output_asm_insn (".long\t%0", op
);
7291 output_asm_insn (".long\t%0-%5", op
);
7295 targetm
.asm_out
.internal_label (file
, "L",
7296 CODE_LABEL_NUMBER (op
[6]));
7297 output_asm_insn (".long\t%2", op
);
7301 targetm
.asm_out
.internal_label (file
, "L",
7302 CODE_LABEL_NUMBER (op
[7]));
7303 output_asm_insn (".long\t%3", op
);
7309 s390_valid_pointer_mode (enum machine_mode mode
)
7311 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
7314 /* How to allocate a 'struct machine_function'. */
7316 static struct machine_function
*
7317 s390_init_machine_status (void)
7319 return ggc_alloc_cleared (sizeof (struct machine_function
));
7322 /* Checks whether the given ARGUMENT_LIST would use a caller
7323 saved register. This is used to decide whether sibling call
7324 optimization could be performed on the respective function
7328 s390_call_saved_register_used (tree argument_list
)
7330 CUMULATIVE_ARGS cum
;
7332 enum machine_mode mode
;
7337 INIT_CUMULATIVE_ARGS (cum
, NULL
, NULL
, 0, 0);
7339 while (argument_list
)
7341 parameter
= TREE_VALUE (argument_list
);
7342 argument_list
= TREE_CHAIN (argument_list
);
7347 /* For an undeclared variable passed as parameter we will get
7348 an ERROR_MARK node here. */
7349 if (TREE_CODE (parameter
) == ERROR_MARK
)
7352 if (! (type
= TREE_TYPE (parameter
)))
7355 if (! (mode
= TYPE_MODE (TREE_TYPE (parameter
))))
7358 if (s390_function_arg_pass_by_reference (mode
, type
))
7361 type
= build_pointer_type (type
);
7364 parm_rtx
= s390_function_arg (&cum
, mode
, type
, 0);
7366 s390_function_arg_advance (&cum
, mode
, type
, 0);
7368 if (parm_rtx
&& REG_P (parm_rtx
))
7371 reg
< HARD_REGNO_NREGS (REGNO (parm_rtx
), GET_MODE (parm_rtx
));
7373 if (! call_used_regs
[reg
+ REGNO (parm_rtx
)])
7380 /* Return true if the given call expression can be
7381 turned into a sibling call.
7382 DECL holds the declaration of the function to be called whereas
7383 EXP is the call expression itself. */
7386 s390_function_ok_for_sibcall (tree decl
, tree exp
)
7388 /* The TPF epilogue uses register 1. */
7389 if (TARGET_TPF_PROFILING
)
7392 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
7393 which would have to be restored before the sibcall. */
7394 if (!TARGET_64BIT
&& flag_pic
&& decl
&& TREE_PUBLIC (decl
))
7397 /* Register 6 on s390 is available as an argument register but unfortunately
7398 "caller saved". This makes functions needing this register for arguments
7399 not suitable for sibcalls. */
7400 if (TREE_OPERAND (exp
, 1)
7401 && s390_call_saved_register_used (TREE_OPERAND (exp
, 1)))
7407 /* This function is used by the call expanders of the machine description.
7408 It emits the call insn itself together with the necessary operations
7409 to adjust the target address and returns the emitted insn.
7410 ADDR_LOCATION is the target address rtx
7411 TLS_CALL the location of the thread-local symbol
7412 RESULT_REG the register where the result of the call should be stored
7413 RETADDR_REG the register where the return address should be stored
7414 If this parameter is NULL_RTX the call is considered
7415 to be a sibling call. */
7418 s390_emit_call (rtx addr_location
, rtx tls_call
, rtx result_reg
,
7421 bool plt_call
= false;
7427 /* Direct function calls need special treatment. */
7428 if (GET_CODE (addr_location
) == SYMBOL_REF
)
7430 /* When calling a global routine in PIC mode, we must
7431 replace the symbol itself with the PLT stub. */
7432 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (addr_location
))
7434 addr_location
= gen_rtx_UNSPEC (Pmode
,
7435 gen_rtvec (1, addr_location
),
7437 addr_location
= gen_rtx_CONST (Pmode
, addr_location
);
7441 /* Unless we can use the bras(l) insn, force the
7442 routine address into a register. */
7443 if (!TARGET_SMALL_EXEC
&& !TARGET_CPU_ZARCH
)
7446 addr_location
= legitimize_pic_address (addr_location
, 0);
7448 addr_location
= force_reg (Pmode
, addr_location
);
7452 /* If it is already an indirect call or the code above moved the
7453 SYMBOL_REF to somewhere else make sure the address can be found in
7455 if (retaddr_reg
== NULL_RTX
7456 && GET_CODE (addr_location
) != SYMBOL_REF
7459 emit_move_insn (gen_rtx_REG (Pmode
, SIBCALL_REGNUM
), addr_location
);
7460 addr_location
= gen_rtx_REG (Pmode
, SIBCALL_REGNUM
);
7463 addr_location
= gen_rtx_MEM (QImode
, addr_location
);
7464 call
= gen_rtx_CALL (VOIDmode
, addr_location
, const0_rtx
);
7466 if (result_reg
!= NULL_RTX
)
7467 call
= gen_rtx_SET (VOIDmode
, result_reg
, call
);
7469 if (retaddr_reg
!= NULL_RTX
)
7471 clobber
= gen_rtx_CLOBBER (VOIDmode
, retaddr_reg
);
7473 if (tls_call
!= NULL_RTX
)
7474 vec
= gen_rtvec (3, call
, clobber
,
7475 gen_rtx_USE (VOIDmode
, tls_call
));
7477 vec
= gen_rtvec (2, call
, clobber
);
7479 call
= gen_rtx_PARALLEL (VOIDmode
, vec
);
7482 insn
= emit_call_insn (call
);
7484 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
7485 if ((!TARGET_64BIT
&& plt_call
) || tls_call
!= NULL_RTX
)
7487 /* s390_function_ok_for_sibcall should
7488 have denied sibcalls in this case. */
7489 if (retaddr_reg
== NULL_RTX
)
7492 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), pic_offset_table_rtx
);
7497 /* Implement CONDITIONAL_REGISTER_USAGE. */
7500 s390_conditional_register_usage (void)
7506 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
7507 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
7509 if (TARGET_CPU_ZARCH
)
7511 fixed_regs
[RETURN_REGNUM
] = 0;
7512 call_used_regs
[RETURN_REGNUM
] = 0;
7516 for (i
= 24; i
< 32; i
++)
7517 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
7521 for (i
= 18; i
< 20; i
++)
7522 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
7527 #include "gt-s390.h"