1 /* Subroutines for insn-output.c for Sun SPARC.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 64 bit SPARC V9 support by Michael Tiemann, Jim Wilson, and Doug Evans,
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
35 #include "insn-attr.h"
47 #include "target-def.h"
49 /* 1 if the caller has placed an "unimp" insn immediately after the call.
50 This is used in v8 code when calling a function that returns a structure.
51 v9 doesn't have this. Be careful to have this test be the same as that
54 #define SKIP_CALLERS_UNIMP_P \
55 (!TARGET_ARCH64 && current_function_returns_struct \
56 && ! integer_zerop (DECL_SIZE (DECL_RESULT (current_function_decl))) \
57 && (TREE_CODE (DECL_SIZE (DECL_RESULT (current_function_decl))) \
60 /* Global variables for machine-dependent things. */
62 /* Size of frame. Need to know this to emit return insns from leaf procedures.
63 ACTUAL_FSIZE is set by compute_frame_size() which is called during the
64 reload pass. This is important as the value is later used in insn
65 scheduling (to see what can go in a delay slot).
66 APPARENT_FSIZE is the size of the stack less the register save area and less
67 the outgoing argument area. It is used when saving call preserved regs. */
68 static int apparent_fsize
;
69 static int actual_fsize
;
71 /* Number of live general or floating point registers needed to be
72 saved (as 4-byte quantities). */
73 static int num_gfregs
;
75 /* Save the operands last given to a compare for use when we
76 generate a scc or bcc insn. */
77 rtx sparc_compare_op0
, sparc_compare_op1
;
79 /* Coordinate with the md file wrt special insns created by
80 sparc_nonflat_function_epilogue. */
81 bool sparc_emitting_epilogue
;
83 /* Vector to say how input registers are mapped to output registers.
84 HARD_FRAME_POINTER_REGNUM cannot be remapped by this function to
85 eliminate it. You must use -fomit-frame-pointer to get that. */
86 char leaf_reg_remap
[] =
87 { 0, 1, 2, 3, 4, 5, 6, 7,
88 -1, -1, -1, -1, -1, -1, 14, -1,
89 -1, -1, -1, -1, -1, -1, -1, -1,
90 8, 9, 10, 11, 12, 13, -1, 15,
92 32, 33, 34, 35, 36, 37, 38, 39,
93 40, 41, 42, 43, 44, 45, 46, 47,
94 48, 49, 50, 51, 52, 53, 54, 55,
95 56, 57, 58, 59, 60, 61, 62, 63,
96 64, 65, 66, 67, 68, 69, 70, 71,
97 72, 73, 74, 75, 76, 77, 78, 79,
98 80, 81, 82, 83, 84, 85, 86, 87,
99 88, 89, 90, 91, 92, 93, 94, 95,
100 96, 97, 98, 99, 100};
102 /* Vector, indexed by hard register number, which contains 1
103 for a register that is allowable in a candidate for leaf
104 function treatment. */
105 char sparc_leaf_regs
[] =
106 { 1, 1, 1, 1, 1, 1, 1, 1,
107 0, 0, 0, 0, 0, 0, 1, 0,
108 0, 0, 0, 0, 0, 0, 0, 0,
109 1, 1, 1, 1, 1, 1, 0, 1,
110 1, 1, 1, 1, 1, 1, 1, 1,
111 1, 1, 1, 1, 1, 1, 1, 1,
112 1, 1, 1, 1, 1, 1, 1, 1,
113 1, 1, 1, 1, 1, 1, 1, 1,
114 1, 1, 1, 1, 1, 1, 1, 1,
115 1, 1, 1, 1, 1, 1, 1, 1,
116 1, 1, 1, 1, 1, 1, 1, 1,
117 1, 1, 1, 1, 1, 1, 1, 1,
120 /* Name of where we pretend to think the frame pointer points.
121 Normally, this is "%fp", but if we are in a leaf procedure,
122 this is "%sp+something". We record "something" separately as it may be
123 too big for reg+constant addressing. */
125 static const char *frame_base_name
;
126 static int frame_base_offset
;
128 static void sparc_init_modes
PARAMS ((void));
129 static int save_regs
PARAMS ((FILE *, int, int, const char *,
131 static int restore_regs
PARAMS ((FILE *, int, int, const char *, int, int));
132 static void build_big_number
PARAMS ((FILE *, int, const char *));
133 static int function_arg_slotno
PARAMS ((const CUMULATIVE_ARGS
*,
134 enum machine_mode
, tree
, int, int,
137 static int supersparc_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
138 static int hypersparc_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
140 static void sparc_output_addr_vec
PARAMS ((rtx
));
141 static void sparc_output_addr_diff_vec
PARAMS ((rtx
));
142 static void sparc_output_deferred_case_vectors
PARAMS ((void));
143 static void sparc_add_gc_roots
PARAMS ((void));
144 static int check_return_regs
PARAMS ((rtx
));
145 static int epilogue_renumber
PARAMS ((rtx
*, int));
146 static bool sparc_assemble_integer
PARAMS ((rtx
, unsigned int, int));
147 static int set_extends
PARAMS ((rtx
));
148 static void output_restore_regs
PARAMS ((FILE *, int));
149 static void sparc_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
150 static void sparc_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
151 static void sparc_flat_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
152 static void sparc_flat_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
153 static void sparc_nonflat_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
,
155 static void sparc_nonflat_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
,
157 #ifdef OBJECT_FORMAT_ELF
158 static void sparc_elf_asm_named_section
PARAMS ((const char *, unsigned int));
161 static int sparc_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
162 static int sparc_issue_rate
PARAMS ((void));
163 static void sparc_sched_init
PARAMS ((FILE *, int, int));
164 static int sparc_use_dfa_pipeline_interface
PARAMS ((void));
165 static int sparc_use_sched_lookahead
PARAMS ((void));
167 static void emit_soft_tfmode_libcall
PARAMS ((const char *, int, rtx
*));
168 static void emit_soft_tfmode_binop
PARAMS ((enum rtx_code
, rtx
*));
169 static void emit_soft_tfmode_unop
PARAMS ((enum rtx_code
, rtx
*));
170 static void emit_soft_tfmode_cvt
PARAMS ((enum rtx_code
, rtx
*));
171 static void emit_hard_tfmode_operation
PARAMS ((enum rtx_code
, rtx
*));
173 /* Option handling. */
175 /* Code model option as passed by user. */
176 const char *sparc_cmodel_string
;
178 enum cmodel sparc_cmodel
;
180 char sparc_hard_reg_printed
[8];
182 struct sparc_cpu_select sparc_select
[] =
184 /* switch name, tune arch */
185 { (char *)0, "default", 1, 1 },
186 { (char *)0, "-mcpu=", 1, 1 },
187 { (char *)0, "-mtune=", 1, 0 },
191 /* CPU type. This is set from TARGET_CPU_DEFAULT and -m{cpu,tune}=xxx. */
192 enum processor_type sparc_cpu
;
194 /* Initialize the GCC target structure. */
196 /* The sparc default is to use .half rather than .short for aligned
197 HI objects. Use .word instead of .long on non-ELF systems. */
198 #undef TARGET_ASM_ALIGNED_HI_OP
199 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
200 #ifndef OBJECT_FORMAT_ELF
201 #undef TARGET_ASM_ALIGNED_SI_OP
202 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
205 #undef TARGET_ASM_UNALIGNED_HI_OP
206 #define TARGET_ASM_UNALIGNED_HI_OP "\t.uahalf\t"
207 #undef TARGET_ASM_UNALIGNED_SI_OP
208 #define TARGET_ASM_UNALIGNED_SI_OP "\t.uaword\t"
209 #undef TARGET_ASM_UNALIGNED_DI_OP
210 #define TARGET_ASM_UNALIGNED_DI_OP "\t.uaxword\t"
212 /* The target hook has to handle DI-mode values. */
213 #undef TARGET_ASM_INTEGER
214 #define TARGET_ASM_INTEGER sparc_assemble_integer
216 #undef TARGET_ASM_FUNCTION_PROLOGUE
217 #define TARGET_ASM_FUNCTION_PROLOGUE sparc_output_function_prologue
218 #undef TARGET_ASM_FUNCTION_EPILOGUE
219 #define TARGET_ASM_FUNCTION_EPILOGUE sparc_output_function_epilogue
221 #undef TARGET_SCHED_ADJUST_COST
222 #define TARGET_SCHED_ADJUST_COST sparc_adjust_cost
223 #undef TARGET_SCHED_ISSUE_RATE
224 #define TARGET_SCHED_ISSUE_RATE sparc_issue_rate
225 #undef TARGET_SCHED_INIT
226 #define TARGET_SCHED_INIT sparc_sched_init
227 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
228 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE sparc_use_dfa_pipeline_interface
229 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
230 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD sparc_use_sched_lookahead
232 struct gcc_target targetm
= TARGET_INITIALIZER
;
234 /* Validate and override various options, and do some machine dependent
238 sparc_override_options ()
240 static struct code_model
{
241 const char *const name
;
243 } const cmodels
[] = {
245 { "medlow", CM_MEDLOW
},
246 { "medmid", CM_MEDMID
},
247 { "medany", CM_MEDANY
},
248 { "embmedany", CM_EMBMEDANY
},
251 const struct code_model
*cmodel
;
252 /* Map TARGET_CPU_DEFAULT to value for -m{arch,tune}=. */
253 static struct cpu_default
{
255 const char *const name
;
256 } const cpu_default
[] = {
257 /* There must be one entry here for each TARGET_CPU value. */
258 { TARGET_CPU_sparc
, "cypress" },
259 { TARGET_CPU_sparclet
, "tsc701" },
260 { TARGET_CPU_sparclite
, "f930" },
261 { TARGET_CPU_v8
, "v8" },
262 { TARGET_CPU_hypersparc
, "hypersparc" },
263 { TARGET_CPU_sparclite86x
, "sparclite86x" },
264 { TARGET_CPU_supersparc
, "supersparc" },
265 { TARGET_CPU_v9
, "v9" },
266 { TARGET_CPU_ultrasparc
, "ultrasparc" },
267 { TARGET_CPU_ultrasparc3
, "ultrasparc3" },
270 const struct cpu_default
*def
;
271 /* Table of values for -m{cpu,tune}=. */
272 static struct cpu_table
{
273 const char *const name
;
274 const enum processor_type processor
;
277 } const cpu_table
[] = {
278 { "v7", PROCESSOR_V7
, MASK_ISA
, 0 },
279 { "cypress", PROCESSOR_CYPRESS
, MASK_ISA
, 0 },
280 { "v8", PROCESSOR_V8
, MASK_ISA
, MASK_V8
},
281 /* TI TMS390Z55 supersparc */
282 { "supersparc", PROCESSOR_SUPERSPARC
, MASK_ISA
, MASK_V8
},
283 { "sparclite", PROCESSOR_SPARCLITE
, MASK_ISA
, MASK_SPARCLITE
},
284 /* The Fujitsu MB86930 is the original sparclite chip, with no fpu.
285 The Fujitsu MB86934 is the recent sparclite chip, with an fpu. */
286 { "f930", PROCESSOR_F930
, MASK_ISA
|MASK_FPU
, MASK_SPARCLITE
},
287 { "f934", PROCESSOR_F934
, MASK_ISA
, MASK_SPARCLITE
|MASK_FPU
},
288 { "hypersparc", PROCESSOR_HYPERSPARC
, MASK_ISA
, MASK_V8
|MASK_FPU
},
289 { "sparclite86x", PROCESSOR_SPARCLITE86X
, MASK_ISA
|MASK_FPU
,
291 { "sparclet", PROCESSOR_SPARCLET
, MASK_ISA
, MASK_SPARCLET
},
293 { "tsc701", PROCESSOR_TSC701
, MASK_ISA
, MASK_SPARCLET
},
294 { "v9", PROCESSOR_V9
, MASK_ISA
, MASK_V9
},
295 /* TI ultrasparc I, II, IIi */
296 { "ultrasparc", PROCESSOR_ULTRASPARC
, MASK_ISA
, MASK_V9
297 /* Although insns using %y are deprecated, it is a clear win on current
299 |MASK_DEPRECATED_V8_INSNS
},
300 /* TI ultrasparc III */
301 /* ??? Check if %y issue still holds true in ultra3. */
302 { "ultrasparc3", PROCESSOR_ULTRASPARC3
, MASK_ISA
, MASK_V9
|MASK_DEPRECATED_V8_INSNS
},
305 const struct cpu_table
*cpu
;
306 const struct sparc_cpu_select
*sel
;
309 #ifndef SPARC_BI_ARCH
310 /* Check for unsupported architecture size. */
311 if (! TARGET_64BIT
!= DEFAULT_ARCH32_P
)
312 error ("%s is not supported by this configuration",
313 DEFAULT_ARCH32_P
? "-m64" : "-m32");
316 /* We force all 64bit archs to use 128 bit long double */
317 if (TARGET_64BIT
&& ! TARGET_LONG_DOUBLE_128
)
319 error ("-mlong-double-64 not allowed with -m64");
320 target_flags
|= MASK_LONG_DOUBLE_128
;
323 /* Code model selection. */
324 sparc_cmodel
= SPARC_DEFAULT_CMODEL
;
328 sparc_cmodel
= CM_32
;
331 if (sparc_cmodel_string
!= NULL
)
335 for (cmodel
= &cmodels
[0]; cmodel
->name
; cmodel
++)
336 if (strcmp (sparc_cmodel_string
, cmodel
->name
) == 0)
338 if (cmodel
->name
== NULL
)
339 error ("bad value (%s) for -mcmodel= switch", sparc_cmodel_string
);
341 sparc_cmodel
= cmodel
->value
;
344 error ("-mcmodel= is not supported on 32 bit systems");
347 fpu
= TARGET_FPU
; /* save current -mfpu status */
349 /* Set the default CPU. */
350 for (def
= &cpu_default
[0]; def
->name
; ++def
)
351 if (def
->cpu
== TARGET_CPU_DEFAULT
)
355 sparc_select
[0].string
= def
->name
;
357 for (sel
= &sparc_select
[0]; sel
->name
; ++sel
)
361 for (cpu
= &cpu_table
[0]; cpu
->name
; ++cpu
)
362 if (! strcmp (sel
->string
, cpu
->name
))
365 sparc_cpu
= cpu
->processor
;
369 target_flags
&= ~cpu
->disable
;
370 target_flags
|= cpu
->enable
;
376 error ("bad value (%s) for %s switch", sel
->string
, sel
->name
);
380 /* If -mfpu or -mno-fpu was explicitly used, don't override with
381 the processor default. Clear MASK_FPU_SET to avoid confusing
382 the reverse mapping from switch values to names. */
385 target_flags
= (target_flags
& ~MASK_FPU
) | fpu
;
386 target_flags
&= ~MASK_FPU_SET
;
389 /* Don't allow -mvis if FPU is disabled. */
391 target_flags
&= ~MASK_VIS
;
393 /* -mvis assumes UltraSPARC+, so we are sure v9 instructions
395 -m64 also implies v9. */
396 if (TARGET_VIS
|| TARGET_ARCH64
)
398 target_flags
|= MASK_V9
;
399 target_flags
&= ~(MASK_V8
| MASK_SPARCLET
| MASK_SPARCLITE
);
402 /* Use the deprecated v8 insns for sparc64 in 32 bit mode. */
403 if (TARGET_V9
&& TARGET_ARCH32
)
404 target_flags
|= MASK_DEPRECATED_V8_INSNS
;
406 /* V8PLUS requires V9, makes no sense in 64 bit mode. */
407 if (! TARGET_V9
|| TARGET_ARCH64
)
408 target_flags
&= ~MASK_V8PLUS
;
410 /* Don't use stack biasing in 32 bit mode. */
412 target_flags
&= ~MASK_STACK_BIAS
;
414 /* Supply a default value for align_functions. */
415 if (align_functions
== 0
416 && (sparc_cpu
== PROCESSOR_ULTRASPARC
417 || sparc_cpu
== PROCESSOR_ULTRASPARC3
))
418 align_functions
= 32;
420 /* Validate PCC_STRUCT_RETURN. */
421 if (flag_pcc_struct_return
== DEFAULT_PCC_STRUCT_RETURN
)
422 flag_pcc_struct_return
= (TARGET_ARCH64
? 0 : 1);
424 /* Only use .uaxword when compiling for a 64-bit target. */
426 targetm
.asm_out
.unaligned_op
.di
= NULL
;
428 /* Do various machine dependent initializations. */
431 /* Register global variables with the garbage collector. */
432 sparc_add_gc_roots ();
435 /* Miscellaneous utilities. */
437 /* Nonzero if CODE, a comparison, is suitable for use in v9 conditional move
438 or branch on register contents instructions. */
444 return (code
== EQ
|| code
== NE
|| code
== GE
|| code
== LT
445 || code
== LE
|| code
== GT
);
449 /* Operand constraints. */
451 /* Return non-zero only if OP is a register of mode MODE,
455 reg_or_0_operand (op
, mode
)
457 enum machine_mode mode
;
459 if (register_operand (op
, mode
))
461 if (op
== const0_rtx
)
463 if (GET_MODE (op
) == VOIDmode
&& GET_CODE (op
) == CONST_DOUBLE
464 && CONST_DOUBLE_HIGH (op
) == 0
465 && CONST_DOUBLE_LOW (op
) == 0)
467 if (fp_zero_operand (op
, mode
))
472 /* Return non-zero only if OP is const1_rtx. */
475 const1_operand (op
, mode
)
477 enum machine_mode mode ATTRIBUTE_UNUSED
;
479 return op
== const1_rtx
;
482 /* Nonzero if OP is a floating point value with value 0.0. */
485 fp_zero_operand (op
, mode
)
487 enum machine_mode mode
;
489 if (GET_MODE_CLASS (GET_MODE (op
)) != MODE_FLOAT
)
491 return op
== CONST0_RTX (mode
);
494 /* Nonzero if OP is a register operand in floating point register. */
497 fp_register_operand (op
, mode
)
499 enum machine_mode mode
;
501 if (! register_operand (op
, mode
))
503 if (GET_CODE (op
) == SUBREG
)
504 op
= SUBREG_REG (op
);
505 return GET_CODE (op
) == REG
&& SPARC_FP_REG_P (REGNO (op
));
508 /* Nonzero if OP is a floating point constant which can
509 be loaded into an integer register using a single
510 sethi instruction. */
516 if (GET_CODE (op
) == CONST_DOUBLE
)
521 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
522 if (REAL_VALUES_EQUAL (r
, dconst0
) &&
523 ! REAL_VALUE_MINUS_ZERO (r
))
525 REAL_VALUE_TO_TARGET_SINGLE (r
, i
);
526 if (SPARC_SETHI_P (i
))
533 /* Nonzero if OP is a floating point constant which can
534 be loaded into an integer register using a single
541 if (GET_CODE (op
) == CONST_DOUBLE
)
546 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
547 if (REAL_VALUES_EQUAL (r
, dconst0
) &&
548 ! REAL_VALUE_MINUS_ZERO (r
))
550 REAL_VALUE_TO_TARGET_SINGLE (r
, i
);
551 if (SPARC_SIMM13_P (i
))
558 /* Nonzero if OP is a floating point constant which can
559 be loaded into an integer register using a high/losum
560 instruction sequence. */
566 /* The constraints calling this should only be in
567 SFmode move insns, so any constant which cannot
568 be moved using a single insn will do. */
569 if (GET_CODE (op
) == CONST_DOUBLE
)
574 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
575 if (REAL_VALUES_EQUAL (r
, dconst0
) &&
576 ! REAL_VALUE_MINUS_ZERO (r
))
578 REAL_VALUE_TO_TARGET_SINGLE (r
, i
);
579 if (! SPARC_SETHI_P (i
)
580 && ! SPARC_SIMM13_P (i
))
587 /* Nonzero if OP is an integer register. */
590 intreg_operand (op
, mode
)
592 enum machine_mode mode ATTRIBUTE_UNUSED
;
594 return (register_operand (op
, SImode
)
595 || (TARGET_ARCH64
&& register_operand (op
, DImode
)));
598 /* Nonzero if OP is a floating point condition code register. */
601 fcc_reg_operand (op
, mode
)
603 enum machine_mode mode
;
605 /* This can happen when recog is called from combine. Op may be a MEM.
606 Fail instead of calling abort in this case. */
607 if (GET_CODE (op
) != REG
)
610 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
613 && (GET_MODE (op
) != CCFPmode
&& GET_MODE (op
) != CCFPEmode
))
616 #if 0 /* ??? ==> 1 when %fcc0-3 are pseudos first. See gen_compare_reg(). */
617 if (reg_renumber
== 0)
618 return REGNO (op
) >= FIRST_PSEUDO_REGISTER
;
619 return REGNO_OK_FOR_CCFP_P (REGNO (op
));
621 return (unsigned) REGNO (op
) - SPARC_FIRST_V9_FCC_REG
< 4;
625 /* Nonzero if OP is a floating point condition code fcc0 register. */
628 fcc0_reg_operand (op
, mode
)
630 enum machine_mode mode
;
632 /* This can happen when recog is called from combine. Op may be a MEM.
633 Fail instead of calling abort in this case. */
634 if (GET_CODE (op
) != REG
)
637 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
640 && (GET_MODE (op
) != CCFPmode
&& GET_MODE (op
) != CCFPEmode
))
643 return REGNO (op
) == SPARC_FCC_REG
;
646 /* Nonzero if OP is an integer or floating point condition code register. */
649 icc_or_fcc_reg_operand (op
, mode
)
651 enum machine_mode mode
;
653 if (GET_CODE (op
) == REG
&& REGNO (op
) == SPARC_ICC_REG
)
655 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
658 && GET_MODE (op
) != CCmode
&& GET_MODE (op
) != CCXmode
)
663 return fcc_reg_operand (op
, mode
);
666 /* Nonzero if OP can appear as the dest of a RESTORE insn. */
668 restore_operand (op
, mode
)
670 enum machine_mode mode
;
672 return (GET_CODE (op
) == REG
&& GET_MODE (op
) == mode
673 && (REGNO (op
) < 8 || (REGNO (op
) >= 24 && REGNO (op
) < 32)));
676 /* Call insn on SPARC can take a PC-relative constant address, or any regular
680 call_operand (op
, mode
)
682 enum machine_mode mode
;
684 if (GET_CODE (op
) != MEM
)
687 return (symbolic_operand (op
, mode
) || memory_address_p (Pmode
, op
));
691 call_operand_address (op
, mode
)
693 enum machine_mode mode
;
695 return (symbolic_operand (op
, mode
) || memory_address_p (Pmode
, op
));
698 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
699 reference and a constant. */
702 symbolic_operand (op
, mode
)
704 enum machine_mode mode
;
706 enum machine_mode omode
= GET_MODE (op
);
708 if (omode
!= mode
&& omode
!= VOIDmode
&& mode
!= VOIDmode
)
711 switch (GET_CODE (op
))
719 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
720 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
721 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
728 /* Return truth value of statement that OP is a symbolic memory
729 operand of mode MODE. */
732 symbolic_memory_operand (op
, mode
)
734 enum machine_mode mode ATTRIBUTE_UNUSED
;
736 if (GET_CODE (op
) == SUBREG
)
737 op
= SUBREG_REG (op
);
738 if (GET_CODE (op
) != MEM
)
741 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
742 || GET_CODE (op
) == HIGH
|| GET_CODE (op
) == LABEL_REF
);
745 /* Return truth value of statement that OP is a LABEL_REF of mode MODE. */
748 label_ref_operand (op
, mode
)
750 enum machine_mode mode
;
752 if (GET_CODE (op
) != LABEL_REF
)
754 if (GET_MODE (op
) != mode
)
759 /* Return 1 if the operand is an argument used in generating pic references
760 in either the medium/low or medium/anywhere code models of sparc64. */
763 sp64_medium_pic_operand (op
, mode
)
765 enum machine_mode mode ATTRIBUTE_UNUSED
;
767 /* Check for (const (minus (symbol_ref:GOT)
768 (const (minus (label) (pc))))). */
769 if (GET_CODE (op
) != CONST
)
772 if (GET_CODE (op
) != MINUS
)
774 if (GET_CODE (XEXP (op
, 0)) != SYMBOL_REF
)
776 /* ??? Ensure symbol is GOT. */
777 if (GET_CODE (XEXP (op
, 1)) != CONST
)
779 if (GET_CODE (XEXP (XEXP (op
, 1), 0)) != MINUS
)
784 /* Return 1 if the operand is a data segment reference. This includes
785 the readonly data segment, or in other words anything but the text segment.
786 This is needed in the medium/anywhere code model on v9. These values
787 are accessed with EMBMEDANY_BASE_REG. */
790 data_segment_operand (op
, mode
)
792 enum machine_mode mode ATTRIBUTE_UNUSED
;
794 switch (GET_CODE (op
))
797 return ! SYMBOL_REF_FLAG (op
);
799 /* Assume canonical format of symbol + constant.
802 return data_segment_operand (XEXP (op
, 0), VOIDmode
);
808 /* Return 1 if the operand is a text segment reference.
809 This is needed in the medium/anywhere code model on v9. */
812 text_segment_operand (op
, mode
)
814 enum machine_mode mode ATTRIBUTE_UNUSED
;
816 switch (GET_CODE (op
))
821 return SYMBOL_REF_FLAG (op
);
823 /* Assume canonical format of symbol + constant.
826 return text_segment_operand (XEXP (op
, 0), VOIDmode
);
832 /* Return 1 if the operand is either a register or a memory operand that is
836 reg_or_nonsymb_mem_operand (op
, mode
)
838 enum machine_mode mode
;
840 if (register_operand (op
, mode
))
843 if (memory_operand (op
, mode
) && ! symbolic_memory_operand (op
, mode
))
850 splittable_symbolic_memory_operand (op
, mode
)
852 enum machine_mode mode ATTRIBUTE_UNUSED
;
854 if (GET_CODE (op
) != MEM
)
856 if (! symbolic_operand (XEXP (op
, 0), Pmode
))
862 splittable_immediate_memory_operand (op
, mode
)
864 enum machine_mode mode ATTRIBUTE_UNUSED
;
866 if (GET_CODE (op
) != MEM
)
868 if (! immediate_operand (XEXP (op
, 0), Pmode
))
873 /* Return truth value of whether OP is EQ or NE. */
878 enum machine_mode mode ATTRIBUTE_UNUSED
;
880 return (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
883 /* Return 1 if this is a comparison operator, but not an EQ, NE, GEU,
884 or LTU for non-floating-point. We handle those specially. */
887 normal_comp_operator (op
, mode
)
889 enum machine_mode mode ATTRIBUTE_UNUSED
;
891 enum rtx_code code
= GET_CODE (op
);
893 if (GET_RTX_CLASS (code
) != '<')
896 if (GET_MODE (XEXP (op
, 0)) == CCFPmode
897 || GET_MODE (XEXP (op
, 0)) == CCFPEmode
)
900 return (code
!= NE
&& code
!= EQ
&& code
!= GEU
&& code
!= LTU
);
903 /* Return 1 if this is a comparison operator. This allows the use of
904 MATCH_OPERATOR to recognize all the branch insns. */
907 noov_compare_op (op
, mode
)
909 enum machine_mode mode ATTRIBUTE_UNUSED
;
911 enum rtx_code code
= GET_CODE (op
);
913 if (GET_RTX_CLASS (code
) != '<')
916 if (GET_MODE (XEXP (op
, 0)) == CC_NOOVmode
917 || GET_MODE (XEXP (op
, 0)) == CCX_NOOVmode
)
918 /* These are the only branches which work with CC_NOOVmode. */
919 return (code
== EQ
|| code
== NE
|| code
== GE
|| code
== LT
);
923 /* Return 1 if this is a 64-bit comparison operator. This allows the use of
924 MATCH_OPERATOR to recognize all the branch insns. */
927 noov_compare64_op (op
, mode
)
929 enum machine_mode mode ATTRIBUTE_UNUSED
;
931 enum rtx_code code
= GET_CODE (op
);
936 if (GET_RTX_CLASS (code
) != '<')
939 if (GET_MODE (XEXP (op
, 0)) == CCX_NOOVmode
)
940 /* These are the only branches which work with CCX_NOOVmode. */
941 return (code
== EQ
|| code
== NE
|| code
== GE
|| code
== LT
);
942 return (GET_MODE (XEXP (op
, 0)) == CCXmode
);
945 /* Nonzero if OP is a comparison operator suitable for use in v9
946 conditional move or branch on register contents instructions. */
949 v9_regcmp_op (op
, mode
)
951 enum machine_mode mode ATTRIBUTE_UNUSED
;
953 enum rtx_code code
= GET_CODE (op
);
955 if (GET_RTX_CLASS (code
) != '<')
958 return v9_regcmp_p (code
);
961 /* Return 1 if this is a SIGN_EXTEND or ZERO_EXTEND operation. */
966 enum machine_mode mode ATTRIBUTE_UNUSED
;
968 return GET_CODE (op
) == SIGN_EXTEND
|| GET_CODE (op
) == ZERO_EXTEND
;
971 /* Return nonzero if OP is an operator of mode MODE which can set
972 the condition codes explicitly. We do not include PLUS and MINUS
973 because these require CC_NOOVmode, which we handle explicitly. */
976 cc_arithop (op
, mode
)
978 enum machine_mode mode ATTRIBUTE_UNUSED
;
980 if (GET_CODE (op
) == AND
981 || GET_CODE (op
) == IOR
982 || GET_CODE (op
) == XOR
)
988 /* Return nonzero if OP is an operator of mode MODE which can bitwise
989 complement its second operand and set the condition codes explicitly. */
992 cc_arithopn (op
, mode
)
994 enum machine_mode mode ATTRIBUTE_UNUSED
;
996 /* XOR is not here because combine canonicalizes (xor (not ...) ...)
997 and (xor ... (not ...)) to (not (xor ...)). */
998 return (GET_CODE (op
) == AND
999 || GET_CODE (op
) == IOR
);
1002 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1003 signed 13 bit immediate field. This is an acceptable SImode operand for
1004 most 3 address instructions. */
1007 arith_operand (op
, mode
)
1009 enum machine_mode mode
;
1011 if (register_operand (op
, mode
))
1013 if (GET_CODE (op
) != CONST_INT
)
1015 return SMALL_INT32 (op
);
1018 /* Return true if OP is a constant 4096 */
1021 arith_4096_operand (op
, mode
)
1023 enum machine_mode mode ATTRIBUTE_UNUSED
;
1025 if (GET_CODE (op
) != CONST_INT
)
1028 return INTVAL (op
) == 4096;
1031 /* Return true if OP is suitable as second operand for add/sub */
1034 arith_add_operand (op
, mode
)
1036 enum machine_mode mode
;
1038 return arith_operand (op
, mode
) || arith_4096_operand (op
, mode
);
1041 /* Return true if OP is a CONST_INT or a CONST_DOUBLE which can fit in the
1042 immediate field of OR and XOR instructions. Used for 64-bit
1043 constant formation patterns. */
1045 const64_operand (op
, mode
)
1047 enum machine_mode mode ATTRIBUTE_UNUSED
;
1049 return ((GET_CODE (op
) == CONST_INT
1050 && SPARC_SIMM13_P (INTVAL (op
)))
1051 #if HOST_BITS_PER_WIDE_INT != 64
1052 || (GET_CODE (op
) == CONST_DOUBLE
1053 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op
))
1054 && (CONST_DOUBLE_HIGH (op
) ==
1055 ((CONST_DOUBLE_LOW (op
) & 0x80000000) != 0 ?
1056 (HOST_WIDE_INT
)-1 : 0)))
1061 /* The same, but only for sethi instructions. */
1063 const64_high_operand (op
, mode
)
1065 enum machine_mode mode
;
1067 return ((GET_CODE (op
) == CONST_INT
1068 && (INTVAL (op
) & ~(HOST_WIDE_INT
)0x3ff) != 0
1069 && SPARC_SETHI_P (INTVAL (op
) & GET_MODE_MASK (mode
))
1071 || (GET_CODE (op
) == CONST_DOUBLE
1072 && CONST_DOUBLE_HIGH (op
) == 0
1073 && (CONST_DOUBLE_LOW (op
) & ~(HOST_WIDE_INT
)0x3ff) != 0
1074 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op
))));
1077 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1078 signed 11 bit immediate field. This is an acceptable SImode operand for
1079 the movcc instructions. */
1082 arith11_operand (op
, mode
)
1084 enum machine_mode mode
;
1086 return (register_operand (op
, mode
)
1087 || (GET_CODE (op
) == CONST_INT
&& SPARC_SIMM11_P (INTVAL (op
))));
1090 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1091 signed 10 bit immediate field. This is an acceptable SImode operand for
1092 the movrcc instructions. */
1095 arith10_operand (op
, mode
)
1097 enum machine_mode mode
;
1099 return (register_operand (op
, mode
)
1100 || (GET_CODE (op
) == CONST_INT
&& SPARC_SIMM10_P (INTVAL (op
))));
1103 /* Return true if OP is a register, is a CONST_INT that fits in a 13 bit
1104 immediate field, or is a CONST_DOUBLE whose both parts fit in a 13 bit
1106 v9: Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1107 can fit in a 13 bit immediate field. This is an acceptable DImode operand
1108 for most 3 address instructions. */
1111 arith_double_operand (op
, mode
)
1113 enum machine_mode mode
;
1115 return (register_operand (op
, mode
)
1116 || (GET_CODE (op
) == CONST_INT
&& SMALL_INT (op
))
1118 && GET_CODE (op
) == CONST_DOUBLE
1119 && (unsigned HOST_WIDE_INT
) (CONST_DOUBLE_LOW (op
) + 0x1000) < 0x2000
1120 && (unsigned HOST_WIDE_INT
) (CONST_DOUBLE_HIGH (op
) + 0x1000) < 0x2000)
1122 && GET_CODE (op
) == CONST_DOUBLE
1123 && (unsigned HOST_WIDE_INT
) (CONST_DOUBLE_LOW (op
) + 0x1000) < 0x2000
1124 && ((CONST_DOUBLE_HIGH (op
) == -1
1125 && (CONST_DOUBLE_LOW (op
) & 0x1000) == 0x1000)
1126 || (CONST_DOUBLE_HIGH (op
) == 0
1127 && (CONST_DOUBLE_LOW (op
) & 0x1000) == 0))));
1130 /* Return true if OP is a constant 4096 for DImode on ARCH64 */
1133 arith_double_4096_operand (op
, mode
)
1135 enum machine_mode mode ATTRIBUTE_UNUSED
;
1137 return (TARGET_ARCH64
&&
1138 ((GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 4096) ||
1139 (GET_CODE (op
) == CONST_DOUBLE
&&
1140 CONST_DOUBLE_LOW (op
) == 4096 &&
1141 CONST_DOUBLE_HIGH (op
) == 0)));
1144 /* Return true if OP is suitable as second operand for add/sub in DImode */
1147 arith_double_add_operand (op
, mode
)
1149 enum machine_mode mode
;
1151 return arith_double_operand (op
, mode
) || arith_double_4096_operand (op
, mode
);
1154 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1155 can fit in an 11 bit immediate field. This is an acceptable DImode
1156 operand for the movcc instructions. */
1157 /* ??? Replace with arith11_operand? */
1160 arith11_double_operand (op
, mode
)
1162 enum machine_mode mode
;
1164 return (register_operand (op
, mode
)
1165 || (GET_CODE (op
) == CONST_DOUBLE
1166 && (GET_MODE (op
) == mode
|| GET_MODE (op
) == VOIDmode
)
1167 && (unsigned HOST_WIDE_INT
) (CONST_DOUBLE_LOW (op
) + 0x400) < 0x800
1168 && ((CONST_DOUBLE_HIGH (op
) == -1
1169 && (CONST_DOUBLE_LOW (op
) & 0x400) == 0x400)
1170 || (CONST_DOUBLE_HIGH (op
) == 0
1171 && (CONST_DOUBLE_LOW (op
) & 0x400) == 0)))
1172 || (GET_CODE (op
) == CONST_INT
1173 && (GET_MODE (op
) == mode
|| GET_MODE (op
) == VOIDmode
)
1174 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x400) < 0x800));
1177 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1178 can fit in an 10 bit immediate field. This is an acceptable DImode
1179 operand for the movrcc instructions. */
1180 /* ??? Replace with arith10_operand? */
1183 arith10_double_operand (op
, mode
)
1185 enum machine_mode mode
;
1187 return (register_operand (op
, mode
)
1188 || (GET_CODE (op
) == CONST_DOUBLE
1189 && (GET_MODE (op
) == mode
|| GET_MODE (op
) == VOIDmode
)
1190 && (unsigned) (CONST_DOUBLE_LOW (op
) + 0x200) < 0x400
1191 && ((CONST_DOUBLE_HIGH (op
) == -1
1192 && (CONST_DOUBLE_LOW (op
) & 0x200) == 0x200)
1193 || (CONST_DOUBLE_HIGH (op
) == 0
1194 && (CONST_DOUBLE_LOW (op
) & 0x200) == 0)))
1195 || (GET_CODE (op
) == CONST_INT
1196 && (GET_MODE (op
) == mode
|| GET_MODE (op
) == VOIDmode
)
1197 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x200) < 0x400));
1200 /* Return truth value of whether OP is an integer which fits the
1201 range constraining immediate operands in most three-address insns,
1202 which have a 13 bit immediate field. */
1205 small_int (op
, mode
)
1207 enum machine_mode mode ATTRIBUTE_UNUSED
;
1209 return (GET_CODE (op
) == CONST_INT
&& SMALL_INT (op
));
1213 small_int_or_double (op
, mode
)
1215 enum machine_mode mode ATTRIBUTE_UNUSED
;
1217 return ((GET_CODE (op
) == CONST_INT
&& SMALL_INT (op
))
1218 || (GET_CODE (op
) == CONST_DOUBLE
1219 && CONST_DOUBLE_HIGH (op
) == 0
1220 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op
))));
1223 /* Recognize operand values for the umul instruction. That instruction sign
1224 extends immediate values just like all other sparc instructions, but
1225 interprets the extended result as an unsigned number. */
1228 uns_small_int (op
, mode
)
1230 enum machine_mode mode ATTRIBUTE_UNUSED
;
1232 #if HOST_BITS_PER_WIDE_INT > 32
1233 /* All allowed constants will fit a CONST_INT. */
1234 return (GET_CODE (op
) == CONST_INT
1235 && ((INTVAL (op
) >= 0 && INTVAL (op
) < 0x1000)
1236 || (INTVAL (op
) >= 0xFFFFF000
1237 && INTVAL (op
) <= 0xFFFFFFFF)));
1239 return ((GET_CODE (op
) == CONST_INT
&& (unsigned) INTVAL (op
) < 0x1000)
1240 || (GET_CODE (op
) == CONST_DOUBLE
1241 && CONST_DOUBLE_HIGH (op
) == 0
1242 && (unsigned) CONST_DOUBLE_LOW (op
) - 0xFFFFF000 < 0x1000));
1247 uns_arith_operand (op
, mode
)
1249 enum machine_mode mode
;
1251 return register_operand (op
, mode
) || uns_small_int (op
, mode
);
1254 /* Return truth value of statement that OP is a call-clobbered register. */
1256 clobbered_register (op
, mode
)
1258 enum machine_mode mode ATTRIBUTE_UNUSED
;
1260 return (GET_CODE (op
) == REG
&& call_used_regs
[REGNO (op
)]);
1263 /* Return 1 if OP is a valid operand for the source of a move insn. */
1266 input_operand (op
, mode
)
1268 enum machine_mode mode
;
1270 /* If both modes are non-void they must be the same. */
1271 if (mode
!= VOIDmode
&& GET_MODE (op
) != VOIDmode
&& mode
!= GET_MODE (op
))
1274 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1275 if (GET_CODE (op
) == CONST
&& GET_CODE (XEXP (op
, 0)) == CONSTANT_P_RTX
)
1278 /* Allow any one instruction integer constant, and all CONST_INT
1279 variants when we are working in DImode and !arch64. */
1280 if (GET_MODE_CLASS (mode
) == MODE_INT
1281 && ((GET_CODE (op
) == CONST_INT
1282 && (SPARC_SETHI_P (INTVAL (op
) & GET_MODE_MASK (mode
))
1283 || SPARC_SIMM13_P (INTVAL (op
))
1285 && ! TARGET_ARCH64
)))
1287 && GET_CODE (op
) == CONST_DOUBLE
1288 && ((CONST_DOUBLE_HIGH (op
) == 0
1289 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op
)))
1291 #if HOST_BITS_PER_WIDE_INT == 64
1292 (CONST_DOUBLE_HIGH (op
) == 0
1293 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op
)))
1295 (SPARC_SIMM13_P (CONST_DOUBLE_LOW (op
))
1296 && (((CONST_DOUBLE_LOW (op
) & 0x80000000) == 0
1297 && CONST_DOUBLE_HIGH (op
) == 0)
1298 || (CONST_DOUBLE_HIGH (op
) == -1
1299 && CONST_DOUBLE_LOW (op
) & 0x80000000) != 0))
1304 /* If !arch64 and this is a DImode const, allow it so that
1305 the splits can be generated. */
1308 && GET_CODE (op
) == CONST_DOUBLE
)
1311 if (register_operand (op
, mode
))
1314 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1315 && GET_CODE (op
) == CONST_DOUBLE
)
1318 /* If this is a SUBREG, look inside so that we handle
1319 paradoxical ones. */
1320 if (GET_CODE (op
) == SUBREG
)
1321 op
= SUBREG_REG (op
);
1323 /* Check for valid MEM forms. */
1324 if (GET_CODE (op
) == MEM
)
1326 rtx inside
= XEXP (op
, 0);
1328 if (GET_CODE (inside
) == LO_SUM
)
1330 /* We can't allow these because all of the splits
1331 (eventually as they trickle down into DFmode
1332 splits) require offsettable memory references. */
1334 && GET_MODE (op
) == TFmode
)
1337 return (register_operand (XEXP (inside
, 0), Pmode
)
1338 && CONSTANT_P (XEXP (inside
, 1)));
1340 return memory_address_p (mode
, inside
);
1347 /* We know it can't be done in one insn when we get here,
1348 the movsi expander guarentees this. */
1350 sparc_emit_set_const32 (op0
, op1
)
1354 enum machine_mode mode
= GET_MODE (op0
);
1357 if (GET_CODE (op1
) == CONST_INT
)
1359 HOST_WIDE_INT value
= INTVAL (op1
);
1361 if (SPARC_SETHI_P (value
& GET_MODE_MASK (mode
))
1362 || SPARC_SIMM13_P (value
))
1366 /* Full 2-insn decomposition is needed. */
1367 if (reload_in_progress
|| reload_completed
)
1370 temp
= gen_reg_rtx (mode
);
1372 if (GET_CODE (op1
) == CONST_INT
)
1374 /* Emit them as real moves instead of a HIGH/LO_SUM,
1375 this way CSE can see everything and reuse intermediate
1376 values if it wants. */
1378 && HOST_BITS_PER_WIDE_INT
!= 64
1379 && (INTVAL (op1
) & 0x80000000) != 0)
1380 emit_insn (gen_rtx_SET
1382 gen_rtx_CONST_DOUBLE (VOIDmode
,
1383 INTVAL (op1
) & ~(HOST_WIDE_INT
)0x3ff,
1386 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
1387 GEN_INT (INTVAL (op1
)
1388 & ~(HOST_WIDE_INT
)0x3ff)));
1390 emit_insn (gen_rtx_SET (VOIDmode
,
1392 gen_rtx_IOR (mode
, temp
,
1393 GEN_INT (INTVAL (op1
) & 0x3ff))));
1397 /* A symbol, emit in the traditional way. */
1398 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
1399 gen_rtx_HIGH (mode
, op1
)));
1400 emit_insn (gen_rtx_SET (VOIDmode
,
1401 op0
, gen_rtx_LO_SUM (mode
, temp
, op1
)));
1407 /* Sparc-v9 code-model support. */
1409 sparc_emit_set_symbolic_const64 (op0
, op1
, temp1
)
1416 if (temp1
&& GET_MODE (temp1
) == TImode
)
1419 temp1
= gen_rtx_REG (DImode
, REGNO (temp1
));
1422 switch (sparc_cmodel
)
1425 /* The range spanned by all instructions in the object is less
1426 than 2^31 bytes (2GB) and the distance from any instruction
1427 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1428 than 2^31 bytes (2GB).
1430 The executable must be in the low 4TB of the virtual address
1433 sethi %hi(symbol), %temp
1434 or %temp, %lo(symbol), %reg */
1435 emit_insn (gen_rtx_SET (VOIDmode
, temp1
, gen_rtx_HIGH (DImode
, op1
)));
1436 emit_insn (gen_rtx_SET (VOIDmode
, op0
, gen_rtx_LO_SUM (DImode
, temp1
, op1
)));
1440 /* The range spanned by all instructions in the object is less
1441 than 2^31 bytes (2GB) and the distance from any instruction
1442 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1443 than 2^31 bytes (2GB).
1445 The executable must be in the low 16TB of the virtual address
1448 sethi %h44(symbol), %temp1
1449 or %temp1, %m44(symbol), %temp2
1450 sllx %temp2, 12, %temp3
1451 or %temp3, %l44(symbol), %reg */
1452 emit_insn (gen_seth44 (op0
, op1
));
1453 emit_insn (gen_setm44 (op0
, op0
, op1
));
1454 emit_insn (gen_rtx_SET (VOIDmode
, temp1
,
1455 gen_rtx_ASHIFT (DImode
, op0
, GEN_INT (12))));
1456 emit_insn (gen_setl44 (op0
, temp1
, op1
));
1460 /* The range spanned by all instructions in the object is less
1461 than 2^31 bytes (2GB) and the distance from any instruction
1462 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1463 than 2^31 bytes (2GB).
1465 The executable can be placed anywhere in the virtual address
1468 sethi %hh(symbol), %temp1
1469 sethi %lm(symbol), %temp2
1470 or %temp1, %hm(symbol), %temp3
1471 or %temp2, %lo(symbol), %temp4
1472 sllx %temp3, 32, %temp5
1473 or %temp4, %temp5, %reg */
1475 /* It is possible that one of the registers we got for operands[2]
1476 might coincide with that of operands[0] (which is why we made
1477 it TImode). Pick the other one to use as our scratch. */
1478 if (rtx_equal_p (temp1
, op0
))
1481 temp1
= gen_rtx_REG (DImode
, REGNO (temp1
) + 1);
1486 emit_insn (gen_sethh (op0
, op1
));
1487 emit_insn (gen_setlm (temp1
, op1
));
1488 emit_insn (gen_sethm (op0
, op0
, op1
));
1489 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1490 gen_rtx_ASHIFT (DImode
, op0
, GEN_INT (32))));
1491 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1492 gen_rtx_PLUS (DImode
, op0
, temp1
)));
1493 emit_insn (gen_setlo (op0
, op0
, op1
));
1497 /* Old old old backwards compatibility kruft here.
1498 Essentially it is MEDLOW with a fixed 64-bit
1499 virtual base added to all data segment addresses.
1500 Text-segment stuff is computed like MEDANY, we can't
1501 reuse the code above because the relocation knobs
1504 Data segment: sethi %hi(symbol), %temp1
1505 or %temp1, %lo(symbol), %temp2
1506 add %temp2, EMBMEDANY_BASE_REG, %reg
1508 Text segment: sethi %uhi(symbol), %temp1
1509 sethi %hi(symbol), %temp2
1510 or %temp1, %ulo(symbol), %temp3
1511 or %temp2, %lo(symbol), %temp4
1512 sllx %temp3, 32, %temp5
1513 or %temp4, %temp5, %reg */
1514 if (data_segment_operand (op1
, GET_MODE (op1
)))
1516 emit_insn (gen_embmedany_sethi (temp1
, op1
));
1517 emit_insn (gen_embmedany_brsum (op0
, temp1
));
1518 emit_insn (gen_embmedany_losum (op0
, op0
, op1
));
1522 /* It is possible that one of the registers we got for operands[2]
1523 might coincide with that of operands[0] (which is why we made
1524 it TImode). Pick the other one to use as our scratch. */
1525 if (rtx_equal_p (temp1
, op0
))
1528 temp1
= gen_rtx_REG (DImode
, REGNO (temp1
) + 1);
1533 emit_insn (gen_embmedany_textuhi (op0
, op1
));
1534 emit_insn (gen_embmedany_texthi (temp1
, op1
));
1535 emit_insn (gen_embmedany_textulo (op0
, op0
, op1
));
1536 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1537 gen_rtx_ASHIFT (DImode
, op0
, GEN_INT (32))));
1538 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1539 gen_rtx_PLUS (DImode
, op0
, temp1
)));
1540 emit_insn (gen_embmedany_textlo (op0
, op0
, op1
));
1549 /* These avoid problems when cross compiling. If we do not
1550 go through all this hair then the optimizer will see
1551 invalid REG_EQUAL notes or in some cases none at all. */
1552 static void sparc_emit_set_safe_HIGH64
PARAMS ((rtx
, HOST_WIDE_INT
));
1553 static rtx gen_safe_SET64
PARAMS ((rtx
, HOST_WIDE_INT
));
1554 static rtx gen_safe_OR64
PARAMS ((rtx
, HOST_WIDE_INT
));
1555 static rtx gen_safe_XOR64
PARAMS ((rtx
, HOST_WIDE_INT
));
1557 #if HOST_BITS_PER_WIDE_INT == 64
1558 #define GEN_HIGHINT64(__x) GEN_INT ((__x) & ~(HOST_WIDE_INT)0x3ff)
1559 #define GEN_INT64(__x) GEN_INT (__x)
1561 #define GEN_HIGHINT64(__x) \
1562 gen_rtx_CONST_DOUBLE (VOIDmode, (__x) & ~(HOST_WIDE_INT)0x3ff, 0)
1563 #define GEN_INT64(__x) \
1564 gen_rtx_CONST_DOUBLE (VOIDmode, (__x) & 0xffffffff, \
1565 ((__x) & 0x80000000 \
1569 /* The optimizer is not to assume anything about exactly
1570 which bits are set for a HIGH, they are unspecified.
1571 Unfortunately this leads to many missed optimizations
1572 during CSE. We mask out the non-HIGH bits, and matches
1573 a plain movdi, to alleviate this problem. */
1575 sparc_emit_set_safe_HIGH64 (dest
, val
)
1579 emit_insn (gen_rtx_SET (VOIDmode
, dest
, GEN_HIGHINT64 (val
)));
1583 gen_safe_SET64 (dest
, val
)
1587 return gen_rtx_SET (VOIDmode
, dest
, GEN_INT64 (val
));
1591 gen_safe_OR64 (src
, val
)
1595 return gen_rtx_IOR (DImode
, src
, GEN_INT64 (val
));
1599 gen_safe_XOR64 (src
, val
)
1603 return gen_rtx_XOR (DImode
, src
, GEN_INT64 (val
));
1606 /* Worker routines for 64-bit constant formation on arch64.
1607 One of the key things to be doing in these emissions is
1608 to create as many temp REGs as possible. This makes it
1609 possible for half-built constants to be used later when
1610 such values are similar to something required later on.
1611 Without doing this, the optimizer cannot see such
1614 static void sparc_emit_set_const64_quick1
1615 PARAMS ((rtx
, rtx
, unsigned HOST_WIDE_INT
, int));
1618 sparc_emit_set_const64_quick1 (op0
, temp
, low_bits
, is_neg
)
1621 unsigned HOST_WIDE_INT low_bits
;
1624 unsigned HOST_WIDE_INT high_bits
;
1627 high_bits
= (~low_bits
) & 0xffffffff;
1629 high_bits
= low_bits
;
1631 sparc_emit_set_safe_HIGH64 (temp
, high_bits
);
1634 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1635 gen_safe_OR64 (temp
, (high_bits
& 0x3ff))));
1639 /* If we are XOR'ing with -1, then we should emit a one's complement
1640 instead. This way the combiner will notice logical operations
1641 such as ANDN later on and substitute. */
1642 if ((low_bits
& 0x3ff) == 0x3ff)
1644 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1645 gen_rtx_NOT (DImode
, temp
)));
1649 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1650 gen_safe_XOR64 (temp
,
1651 (-(HOST_WIDE_INT
)0x400
1652 | (low_bits
& 0x3ff)))));
1657 static void sparc_emit_set_const64_quick2
1658 PARAMS ((rtx
, rtx
, unsigned HOST_WIDE_INT
,
1659 unsigned HOST_WIDE_INT
, int));
1662 sparc_emit_set_const64_quick2 (op0
, temp
, high_bits
, low_immediate
, shift_count
)
1665 unsigned HOST_WIDE_INT high_bits
;
1666 unsigned HOST_WIDE_INT low_immediate
;
1671 if ((high_bits
& 0xfffffc00) != 0)
1673 sparc_emit_set_safe_HIGH64 (temp
, high_bits
);
1674 if ((high_bits
& ~0xfffffc00) != 0)
1675 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1676 gen_safe_OR64 (temp
, (high_bits
& 0x3ff))));
1682 emit_insn (gen_safe_SET64 (temp
, high_bits
));
1686 /* Now shift it up into place. */
1687 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1688 gen_rtx_ASHIFT (DImode
, temp2
,
1689 GEN_INT (shift_count
))));
1691 /* If there is a low immediate part piece, finish up by
1692 putting that in as well. */
1693 if (low_immediate
!= 0)
1694 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1695 gen_safe_OR64 (op0
, low_immediate
)));
1698 static void sparc_emit_set_const64_longway
1699 PARAMS ((rtx
, rtx
, unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
));
1701 /* Full 64-bit constant decomposition. Even though this is the
1702 'worst' case, we still optimize a few things away. */
1704 sparc_emit_set_const64_longway (op0
, temp
, high_bits
, low_bits
)
1707 unsigned HOST_WIDE_INT high_bits
;
1708 unsigned HOST_WIDE_INT low_bits
;
1712 if (reload_in_progress
|| reload_completed
)
1715 sub_temp
= gen_reg_rtx (DImode
);
1717 if ((high_bits
& 0xfffffc00) != 0)
1719 sparc_emit_set_safe_HIGH64 (temp
, high_bits
);
1720 if ((high_bits
& ~0xfffffc00) != 0)
1721 emit_insn (gen_rtx_SET (VOIDmode
,
1723 gen_safe_OR64 (temp
, (high_bits
& 0x3ff))));
1729 emit_insn (gen_safe_SET64 (temp
, high_bits
));
1733 if (!reload_in_progress
&& !reload_completed
)
1735 rtx temp2
= gen_reg_rtx (DImode
);
1736 rtx temp3
= gen_reg_rtx (DImode
);
1737 rtx temp4
= gen_reg_rtx (DImode
);
1739 emit_insn (gen_rtx_SET (VOIDmode
, temp4
,
1740 gen_rtx_ASHIFT (DImode
, sub_temp
,
1743 sparc_emit_set_safe_HIGH64 (temp2
, low_bits
);
1744 if ((low_bits
& ~0xfffffc00) != 0)
1746 emit_insn (gen_rtx_SET (VOIDmode
, temp3
,
1747 gen_safe_OR64 (temp2
, (low_bits
& 0x3ff))));
1748 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1749 gen_rtx_PLUS (DImode
, temp4
, temp3
)));
1753 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1754 gen_rtx_PLUS (DImode
, temp4
, temp2
)));
1759 rtx low1
= GEN_INT ((low_bits
>> (32 - 12)) & 0xfff);
1760 rtx low2
= GEN_INT ((low_bits
>> (32 - 12 - 12)) & 0xfff);
1761 rtx low3
= GEN_INT ((low_bits
>> (32 - 12 - 12 - 8)) & 0x0ff);
1764 /* We are in the middle of reload, so this is really
1765 painful. However we do still make an attempt to
1766 avoid emitting truly stupid code. */
1767 if (low1
!= const0_rtx
)
1769 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1770 gen_rtx_ASHIFT (DImode
, sub_temp
,
1771 GEN_INT (to_shift
))));
1772 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1773 gen_rtx_IOR (DImode
, op0
, low1
)));
1781 if (low2
!= const0_rtx
)
1783 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1784 gen_rtx_ASHIFT (DImode
, sub_temp
,
1785 GEN_INT (to_shift
))));
1786 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1787 gen_rtx_IOR (DImode
, op0
, low2
)));
1795 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1796 gen_rtx_ASHIFT (DImode
, sub_temp
,
1797 GEN_INT (to_shift
))));
1798 if (low3
!= const0_rtx
)
1799 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
1800 gen_rtx_IOR (DImode
, op0
, low3
)));
1805 /* Analyze a 64-bit constant for certain properties. */
1806 static void analyze_64bit_constant
1807 PARAMS ((unsigned HOST_WIDE_INT
,
1808 unsigned HOST_WIDE_INT
,
1809 int *, int *, int *));
1812 analyze_64bit_constant (high_bits
, low_bits
, hbsp
, lbsp
, abbasp
)
1813 unsigned HOST_WIDE_INT high_bits
, low_bits
;
1814 int *hbsp
, *lbsp
, *abbasp
;
1816 int lowest_bit_set
, highest_bit_set
, all_bits_between_are_set
;
1819 lowest_bit_set
= highest_bit_set
= -1;
1823 if ((lowest_bit_set
== -1)
1824 && ((low_bits
>> i
) & 1))
1826 if ((highest_bit_set
== -1)
1827 && ((high_bits
>> (32 - i
- 1)) & 1))
1828 highest_bit_set
= (64 - i
- 1);
1831 && ((highest_bit_set
== -1)
1832 || (lowest_bit_set
== -1)));
1838 if ((lowest_bit_set
== -1)
1839 && ((high_bits
>> i
) & 1))
1840 lowest_bit_set
= i
+ 32;
1841 if ((highest_bit_set
== -1)
1842 && ((low_bits
>> (32 - i
- 1)) & 1))
1843 highest_bit_set
= 32 - i
- 1;
1846 && ((highest_bit_set
== -1)
1847 || (lowest_bit_set
== -1)));
1849 /* If there are no bits set this should have gone out
1850 as one instruction! */
1851 if (lowest_bit_set
== -1
1852 || highest_bit_set
== -1)
1854 all_bits_between_are_set
= 1;
1855 for (i
= lowest_bit_set
; i
<= highest_bit_set
; i
++)
1859 if ((low_bits
& (1 << i
)) != 0)
1864 if ((high_bits
& (1 << (i
- 32))) != 0)
1867 all_bits_between_are_set
= 0;
1870 *hbsp
= highest_bit_set
;
1871 *lbsp
= lowest_bit_set
;
1872 *abbasp
= all_bits_between_are_set
;
1875 static int const64_is_2insns
1876 PARAMS ((unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
));
1879 const64_is_2insns (high_bits
, low_bits
)
1880 unsigned HOST_WIDE_INT high_bits
, low_bits
;
1882 int highest_bit_set
, lowest_bit_set
, all_bits_between_are_set
;
1885 || high_bits
== 0xffffffff)
1888 analyze_64bit_constant (high_bits
, low_bits
,
1889 &highest_bit_set
, &lowest_bit_set
,
1890 &all_bits_between_are_set
);
1892 if ((highest_bit_set
== 63
1893 || lowest_bit_set
== 0)
1894 && all_bits_between_are_set
!= 0)
1897 if ((highest_bit_set
- lowest_bit_set
) < 21)
1903 static unsigned HOST_WIDE_INT create_simple_focus_bits
1904 PARAMS ((unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
1907 static unsigned HOST_WIDE_INT
1908 create_simple_focus_bits (high_bits
, low_bits
, lowest_bit_set
, shift
)
1909 unsigned HOST_WIDE_INT high_bits
, low_bits
;
1910 int lowest_bit_set
, shift
;
1912 HOST_WIDE_INT hi
, lo
;
1914 if (lowest_bit_set
< 32)
1916 lo
= (low_bits
>> lowest_bit_set
) << shift
;
1917 hi
= ((high_bits
<< (32 - lowest_bit_set
)) << shift
);
1922 hi
= ((high_bits
>> (lowest_bit_set
- 32)) << shift
);
1929 /* Here we are sure to be arch64 and this is an integer constant
1930 being loaded into a register. Emit the most efficient
1931 insn sequence possible. Detection of all the 1-insn cases
1932 has been done already. */
1934 sparc_emit_set_const64 (op0
, op1
)
1938 unsigned HOST_WIDE_INT high_bits
, low_bits
;
1939 int lowest_bit_set
, highest_bit_set
;
1940 int all_bits_between_are_set
;
1943 /* Sanity check that we know what we are working with. */
1944 if (! TARGET_ARCH64
)
1947 if (GET_CODE (op0
) != SUBREG
)
1949 if (GET_CODE (op0
) != REG
1950 || (REGNO (op0
) >= SPARC_FIRST_FP_REG
1951 && REGNO (op0
) <= SPARC_LAST_V9_FP_REG
))
1955 if (reload_in_progress
|| reload_completed
)
1958 temp
= gen_reg_rtx (DImode
);
1960 if (GET_CODE (op1
) != CONST_DOUBLE
1961 && GET_CODE (op1
) != CONST_INT
)
1963 sparc_emit_set_symbolic_const64 (op0
, op1
, temp
);
1967 if (GET_CODE (op1
) == CONST_DOUBLE
)
1969 #if HOST_BITS_PER_WIDE_INT == 64
1970 high_bits
= (CONST_DOUBLE_LOW (op1
) >> 32) & 0xffffffff;
1971 low_bits
= CONST_DOUBLE_LOW (op1
) & 0xffffffff;
1973 high_bits
= CONST_DOUBLE_HIGH (op1
);
1974 low_bits
= CONST_DOUBLE_LOW (op1
);
1979 #if HOST_BITS_PER_WIDE_INT == 64
1980 high_bits
= ((INTVAL (op1
) >> 32) & 0xffffffff);
1981 low_bits
= (INTVAL (op1
) & 0xffffffff);
1983 high_bits
= ((INTVAL (op1
) < 0) ?
1986 low_bits
= INTVAL (op1
);
1990 /* low_bits bits 0 --> 31
1991 high_bits bits 32 --> 63 */
1993 analyze_64bit_constant (high_bits
, low_bits
,
1994 &highest_bit_set
, &lowest_bit_set
,
1995 &all_bits_between_are_set
);
1997 /* First try for a 2-insn sequence. */
1999 /* These situations are preferred because the optimizer can
2000 * do more things with them:
2002 * sllx %reg, shift, %reg
2004 * srlx %reg, shift, %reg
2005 * 3) mov some_small_const, %reg
2006 * sllx %reg, shift, %reg
2008 if (((highest_bit_set
== 63
2009 || lowest_bit_set
== 0)
2010 && all_bits_between_are_set
!= 0)
2011 || ((highest_bit_set
- lowest_bit_set
) < 12))
2013 HOST_WIDE_INT the_const
= -1;
2014 int shift
= lowest_bit_set
;
2016 if ((highest_bit_set
!= 63
2017 && lowest_bit_set
!= 0)
2018 || all_bits_between_are_set
== 0)
2021 create_simple_focus_bits (high_bits
, low_bits
,
2024 else if (lowest_bit_set
== 0)
2025 shift
= -(63 - highest_bit_set
);
2027 if (! SPARC_SIMM13_P (the_const
))
2030 emit_insn (gen_safe_SET64 (temp
, the_const
));
2032 emit_insn (gen_rtx_SET (VOIDmode
,
2034 gen_rtx_ASHIFT (DImode
,
2038 emit_insn (gen_rtx_SET (VOIDmode
,
2040 gen_rtx_LSHIFTRT (DImode
,
2042 GEN_INT (-shift
))));
2048 /* Now a range of 22 or less bits set somewhere.
2049 * 1) sethi %hi(focus_bits), %reg
2050 * sllx %reg, shift, %reg
2051 * 2) sethi %hi(focus_bits), %reg
2052 * srlx %reg, shift, %reg
2054 if ((highest_bit_set
- lowest_bit_set
) < 21)
2056 unsigned HOST_WIDE_INT focus_bits
=
2057 create_simple_focus_bits (high_bits
, low_bits
,
2058 lowest_bit_set
, 10);
2060 if (! SPARC_SETHI_P (focus_bits
))
2063 sparc_emit_set_safe_HIGH64 (temp
, focus_bits
);
2065 /* If lowest_bit_set == 10 then a sethi alone could have done it. */
2066 if (lowest_bit_set
< 10)
2067 emit_insn (gen_rtx_SET (VOIDmode
,
2069 gen_rtx_LSHIFTRT (DImode
, temp
,
2070 GEN_INT (10 - lowest_bit_set
))));
2071 else if (lowest_bit_set
> 10)
2072 emit_insn (gen_rtx_SET (VOIDmode
,
2074 gen_rtx_ASHIFT (DImode
, temp
,
2075 GEN_INT (lowest_bit_set
- 10))));
2081 /* 1) sethi %hi(low_bits), %reg
2082 * or %reg, %lo(low_bits), %reg
2083 * 2) sethi %hi(~low_bits), %reg
2084 * xor %reg, %lo(-0x400 | (low_bits & 0x3ff)), %reg
2087 || high_bits
== 0xffffffff)
2089 sparc_emit_set_const64_quick1 (op0
, temp
, low_bits
,
2090 (high_bits
== 0xffffffff));
2094 /* Now, try 3-insn sequences. */
2096 /* 1) sethi %hi(high_bits), %reg
2097 * or %reg, %lo(high_bits), %reg
2098 * sllx %reg, 32, %reg
2102 sparc_emit_set_const64_quick2 (op0
, temp
, high_bits
, 0, 32);
2106 /* We may be able to do something quick
2107 when the constant is negated, so try that. */
2108 if (const64_is_2insns ((~high_bits
) & 0xffffffff,
2109 (~low_bits
) & 0xfffffc00))
2111 /* NOTE: The trailing bits get XOR'd so we need the
2112 non-negated bits, not the negated ones. */
2113 unsigned HOST_WIDE_INT trailing_bits
= low_bits
& 0x3ff;
2115 if ((((~high_bits
) & 0xffffffff) == 0
2116 && ((~low_bits
) & 0x80000000) == 0)
2117 || (((~high_bits
) & 0xffffffff) == 0xffffffff
2118 && ((~low_bits
) & 0x80000000) != 0))
2120 int fast_int
= (~low_bits
& 0xffffffff);
2122 if ((SPARC_SETHI_P (fast_int
)
2123 && (~high_bits
& 0xffffffff) == 0)
2124 || SPARC_SIMM13_P (fast_int
))
2125 emit_insn (gen_safe_SET64 (temp
, fast_int
));
2127 sparc_emit_set_const64 (temp
, GEN_INT64 (fast_int
));
2132 #if HOST_BITS_PER_WIDE_INT == 64
2133 negated_const
= GEN_INT (((~low_bits
) & 0xfffffc00) |
2134 (((HOST_WIDE_INT
)((~high_bits
) & 0xffffffff))<<32));
2136 negated_const
= gen_rtx_CONST_DOUBLE (DImode
,
2137 (~low_bits
) & 0xfffffc00,
2138 (~high_bits
) & 0xffffffff);
2140 sparc_emit_set_const64 (temp
, negated_const
);
2143 /* If we are XOR'ing with -1, then we should emit a one's complement
2144 instead. This way the combiner will notice logical operations
2145 such as ANDN later on and substitute. */
2146 if (trailing_bits
== 0x3ff)
2148 emit_insn (gen_rtx_SET (VOIDmode
, op0
,
2149 gen_rtx_NOT (DImode
, temp
)));
2153 emit_insn (gen_rtx_SET (VOIDmode
,
2155 gen_safe_XOR64 (temp
,
2156 (-0x400 | trailing_bits
))));
2161 /* 1) sethi %hi(xxx), %reg
2162 * or %reg, %lo(xxx), %reg
2163 * sllx %reg, yyy, %reg
2165 * ??? This is just a generalized version of the low_bits==0
2166 * thing above, FIXME...
2168 if ((highest_bit_set
- lowest_bit_set
) < 32)
2170 unsigned HOST_WIDE_INT focus_bits
=
2171 create_simple_focus_bits (high_bits
, low_bits
,
2174 /* We can't get here in this state. */
2175 if (highest_bit_set
< 32
2176 || lowest_bit_set
>= 32)
2179 /* So what we know is that the set bits straddle the
2180 middle of the 64-bit word. */
2181 sparc_emit_set_const64_quick2 (op0
, temp
,
2187 /* 1) sethi %hi(high_bits), %reg
2188 * or %reg, %lo(high_bits), %reg
2189 * sllx %reg, 32, %reg
2190 * or %reg, low_bits, %reg
2192 if (SPARC_SIMM13_P(low_bits
)
2193 && ((int)low_bits
> 0))
2195 sparc_emit_set_const64_quick2 (op0
, temp
, high_bits
, low_bits
, 32);
2199 /* The easiest way when all else fails, is full decomposition. */
2201 printf ("sparc_emit_set_const64: Hard constant [%08lx%08lx] neg[%08lx%08lx]\n",
2202 high_bits
, low_bits
, ~high_bits
, ~low_bits
);
2204 sparc_emit_set_const64_longway (op0
, temp
, high_bits
, low_bits
);
2207 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
2208 return the mode to be used for the comparison. For floating-point,
2209 CCFP[E]mode is used. CC_NOOVmode should be used when the first operand
2210 is a PLUS, MINUS, NEG, or ASHIFT. CCmode should be used when no special
2211 processing is needed. */
2214 select_cc_mode (op
, x
, y
)
2217 rtx y ATTRIBUTE_UNUSED
;
2219 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2245 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
2246 || GET_CODE (x
) == NEG
|| GET_CODE (x
) == ASHIFT
)
2248 if (TARGET_ARCH64
&& GET_MODE (x
) == DImode
)
2249 return CCX_NOOVmode
;
2255 if (TARGET_ARCH64
&& GET_MODE (x
) == DImode
)
2262 /* X and Y are two things to compare using CODE. Emit the compare insn and
2263 return the rtx for the cc reg in the proper mode. */
2266 gen_compare_reg (code
, x
, y
)
2270 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
2273 /* ??? We don't have movcc patterns so we cannot generate pseudo regs for the
2274 fcc regs (cse can't tell they're really call clobbered regs and will
2275 remove a duplicate comparison even if there is an intervening function
2276 call - it will then try to reload the cc reg via an int reg which is why
2277 we need the movcc patterns). It is possible to provide the movcc
2278 patterns by using the ldxfsr/stxfsr v9 insns. I tried it: you need two
2279 registers (say %g1,%g5) and it takes about 6 insns. A better fix would be
2280 to tell cse that CCFPE mode registers (even pseudos) are call
2283 /* ??? This is an experiment. Rather than making changes to cse which may
2284 or may not be easy/clean, we do our own cse. This is possible because
2285 we will generate hard registers. Cse knows they're call clobbered (it
2286 doesn't know the same thing about pseudos). If we guess wrong, no big
2287 deal, but if we win, great! */
2289 if (TARGET_V9
&& GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2290 #if 1 /* experiment */
2293 /* We cycle through the registers to ensure they're all exercised. */
2294 static int next_fcc_reg
= 0;
2295 /* Previous x,y for each fcc reg. */
2296 static rtx prev_args
[4][2];
2298 /* Scan prev_args for x,y. */
2299 for (reg
= 0; reg
< 4; reg
++)
2300 if (prev_args
[reg
][0] == x
&& prev_args
[reg
][1] == y
)
2305 prev_args
[reg
][0] = x
;
2306 prev_args
[reg
][1] = y
;
2307 next_fcc_reg
= (next_fcc_reg
+ 1) & 3;
2309 cc_reg
= gen_rtx_REG (mode
, reg
+ SPARC_FIRST_V9_FCC_REG
);
2312 cc_reg
= gen_reg_rtx (mode
);
2313 #endif /* ! experiment */
2314 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2315 cc_reg
= gen_rtx_REG (mode
, SPARC_FCC_REG
);
2317 cc_reg
= gen_rtx_REG (mode
, SPARC_ICC_REG
);
2319 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
2320 gen_rtx_COMPARE (mode
, x
, y
)));
2325 /* This function is used for v9 only.
2326 CODE is the code for an Scc's comparison.
2327 OPERANDS[0] is the target of the Scc insn.
2328 OPERANDS[1] is the value we compare against const0_rtx (which hasn't
2329 been generated yet).
2331 This function is needed to turn
2334 (gt (reg:CCX 100 %icc)
2338 (gt:DI (reg:CCX 100 %icc)
2341 IE: The instruction recognizer needs to see the mode of the comparison to
2342 find the right instruction. We could use "gt:DI" right in the
2343 define_expand, but leaving it out allows us to handle DI, SI, etc.
2345 We refer to the global sparc compare operands sparc_compare_op0 and
2346 sparc_compare_op1. */
2349 gen_v9_scc (compare_code
, operands
)
2350 enum rtx_code compare_code
;
2351 register rtx
*operands
;
2356 && (GET_MODE (sparc_compare_op0
) == DImode
2357 || GET_MODE (operands
[0]) == DImode
))
2360 op0
= sparc_compare_op0
;
2361 op1
= sparc_compare_op1
;
2363 /* Try to use the movrCC insns. */
2365 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
2366 && op1
== const0_rtx
2367 && v9_regcmp_p (compare_code
))
2369 /* Special case for op0 != 0. This can be done with one instruction if
2370 operands[0] == sparc_compare_op0. */
2372 if (compare_code
== NE
2373 && GET_MODE (operands
[0]) == DImode
2374 && rtx_equal_p (op0
, operands
[0]))
2376 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0],
2377 gen_rtx_IF_THEN_ELSE (DImode
,
2378 gen_rtx_fmt_ee (compare_code
, DImode
,
2385 if (reg_overlap_mentioned_p (operands
[0], op0
))
2387 /* Handle the case where operands[0] == sparc_compare_op0.
2388 We "early clobber" the result. */
2389 op0
= gen_reg_rtx (GET_MODE (sparc_compare_op0
));
2390 emit_move_insn (op0
, sparc_compare_op0
);
2393 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], const0_rtx
));
2394 if (GET_MODE (op0
) != DImode
)
2396 temp
= gen_reg_rtx (DImode
);
2397 convert_move (temp
, op0
, 0);
2401 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0],
2402 gen_rtx_IF_THEN_ELSE (GET_MODE (operands
[0]),
2403 gen_rtx_fmt_ee (compare_code
, DImode
,
2411 operands
[1] = gen_compare_reg (compare_code
, op0
, op1
);
2413 switch (GET_MODE (operands
[1]))
2423 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], const0_rtx
));
2424 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0],
2425 gen_rtx_IF_THEN_ELSE (GET_MODE (operands
[0]),
2426 gen_rtx_fmt_ee (compare_code
,
2427 GET_MODE (operands
[1]),
2428 operands
[1], const0_rtx
),
2429 const1_rtx
, operands
[0])));
2434 /* Emit a conditional jump insn for the v9 architecture using comparison code
2435 CODE and jump target LABEL.
2436 This function exists to take advantage of the v9 brxx insns. */
2439 emit_v9_brxx_insn (code
, op0
, label
)
2443 emit_jump_insn (gen_rtx_SET (VOIDmode
,
2445 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2446 gen_rtx_fmt_ee (code
, GET_MODE (op0
),
2448 gen_rtx_LABEL_REF (VOIDmode
, label
),
2452 /* Generate a DFmode part of a hard TFmode register.
2453 REG is the TFmode hard register, LOW is 1 for the
2454 low 64bit of the register and 0 otherwise.
2457 gen_df_reg (reg
, low
)
2461 int regno
= REGNO (reg
);
2463 if ((WORDS_BIG_ENDIAN
== 0) ^ (low
!= 0))
2464 regno
+= (TARGET_ARCH64
&& regno
< 32) ? 1 : 2;
2465 return gen_rtx_REG (DFmode
, regno
);
2468 /* Generate a call to FUNC with OPERANDS. Operand 0 is the return value.
2469 Unlike normal calls, TFmode operands are passed by reference. It is
2470 assumed that no more than 3 operands are required. */
2473 emit_soft_tfmode_libcall (func_name
, nargs
, operands
)
2474 const char *func_name
;
2478 rtx ret_slot
= NULL
, arg
[3], func_sym
;
2481 /* We only expect to be called for conversions, unary, and binary ops. */
2482 if (nargs
< 2 || nargs
> 3)
2485 for (i
= 0; i
< nargs
; ++i
)
2487 rtx this_arg
= operands
[i
];
2490 /* TFmode arguments and return values are passed by reference. */
2491 if (GET_MODE (this_arg
) == TFmode
)
2493 int force_stack_temp
;
2495 force_stack_temp
= 0;
2496 if (TARGET_BUGGY_QP_LIB
&& i
== 0)
2497 force_stack_temp
= 1;
2499 if (GET_CODE (this_arg
) == MEM
2500 && ! force_stack_temp
)
2501 this_arg
= XEXP (this_arg
, 0);
2502 else if (CONSTANT_P (this_arg
)
2503 && ! force_stack_temp
)
2505 this_slot
= force_const_mem (TFmode
, this_arg
);
2506 this_arg
= XEXP (this_slot
, 0);
2510 this_slot
= assign_stack_temp (TFmode
, GET_MODE_SIZE (TFmode
), 0);
2512 /* Operand 0 is the return value. We'll copy it out later. */
2514 emit_move_insn (this_slot
, this_arg
);
2516 ret_slot
= this_slot
;
2518 this_arg
= XEXP (this_slot
, 0);
2525 func_sym
= gen_rtx_SYMBOL_REF (Pmode
, func_name
);
2527 if (GET_MODE (operands
[0]) == TFmode
)
2530 emit_library_call (func_sym
, LCT_NORMAL
, VOIDmode
, 2,
2531 arg
[0], GET_MODE (arg
[0]),
2532 arg
[1], GET_MODE (arg
[1]));
2534 emit_library_call (func_sym
, LCT_NORMAL
, VOIDmode
, 3,
2535 arg
[0], GET_MODE (arg
[0]),
2536 arg
[1], GET_MODE (arg
[1]),
2537 arg
[2], GET_MODE (arg
[2]));
2540 emit_move_insn (operands
[0], ret_slot
);
2549 ret
= emit_library_call_value (func_sym
, operands
[0], LCT_NORMAL
,
2550 GET_MODE (operands
[0]), 1,
2551 arg
[1], GET_MODE (arg
[1]));
2553 if (ret
!= operands
[0])
2554 emit_move_insn (operands
[0], ret
);
2558 /* Expand soft-float TFmode calls to sparc abi routines. */
2561 emit_soft_tfmode_binop (code
, operands
)
2585 emit_soft_tfmode_libcall (func
, 3, operands
);
2589 emit_soft_tfmode_unop (code
, operands
)
2604 emit_soft_tfmode_libcall (func
, 2, operands
);
2608 emit_soft_tfmode_cvt (code
, operands
)
2617 switch (GET_MODE (operands
[1]))
2630 case FLOAT_TRUNCATE
:
2631 switch (GET_MODE (operands
[0]))
2645 switch (GET_MODE (operands
[1]))
2658 case UNSIGNED_FLOAT
:
2659 switch (GET_MODE (operands
[1]))
2673 switch (GET_MODE (operands
[0]))
2687 switch (GET_MODE (operands
[0]))
2704 emit_soft_tfmode_libcall (func
, 2, operands
);
2707 /* Expand a hard-float tfmode operation. All arguments must be in
2711 emit_hard_tfmode_operation (code
, operands
)
2717 if (GET_RTX_CLASS (code
) == '1')
2719 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
2720 op
= gen_rtx_fmt_e (code
, GET_MODE (operands
[0]), operands
[1]);
2724 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
2725 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
2726 op
= gen_rtx_fmt_ee (code
, GET_MODE (operands
[0]),
2727 operands
[1], operands
[2]);
2730 if (register_operand (operands
[0], VOIDmode
))
2733 dest
= gen_reg_rtx (GET_MODE (operands
[0]));
2735 emit_insn (gen_rtx_SET (VOIDmode
, dest
, op
));
2737 if (dest
!= operands
[0])
2738 emit_move_insn (operands
[0], dest
);
2742 emit_tfmode_binop (code
, operands
)
2746 if (TARGET_HARD_QUAD
)
2747 emit_hard_tfmode_operation (code
, operands
);
2749 emit_soft_tfmode_binop (code
, operands
);
2753 emit_tfmode_unop (code
, operands
)
2757 if (TARGET_HARD_QUAD
)
2758 emit_hard_tfmode_operation (code
, operands
);
2760 emit_soft_tfmode_unop (code
, operands
);
2764 emit_tfmode_cvt (code
, operands
)
2768 if (TARGET_HARD_QUAD
)
2769 emit_hard_tfmode_operation (code
, operands
);
2771 emit_soft_tfmode_cvt (code
, operands
);
2774 /* Return nonzero if a return peephole merging return with
2775 setting of output register is ok. */
2777 leaf_return_peephole_ok ()
2779 return (actual_fsize
== 0);
2782 /* Return nonzero if a branch/jump/call instruction will be emitting
2783 nop into its delay slot. */
2786 empty_delay_slot (insn
)
2791 /* If no previous instruction (should not happen), return true. */
2792 if (PREV_INSN (insn
) == NULL
)
2795 seq
= NEXT_INSN (PREV_INSN (insn
));
2796 if (GET_CODE (PATTERN (seq
)) == SEQUENCE
)
2802 /* Return nonzero if TRIAL can go into the function epilogue's
2803 delay slot. SLOT is the slot we are trying to fill. */
2806 eligible_for_epilogue_delay (trial
, slot
)
2815 if (GET_CODE (trial
) != INSN
|| GET_CODE (PATTERN (trial
)) != SET
)
2818 if (get_attr_length (trial
) != 1)
2821 /* If there are any call-saved registers, we should scan TRIAL if it
2822 does not reference them. For now just make it easy. */
2826 /* If the function uses __builtin_eh_return, the eh_return machinery
2827 occupies the delay slot. */
2828 if (current_function_calls_eh_return
)
2831 /* In the case of a true leaf function, anything can go into the delay slot.
2832 A delay slot only exists however if the frame size is zero, otherwise
2833 we will put an insn to adjust the stack after the return. */
2834 if (current_function_uses_only_leaf_regs
)
2836 if (leaf_return_peephole_ok ())
2837 return ((get_attr_in_uncond_branch_delay (trial
)
2838 == IN_BRANCH_DELAY_TRUE
));
2842 pat
= PATTERN (trial
);
2844 /* Otherwise, only operations which can be done in tandem with
2845 a `restore' or `return' insn can go into the delay slot. */
2846 if (GET_CODE (SET_DEST (pat
)) != REG
2847 || REGNO (SET_DEST (pat
)) < 24)
2850 /* If this instruction sets up floating point register and we have a return
2851 instruction, it can probably go in. But restore will not work
2853 if (REGNO (SET_DEST (pat
)) >= 32)
2855 if (TARGET_V9
&& ! epilogue_renumber (&pat
, 1)
2856 && (get_attr_in_uncond_branch_delay (trial
) == IN_BRANCH_DELAY_TRUE
))
2861 /* The set of insns matched here must agree precisely with the set of
2862 patterns paired with a RETURN in sparc.md. */
2864 src
= SET_SRC (pat
);
2866 /* This matches "*return_[qhs]i" or even "*return_di" on TARGET_ARCH64. */
2867 if (GET_MODE_CLASS (GET_MODE (src
)) != MODE_FLOAT
2868 && arith_operand (src
, GET_MODE (src
)))
2871 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (DImode
);
2873 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (SImode
);
2876 /* This matches "*return_di". */
2877 else if (GET_MODE_CLASS (GET_MODE (src
)) != MODE_FLOAT
2878 && arith_double_operand (src
, GET_MODE (src
)))
2879 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (DImode
);
2881 /* This matches "*return_sf_no_fpu". */
2882 else if (! TARGET_FPU
&& restore_operand (SET_DEST (pat
), SFmode
)
2883 && register_operand (src
, SFmode
))
2886 /* If we have return instruction, anything that does not use
2887 local or output registers and can go into a delay slot wins. */
2888 else if (TARGET_V9
&& ! epilogue_renumber (&pat
, 1)
2889 && (get_attr_in_uncond_branch_delay (trial
) == IN_BRANCH_DELAY_TRUE
))
2892 /* This matches "*return_addsi". */
2893 else if (GET_CODE (src
) == PLUS
2894 && arith_operand (XEXP (src
, 0), SImode
)
2895 && arith_operand (XEXP (src
, 1), SImode
)
2896 && (register_operand (XEXP (src
, 0), SImode
)
2897 || register_operand (XEXP (src
, 1), SImode
)))
2900 /* This matches "*return_adddi". */
2901 else if (GET_CODE (src
) == PLUS
2902 && arith_double_operand (XEXP (src
, 0), DImode
)
2903 && arith_double_operand (XEXP (src
, 1), DImode
)
2904 && (register_operand (XEXP (src
, 0), DImode
)
2905 || register_operand (XEXP (src
, 1), DImode
)))
2908 /* This can match "*return_losum_[sd]i".
2909 Catch only some cases, so that return_losum* don't have
2911 else if (GET_CODE (src
) == LO_SUM
2912 && ! TARGET_CM_MEDMID
2913 && ((register_operand (XEXP (src
, 0), SImode
)
2914 && immediate_operand (XEXP (src
, 1), SImode
))
2916 && register_operand (XEXP (src
, 0), DImode
)
2917 && immediate_operand (XEXP (src
, 1), DImode
))))
2920 /* sll{,x} reg,1,reg2 is add reg,reg,reg2 as well. */
2921 else if (GET_CODE (src
) == ASHIFT
2922 && (register_operand (XEXP (src
, 0), SImode
)
2923 || register_operand (XEXP (src
, 0), DImode
))
2924 && XEXP (src
, 1) == const1_rtx
)
2930 /* Return nonzero if TRIAL can go into the sibling call
2934 eligible_for_sibcall_delay (trial
)
2939 if (GET_CODE (trial
) != INSN
|| GET_CODE (PATTERN (trial
)) != SET
)
2942 if (get_attr_length (trial
) != 1)
2945 pat
= PATTERN (trial
);
2947 if (current_function_uses_only_leaf_regs
)
2949 /* If the tail call is done using the call instruction,
2950 we have to restore %o7 in the delay slot. */
2951 if ((TARGET_ARCH64
&& ! TARGET_CM_MEDLOW
) || flag_pic
)
2954 /* %g1 is used to build the function address */
2955 if (reg_mentioned_p (gen_rtx_REG (Pmode
, 1), pat
))
2961 /* Otherwise, only operations which can be done in tandem with
2962 a `restore' insn can go into the delay slot. */
2963 if (GET_CODE (SET_DEST (pat
)) != REG
2964 || REGNO (SET_DEST (pat
)) < 24
2965 || REGNO (SET_DEST (pat
)) >= 32)
2968 /* If it mentions %o7, it can't go in, because sibcall will clobber it
2970 if (reg_mentioned_p (gen_rtx_REG (Pmode
, 15), pat
))
2973 src
= SET_SRC (pat
);
2975 if (GET_MODE_CLASS (GET_MODE (src
)) != MODE_FLOAT
2976 && arith_operand (src
, GET_MODE (src
)))
2979 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (DImode
);
2981 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (SImode
);
2984 else if (GET_MODE_CLASS (GET_MODE (src
)) != MODE_FLOAT
2985 && arith_double_operand (src
, GET_MODE (src
)))
2986 return GET_MODE_SIZE (GET_MODE (src
)) <= GET_MODE_SIZE (DImode
);
2988 else if (! TARGET_FPU
&& restore_operand (SET_DEST (pat
), SFmode
)
2989 && register_operand (src
, SFmode
))
2992 else if (GET_CODE (src
) == PLUS
2993 && arith_operand (XEXP (src
, 0), SImode
)
2994 && arith_operand (XEXP (src
, 1), SImode
)
2995 && (register_operand (XEXP (src
, 0), SImode
)
2996 || register_operand (XEXP (src
, 1), SImode
)))
2999 else if (GET_CODE (src
) == PLUS
3000 && arith_double_operand (XEXP (src
, 0), DImode
)
3001 && arith_double_operand (XEXP (src
, 1), DImode
)
3002 && (register_operand (XEXP (src
, 0), DImode
)
3003 || register_operand (XEXP (src
, 1), DImode
)))
3006 else if (GET_CODE (src
) == LO_SUM
3007 && ! TARGET_CM_MEDMID
3008 && ((register_operand (XEXP (src
, 0), SImode
)
3009 && immediate_operand (XEXP (src
, 1), SImode
))
3011 && register_operand (XEXP (src
, 0), DImode
)
3012 && immediate_operand (XEXP (src
, 1), DImode
))))
3015 else if (GET_CODE (src
) == ASHIFT
3016 && (register_operand (XEXP (src
, 0), SImode
)
3017 || register_operand (XEXP (src
, 0), DImode
))
3018 && XEXP (src
, 1) == const1_rtx
)
3025 check_return_regs (x
)
3028 switch (GET_CODE (x
))
3031 return IN_OR_GLOBAL_P (x
);
3046 if (check_return_regs (XEXP (x
, 1)) == 0)
3051 return check_return_regs (XEXP (x
, 0));
3059 /* Return 1 if TRIAL references only in and global registers. */
3061 eligible_for_return_delay (trial
)
3064 if (GET_CODE (PATTERN (trial
)) != SET
)
3067 return check_return_regs (PATTERN (trial
));
3071 short_branch (uid1
, uid2
)
3074 int delta
= INSN_ADDRESSES (uid1
) - INSN_ADDRESSES (uid2
);
3076 /* Leave a few words of "slop". */
3077 if (delta
>= -1023 && delta
<= 1022)
3083 /* Return non-zero if REG is not used after INSN.
3084 We assume REG is a reload reg, and therefore does
3085 not live past labels or calls or jumps. */
3087 reg_unused_after (reg
, insn
)
3091 enum rtx_code code
, prev_code
= UNKNOWN
;
3093 while ((insn
= NEXT_INSN (insn
)))
3095 if (prev_code
== CALL_INSN
&& call_used_regs
[REGNO (reg
)])
3098 code
= GET_CODE (insn
);
3099 if (GET_CODE (insn
) == CODE_LABEL
)
3102 if (GET_RTX_CLASS (code
) == 'i')
3104 rtx set
= single_set (insn
);
3105 int in_src
= set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
));
3108 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
3110 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
3118 /* The table we use to reference PIC data. */
3119 static rtx global_offset_table
;
3121 /* The function we use to get at it. */
3122 static rtx get_pc_symbol
;
3123 static char get_pc_symbol_name
[256];
3125 /* Ensure that we are not using patterns that are not OK with PIC. */
3134 if (GET_CODE (recog_data
.operand
[i
]) == SYMBOL_REF
3135 || (GET_CODE (recog_data
.operand
[i
]) == CONST
3136 && ! (GET_CODE (XEXP (recog_data
.operand
[i
], 0)) == MINUS
3137 && (XEXP (XEXP (recog_data
.operand
[i
], 0), 0)
3138 == global_offset_table
)
3139 && (GET_CODE (XEXP (XEXP (recog_data
.operand
[i
], 0), 1))
3148 /* Return true if X is an address which needs a temporary register when
3149 reloaded while generating PIC code. */
3152 pic_address_needs_scratch (x
)
3155 /* An address which is a symbolic plus a non SMALL_INT needs a temp reg. */
3156 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
3157 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3158 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3159 && ! SMALL_INT (XEXP (XEXP (x
, 0), 1)))
3165 /* Legitimize PIC addresses. If the address is already position-independent,
3166 we return ORIG. Newly generated position-independent addresses go into a
3167 reg. This is REG if non zero, otherwise we allocate register(s) as
3171 legitimize_pic_address (orig
, mode
, reg
)
3173 enum machine_mode mode ATTRIBUTE_UNUSED
;
3176 if (GET_CODE (orig
) == SYMBOL_REF
)
3178 rtx pic_ref
, address
;
3183 if (reload_in_progress
|| reload_completed
)
3186 reg
= gen_reg_rtx (Pmode
);
3191 /* If not during reload, allocate another temp reg here for loading
3192 in the address, so that these instructions can be optimized
3194 rtx temp_reg
= ((reload_in_progress
|| reload_completed
)
3195 ? reg
: gen_reg_rtx (Pmode
));
3197 /* Must put the SYMBOL_REF inside an UNSPEC here so that cse
3198 won't get confused into thinking that these two instructions
3199 are loading in the true address of the symbol. If in the
3200 future a PIC rtx exists, that should be used instead. */
3201 if (Pmode
== SImode
)
3203 emit_insn (gen_movsi_high_pic (temp_reg
, orig
));
3204 emit_insn (gen_movsi_lo_sum_pic (temp_reg
, temp_reg
, orig
));
3208 emit_insn (gen_movdi_high_pic (temp_reg
, orig
));
3209 emit_insn (gen_movdi_lo_sum_pic (temp_reg
, temp_reg
, orig
));
3216 pic_ref
= gen_rtx_MEM (Pmode
,
3217 gen_rtx_PLUS (Pmode
,
3218 pic_offset_table_rtx
, address
));
3219 current_function_uses_pic_offset_table
= 1;
3220 RTX_UNCHANGING_P (pic_ref
) = 1;
3221 insn
= emit_move_insn (reg
, pic_ref
);
3222 /* Put a REG_EQUAL note on this insn, so that it can be optimized
3224 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUAL
, orig
,
3228 else if (GET_CODE (orig
) == CONST
)
3232 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3233 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
3238 if (reload_in_progress
|| reload_completed
)
3241 reg
= gen_reg_rtx (Pmode
);
3244 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
3246 base
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
, reg
);
3247 offset
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
3248 base
== reg
? 0 : reg
);
3253 if (GET_CODE (offset
) == CONST_INT
)
3255 if (SMALL_INT (offset
))
3256 return plus_constant (base
, INTVAL (offset
));
3257 else if (! reload_in_progress
&& ! reload_completed
)
3258 offset
= force_reg (Pmode
, offset
);
3260 /* If we reach here, then something is seriously wrong. */
3263 return gen_rtx_PLUS (Pmode
, base
, offset
);
3265 else if (GET_CODE (orig
) == LABEL_REF
)
3266 /* ??? Why do we do this? */
3267 /* Now movsi_pic_label_ref uses it, but we ought to be checking that
3268 the register is live instead, in case it is eliminated. */
3269 current_function_uses_pic_offset_table
= 1;
3274 /* Emit special PIC prologues. */
3277 load_pic_register ()
3279 /* Labels to get the PC in the prologue of this function. */
3280 int orig_flag_pic
= flag_pic
;
3285 /* If we haven't emitted the special get_pc helper function, do so now. */
3286 if (get_pc_symbol_name
[0] == 0)
3290 ASM_GENERATE_INTERNAL_LABEL (get_pc_symbol_name
, "LGETPC", 0);
3293 align
= floor_log2 (FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
3295 ASM_OUTPUT_ALIGN (asm_out_file
, align
);
3296 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "LGETPC", 0);
3297 fputs ("\tretl\n\tadd\t%o7, %l7, %l7\n", asm_out_file
);
3300 /* Initialize every time through, since we can't easily
3301 know this to be permanent. */
3302 global_offset_table
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3303 get_pc_symbol
= gen_rtx_SYMBOL_REF (Pmode
, get_pc_symbol_name
);
3306 emit_insn (gen_get_pc (pic_offset_table_rtx
, global_offset_table
,
3309 flag_pic
= orig_flag_pic
;
3311 /* Need to emit this whether or not we obey regdecls,
3312 since setjmp/longjmp can cause life info to screw up.
3313 ??? In the case where we don't obey regdecls, this is not sufficient
3314 since we may not fall out the bottom. */
3315 emit_insn (gen_rtx_USE (VOIDmode
, pic_offset_table_rtx
));
3318 /* Return 1 if RTX is a MEM which is known to be aligned to at
3319 least a DESIRED byte boundary. */
3322 mem_min_alignment (mem
, desired
)
3326 rtx addr
, base
, offset
;
3328 /* If it's not a MEM we can't accept it. */
3329 if (GET_CODE (mem
) != MEM
)
3332 addr
= XEXP (mem
, 0);
3333 base
= offset
= NULL_RTX
;
3334 if (GET_CODE (addr
) == PLUS
)
3336 if (GET_CODE (XEXP (addr
, 0)) == REG
)
3338 base
= XEXP (addr
, 0);
3340 /* What we are saying here is that if the base
3341 REG is aligned properly, the compiler will make
3342 sure any REG based index upon it will be so
3344 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3345 offset
= XEXP (addr
, 1);
3347 offset
= const0_rtx
;
3350 else if (GET_CODE (addr
) == REG
)
3353 offset
= const0_rtx
;
3356 if (base
!= NULL_RTX
)
3358 int regno
= REGNO (base
);
3360 if (regno
!= HARD_FRAME_POINTER_REGNUM
&& regno
!= STACK_POINTER_REGNUM
)
3362 /* Check if the compiler has recorded some information
3363 about the alignment of the base REG. If reload has
3364 completed, we already matched with proper alignments.
3365 If not running global_alloc, reload might give us
3366 unaligned pointer to local stack though. */
3368 && REGNO_POINTER_ALIGN (regno
) >= desired
* BITS_PER_UNIT
)
3369 || (optimize
&& reload_completed
))
3370 && (INTVAL (offset
) & (desired
- 1)) == 0)
3375 if (((INTVAL (offset
) - SPARC_STACK_BIAS
) & (desired
- 1)) == 0)
3379 else if (! TARGET_UNALIGNED_DOUBLES
3380 || CONSTANT_P (addr
)
3381 || GET_CODE (addr
) == LO_SUM
)
3383 /* Anything else we know is properly aligned unless TARGET_UNALIGNED_DOUBLES
3384 is true, in which case we can only assume that an access is aligned if
3385 it is to a constant address, or the address involves a LO_SUM. */
3389 /* An obviously unaligned address. */
3394 /* Vectors to keep interesting information about registers where it can easily
3395 be got. We use to use the actual mode value as the bit number, but there
3396 are more than 32 modes now. Instead we use two tables: one indexed by
3397 hard register number, and one indexed by mode. */
3399 /* The purpose of sparc_mode_class is to shrink the range of modes so that
3400 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
3401 mapped into one sparc_mode_class mode. */
3403 enum sparc_mode_class
{
3404 S_MODE
, D_MODE
, T_MODE
, O_MODE
,
3405 SF_MODE
, DF_MODE
, TF_MODE
, OF_MODE
,
3409 /* Modes for single-word and smaller quantities. */
3410 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
3412 /* Modes for double-word and smaller quantities. */
3413 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
3415 /* Modes for quad-word and smaller quantities. */
3416 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
3418 /* Modes for 8-word and smaller quantities. */
3419 #define O_MODES (T_MODES | (1 << (int) O_MODE) | (1 << (int) OF_MODE))
3421 /* Modes for single-float quantities. We must allow any single word or
3422 smaller quantity. This is because the fix/float conversion instructions
3423 take integer inputs/outputs from the float registers. */
3424 #define SF_MODES (S_MODES)
3426 /* Modes for double-float and smaller quantities. */
3427 #define DF_MODES (S_MODES | D_MODES)
3429 /* Modes for double-float only quantities. */
3430 #define DF_MODES_NO_S ((1 << (int) D_MODE) | (1 << (int) DF_MODE))
3432 /* Modes for quad-float only quantities. */
3433 #define TF_ONLY_MODES (1 << (int) TF_MODE)
3435 /* Modes for quad-float and smaller quantities. */
3436 #define TF_MODES (DF_MODES | TF_ONLY_MODES)
3438 /* Modes for quad-float and double-float quantities. */
3439 #define TF_MODES_NO_S (DF_MODES_NO_S | TF_ONLY_MODES)
3441 /* Modes for quad-float pair only quantities. */
3442 #define OF_ONLY_MODES (1 << (int) OF_MODE)
3444 /* Modes for quad-float pairs and smaller quantities. */
3445 #define OF_MODES (TF_MODES | OF_ONLY_MODES)
3447 #define OF_MODES_NO_S (TF_MODES_NO_S | OF_ONLY_MODES)
3449 /* Modes for condition codes. */
3450 #define CC_MODES (1 << (int) CC_MODE)
3451 #define CCFP_MODES (1 << (int) CCFP_MODE)
3453 /* Value is 1 if register/mode pair is acceptable on sparc.
3454 The funny mixture of D and T modes is because integer operations
3455 do not specially operate on tetra quantities, so non-quad-aligned
3456 registers can hold quadword quantities (except %o4 and %i4 because
3457 they cross fixed registers). */
3459 /* This points to either the 32 bit or the 64 bit version. */
3460 const int *hard_regno_mode_classes
;
3462 static const int hard_32bit_mode_classes
[] = {
3463 S_MODES
, S_MODES
, T_MODES
, S_MODES
, T_MODES
, S_MODES
, D_MODES
, S_MODES
,
3464 T_MODES
, S_MODES
, T_MODES
, S_MODES
, D_MODES
, S_MODES
, D_MODES
, S_MODES
,
3465 T_MODES
, S_MODES
, T_MODES
, S_MODES
, T_MODES
, S_MODES
, D_MODES
, S_MODES
,
3466 T_MODES
, S_MODES
, T_MODES
, S_MODES
, D_MODES
, S_MODES
, D_MODES
, S_MODES
,
3468 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3469 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3470 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3471 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, TF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3473 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3474 and none can hold SFmode/SImode values. */
3475 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3476 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3477 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3478 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, TF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3481 CCFP_MODES
, CCFP_MODES
, CCFP_MODES
, CCFP_MODES
,
3487 static const int hard_64bit_mode_classes
[] = {
3488 D_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
,
3489 O_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
,
3490 T_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
,
3491 O_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
, T_MODES
, D_MODES
,
3493 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3494 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3495 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3496 OF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
, TF_MODES
, SF_MODES
, DF_MODES
, SF_MODES
,
3498 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3499 and none can hold SFmode/SImode values. */
3500 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3501 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3502 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3503 OF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0, TF_MODES_NO_S
, 0, DF_MODES_NO_S
, 0,
3506 CCFP_MODES
, CCFP_MODES
, CCFP_MODES
, CCFP_MODES
,
3512 int sparc_mode_class
[NUM_MACHINE_MODES
];
3514 enum reg_class sparc_regno_reg_class
[FIRST_PSEUDO_REGISTER
];
3521 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
3523 switch (GET_MODE_CLASS (i
))
3526 case MODE_PARTIAL_INT
:
3527 case MODE_COMPLEX_INT
:
3528 if (GET_MODE_SIZE (i
) <= 4)
3529 sparc_mode_class
[i
] = 1 << (int) S_MODE
;
3530 else if (GET_MODE_SIZE (i
) == 8)
3531 sparc_mode_class
[i
] = 1 << (int) D_MODE
;
3532 else if (GET_MODE_SIZE (i
) == 16)
3533 sparc_mode_class
[i
] = 1 << (int) T_MODE
;
3534 else if (GET_MODE_SIZE (i
) == 32)
3535 sparc_mode_class
[i
] = 1 << (int) O_MODE
;
3537 sparc_mode_class
[i
] = 0;
3540 case MODE_COMPLEX_FLOAT
:
3541 if (GET_MODE_SIZE (i
) <= 4)
3542 sparc_mode_class
[i
] = 1 << (int) SF_MODE
;
3543 else if (GET_MODE_SIZE (i
) == 8)
3544 sparc_mode_class
[i
] = 1 << (int) DF_MODE
;
3545 else if (GET_MODE_SIZE (i
) == 16)
3546 sparc_mode_class
[i
] = 1 << (int) TF_MODE
;
3547 else if (GET_MODE_SIZE (i
) == 32)
3548 sparc_mode_class
[i
] = 1 << (int) OF_MODE
;
3550 sparc_mode_class
[i
] = 0;
3554 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
3555 we must explicitly check for them here. */
3556 if (i
== (int) CCFPmode
|| i
== (int) CCFPEmode
)
3557 sparc_mode_class
[i
] = 1 << (int) CCFP_MODE
;
3558 else if (i
== (int) CCmode
|| i
== (int) CC_NOOVmode
3559 || i
== (int) CCXmode
|| i
== (int) CCX_NOOVmode
)
3560 sparc_mode_class
[i
] = 1 << (int) CC_MODE
;
3562 sparc_mode_class
[i
] = 0;
3568 hard_regno_mode_classes
= hard_64bit_mode_classes
;
3570 hard_regno_mode_classes
= hard_32bit_mode_classes
;
3572 /* Initialize the array used by REGNO_REG_CLASS. */
3573 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3575 if (i
< 16 && TARGET_V8PLUS
)
3576 sparc_regno_reg_class
[i
] = I64_REGS
;
3577 else if (i
< 32 || i
== FRAME_POINTER_REGNUM
)
3578 sparc_regno_reg_class
[i
] = GENERAL_REGS
;
3580 sparc_regno_reg_class
[i
] = FP_REGS
;
3582 sparc_regno_reg_class
[i
] = EXTRA_FP_REGS
;
3584 sparc_regno_reg_class
[i
] = FPCC_REGS
;
3586 sparc_regno_reg_class
[i
] = NO_REGS
;
3590 /* Save non call used registers from LOW to HIGH at BASE+OFFSET.
3591 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3592 v9 int regs as it simplifies the code. */
3595 save_regs (file
, low
, high
, base
, offset
, n_regs
, real_offset
)
3605 if (TARGET_ARCH64
&& high
<= 32)
3607 for (i
= low
; i
< high
; i
++)
3609 if (regs_ever_live
[i
] && ! call_used_regs
[i
])
3611 fprintf (file
, "\tstx\t%s, [%s+%d]\n",
3612 reg_names
[i
], base
, offset
+ 4 * n_regs
);
3613 if (dwarf2out_do_frame ())
3614 dwarf2out_reg_save ("", i
, real_offset
+ 4 * n_regs
);
3621 for (i
= low
; i
< high
; i
+= 2)
3623 if (regs_ever_live
[i
] && ! call_used_regs
[i
])
3625 if (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1])
3627 fprintf (file
, "\tstd\t%s, [%s+%d]\n",
3628 reg_names
[i
], base
, offset
+ 4 * n_regs
);
3629 if (dwarf2out_do_frame ())
3631 char *l
= dwarf2out_cfi_label ();
3632 dwarf2out_reg_save (l
, i
, real_offset
+ 4 * n_regs
);
3633 dwarf2out_reg_save (l
, i
+1, real_offset
+ 4 * n_regs
+ 4);
3639 fprintf (file
, "\tst\t%s, [%s+%d]\n",
3640 reg_names
[i
], base
, offset
+ 4 * n_regs
);
3641 if (dwarf2out_do_frame ())
3642 dwarf2out_reg_save ("", i
, real_offset
+ 4 * n_regs
);
3648 if (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1])
3650 fprintf (file
, "\tst\t%s, [%s+%d]\n",
3651 reg_names
[i
+1], base
, offset
+ 4 * n_regs
+ 4);
3652 if (dwarf2out_do_frame ())
3653 dwarf2out_reg_save ("", i
+ 1, real_offset
+ 4 * n_regs
+ 4);
3662 /* Restore non call used registers from LOW to HIGH at BASE+OFFSET.
3664 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3665 v9 int regs as it simplifies the code. */
3668 restore_regs (file
, low
, high
, base
, offset
, n_regs
)
3677 if (TARGET_ARCH64
&& high
<= 32)
3679 for (i
= low
; i
< high
; i
++)
3681 if (regs_ever_live
[i
] && ! call_used_regs
[i
])
3682 fprintf (file
, "\tldx\t[%s+%d], %s\n",
3683 base
, offset
+ 4 * n_regs
, reg_names
[i
]),
3689 for (i
= low
; i
< high
; i
+= 2)
3691 if (regs_ever_live
[i
] && ! call_used_regs
[i
])
3692 if (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1])
3693 fprintf (file
, "\tldd\t[%s+%d], %s\n",
3694 base
, offset
+ 4 * n_regs
, reg_names
[i
]),
3697 fprintf (file
, "\tld\t[%s+%d], %s\n",
3698 base
, offset
+ 4 * n_regs
, reg_names
[i
]),
3700 else if (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1])
3701 fprintf (file
, "\tld\t[%s+%d], %s\n",
3702 base
, offset
+ 4 * n_regs
+ 4, reg_names
[i
+1]),
3709 /* Compute the frame size required by the function. This function is called
3710 during the reload pass and also by output_function_prologue(). */
3713 compute_frame_size (size
, leaf_function
)
3718 int outgoing_args_size
= (current_function_outgoing_args_size
3719 + REG_PARM_STACK_SPACE (current_function_decl
));
3721 /* N_REGS is the number of 4-byte regs saved thus far. This applies
3722 even to v9 int regs to be consistent with save_regs/restore_regs. */
3726 for (i
= 0; i
< 8; i
++)
3727 if (regs_ever_live
[i
] && ! call_used_regs
[i
])
3732 for (i
= 0; i
< 8; i
+= 2)
3733 if ((regs_ever_live
[i
] && ! call_used_regs
[i
])
3734 || (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1]))
3738 for (i
= 32; i
< (TARGET_V9
? 96 : 64); i
+= 2)
3739 if ((regs_ever_live
[i
] && ! call_used_regs
[i
])
3740 || (regs_ever_live
[i
+1] && ! call_used_regs
[i
+1]))
3743 /* Set up values for use in `function_epilogue'. */
3744 num_gfregs
= n_regs
;
3746 if (leaf_function
&& n_regs
== 0
3747 && size
== 0 && current_function_outgoing_args_size
== 0)
3749 actual_fsize
= apparent_fsize
= 0;
3753 /* We subtract STARTING_FRAME_OFFSET, remember it's negative. */
3754 apparent_fsize
= (size
- STARTING_FRAME_OFFSET
+ 7) & -8;
3755 apparent_fsize
+= n_regs
* 4;
3756 actual_fsize
= apparent_fsize
+ ((outgoing_args_size
+ 7) & -8);
3759 /* Make sure nothing can clobber our register windows.
3760 If a SAVE must be done, or there is a stack-local variable,
3761 the register window area must be allocated.
3762 ??? For v8 we apparently need an additional 8 bytes of reserved space. */
3763 if (leaf_function
== 0 || size
> 0)
3764 actual_fsize
+= (16 * UNITS_PER_WORD
) + (TARGET_ARCH64
? 0 : 8);
3766 return SPARC_STACK_ALIGN (actual_fsize
);
3769 /* Build a (32 bit) big number in a register. */
3770 /* ??? We may be able to use the set macro here too. */
3773 build_big_number (file
, num
, reg
)
3778 if (num
>= 0 || ! TARGET_ARCH64
)
3780 fprintf (file
, "\tsethi\t%%hi(%d), %s\n", num
, reg
);
3781 if ((num
& 0x3ff) != 0)
3782 fprintf (file
, "\tor\t%s, %%lo(%d), %s\n", reg
, num
, reg
);
3784 else /* num < 0 && TARGET_ARCH64 */
3786 /* Sethi does not sign extend, so we must use a little trickery
3787 to use it for negative numbers. Invert the constant before
3788 loading it in, then use xor immediate to invert the loaded bits
3789 (along with the upper 32 bits) to the desired constant. This
3790 works because the sethi and immediate fields overlap. */
3793 int low
= -0x400 + (asize
& 0x3FF);
3795 fprintf (file
, "\tsethi\t%%hi(%d), %s\n\txor\t%s, %d, %s\n",
3796 inv
, reg
, reg
, low
, reg
);
3800 /* Output any necessary .register pseudo-ops. */
3802 sparc_output_scratch_registers (file
)
3803 FILE *file ATTRIBUTE_UNUSED
;
3805 #ifdef HAVE_AS_REGISTER_PSEUDO_OP
3811 /* Check if %g[2367] were used without
3812 .register being printed for them already. */
3813 for (i
= 2; i
< 8; i
++)
3815 if (regs_ever_live
[i
]
3816 && ! sparc_hard_reg_printed
[i
])
3818 sparc_hard_reg_printed
[i
] = 1;
3819 fprintf (file
, "\t.register\t%%g%d, #scratch\n", i
);
3826 /* This function generates the assembly code for function entry.
3827 FILE is a stdio stream to output the code to.
3828 SIZE is an int: how many units of temporary storage to allocate.
3829 Refer to the array `regs_ever_live' to determine which registers
3830 to save; `regs_ever_live[I]' is nonzero if register number I
3831 is ever used in the function. This macro is responsible for
3832 knowing which registers should not be saved even if used. */
3834 /* On SPARC, move-double insns between fpu and cpu need an 8-byte block
3835 of memory. If any fpu reg is used in the function, we allocate
3836 such a block here, at the bottom of the frame, just in case it's needed.
3838 If this function is a leaf procedure, then we may choose not
3839 to do a "save" insn. The decision about whether or not
3840 to do this is made in regclass.c. */
3843 sparc_output_function_prologue (file
, size
)
3848 sparc_flat_function_prologue (file
, size
);
3850 sparc_nonflat_function_prologue (file
, size
,
3851 current_function_uses_only_leaf_regs
);
3854 /* Output code for the function prologue. */
3857 sparc_nonflat_function_prologue (file
, size
, leaf_function
)
3862 sparc_output_scratch_registers (file
);
3864 /* Need to use actual_fsize, since we are also allocating
3865 space for our callee (and our own register save area). */
3866 actual_fsize
= compute_frame_size (size
, leaf_function
);
3870 frame_base_name
= "%sp";
3871 frame_base_offset
= actual_fsize
+ SPARC_STACK_BIAS
;
3875 frame_base_name
= "%fp";
3876 frame_base_offset
= SPARC_STACK_BIAS
;
3879 /* This is only for the human reader. */
3880 fprintf (file
, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START
);
3882 if (actual_fsize
== 0)
3884 else if (! leaf_function
)
3886 if (actual_fsize
<= 4096)
3887 fprintf (file
, "\tsave\t%%sp, -%d, %%sp\n", actual_fsize
);
3888 else if (actual_fsize
<= 8192)
3890 fprintf (file
, "\tsave\t%%sp, -4096, %%sp\n");
3891 fprintf (file
, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize
- 4096);
3895 build_big_number (file
, -actual_fsize
, "%g1");
3896 fprintf (file
, "\tsave\t%%sp, %%g1, %%sp\n");
3899 else /* leaf function */
3901 if (actual_fsize
<= 4096)
3902 fprintf (file
, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize
);
3903 else if (actual_fsize
<= 8192)
3905 fprintf (file
, "\tadd\t%%sp, -4096, %%sp\n");
3906 fprintf (file
, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize
- 4096);
3910 build_big_number (file
, -actual_fsize
, "%g1");
3911 fprintf (file
, "\tadd\t%%sp, %%g1, %%sp\n");
3915 if (dwarf2out_do_frame () && actual_fsize
)
3917 char *label
= dwarf2out_cfi_label ();
3919 /* The canonical frame address refers to the top of the frame. */
3920 dwarf2out_def_cfa (label
, (leaf_function
? STACK_POINTER_REGNUM
3921 : HARD_FRAME_POINTER_REGNUM
),
3924 if (! leaf_function
)
3926 /* Note the register window save. This tells the unwinder that
3927 it needs to restore the window registers from the previous
3928 frame's window save area at 0(cfa). */
3929 dwarf2out_window_save (label
);
3931 /* The return address (-8) is now in %i7. */
3932 dwarf2out_return_reg (label
, 31);
3936 /* If doing anything with PIC, do it now. */
3938 fprintf (file
, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START
);
3940 /* Call saved registers are saved just above the outgoing argument area. */
3943 int offset
, real_offset
, n_regs
;
3946 real_offset
= -apparent_fsize
;
3947 offset
= -apparent_fsize
+ frame_base_offset
;
3948 if (offset
< -4096 || offset
+ num_gfregs
* 4 > 4096)
3950 /* ??? This might be optimized a little as %g1 might already have a
3951 value close enough that a single add insn will do. */
3952 /* ??? Although, all of this is probably only a temporary fix
3953 because if %g1 can hold a function result, then
3954 output_function_epilogue will lose (the result will get
3956 build_big_number (file
, offset
, "%g1");
3957 fprintf (file
, "\tadd\t%s, %%g1, %%g1\n", frame_base_name
);
3963 base
= frame_base_name
;
3966 n_regs
= save_regs (file
, 0, 8, base
, offset
, 0, real_offset
);
3967 save_regs (file
, 32, TARGET_V9
? 96 : 64, base
, offset
, n_regs
,
3972 /* Output code to restore any call saved registers. */
3975 output_restore_regs (file
, leaf_function
)
3977 int leaf_function ATTRIBUTE_UNUSED
;
3982 offset
= -apparent_fsize
+ frame_base_offset
;
3983 if (offset
< -4096 || offset
+ num_gfregs
* 4 > 4096 - 8 /*double*/)
3985 build_big_number (file
, offset
, "%g1");
3986 fprintf (file
, "\tadd\t%s, %%g1, %%g1\n", frame_base_name
);
3992 base
= frame_base_name
;
3995 n_regs
= restore_regs (file
, 0, 8, base
, offset
, 0);
3996 restore_regs (file
, 32, TARGET_V9
? 96 : 64, base
, offset
, n_regs
);
3999 /* This function generates the assembly code for function exit,
4000 on machines that need it.
4002 The function epilogue should not depend on the current stack pointer!
4003 It should use the frame pointer only. This is mandatory because
4004 of alloca; we also take advantage of it to omit stack adjustments
4005 before returning. */
4008 sparc_output_function_epilogue (file
, size
)
4013 sparc_flat_function_epilogue (file
, size
);
4015 sparc_nonflat_function_epilogue (file
, size
,
4016 current_function_uses_only_leaf_regs
);
4019 /* Output code for the function epilogue. */
4022 sparc_nonflat_function_epilogue (file
, size
, leaf_function
)
4024 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
4029 if (current_function_epilogue_delay_list
== 0)
4031 /* If code does not drop into the epilogue, we need
4032 do nothing except output pending case vectors.
4034 We have to still output a dummy nop for the sake of
4035 sane backtraces. Otherwise, if the last two instructions
4036 of a function were call foo; dslot; this can make the return
4037 PC of foo (ie. address of call instruction plus 8) point to
4038 the first instruction in the next function. */
4041 fputs("\tnop\n", file
);
4043 insn
= get_last_insn ();
4044 if (GET_CODE (insn
) == NOTE
)
4045 insn
= prev_nonnote_insn (insn
);
4046 if (insn
&& GET_CODE (insn
) == BARRIER
)
4047 goto output_vectors
;
4051 output_restore_regs (file
, leaf_function
);
4053 /* Work out how to skip the caller's unimp instruction if required. */
4055 ret
= (SKIP_CALLERS_UNIMP_P
? "jmp\t%o7+12" : "retl");
4057 ret
= (SKIP_CALLERS_UNIMP_P
? "jmp\t%i7+12" : "ret");
4059 if (! leaf_function
)
4061 if (current_function_calls_eh_return
)
4063 if (current_function_epilogue_delay_list
)
4065 if (SKIP_CALLERS_UNIMP_P
)
4068 fputs ("\trestore\n\tretl\n\tadd\t%sp, %g1, %sp\n", file
);
4070 /* If we wound up with things in our delay slot, flush them here. */
4071 else if (current_function_epilogue_delay_list
)
4073 rtx delay
= PATTERN (XEXP (current_function_epilogue_delay_list
, 0));
4075 if (TARGET_V9
&& ! epilogue_renumber (&delay
, 1))
4077 epilogue_renumber (&delay
, 0);
4078 fputs (SKIP_CALLERS_UNIMP_P
4079 ? "\treturn\t%i7+12\n"
4080 : "\treturn\t%i7+8\n", file
);
4081 final_scan_insn (XEXP (current_function_epilogue_delay_list
, 0),
4088 if (GET_CODE (delay
) != SET
)
4091 src
= SET_SRC (delay
);
4092 if (GET_CODE (src
) == ASHIFT
)
4094 if (XEXP (src
, 1) != const1_rtx
)
4097 = gen_rtx_PLUS (GET_MODE (src
), XEXP (src
, 0),
4101 insn
= gen_rtx_PARALLEL (VOIDmode
,
4102 gen_rtvec (2, delay
,
4103 gen_rtx_RETURN (VOIDmode
)));
4104 insn
= emit_jump_insn (insn
);
4106 sparc_emitting_epilogue
= true;
4107 final_scan_insn (insn
, file
, 1, 0, 1);
4108 sparc_emitting_epilogue
= false;
4111 else if (TARGET_V9
&& ! SKIP_CALLERS_UNIMP_P
)
4112 fputs ("\treturn\t%i7+8\n\tnop\n", file
);
4114 fprintf (file
, "\t%s\n\trestore\n", ret
);
4116 /* All of the following cases are for leaf functions. */
4117 else if (current_function_calls_eh_return
)
4119 else if (current_function_epilogue_delay_list
)
4121 /* eligible_for_epilogue_delay_slot ensures that if this is a
4122 leaf function, then we will only have insn in the delay slot
4123 if the frame size is zero, thus no adjust for the stack is
4125 if (actual_fsize
!= 0)
4127 fprintf (file
, "\t%s\n", ret
);
4128 final_scan_insn (XEXP (current_function_epilogue_delay_list
, 0),
4131 /* Output 'nop' instead of 'sub %sp,-0,%sp' when no frame, so as to
4132 avoid generating confusing assembly language output. */
4133 else if (actual_fsize
== 0)
4134 fprintf (file
, "\t%s\n\tnop\n", ret
);
4135 else if (actual_fsize
<= 4096)
4136 fprintf (file
, "\t%s\n\tsub\t%%sp, -%d, %%sp\n", ret
, actual_fsize
);
4137 else if (actual_fsize
<= 8192)
4138 fprintf (file
, "\tsub\t%%sp, -4096, %%sp\n\t%s\n\tsub\t%%sp, -%d, %%sp\n",
4139 ret
, actual_fsize
- 4096);
4140 else if ((actual_fsize
& 0x3ff) == 0)
4141 fprintf (file
, "\tsethi\t%%hi(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
4144 fprintf (file
, "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
4145 actual_fsize
, actual_fsize
, ret
);
4148 sparc_output_deferred_case_vectors ();
4151 /* Output a sibling call. */
4154 output_sibcall (insn
, call_operand
)
4155 rtx insn
, call_operand
;
4157 int leaf_regs
= current_function_uses_only_leaf_regs
;
4159 int delay_slot
= dbr_sequence_length () > 0;
4163 /* Call to restore global regs might clobber
4164 the delay slot. Instead of checking for this
4165 output the delay slot now. */
4168 rtx delay
= NEXT_INSN (insn
);
4173 final_scan_insn (delay
, asm_out_file
, 1, 0, 1);
4174 PATTERN (delay
) = gen_blockage ();
4175 INSN_CODE (delay
) = -1;
4178 output_restore_regs (asm_out_file
, leaf_regs
);
4181 operands
[0] = call_operand
;
4185 #ifdef HAVE_AS_RELAX_OPTION
4186 /* If as and ld are relaxing tail call insns into branch always,
4187 use or %o7,%g0,X; call Y; or X,%g0,%o7 always, so that it can
4188 be optimized. With sethi/jmpl as nor ld has no easy way how to
4189 find out if somebody does not branch between the sethi and jmpl. */
4192 int spare_slot
= ((TARGET_ARCH32
|| TARGET_CM_MEDLOW
) && ! flag_pic
);
4196 if ((actual_fsize
|| ! spare_slot
) && delay_slot
)
4198 rtx delay
= NEXT_INSN (insn
);
4203 final_scan_insn (delay
, asm_out_file
, 1, 0, 1);
4204 PATTERN (delay
) = gen_blockage ();
4205 INSN_CODE (delay
) = -1;
4210 if (actual_fsize
<= 4096)
4211 size
= actual_fsize
;
4212 else if (actual_fsize
<= 8192)
4214 fputs ("\tsub\t%sp, -4096, %sp\n", asm_out_file
);
4215 size
= actual_fsize
- 4096;
4217 else if ((actual_fsize
& 0x3ff) == 0)
4218 fprintf (asm_out_file
,
4219 "\tsethi\t%%hi(%d), %%g1\n\tadd\t%%sp, %%g1, %%sp\n",
4223 fprintf (asm_out_file
,
4224 "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n",
4225 actual_fsize
, actual_fsize
);
4226 fputs ("\tadd\t%%sp, %%g1, %%sp\n", asm_out_file
);
4231 output_asm_insn ("sethi\t%%hi(%a0), %%g1", operands
);
4232 output_asm_insn ("jmpl\t%%g1 + %%lo(%a0), %%g0", operands
);
4234 fprintf (asm_out_file
, "\t sub\t%%sp, -%d, %%sp\n", size
);
4235 else if (! delay_slot
)
4236 fputs ("\t nop\n", asm_out_file
);
4241 fprintf (asm_out_file
, "\tsub\t%%sp, -%d, %%sp\n", size
);
4242 /* Use or with rs2 %%g0 instead of mov, so that as/ld can optimize
4243 it into branch if possible. */
4244 output_asm_insn ("or\t%%o7, %%g0, %%g1", operands
);
4245 output_asm_insn ("call\t%a0, 0", operands
);
4246 output_asm_insn (" or\t%%g1, %%g0, %%o7", operands
);
4251 output_asm_insn ("call\t%a0, 0", operands
);
4254 rtx delay
= NEXT_INSN (insn
), pat
;
4259 pat
= PATTERN (delay
);
4260 if (GET_CODE (pat
) != SET
)
4263 operands
[0] = SET_DEST (pat
);
4264 pat
= SET_SRC (pat
);
4265 switch (GET_CODE (pat
))
4268 operands
[1] = XEXP (pat
, 0);
4269 operands
[2] = XEXP (pat
, 1);
4270 output_asm_insn (" restore %r1, %2, %Y0", operands
);
4273 operands
[1] = XEXP (pat
, 0);
4274 operands
[2] = XEXP (pat
, 1);
4275 output_asm_insn (" restore %r1, %%lo(%a2), %Y0", operands
);
4278 operands
[1] = XEXP (pat
, 0);
4279 output_asm_insn (" restore %r1, %r1, %Y0", operands
);
4283 output_asm_insn (" restore %%g0, %1, %Y0", operands
);
4286 PATTERN (delay
) = gen_blockage ();
4287 INSN_CODE (delay
) = -1;
4290 fputs ("\t restore\n", asm_out_file
);
4294 /* Functions for handling argument passing.
4296 For v8 the first six args are normally in registers and the rest are
4297 pushed. Any arg that starts within the first 6 words is at least
4298 partially passed in a register unless its data type forbids.
4300 For v9, the argument registers are laid out as an array of 16 elements
4301 and arguments are added sequentially. The first 6 int args and up to the
4302 first 16 fp args (depending on size) are passed in regs.
4304 Slot Stack Integral Float Float in structure Double Long Double
4305 ---- ----- -------- ----- ------------------ ------ -----------
4306 15 [SP+248] %f31 %f30,%f31 %d30
4307 14 [SP+240] %f29 %f28,%f29 %d28 %q28
4308 13 [SP+232] %f27 %f26,%f27 %d26
4309 12 [SP+224] %f25 %f24,%f25 %d24 %q24
4310 11 [SP+216] %f23 %f22,%f23 %d22
4311 10 [SP+208] %f21 %f20,%f21 %d20 %q20
4312 9 [SP+200] %f19 %f18,%f19 %d18
4313 8 [SP+192] %f17 %f16,%f17 %d16 %q16
4314 7 [SP+184] %f15 %f14,%f15 %d14
4315 6 [SP+176] %f13 %f12,%f13 %d12 %q12
4316 5 [SP+168] %o5 %f11 %f10,%f11 %d10
4317 4 [SP+160] %o4 %f9 %f8,%f9 %d8 %q8
4318 3 [SP+152] %o3 %f7 %f6,%f7 %d6
4319 2 [SP+144] %o2 %f5 %f4,%f5 %d4 %q4
4320 1 [SP+136] %o1 %f3 %f2,%f3 %d2
4321 0 [SP+128] %o0 %f1 %f0,%f1 %d0 %q0
4323 Here SP = %sp if -mno-stack-bias or %sp+stack_bias otherwise.
4325 Integral arguments are always passed as 64 bit quantities appropriately
4328 Passing of floating point values is handled as follows.
4329 If a prototype is in scope:
4330 If the value is in a named argument (i.e. not a stdarg function or a
4331 value not part of the `...') then the value is passed in the appropriate
4333 If the value is part of the `...' and is passed in one of the first 6
4334 slots then the value is passed in the appropriate int reg.
4335 If the value is part of the `...' and is not passed in one of the first 6
4336 slots then the value is passed in memory.
4337 If a prototype is not in scope:
4338 If the value is one of the first 6 arguments the value is passed in the
4339 appropriate integer reg and the appropriate fp reg.
4340 If the value is not one of the first 6 arguments the value is passed in
4341 the appropriate fp reg and in memory.
4344 /* Maximum number of int regs for args. */
4345 #define SPARC_INT_ARG_MAX 6
4346 /* Maximum number of fp regs for args. */
4347 #define SPARC_FP_ARG_MAX 16
4349 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4351 /* Handle the INIT_CUMULATIVE_ARGS macro.
4352 Initialize a variable CUM of type CUMULATIVE_ARGS
4353 for a call to a function whose data type is FNTYPE.
4354 For a library call, FNTYPE is 0. */
4357 init_cumulative_args (cum
, fntype
, libname
, indirect
)
4358 CUMULATIVE_ARGS
*cum
;
4360 rtx libname ATTRIBUTE_UNUSED
;
4361 int indirect ATTRIBUTE_UNUSED
;
4364 cum
->prototype_p
= fntype
&& TYPE_ARG_TYPES (fntype
);
4365 cum
->libcall_p
= fntype
== 0;
4368 /* Compute the slot number to pass an argument in.
4369 Returns the slot number or -1 if passing on the stack.
4371 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4372 the preceding args and about the function being called.
4373 MODE is the argument's machine mode.
4374 TYPE is the data type of the argument (as a tree).
4375 This is null for libcalls where that information may
4377 NAMED is nonzero if this argument is a named parameter
4378 (otherwise it is an extra parameter matching an ellipsis).
4379 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
4380 *PREGNO records the register number to use if scalar type.
4381 *PPADDING records the amount of padding needed in words. */
4384 function_arg_slotno (cum
, mode
, type
, named
, incoming_p
, pregno
, ppadding
)
4385 const CUMULATIVE_ARGS
*cum
;
4386 enum machine_mode mode
;
4393 int regbase
= (incoming_p
4394 ? SPARC_INCOMING_INT_ARG_FIRST
4395 : SPARC_OUTGOING_INT_ARG_FIRST
);
4396 int slotno
= cum
->words
;
4401 if (type
!= 0 && TREE_ADDRESSABLE (type
))
4404 && type
!= 0 && mode
== BLKmode
4405 && TYPE_ALIGN (type
) % PARM_BOUNDARY
!= 0)
4411 /* MODE is VOIDmode when generating the actual call.
4415 case QImode
: case CQImode
:
4416 case HImode
: case CHImode
:
4417 case SImode
: case CSImode
:
4418 case DImode
: case CDImode
:
4419 case TImode
: case CTImode
:
4420 if (slotno
>= SPARC_INT_ARG_MAX
)
4422 regno
= regbase
+ slotno
;
4425 case SFmode
: case SCmode
:
4426 case DFmode
: case DCmode
:
4427 case TFmode
: case TCmode
:
4430 if (slotno
>= SPARC_INT_ARG_MAX
)
4432 regno
= regbase
+ slotno
;
4436 if ((mode
== TFmode
|| mode
== TCmode
)
4437 && (slotno
& 1) != 0)
4438 slotno
++, *ppadding
= 1;
4439 if (TARGET_FPU
&& named
)
4441 if (slotno
>= SPARC_FP_ARG_MAX
)
4443 regno
= SPARC_FP_ARG_FIRST
+ slotno
* 2;
4449 if (slotno
>= SPARC_INT_ARG_MAX
)
4451 regno
= regbase
+ slotno
;
4457 /* For sparc64, objects requiring 16 byte alignment get it. */
4460 if (type
&& TYPE_ALIGN (type
) == 128 && (slotno
& 1) != 0)
4461 slotno
++, *ppadding
= 1;
4465 || (type
&& TREE_CODE (type
) == UNION_TYPE
))
4467 if (slotno
>= SPARC_INT_ARG_MAX
)
4469 regno
= regbase
+ slotno
;
4474 int intregs_p
= 0, fpregs_p
= 0;
4475 /* The ABI obviously doesn't specify how packed
4476 structures are passed. These are defined to be passed
4477 in int regs if possible, otherwise memory. */
4480 /* First see what kinds of registers we need. */
4481 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4483 if (TREE_CODE (field
) == FIELD_DECL
)
4485 if (TREE_CODE (TREE_TYPE (field
)) == REAL_TYPE
4490 if (DECL_PACKED (field
))
4494 if (packed_p
|| !named
)
4495 fpregs_p
= 0, intregs_p
= 1;
4497 /* If all arg slots are filled, then must pass on stack. */
4498 if (fpregs_p
&& slotno
>= SPARC_FP_ARG_MAX
)
4500 /* If there are only int args and all int arg slots are filled,
4501 then must pass on stack. */
4502 if (!fpregs_p
&& intregs_p
&& slotno
>= SPARC_INT_ARG_MAX
)
4504 /* Note that even if all int arg slots are filled, fp members may
4505 still be passed in regs if such regs are available.
4506 *PREGNO isn't set because there may be more than one, it's up
4507 to the caller to compute them. */
4520 /* Handle recursive register counting for structure field layout. */
4522 struct function_arg_record_value_parms
4525 int slotno
, named
, regbase
;
4530 static void function_arg_record_value_3
4531 PARAMS ((HOST_WIDE_INT
, struct function_arg_record_value_parms
*));
4532 static void function_arg_record_value_2
4533 PARAMS ((tree
, HOST_WIDE_INT
,
4534 struct function_arg_record_value_parms
*));
4535 static void function_arg_record_value_1
4536 PARAMS ((tree
, HOST_WIDE_INT
,
4537 struct function_arg_record_value_parms
*));
4538 static rtx function_arg_record_value
4539 PARAMS ((tree
, enum machine_mode
, int, int, int));
4541 /* A subroutine of function_arg_record_value. Traverse the structure
4542 recusively and determine how many registers will be required. */
4545 function_arg_record_value_1 (type
, startbitpos
, parms
)
4547 HOST_WIDE_INT startbitpos
;
4548 struct function_arg_record_value_parms
*parms
;
4552 /* The ABI obviously doesn't specify how packed structures are
4553 passed. These are defined to be passed in int regs if possible,
4554 otherwise memory. */
4557 /* We need to compute how many registers are needed so we can
4558 allocate the PARALLEL but before we can do that we need to know
4559 whether there are any packed fields. If there are, int regs are
4560 used regardless of whether there are fp values present. */
4561 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4563 if (TREE_CODE (field
) == FIELD_DECL
&& DECL_PACKED (field
))
4570 /* Compute how many registers we need. */
4571 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4573 if (TREE_CODE (field
) == FIELD_DECL
)
4575 HOST_WIDE_INT bitpos
= startbitpos
;
4577 if (DECL_SIZE (field
) != 0
4578 && host_integerp (bit_position (field
), 1))
4579 bitpos
+= int_bit_position (field
);
4581 /* ??? FIXME: else assume zero offset. */
4583 if (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
)
4584 function_arg_record_value_1 (TREE_TYPE (field
), bitpos
, parms
);
4585 else if ((TREE_CODE (TREE_TYPE (field
)) == REAL_TYPE
4586 || (TREE_CODE (TREE_TYPE (field
)) == COMPLEX_TYPE
4587 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field
)))
4593 if (parms
->intoffset
!= -1)
4595 int intslots
, this_slotno
;
4597 intslots
= (bitpos
- parms
->intoffset
+ BITS_PER_WORD
- 1)
4599 this_slotno
= parms
->slotno
+ parms
->intoffset
4602 intslots
= MIN (intslots
, SPARC_INT_ARG_MAX
- this_slotno
);
4603 intslots
= MAX (intslots
, 0);
4604 parms
->nregs
+= intslots
;
4605 parms
->intoffset
= -1;
4608 /* There's no need to check this_slotno < SPARC_FP_ARG MAX.
4609 If it wasn't true we wouldn't be here. */
4611 if (TREE_CODE (TREE_TYPE (field
)) == COMPLEX_TYPE
)
4616 if (parms
->intoffset
== -1)
4617 parms
->intoffset
= bitpos
;
4623 /* A subroutine of function_arg_record_value. Assign the bits of the
4624 structure between parms->intoffset and bitpos to integer registers. */
4627 function_arg_record_value_3 (bitpos
, parms
)
4628 HOST_WIDE_INT bitpos
;
4629 struct function_arg_record_value_parms
*parms
;
4631 enum machine_mode mode
;
4633 unsigned int startbit
, endbit
;
4634 int this_slotno
, intslots
, intoffset
;
4637 if (parms
->intoffset
== -1)
4640 intoffset
= parms
->intoffset
;
4641 parms
->intoffset
= -1;
4643 startbit
= intoffset
& -BITS_PER_WORD
;
4644 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4645 intslots
= (endbit
- startbit
) / BITS_PER_WORD
;
4646 this_slotno
= parms
->slotno
+ intoffset
/ BITS_PER_WORD
;
4648 intslots
= MIN (intslots
, SPARC_INT_ARG_MAX
- this_slotno
);
4652 /* If this is the trailing part of a word, only load that much into
4653 the register. Otherwise load the whole register. Note that in
4654 the latter case we may pick up unwanted bits. It's not a problem
4655 at the moment but may wish to revisit. */
4657 if (intoffset
% BITS_PER_WORD
!= 0)
4658 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
4663 intoffset
/= BITS_PER_UNIT
;
4666 regno
= parms
->regbase
+ this_slotno
;
4667 reg
= gen_rtx_REG (mode
, regno
);
4668 XVECEXP (parms
->ret
, 0, parms
->nregs
)
4669 = gen_rtx_EXPR_LIST (VOIDmode
, reg
, GEN_INT (intoffset
));
4672 intoffset
= (intoffset
| (UNITS_PER_WORD
-1)) + 1;
4676 while (intslots
> 0);
4679 /* A subroutine of function_arg_record_value. Traverse the structure
4680 recursively and assign bits to floating point registers. Track which
4681 bits in between need integer registers; invoke function_arg_record_value_3
4682 to make that happen. */
4685 function_arg_record_value_2 (type
, startbitpos
, parms
)
4687 HOST_WIDE_INT startbitpos
;
4688 struct function_arg_record_value_parms
*parms
;
4693 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4695 if (TREE_CODE (field
) == FIELD_DECL
&& DECL_PACKED (field
))
4702 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4704 if (TREE_CODE (field
) == FIELD_DECL
)
4706 HOST_WIDE_INT bitpos
= startbitpos
;
4708 if (DECL_SIZE (field
) != 0
4709 && host_integerp (bit_position (field
), 1))
4710 bitpos
+= int_bit_position (field
);
4712 /* ??? FIXME: else assume zero offset. */
4714 if (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
)
4715 function_arg_record_value_2 (TREE_TYPE (field
), bitpos
, parms
);
4716 else if ((TREE_CODE (TREE_TYPE (field
)) == REAL_TYPE
4717 || (TREE_CODE (TREE_TYPE (field
)) == COMPLEX_TYPE
4718 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field
)))
4724 int this_slotno
= parms
->slotno
+ bitpos
/ BITS_PER_WORD
;
4726 enum machine_mode mode
= DECL_MODE (field
);
4729 function_arg_record_value_3 (bitpos
, parms
);
4730 regno
= SPARC_FP_ARG_FIRST
+ this_slotno
* 2
4731 + ((mode
== SFmode
|| mode
== SCmode
)
4732 && (bitpos
& 32) != 0);
4735 case SCmode
: mode
= SFmode
; break;
4736 case DCmode
: mode
= DFmode
; break;
4737 case TCmode
: mode
= TFmode
; break;
4740 reg
= gen_rtx_REG (mode
, regno
);
4741 XVECEXP (parms
->ret
, 0, parms
->nregs
)
4742 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
4743 GEN_INT (bitpos
/ BITS_PER_UNIT
));
4745 if (TREE_CODE (TREE_TYPE (field
)) == COMPLEX_TYPE
)
4747 regno
+= GET_MODE_SIZE (mode
) / 4;
4748 reg
= gen_rtx_REG (mode
, regno
);
4749 XVECEXP (parms
->ret
, 0, parms
->nregs
)
4750 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
4751 GEN_INT ((bitpos
+ GET_MODE_BITSIZE (mode
))
4758 if (parms
->intoffset
== -1)
4759 parms
->intoffset
= bitpos
;
4765 /* Used by function_arg and function_value to implement the complex
4766 Sparc64 structure calling conventions. */
4769 function_arg_record_value (type
, mode
, slotno
, named
, regbase
)
4771 enum machine_mode mode
;
4772 int slotno
, named
, regbase
;
4774 HOST_WIDE_INT typesize
= int_size_in_bytes (type
);
4775 struct function_arg_record_value_parms parms
;
4778 parms
.ret
= NULL_RTX
;
4779 parms
.slotno
= slotno
;
4780 parms
.named
= named
;
4781 parms
.regbase
= regbase
;
4783 /* Compute how many registers we need. */
4785 parms
.intoffset
= 0;
4786 function_arg_record_value_1 (type
, 0, &parms
);
4788 if (parms
.intoffset
!= -1)
4790 unsigned int startbit
, endbit
;
4791 int intslots
, this_slotno
;
4793 startbit
= parms
.intoffset
& -BITS_PER_WORD
;
4794 endbit
= (typesize
*BITS_PER_UNIT
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4795 intslots
= (endbit
- startbit
) / BITS_PER_WORD
;
4796 this_slotno
= slotno
+ parms
.intoffset
/ BITS_PER_WORD
;
4798 intslots
= MIN (intslots
, SPARC_INT_ARG_MAX
- this_slotno
);
4799 intslots
= MAX (intslots
, 0);
4801 parms
.nregs
+= intslots
;
4803 nregs
= parms
.nregs
;
4805 /* Allocate the vector and handle some annoying special cases. */
4808 /* ??? Empty structure has no value? Duh? */
4811 /* Though there's nothing really to store, return a word register
4812 anyway so the rest of gcc doesn't go nuts. Returning a PARALLEL
4813 leads to breakage due to the fact that there are zero bytes to
4815 return gen_rtx_REG (mode
, regbase
);
4819 /* ??? C++ has structures with no fields, and yet a size. Give up
4820 for now and pass everything back in integer registers. */
4821 nregs
= (typesize
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4823 if (nregs
+ slotno
> SPARC_INT_ARG_MAX
)
4824 nregs
= SPARC_INT_ARG_MAX
- slotno
;
4829 parms
.ret
= gen_rtx_PARALLEL (mode
, rtvec_alloc (nregs
));
4831 /* Fill in the entries. */
4833 parms
.intoffset
= 0;
4834 function_arg_record_value_2 (type
, 0, &parms
);
4835 function_arg_record_value_3 (typesize
* BITS_PER_UNIT
, &parms
);
4837 if (parms
.nregs
!= nregs
)
4843 /* Handle the FUNCTION_ARG macro.
4844 Determine where to put an argument to a function.
4845 Value is zero to push the argument on the stack,
4846 or a hard register in which to store the argument.
4848 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4849 the preceding args and about the function being called.
4850 MODE is the argument's machine mode.
4851 TYPE is the data type of the argument (as a tree).
4852 This is null for libcalls where that information may
4854 NAMED is nonzero if this argument is a named parameter
4855 (otherwise it is an extra parameter matching an ellipsis).
4856 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG. */
4859 function_arg (cum
, mode
, type
, named
, incoming_p
)
4860 const CUMULATIVE_ARGS
*cum
;
4861 enum machine_mode mode
;
4866 int regbase
= (incoming_p
4867 ? SPARC_INCOMING_INT_ARG_FIRST
4868 : SPARC_OUTGOING_INT_ARG_FIRST
);
4869 int slotno
, regno
, padding
;
4872 slotno
= function_arg_slotno (cum
, mode
, type
, named
, incoming_p
,
4880 reg
= gen_rtx_REG (mode
, regno
);
4884 /* v9 fp args in reg slots beyond the int reg slots get passed in regs
4885 but also have the slot allocated for them.
4886 If no prototype is in scope fp values in register slots get passed
4887 in two places, either fp regs and int regs or fp regs and memory. */
4888 if ((GET_MODE_CLASS (mode
) == MODE_FLOAT
4889 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
4890 && SPARC_FP_REG_P (regno
))
4892 reg
= gen_rtx_REG (mode
, regno
);
4893 if (cum
->prototype_p
|| cum
->libcall_p
)
4895 /* "* 2" because fp reg numbers are recorded in 4 byte
4898 /* ??? This will cause the value to be passed in the fp reg and
4899 in the stack. When a prototype exists we want to pass the
4900 value in the reg but reserve space on the stack. That's an
4901 optimization, and is deferred [for a bit]. */
4902 if ((regno
- SPARC_FP_ARG_FIRST
) >= SPARC_INT_ARG_MAX
* 2)
4903 return gen_rtx_PARALLEL (mode
,
4905 gen_rtx_EXPR_LIST (VOIDmode
,
4906 NULL_RTX
, const0_rtx
),
4907 gen_rtx_EXPR_LIST (VOIDmode
,
4911 /* ??? It seems that passing back a register even when past
4912 the area declared by REG_PARM_STACK_SPACE will allocate
4913 space appropriately, and will not copy the data onto the
4914 stack, exactly as we desire.
4916 This is due to locate_and_pad_parm being called in
4917 expand_call whenever reg_parm_stack_space > 0, which
4918 while benefical to our example here, would seem to be
4919 in error from what had been intended. Ho hum... -- r~ */
4927 if ((regno
- SPARC_FP_ARG_FIRST
) < SPARC_INT_ARG_MAX
* 2)
4931 /* On incoming, we don't need to know that the value
4932 is passed in %f0 and %i0, and it confuses other parts
4933 causing needless spillage even on the simplest cases. */
4937 intreg
= (SPARC_OUTGOING_INT_ARG_FIRST
4938 + (regno
- SPARC_FP_ARG_FIRST
) / 2);
4940 v0
= gen_rtx_EXPR_LIST (VOIDmode
, reg
, const0_rtx
);
4941 v1
= gen_rtx_EXPR_LIST (VOIDmode
, gen_rtx_REG (mode
, intreg
),
4943 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, v0
, v1
));
4947 v0
= gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
4948 v1
= gen_rtx_EXPR_LIST (VOIDmode
, reg
, const0_rtx
);
4949 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, v0
, v1
));
4953 else if (type
&& TREE_CODE (type
) == RECORD_TYPE
)
4955 /* Structures up to 16 bytes in size are passed in arg slots on the
4956 stack and are promoted to registers where possible. */
4958 if (int_size_in_bytes (type
) > 16)
4959 abort (); /* shouldn't get here */
4961 return function_arg_record_value (type
, mode
, slotno
, named
, regbase
);
4963 else if (type
&& TREE_CODE (type
) == UNION_TYPE
)
4965 enum machine_mode mode
;
4966 int bytes
= int_size_in_bytes (type
);
4971 mode
= mode_for_size (bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
4972 reg
= gen_rtx_REG (mode
, regno
);
4976 /* Scalar or complex int. */
4977 reg
= gen_rtx_REG (mode
, regno
);
4983 /* Handle the FUNCTION_ARG_PARTIAL_NREGS macro.
4984 For an arg passed partly in registers and partly in memory,
4985 this is the number of registers used.
4986 For args passed entirely in registers or entirely in memory, zero.
4988 Any arg that starts in the first 6 regs but won't entirely fit in them
4989 needs partial registers on v8. On v9, structures with integer
4990 values in arg slots 5,6 will be passed in %o5 and SP+176, and complex fp
4991 values that begin in the last fp reg [where "last fp reg" varies with the
4992 mode] will be split between that reg and memory. */
4995 function_arg_partial_nregs (cum
, mode
, type
, named
)
4996 const CUMULATIVE_ARGS
*cum
;
4997 enum machine_mode mode
;
5001 int slotno
, regno
, padding
;
5003 /* We pass 0 for incoming_p here, it doesn't matter. */
5004 slotno
= function_arg_slotno (cum
, mode
, type
, named
, 0, ®no
, &padding
);
5011 if ((slotno
+ (mode
== BLKmode
5012 ? ROUND_ADVANCE (int_size_in_bytes (type
))
5013 : ROUND_ADVANCE (GET_MODE_SIZE (mode
))))
5014 > NPARM_REGS (SImode
))
5015 return NPARM_REGS (SImode
) - slotno
;
5020 if (type
&& AGGREGATE_TYPE_P (type
))
5022 int size
= int_size_in_bytes (type
);
5023 int align
= TYPE_ALIGN (type
);
5026 slotno
+= slotno
& 1;
5027 if (size
> 8 && size
<= 16
5028 && slotno
== SPARC_INT_ARG_MAX
- 1)
5031 else if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
5032 || (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
5035 if (GET_MODE_ALIGNMENT (mode
) == 128)
5037 slotno
+= slotno
& 1;
5038 if (slotno
== SPARC_INT_ARG_MAX
- 2)
5043 if (slotno
== SPARC_INT_ARG_MAX
- 1)
5047 else if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
5049 if (GET_MODE_ALIGNMENT (mode
) == 128)
5050 slotno
+= slotno
& 1;
5051 if ((slotno
+ GET_MODE_SIZE (mode
) / UNITS_PER_WORD
)
5059 /* Handle the FUNCTION_ARG_PASS_BY_REFERENCE macro.
5060 !v9: The SPARC ABI stipulates passing struct arguments (of any size) and
5061 quad-precision floats by invisible reference.
5062 v9: Aggregates greater than 16 bytes are passed by reference.
5063 For Pascal, also pass arrays by reference. */
5066 function_arg_pass_by_reference (cum
, mode
, type
, named
)
5067 const CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
5068 enum machine_mode mode
;
5070 int named ATTRIBUTE_UNUSED
;
5074 return ((type
&& AGGREGATE_TYPE_P (type
))
5075 || mode
== TFmode
|| mode
== TCmode
);
5079 return ((type
&& TREE_CODE (type
) == ARRAY_TYPE
)
5080 /* Consider complex values as aggregates, so care for TCmode. */
5081 || GET_MODE_SIZE (mode
) > 16
5083 && AGGREGATE_TYPE_P (type
)
5084 && (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 16));
5088 /* Handle the FUNCTION_ARG_ADVANCE macro.
5089 Update the data in CUM to advance over an argument
5090 of mode MODE and data type TYPE.
5091 TYPE is null for libcalls where that information may not be available. */
5094 function_arg_advance (cum
, mode
, type
, named
)
5095 CUMULATIVE_ARGS
*cum
;
5096 enum machine_mode mode
;
5100 int slotno
, regno
, padding
;
5102 /* We pass 0 for incoming_p here, it doesn't matter. */
5103 slotno
= function_arg_slotno (cum
, mode
, type
, named
, 0, ®no
, &padding
);
5105 /* If register required leading padding, add it. */
5107 cum
->words
+= padding
;
5111 cum
->words
+= (mode
!= BLKmode
5112 ? ROUND_ADVANCE (GET_MODE_SIZE (mode
))
5113 : ROUND_ADVANCE (int_size_in_bytes (type
)));
5117 if (type
&& AGGREGATE_TYPE_P (type
))
5119 int size
= int_size_in_bytes (type
);
5123 else if (size
<= 16)
5125 else /* passed by reference */
5128 else if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
)
5132 else if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
5134 cum
->words
+= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
5138 cum
->words
+= (mode
!= BLKmode
5139 ? ROUND_ADVANCE (GET_MODE_SIZE (mode
))
5140 : ROUND_ADVANCE (int_size_in_bytes (type
)));
5145 /* Handle the FUNCTION_ARG_PADDING macro.
5146 For the 64 bit ABI structs are always stored left shifted in their
5150 function_arg_padding (mode
, type
)
5151 enum machine_mode mode
;
5154 if (TARGET_ARCH64
&& type
!= 0 && AGGREGATE_TYPE_P (type
))
5157 /* This is the default definition. */
5158 return (! BYTES_BIG_ENDIAN
5161 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5162 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
5163 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
5164 ? downward
: upward
));
5167 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE macros.
5168 For v9, function return values are subject to the same rules as arguments,
5169 except that up to 32-bytes may be returned in registers. */
5172 function_value (type
, mode
, incoming_p
)
5174 enum machine_mode mode
;
5178 int regbase
= (incoming_p
5179 ? SPARC_OUTGOING_INT_ARG_FIRST
5180 : SPARC_INCOMING_INT_ARG_FIRST
);
5182 if (TARGET_ARCH64
&& type
)
5184 if (TREE_CODE (type
) == RECORD_TYPE
)
5186 /* Structures up to 32 bytes in size are passed in registers,
5187 promoted to fp registers where possible. */
5189 if (int_size_in_bytes (type
) > 32)
5190 abort (); /* shouldn't get here */
5192 return function_arg_record_value (type
, mode
, 0, 1, regbase
);
5194 else if (AGGREGATE_TYPE_P (type
))
5196 /* All other aggregate types are passed in an integer register
5197 in a mode corresponding to the size of the type. */
5198 HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
5203 mode
= mode_for_size (bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
5208 && GET_MODE_CLASS (mode
) == MODE_INT
5209 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
5210 && type
&& ! AGGREGATE_TYPE_P (type
))
5214 regno
= BASE_RETURN_VALUE_REG (mode
);
5216 regno
= BASE_OUTGOING_VALUE_REG (mode
);
5218 return gen_rtx_REG (mode
, regno
);
5221 /* Do what is necessary for `va_start'. We look at the current function
5222 to determine if stdarg or varargs is used and return the address of
5223 the first unnamed parameter. */
5226 sparc_builtin_saveregs ()
5228 int first_reg
= current_function_args_info
.words
;
5232 for (regno
= first_reg
; regno
< NPARM_REGS (word_mode
); regno
++)
5233 emit_move_insn (gen_rtx_MEM (word_mode
,
5234 gen_rtx_PLUS (Pmode
,
5236 GEN_INT (FIRST_PARM_OFFSET (0)
5239 gen_rtx_REG (word_mode
,
5240 BASE_INCOMING_ARG_REG (word_mode
) + regno
));
5242 address
= gen_rtx_PLUS (Pmode
,
5244 GEN_INT (FIRST_PARM_OFFSET (0)
5245 + UNITS_PER_WORD
* first_reg
));
5250 /* Implement `va_start' for varargs and stdarg. */
5253 sparc_va_start (stdarg_p
, valist
, nextarg
)
5254 int stdarg_p ATTRIBUTE_UNUSED
;
5258 nextarg
= expand_builtin_saveregs ();
5259 std_expand_builtin_va_start (1, valist
, nextarg
);
5262 /* Implement `va_arg'. */
5265 sparc_va_arg (valist
, type
)
5268 HOST_WIDE_INT size
, rsize
, align
;
5273 /* Round up sizeof(type) to a word. */
5274 size
= int_size_in_bytes (type
);
5275 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
5280 if (TYPE_ALIGN (type
) >= 2 * (unsigned) BITS_PER_WORD
)
5281 align
= 2 * UNITS_PER_WORD
;
5283 if (AGGREGATE_TYPE_P (type
))
5285 if ((unsigned HOST_WIDE_INT
) size
> 16)
5288 size
= rsize
= UNITS_PER_WORD
;
5290 /* SPARC v9 ABI states that structures up to 8 bytes in size are
5291 given one 8 byte slot. */
5293 size
= rsize
= UNITS_PER_WORD
;
5300 if (AGGREGATE_TYPE_P (type
)
5301 || TYPE_MODE (type
) == TFmode
5302 || TYPE_MODE (type
) == TCmode
)
5305 size
= rsize
= UNITS_PER_WORD
;
5312 incr
= fold (build (PLUS_EXPR
, ptr_type_node
, incr
,
5313 build_int_2 (align
- 1, 0)));
5314 incr
= fold (build (BIT_AND_EXPR
, ptr_type_node
, incr
,
5315 build_int_2 (-align
, -1)));
5318 addr
= incr
= save_expr (incr
);
5319 if (BYTES_BIG_ENDIAN
&& size
< rsize
)
5321 addr
= fold (build (PLUS_EXPR
, ptr_type_node
, incr
,
5322 build_int_2 (rsize
- size
, 0)));
5324 incr
= fold (build (PLUS_EXPR
, ptr_type_node
, incr
,
5325 build_int_2 (rsize
, 0)));
5327 incr
= build (MODIFY_EXPR
, ptr_type_node
, valist
, incr
);
5328 TREE_SIDE_EFFECTS (incr
) = 1;
5329 expand_expr (incr
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5331 addr_rtx
= expand_expr (addr
, NULL
, Pmode
, EXPAND_NORMAL
);
5333 /* If the address isn't aligned properly for the type,
5334 we may need to copy to a temporary.
5335 FIXME: This is inefficient. Usually we can do this
5338 && TYPE_ALIGN (type
) > BITS_PER_WORD
5341 /* FIXME: We really need to specify that the temporary is live
5342 for the whole function because expand_builtin_va_arg wants
5343 the alias set to be get_varargs_alias_set (), but in this
5344 case the alias set is that for TYPE and if the memory gets
5345 reused it will be reused with alias set TYPE. */
5346 rtx tmp
= assign_temp (type
, 0, 1, 0);
5349 addr_rtx
= force_reg (Pmode
, addr_rtx
);
5350 addr_rtx
= gen_rtx_MEM (BLKmode
, addr_rtx
);
5351 set_mem_alias_set (addr_rtx
, get_varargs_alias_set ());
5352 set_mem_align (addr_rtx
, BITS_PER_WORD
);
5353 tmp
= shallow_copy_rtx (tmp
);
5354 PUT_MODE (tmp
, BLKmode
);
5355 set_mem_alias_set (tmp
, 0);
5357 dest_addr
= emit_block_move (tmp
, addr_rtx
, GEN_INT (rsize
));
5358 if (dest_addr
!= NULL_RTX
)
5359 addr_rtx
= dest_addr
;
5361 addr_rtx
= XCEXP (tmp
, 0, MEM
);
5366 addr_rtx
= force_reg (Pmode
, addr_rtx
);
5367 addr_rtx
= gen_rtx_MEM (Pmode
, addr_rtx
);
5368 set_mem_alias_set (addr_rtx
, get_varargs_alias_set ());
5374 /* Return the string to output a conditional branch to LABEL, which is
5375 the operand number of the label. OP is the conditional expression.
5376 XEXP (OP, 0) is assumed to be a condition code register (integer or
5377 floating point) and its mode specifies what kind of comparison we made.
5379 REVERSED is non-zero if we should reverse the sense of the comparison.
5381 ANNUL is non-zero if we should generate an annulling branch.
5383 NOOP is non-zero if we have to follow this branch by a noop.
5385 INSN, if set, is the insn. */
5388 output_cbranch (op
, dest
, label
, reversed
, annul
, noop
, insn
)
5391 int reversed
, annul
, noop
;
5394 static char string
[50];
5395 enum rtx_code code
= GET_CODE (op
);
5396 rtx cc_reg
= XEXP (op
, 0);
5397 enum machine_mode mode
= GET_MODE (cc_reg
);
5398 const char *labelno
, *branch
;
5399 int spaces
= 8, far
;
5402 /* v9 branches are limited to +-1MB. If it is too far away,
5415 fbne,a,pn %fcc2, .LC29
5423 far
= get_attr_length (insn
) >= 3;
5426 /* Reversal of FP compares takes care -- an ordered compare
5427 becomes an unordered compare and vice versa. */
5428 if (mode
== CCFPmode
|| mode
== CCFPEmode
)
5429 code
= reverse_condition_maybe_unordered (code
);
5431 code
= reverse_condition (code
);
5434 /* Start by writing the branch condition. */
5435 if (mode
== CCFPmode
|| mode
== CCFPEmode
)
5486 /* ??? !v9: FP branches cannot be preceded by another floating point
5487 insn. Because there is currently no concept of pre-delay slots,
5488 we can fix this only by always emitting a nop before a floating
5493 strcpy (string
, "nop\n\t");
5494 strcat (string
, branch
);
5507 if (mode
== CC_NOOVmode
|| mode
== CCX_NOOVmode
)
5519 if (mode
== CC_NOOVmode
|| mode
== CCX_NOOVmode
)
5540 strcpy (string
, branch
);
5542 spaces
-= strlen (branch
);
5543 p
= strchr (string
, '\0');
5545 /* Now add the annulling, the label, and a possible noop. */
5560 if (! far
&& insn
&& INSN_ADDRESSES_SET_P ())
5562 int delta
= (INSN_ADDRESSES (INSN_UID (dest
))
5563 - INSN_ADDRESSES (INSN_UID (insn
)));
5564 /* Leave some instructions for "slop". */
5565 if (delta
< -260000 || delta
>= 260000)
5569 if (mode
== CCFPmode
|| mode
== CCFPEmode
)
5571 static char v9_fcc_labelno
[] = "%%fccX, ";
5572 /* Set the char indicating the number of the fcc reg to use. */
5573 v9_fcc_labelno
[5] = REGNO (cc_reg
) - SPARC_FIRST_V9_FCC_REG
+ '0';
5574 labelno
= v9_fcc_labelno
;
5577 if (REGNO (cc_reg
) == SPARC_FCC_REG
)
5583 else if (mode
== CCXmode
|| mode
== CCX_NOOVmode
)
5585 labelno
= "%%xcc, ";
5591 labelno
= "%%icc, ";
5596 if (*labelno
&& insn
&& (note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
)))
5599 ((INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2) ^ far
)
5609 strcpy (p
, labelno
);
5610 p
= strchr (p
, '\0');
5613 strcpy (p
, ".+12\n\tnop\n\tb\t");
5620 /* Set the char indicating the number of the operand containing the
5625 strcpy (p
, "\n\tnop");
5630 /* Emit a library call comparison between floating point X and Y.
5631 COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.).
5632 TARGET_ARCH64 uses _Qp_* functions, which use pointers to TFmode
5633 values as arguments instead of the TFmode registers themselves,
5634 that's why we cannot call emit_float_lib_cmp. */
5636 sparc_emit_float_lib_cmp (x
, y
, comparison
)
5638 enum rtx_code comparison
;
5641 rtx slot0
, slot1
, result
, tem
, tem2
;
5642 enum machine_mode mode
;
5647 qpfunc
= (TARGET_ARCH64
) ? "_Qp_feq" : "_Q_feq";
5651 qpfunc
= (TARGET_ARCH64
) ? "_Qp_fne" : "_Q_fne";
5655 qpfunc
= (TARGET_ARCH64
) ? "_Qp_fgt" : "_Q_fgt";
5659 qpfunc
= (TARGET_ARCH64
) ? "_Qp_fge" : "_Q_fge";
5663 qpfunc
= (TARGET_ARCH64
) ? "_Qp_flt" : "_Q_flt";
5667 qpfunc
= (TARGET_ARCH64
) ? "_Qp_fle" : "_Q_fle";
5678 qpfunc
= (TARGET_ARCH64
) ? "_Qp_cmp" : "_Q_cmp";
5688 if (GET_CODE (x
) != MEM
)
5690 slot0
= assign_stack_temp (TFmode
, GET_MODE_SIZE(TFmode
), 0);
5691 emit_insn (gen_rtx_SET (VOIDmode
, slot0
, x
));
5696 if (GET_CODE (y
) != MEM
)
5698 slot1
= assign_stack_temp (TFmode
, GET_MODE_SIZE(TFmode
), 0);
5699 emit_insn (gen_rtx_SET (VOIDmode
, slot1
, y
));
5704 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, qpfunc
), LCT_NORMAL
,
5706 XEXP (slot0
, 0), Pmode
,
5707 XEXP (slot1
, 0), Pmode
);
5713 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, qpfunc
), LCT_NORMAL
,
5715 x
, TFmode
, y
, TFmode
);
5721 /* Immediately move the result of the libcall into a pseudo
5722 register so reload doesn't clobber the value if it needs
5723 the return register for a spill reg. */
5724 result
= gen_reg_rtx (mode
);
5725 emit_move_insn (result
, hard_libcall_value (mode
));
5730 emit_cmp_insn (result
, const0_rtx
, NE
, NULL_RTX
, mode
, 0);
5734 emit_cmp_insn (result
, GEN_INT(3), comparison
== UNORDERED
? EQ
: NE
,
5739 emit_cmp_insn (result
, const1_rtx
,
5740 comparison
== UNGT
? GT
: NE
, NULL_RTX
, mode
, 0);
5743 emit_cmp_insn (result
, const2_rtx
, NE
, NULL_RTX
, mode
, 0);
5746 tem
= gen_reg_rtx (mode
);
5748 emit_insn (gen_andsi3 (tem
, result
, const1_rtx
));
5750 emit_insn (gen_anddi3 (tem
, result
, const1_rtx
));
5751 emit_cmp_insn (tem
, const0_rtx
, NE
, NULL_RTX
, mode
, 0);
5755 tem
= gen_reg_rtx (mode
);
5757 emit_insn (gen_addsi3 (tem
, result
, const1_rtx
));
5759 emit_insn (gen_adddi3 (tem
, result
, const1_rtx
));
5760 tem2
= gen_reg_rtx (mode
);
5762 emit_insn (gen_andsi3 (tem2
, tem
, const2_rtx
));
5764 emit_insn (gen_anddi3 (tem2
, tem
, const2_rtx
));
5765 emit_cmp_insn (tem2
, const0_rtx
, comparison
== UNEQ
? EQ
: NE
,
5771 /* Generate an unsigned DImode to FP conversion. This is the same code
5772 optabs would emit if we didn't have TFmode patterns. */
5775 sparc_emit_floatunsdi (operands
)
5778 rtx neglab
, donelab
, i0
, i1
, f0
, in
, out
;
5779 enum machine_mode mode
;
5782 in
= force_reg (DImode
, operands
[1]);
5783 mode
= GET_MODE (out
);
5784 neglab
= gen_label_rtx ();
5785 donelab
= gen_label_rtx ();
5786 i0
= gen_reg_rtx (DImode
);
5787 i1
= gen_reg_rtx (DImode
);
5788 f0
= gen_reg_rtx (mode
);
5790 emit_cmp_and_jump_insns (in
, const0_rtx
, LT
, const0_rtx
, DImode
, 0, neglab
);
5792 emit_insn (gen_rtx_SET (VOIDmode
, out
, gen_rtx_FLOAT (mode
, in
)));
5793 emit_jump_insn (gen_jump (donelab
));
5796 emit_label (neglab
);
5798 emit_insn (gen_lshrdi3 (i0
, in
, const1_rtx
));
5799 emit_insn (gen_anddi3 (i1
, in
, const1_rtx
));
5800 emit_insn (gen_iordi3 (i0
, i0
, i1
));
5801 emit_insn (gen_rtx_SET (VOIDmode
, f0
, gen_rtx_FLOAT (mode
, i0
)));
5802 emit_insn (gen_rtx_SET (VOIDmode
, out
, gen_rtx_PLUS (mode
, f0
, f0
)));
5804 emit_label (donelab
);
5807 /* Return the string to output a conditional branch to LABEL, testing
5808 register REG. LABEL is the operand number of the label; REG is the
5809 operand number of the reg. OP is the conditional expression. The mode
5810 of REG says what kind of comparison we made.
5812 REVERSED is non-zero if we should reverse the sense of the comparison.
5814 ANNUL is non-zero if we should generate an annulling branch.
5816 NOOP is non-zero if we have to follow this branch by a noop. */
5819 output_v9branch (op
, dest
, reg
, label
, reversed
, annul
, noop
, insn
)
5822 int reversed
, annul
, noop
;
5825 static char string
[50];
5826 enum rtx_code code
= GET_CODE (op
);
5827 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
5832 /* branch on register are limited to +-128KB. If it is too far away,
5845 brgez,a,pn %o1, .LC29
5851 ba,pt %xcc, .LC29 */
5853 far
= get_attr_length (insn
) >= 3;
5855 /* If not floating-point or if EQ or NE, we can just reverse the code. */
5857 code
= reverse_condition (code
);
5859 /* Only 64 bit versions of these instructions exist. */
5863 /* Start by writing the branch condition. */
5868 strcpy (string
, "brnz");
5872 strcpy (string
, "brz");
5876 strcpy (string
, "brgez");
5880 strcpy (string
, "brlz");
5884 strcpy (string
, "brlez");
5888 strcpy (string
, "brgz");
5895 p
= strchr (string
, '\0');
5897 /* Now add the annulling, reg, label, and nop. */
5904 if (insn
&& (note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
)))
5907 ((INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2) ^ far
)
5912 *p
= p
< string
+ 8 ? '\t' : ' ';
5920 int veryfar
= 1, delta
;
5922 if (INSN_ADDRESSES_SET_P ())
5924 delta
= (INSN_ADDRESSES (INSN_UID (dest
))
5925 - INSN_ADDRESSES (INSN_UID (insn
)));
5926 /* Leave some instructions for "slop". */
5927 if (delta
>= -260000 && delta
< 260000)
5931 strcpy (p
, ".+12\n\tnop\n\t");
5942 strcpy (p
, "ba,pt\t%%xcc, ");
5952 strcpy (p
, "\n\tnop");
5957 /* Return 1, if any of the registers of the instruction are %l[0-7] or %o[0-7].
5958 Such instructions cannot be used in the delay slot of return insn on v9.
5959 If TEST is 0, also rename all %i[0-7] registers to their %o[0-7] counterparts.
5963 epilogue_renumber (where
, test
)
5964 register rtx
*where
;
5967 register const char *fmt
;
5969 register enum rtx_code code
;
5974 code
= GET_CODE (*where
);
5979 if (REGNO (*where
) >= 8 && REGNO (*where
) < 24) /* oX or lX */
5981 if (! test
&& REGNO (*where
) >= 24 && REGNO (*where
) < 32)
5982 *where
= gen_rtx (REG
, GET_MODE (*where
), OUTGOING_REGNO (REGNO(*where
)));
5990 /* Do not replace the frame pointer with the stack pointer because
5991 it can cause the delayed instruction to load below the stack.
5992 This occurs when instructions like:
5994 (set (reg/i:SI 24 %i0)
5995 (mem/f:SI (plus:SI (reg/f:SI 30 %fp)
5996 (const_int -20 [0xffffffec])) 0))
5998 are in the return delayed slot. */
6000 if (GET_CODE (XEXP (*where
, 0)) == REG
6001 && REGNO (XEXP (*where
, 0)) == HARD_FRAME_POINTER_REGNUM
6002 && (GET_CODE (XEXP (*where
, 1)) != CONST_INT
6003 || INTVAL (XEXP (*where
, 1)) < SPARC_STACK_BIAS
))
6008 if (SPARC_STACK_BIAS
6009 && GET_CODE (XEXP (*where
, 0)) == REG
6010 && REGNO (XEXP (*where
, 0)) == HARD_FRAME_POINTER_REGNUM
)
6018 fmt
= GET_RTX_FORMAT (code
);
6020 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6025 for (j
= XVECLEN (*where
, i
) - 1; j
>= 0; j
--)
6026 if (epilogue_renumber (&(XVECEXP (*where
, i
, j
)), test
))
6029 else if (fmt
[i
] == 'e'
6030 && epilogue_renumber (&(XEXP (*where
, i
)), test
))
6036 /* Leaf functions and non-leaf functions have different needs. */
6039 reg_leaf_alloc_order
[] = REG_LEAF_ALLOC_ORDER
;
6042 reg_nonleaf_alloc_order
[] = REG_ALLOC_ORDER
;
6044 static const int *const reg_alloc_orders
[] = {
6045 reg_leaf_alloc_order
,
6046 reg_nonleaf_alloc_order
};
6049 order_regs_for_local_alloc ()
6051 static int last_order_nonleaf
= 1;
6053 if (regs_ever_live
[15] != last_order_nonleaf
)
6055 last_order_nonleaf
= !last_order_nonleaf
;
6056 memcpy ((char *) reg_alloc_order
,
6057 (const char *) reg_alloc_orders
[last_order_nonleaf
],
6058 FIRST_PSEUDO_REGISTER
* sizeof (int));
6062 /* Return 1 if REG and MEM are legitimate enough to allow the various
6063 mem<-->reg splits to be run. */
6066 sparc_splitdi_legitimate (reg
, mem
)
6070 /* Punt if we are here by mistake. */
6071 if (! reload_completed
)
6074 /* We must have an offsettable memory reference. */
6075 if (! offsettable_memref_p (mem
))
6078 /* If we have legitimate args for ldd/std, we do not want
6079 the split to happen. */
6080 if ((REGNO (reg
) % 2) == 0
6081 && mem_min_alignment (mem
, 8))
6088 /* Return 1 if x and y are some kind of REG and they refer to
6089 different hard registers. This test is guarenteed to be
6090 run after reload. */
6093 sparc_absnegfloat_split_legitimate (x
, y
)
6096 if (GET_CODE (x
) != REG
)
6098 if (GET_CODE (y
) != REG
)
6100 if (REGNO (x
) == REGNO (y
))
6105 /* Return 1 if REGNO (reg1) is even and REGNO (reg1) == REGNO (reg2) - 1.
6106 This makes them candidates for using ldd and std insns.
6108 Note reg1 and reg2 *must* be hard registers. */
6111 registers_ok_for_ldd_peep (reg1
, reg2
)
6114 /* We might have been passed a SUBREG. */
6115 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
6118 if (REGNO (reg1
) % 2 != 0)
6121 /* Integer ldd is deprecated in SPARC V9 */
6122 if (TARGET_V9
&& REGNO (reg1
) < 32)
6125 return (REGNO (reg1
) == REGNO (reg2
) - 1);
6128 /* Return 1 if the addresses in mem1 and mem2 are suitable for use in
6131 This can only happen when addr1 and addr2, the addresses in mem1
6132 and mem2, are consecutive memory locations (addr1 + 4 == addr2).
6133 addr1 must also be aligned on a 64-bit boundary.
6135 Also iff dependent_reg_rtx is not null it should not be used to
6136 compute the address for mem1, i.e. we cannot optimize a sequence
6148 But, note that the transformation from:
6153 is perfectly fine. Thus, the peephole2 patterns always pass us
6154 the destination register of the first load, never the second one.
6156 For stores we don't have a similar problem, so dependent_reg_rtx is
6160 mems_ok_for_ldd_peep (mem1
, mem2
, dependent_reg_rtx
)
6161 rtx mem1
, mem2
, dependent_reg_rtx
;
6167 /* The mems cannot be volatile. */
6168 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
6171 /* MEM1 should be aligned on a 64-bit boundary. */
6172 if (MEM_ALIGN (mem1
) < 64)
6175 addr1
= XEXP (mem1
, 0);
6176 addr2
= XEXP (mem2
, 0);
6178 /* Extract a register number and offset (if used) from the first addr. */
6179 if (GET_CODE (addr1
) == PLUS
)
6181 /* If not a REG, return zero. */
6182 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
6186 reg1
= REGNO (XEXP (addr1
, 0));
6187 /* The offset must be constant! */
6188 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
6190 offset1
= INTVAL (XEXP (addr1
, 1));
6193 else if (GET_CODE (addr1
) != REG
)
6197 reg1
= REGNO (addr1
);
6198 /* This was a simple (mem (reg)) expression. Offset is 0. */
6202 /* Make sure the second address is a (mem (plus (reg) (const_int). */
6203 if (GET_CODE (addr2
) != PLUS
)
6206 if (GET_CODE (XEXP (addr2
, 0)) != REG
6207 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
6210 if (reg1
!= REGNO (XEXP (addr2
, 0)))
6213 if (dependent_reg_rtx
!= NULL_RTX
&& reg1
== REGNO (dependent_reg_rtx
))
6216 /* The first offset must be evenly divisible by 8 to ensure the
6217 address is 64 bit aligned. */
6218 if (offset1
% 8 != 0)
6221 /* The offset for the second addr must be 4 more than the first addr. */
6222 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 4)
6225 /* All the tests passed. addr1 and addr2 are valid for ldd and std
6230 /* Return 1 if reg is a pseudo, or is the first register in
6231 a hard register pair. This makes it a candidate for use in
6232 ldd and std insns. */
6235 register_ok_for_ldd (reg
)
6238 /* We might have been passed a SUBREG. */
6239 if (GET_CODE (reg
) != REG
)
6242 if (REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
6243 return (REGNO (reg
) % 2 == 0);
6248 /* Print operand X (an rtx) in assembler syntax to file FILE.
6249 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
6250 For `%' followed by punctuation, CODE is the punctuation and X is null. */
6253 print_operand (file
, x
, code
)
6261 /* Output a 'nop' if there's nothing for the delay slot. */
6262 if (dbr_sequence_length () == 0)
6263 fputs ("\n\t nop", file
);
6266 /* Output an annul flag if there's nothing for the delay slot and we
6267 are optimizing. This is always used with '(' below. */
6268 /* Sun OS 4.1.1 dbx can't handle an annulled unconditional branch;
6269 this is a dbx bug. So, we only do this when optimizing. */
6270 /* On UltraSPARC, a branch in a delay slot causes a pipeline flush.
6271 Always emit a nop in case the next instruction is a branch. */
6272 if (dbr_sequence_length () == 0
6273 && (optimize
&& (int)sparc_cpu
< PROCESSOR_V9
))
6277 /* Output a 'nop' if there's nothing for the delay slot and we are
6278 not optimizing. This is always used with '*' above. */
6279 if (dbr_sequence_length () == 0
6280 && ! (optimize
&& (int)sparc_cpu
< PROCESSOR_V9
))
6281 fputs ("\n\t nop", file
);
6284 /* Output the Embedded Medium/Anywhere code model base register. */
6285 fputs (EMBMEDANY_BASE_REG
, file
);
6288 /* Print out what we are using as the frame pointer. This might
6289 be %fp, or might be %sp+offset. */
6290 /* ??? What if offset is too big? Perhaps the caller knows it isn't? */
6291 fprintf (file
, "%s+%d", frame_base_name
, frame_base_offset
);
6294 /* Adjust the operand to take into account a RESTORE operation. */
6295 if (GET_CODE (x
) == CONST_INT
)
6297 else if (GET_CODE (x
) != REG
)
6298 output_operand_lossage ("invalid %%Y operand");
6299 else if (REGNO (x
) < 8)
6300 fputs (reg_names
[REGNO (x
)], file
);
6301 else if (REGNO (x
) >= 24 && REGNO (x
) < 32)
6302 fputs (reg_names
[REGNO (x
)-16], file
);
6304 output_operand_lossage ("invalid %%Y operand");
6307 /* Print out the low order register name of a register pair. */
6308 if (WORDS_BIG_ENDIAN
)
6309 fputs (reg_names
[REGNO (x
)+1], file
);
6311 fputs (reg_names
[REGNO (x
)], file
);
6314 /* Print out the high order register name of a register pair. */
6315 if (WORDS_BIG_ENDIAN
)
6316 fputs (reg_names
[REGNO (x
)], file
);
6318 fputs (reg_names
[REGNO (x
)+1], file
);
6321 /* Print out the second register name of a register pair or quad.
6322 I.e., R (%o0) => %o1. */
6323 fputs (reg_names
[REGNO (x
)+1], file
);
6326 /* Print out the third register name of a register quad.
6327 I.e., S (%o0) => %o2. */
6328 fputs (reg_names
[REGNO (x
)+2], file
);
6331 /* Print out the fourth register name of a register quad.
6332 I.e., T (%o0) => %o3. */
6333 fputs (reg_names
[REGNO (x
)+3], file
);
6336 /* Print a condition code register. */
6337 if (REGNO (x
) == SPARC_ICC_REG
)
6339 /* We don't handle CC[X]_NOOVmode because they're not supposed
6341 if (GET_MODE (x
) == CCmode
)
6342 fputs ("%icc", file
);
6343 else if (GET_MODE (x
) == CCXmode
)
6344 fputs ("%xcc", file
);
6349 /* %fccN register */
6350 fputs (reg_names
[REGNO (x
)], file
);
6353 /* Print the operand's address only. */
6354 output_address (XEXP (x
, 0));
6357 /* In this case we need a register. Use %g0 if the
6358 operand is const0_rtx. */
6360 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
6362 fputs ("%g0", file
);
6369 switch (GET_CODE (x
))
6371 case IOR
: fputs ("or", file
); break;
6372 case AND
: fputs ("and", file
); break;
6373 case XOR
: fputs ("xor", file
); break;
6374 default: output_operand_lossage ("invalid %%A operand");
6379 switch (GET_CODE (x
))
6381 case IOR
: fputs ("orn", file
); break;
6382 case AND
: fputs ("andn", file
); break;
6383 case XOR
: fputs ("xnor", file
); break;
6384 default: output_operand_lossage ("invalid %%B operand");
6388 /* These are used by the conditional move instructions. */
6392 enum rtx_code rc
= GET_CODE (x
);
6396 enum machine_mode mode
= GET_MODE (XEXP (x
, 0));
6397 if (mode
== CCFPmode
|| mode
== CCFPEmode
)
6398 rc
= reverse_condition_maybe_unordered (GET_CODE (x
));
6400 rc
= reverse_condition (GET_CODE (x
));
6404 case NE
: fputs ("ne", file
); break;
6405 case EQ
: fputs ("e", file
); break;
6406 case GE
: fputs ("ge", file
); break;
6407 case GT
: fputs ("g", file
); break;
6408 case LE
: fputs ("le", file
); break;
6409 case LT
: fputs ("l", file
); break;
6410 case GEU
: fputs ("geu", file
); break;
6411 case GTU
: fputs ("gu", file
); break;
6412 case LEU
: fputs ("leu", file
); break;
6413 case LTU
: fputs ("lu", file
); break;
6414 case LTGT
: fputs ("lg", file
); break;
6415 case UNORDERED
: fputs ("u", file
); break;
6416 case ORDERED
: fputs ("o", file
); break;
6417 case UNLT
: fputs ("ul", file
); break;
6418 case UNLE
: fputs ("ule", file
); break;
6419 case UNGT
: fputs ("ug", file
); break;
6420 case UNGE
: fputs ("uge", file
); break;
6421 case UNEQ
: fputs ("ue", file
); break;
6422 default: output_operand_lossage (code
== 'c'
6423 ? "invalid %%c operand"
6424 : "invalid %%C operand");
6429 /* These are used by the movr instruction pattern. */
6433 enum rtx_code rc
= (code
== 'd'
6434 ? reverse_condition (GET_CODE (x
))
6438 case NE
: fputs ("ne", file
); break;
6439 case EQ
: fputs ("e", file
); break;
6440 case GE
: fputs ("gez", file
); break;
6441 case LT
: fputs ("lz", file
); break;
6442 case LE
: fputs ("lez", file
); break;
6443 case GT
: fputs ("gz", file
); break;
6444 default: output_operand_lossage (code
== 'd'
6445 ? "invalid %%d operand"
6446 : "invalid %%D operand");
6453 /* Print a sign-extended character. */
6454 int i
= trunc_int_for_mode (INTVAL (x
), QImode
);
6455 fprintf (file
, "%d", i
);
6460 /* Operand must be a MEM; write its address. */
6461 if (GET_CODE (x
) != MEM
)
6462 output_operand_lossage ("invalid %%f operand");
6463 output_address (XEXP (x
, 0));
6467 /* Do nothing special. */
6471 /* Undocumented flag. */
6472 output_operand_lossage ("invalid operand output code");
6475 if (GET_CODE (x
) == REG
)
6476 fputs (reg_names
[REGNO (x
)], file
);
6477 else if (GET_CODE (x
) == MEM
)
6480 /* Poor Sun assembler doesn't understand absolute addressing. */
6481 if (CONSTANT_P (XEXP (x
, 0)))
6482 fputs ("%g0+", file
);
6483 output_address (XEXP (x
, 0));
6486 else if (GET_CODE (x
) == HIGH
)
6488 fputs ("%hi(", file
);
6489 output_addr_const (file
, XEXP (x
, 0));
6492 else if (GET_CODE (x
) == LO_SUM
)
6494 print_operand (file
, XEXP (x
, 0), 0);
6495 if (TARGET_CM_MEDMID
)
6496 fputs ("+%l44(", file
);
6498 fputs ("+%lo(", file
);
6499 output_addr_const (file
, XEXP (x
, 1));
6502 else if (GET_CODE (x
) == CONST_DOUBLE
6503 && (GET_MODE (x
) == VOIDmode
6504 || GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
))
6506 if (CONST_DOUBLE_HIGH (x
) == 0)
6507 fprintf (file
, "%u", (unsigned int) CONST_DOUBLE_LOW (x
));
6508 else if (CONST_DOUBLE_HIGH (x
) == -1
6509 && CONST_DOUBLE_LOW (x
) < 0)
6510 fprintf (file
, "%d", (int) CONST_DOUBLE_LOW (x
));
6512 output_operand_lossage ("long long constant not a valid immediate operand");
6514 else if (GET_CODE (x
) == CONST_DOUBLE
)
6515 output_operand_lossage ("floating point constant not a valid immediate operand");
6516 else { output_addr_const (file
, x
); }
6519 /* Target hook for assembling integer objects. The sparc version has
6520 special handling for aligned DI-mode objects. */
6523 sparc_assemble_integer (x
, size
, aligned_p
)
6528 /* ??? We only output .xword's for symbols and only then in environments
6529 where the assembler can handle them. */
6530 if (aligned_p
&& size
== 8
6531 && (GET_CODE (x
) != CONST_INT
&& GET_CODE (x
) != CONST_DOUBLE
))
6535 assemble_integer_with_op ("\t.xword\t", x
);
6540 assemble_aligned_integer (4, const0_rtx
);
6541 assemble_aligned_integer (4, x
);
6545 return default_assemble_integer (x
, size
, aligned_p
);
6548 /* Return the value of a code used in the .proc pseudo-op that says
6549 what kind of result this function returns. For non-C types, we pick
6550 the closest C type. */
6552 #ifndef SHORT_TYPE_SIZE
6553 #define SHORT_TYPE_SIZE (BITS_PER_UNIT * 2)
6556 #ifndef INT_TYPE_SIZE
6557 #define INT_TYPE_SIZE BITS_PER_WORD
6560 #ifndef LONG_TYPE_SIZE
6561 #define LONG_TYPE_SIZE BITS_PER_WORD
6564 #ifndef LONG_LONG_TYPE_SIZE
6565 #define LONG_LONG_TYPE_SIZE (BITS_PER_WORD * 2)
6568 #ifndef FLOAT_TYPE_SIZE
6569 #define FLOAT_TYPE_SIZE BITS_PER_WORD
6572 #ifndef DOUBLE_TYPE_SIZE
6573 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6576 #ifndef LONG_DOUBLE_TYPE_SIZE
6577 #define LONG_DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6581 sparc_type_code (type
)
6584 register unsigned long qualifiers
= 0;
6585 register unsigned shift
;
6587 /* Only the first 30 bits of the qualifier are valid. We must refrain from
6588 setting more, since some assemblers will give an error for this. Also,
6589 we must be careful to avoid shifts of 32 bits or more to avoid getting
6590 unpredictable results. */
6592 for (shift
= 6; shift
< 30; shift
+= 2, type
= TREE_TYPE (type
))
6594 switch (TREE_CODE (type
))
6600 qualifiers
|= (3 << shift
);
6605 qualifiers
|= (2 << shift
);
6609 case REFERENCE_TYPE
:
6611 qualifiers
|= (1 << shift
);
6615 return (qualifiers
| 8);
6618 case QUAL_UNION_TYPE
:
6619 return (qualifiers
| 9);
6622 return (qualifiers
| 10);
6625 return (qualifiers
| 16);
6628 /* If this is a range type, consider it to be the underlying
6630 if (TREE_TYPE (type
) != 0)
6633 /* Carefully distinguish all the standard types of C,
6634 without messing up if the language is not C. We do this by
6635 testing TYPE_PRECISION and TREE_UNSIGNED. The old code used to
6636 look at both the names and the above fields, but that's redundant.
6637 Any type whose size is between two C types will be considered
6638 to be the wider of the two types. Also, we do not have a
6639 special code to use for "long long", so anything wider than
6640 long is treated the same. Note that we can't distinguish
6641 between "int" and "long" in this code if they are the same
6642 size, but that's fine, since neither can the assembler. */
6644 if (TYPE_PRECISION (type
) <= CHAR_TYPE_SIZE
)
6645 return (qualifiers
| (TREE_UNSIGNED (type
) ? 12 : 2));
6647 else if (TYPE_PRECISION (type
) <= SHORT_TYPE_SIZE
)
6648 return (qualifiers
| (TREE_UNSIGNED (type
) ? 13 : 3));
6650 else if (TYPE_PRECISION (type
) <= INT_TYPE_SIZE
)
6651 return (qualifiers
| (TREE_UNSIGNED (type
) ? 14 : 4));
6654 return (qualifiers
| (TREE_UNSIGNED (type
) ? 15 : 5));
6657 /* If this is a range type, consider it to be the underlying
6659 if (TREE_TYPE (type
) != 0)
6662 /* Carefully distinguish all the standard types of C,
6663 without messing up if the language is not C. */
6665 if (TYPE_PRECISION (type
) == FLOAT_TYPE_SIZE
)
6666 return (qualifiers
| 6);
6669 return (qualifiers
| 7);
6671 case COMPLEX_TYPE
: /* GNU Fortran COMPLEX type. */
6672 /* ??? We need to distinguish between double and float complex types,
6673 but I don't know how yet because I can't reach this code from
6674 existing front-ends. */
6675 return (qualifiers
| 7); /* Who knows? */
6677 case CHAR_TYPE
: /* GNU Pascal CHAR type. Not used in C. */
6678 case BOOLEAN_TYPE
: /* GNU Fortran BOOLEAN type. */
6679 case FILE_TYPE
: /* GNU Pascal FILE type. */
6680 case SET_TYPE
: /* GNU Pascal SET type. */
6681 case LANG_TYPE
: /* ? */
6685 abort (); /* Not a type! */
6692 /* Nested function support. */
6694 /* Emit RTL insns to initialize the variable parts of a trampoline.
6695 FNADDR is an RTX for the address of the function's pure code.
6696 CXT is an RTX for the static chain value for the function.
6698 This takes 16 insns: 2 shifts & 2 ands (to split up addresses), 4 sethi
6699 (to load in opcodes), 4 iors (to merge address and opcodes), and 4 writes
6700 (to store insns). This is a bit excessive. Perhaps a different
6701 mechanism would be better here.
6703 Emit enough FLUSH insns to synchronize the data and instruction caches. */
6706 sparc_initialize_trampoline (tramp
, fnaddr
, cxt
)
6707 rtx tramp
, fnaddr
, cxt
;
6709 /* SPARC 32 bit trampoline:
6712 sethi %hi(static), %g2
6714 or %g2, %lo(static), %g2
6716 SETHI i,r = 00rr rrr1 00ii iiii iiii iiii iiii iiii
6717 JMPL r+i,d = 10dd ddd1 1100 0rrr rr1i iiii iiii iiii
6719 #ifdef TRANSFER_FROM_TRAMPOLINE
6720 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__enable_execute_stack"),
6721 LCT_NORMAL
, VOIDmode
, 1, tramp
, Pmode
);
6725 (gen_rtx_MEM (SImode
, plus_constant (tramp
, 0)),
6726 expand_binop (SImode
, ior_optab
,
6727 expand_shift (RSHIFT_EXPR
, SImode
, fnaddr
,
6728 size_int (10), 0, 1),
6729 GEN_INT (trunc_int_for_mode (0x03000000, SImode
)),
6730 NULL_RTX
, 1, OPTAB_DIRECT
));
6733 (gen_rtx_MEM (SImode
, plus_constant (tramp
, 4)),
6734 expand_binop (SImode
, ior_optab
,
6735 expand_shift (RSHIFT_EXPR
, SImode
, cxt
,
6736 size_int (10), 0, 1),
6737 GEN_INT (trunc_int_for_mode (0x05000000, SImode
)),
6738 NULL_RTX
, 1, OPTAB_DIRECT
));
6741 (gen_rtx_MEM (SImode
, plus_constant (tramp
, 8)),
6742 expand_binop (SImode
, ior_optab
,
6743 expand_and (SImode
, fnaddr
, GEN_INT (0x3ff), NULL_RTX
),
6744 GEN_INT (trunc_int_for_mode (0x81c06000, SImode
)),
6745 NULL_RTX
, 1, OPTAB_DIRECT
));
6748 (gen_rtx_MEM (SImode
, plus_constant (tramp
, 12)),
6749 expand_binop (SImode
, ior_optab
,
6750 expand_and (SImode
, cxt
, GEN_INT (0x3ff), NULL_RTX
),
6751 GEN_INT (trunc_int_for_mode (0x8410a000, SImode
)),
6752 NULL_RTX
, 1, OPTAB_DIRECT
));
6754 /* On UltraSPARC a flush flushes an entire cache line. The trampoline is
6755 aligned on a 16 byte boundary so one flush clears it all. */
6756 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode
, tramp
))));
6757 if (sparc_cpu
!= PROCESSOR_ULTRASPARC
6758 && sparc_cpu
!= PROCESSOR_ULTRASPARC3
)
6759 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode
,
6760 plus_constant (tramp
, 8)))));
6763 /* The 64 bit version is simpler because it makes more sense to load the
6764 values as "immediate" data out of the trampoline. It's also easier since
6765 we can read the PC without clobbering a register. */
6768 sparc64_initialize_trampoline (tramp
, fnaddr
, cxt
)
6769 rtx tramp
, fnaddr
, cxt
;
6771 #ifdef TRANSFER_FROM_TRAMPOLINE
6772 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__enable_execute_stack"),
6773 LCT_NORMAL
, VOIDmode
, 1, tramp
, Pmode
);
6784 emit_move_insn (gen_rtx_MEM (SImode
, tramp
),
6785 GEN_INT (trunc_int_for_mode (0x83414000, SImode
)));
6786 emit_move_insn (gen_rtx_MEM (SImode
, plus_constant (tramp
, 4)),
6787 GEN_INT (trunc_int_for_mode (0xca586018, SImode
)));
6788 emit_move_insn (gen_rtx_MEM (SImode
, plus_constant (tramp
, 8)),
6789 GEN_INT (trunc_int_for_mode (0x81c14000, SImode
)));
6790 emit_move_insn (gen_rtx_MEM (SImode
, plus_constant (tramp
, 12)),
6791 GEN_INT (trunc_int_for_mode (0xca586010, SImode
)));
6792 emit_move_insn (gen_rtx_MEM (DImode
, plus_constant (tramp
, 16)), cxt
);
6793 emit_move_insn (gen_rtx_MEM (DImode
, plus_constant (tramp
, 24)), fnaddr
);
6794 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode
, tramp
))));
6796 if (sparc_cpu
!= PROCESSOR_ULTRASPARC
6797 && sparc_cpu
!= PROCESSOR_ULTRASPARC3
)
6798 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode
, plus_constant (tramp
, 8)))));
6801 /* Subroutines to support a flat (single) register window calling
6804 /* Single-register window sparc stack frames look like:
6806 Before call After call
6807 +-----------------------+ +-----------------------+
6809 mem | caller's temps. | | caller's temps. |
6811 +-----------------------+ +-----------------------+
6813 | arguments on stack. | | arguments on stack. |
6815 +-----------------------+FP+92->+-----------------------+
6816 | 6 words to save | | 6 words to save |
6817 | arguments passed | | arguments passed |
6818 | in registers, even | | in registers, even |
6819 | if not passed. | | if not passed. |
6820 SP+68->+-----------------------+FP+68->+-----------------------+
6821 | 1 word struct addr | | 1 word struct addr |
6822 +-----------------------+FP+64->+-----------------------+
6824 | 16 word reg save area | | 16 word reg save area |
6826 SP->+-----------------------+ FP->+-----------------------+
6828 | fp/alu reg moves |
6829 FP-16->+-----------------------+
6833 +-----------------------+
6835 | fp register save |
6837 +-----------------------+
6839 | gp register save |
6841 +-----------------------+
6843 | alloca allocations |
6845 +-----------------------+
6847 | arguments on stack |
6849 SP+92->+-----------------------+
6851 | arguments passed |
6852 | in registers, even |
6853 low | if not passed. |
6854 memory SP+68->+-----------------------+
6855 | 1 word struct addr |
6856 SP+64->+-----------------------+
6858 I 16 word reg save area |
6860 SP->+-----------------------+ */
6862 /* Structure to be filled in by sparc_flat_compute_frame_size with register
6863 save masks, and offsets for the current function. */
6865 struct sparc_frame_info
6867 unsigned long total_size
; /* # bytes that the entire frame takes up. */
6868 unsigned long var_size
; /* # bytes that variables take up. */
6869 unsigned long args_size
; /* # bytes that outgoing arguments take up. */
6870 unsigned long extra_size
; /* # bytes of extra gunk. */
6871 unsigned int gp_reg_size
; /* # bytes needed to store gp regs. */
6872 unsigned int fp_reg_size
; /* # bytes needed to store fp regs. */
6873 unsigned long gmask
; /* Mask of saved gp registers. */
6874 unsigned long fmask
; /* Mask of saved fp registers. */
6875 unsigned long reg_offset
; /* Offset from new sp to store regs. */
6876 int initialized
; /* Nonzero if frame size already calculated. */
6879 /* Current frame information calculated by sparc_flat_compute_frame_size. */
6880 struct sparc_frame_info current_frame_info
;
6882 /* Zero structure to initialize current_frame_info. */
6883 struct sparc_frame_info zero_frame_info
;
6885 /* Tell prologue and epilogue if register REGNO should be saved / restored. */
6887 #define RETURN_ADDR_REGNUM 15
6888 #define HARD_FRAME_POINTER_MASK (1 << (HARD_FRAME_POINTER_REGNUM))
6889 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
6891 #define MUST_SAVE_REGISTER(regno) \
6892 ((regs_ever_live[regno] && !call_used_regs[regno]) \
6893 || (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed) \
6894 || (regno == RETURN_ADDR_REGNUM && regs_ever_live[RETURN_ADDR_REGNUM]))
6896 /* Return the bytes needed to compute the frame pointer from the current
6900 sparc_flat_compute_frame_size (size
)
6901 int size
; /* # of var. bytes allocated. */
6904 unsigned long total_size
; /* # bytes that the entire frame takes up. */
6905 unsigned long var_size
; /* # bytes that variables take up. */
6906 unsigned long args_size
; /* # bytes that outgoing arguments take up. */
6907 unsigned long extra_size
; /* # extra bytes. */
6908 unsigned int gp_reg_size
; /* # bytes needed to store gp regs. */
6909 unsigned int fp_reg_size
; /* # bytes needed to store fp regs. */
6910 unsigned long gmask
; /* Mask of saved gp registers. */
6911 unsigned long fmask
; /* Mask of saved fp registers. */
6912 unsigned long reg_offset
; /* Offset to register save area. */
6913 int need_aligned_p
; /* 1 if need the save area 8 byte aligned. */
6915 /* This is the size of the 16 word reg save area, 1 word struct addr
6916 area, and 4 word fp/alu register copy area. */
6917 extra_size
= -STARTING_FRAME_OFFSET
+ FIRST_PARM_OFFSET(0);
6927 if (!leaf_function_p ())
6929 /* Also include the size needed for the 6 parameter registers. */
6930 args_size
= current_function_outgoing_args_size
+ 24;
6932 total_size
= var_size
+ args_size
;
6934 /* Calculate space needed for gp registers. */
6935 for (regno
= 1; regno
<= 31; regno
++)
6937 if (MUST_SAVE_REGISTER (regno
))
6939 /* If we need to save two regs in a row, ensure there's room to bump
6940 up the address to align it to a doubleword boundary. */
6941 if ((regno
& 0x1) == 0 && MUST_SAVE_REGISTER (regno
+1))
6943 if (gp_reg_size
% 8 != 0)
6945 gp_reg_size
+= 2 * UNITS_PER_WORD
;
6946 gmask
|= 3 << regno
;
6952 gp_reg_size
+= UNITS_PER_WORD
;
6953 gmask
|= 1 << regno
;
6958 /* Calculate space needed for fp registers. */
6959 for (regno
= 32; regno
<= 63; regno
++)
6961 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
6963 fp_reg_size
+= UNITS_PER_WORD
;
6964 fmask
|= 1 << (regno
- 32);
6971 reg_offset
= FIRST_PARM_OFFSET(0) + args_size
;
6972 /* Ensure save area is 8 byte aligned if we need it. */
6974 if (need_aligned_p
&& n
!= 0)
6976 total_size
+= 8 - n
;
6977 reg_offset
+= 8 - n
;
6979 total_size
+= gp_reg_size
+ fp_reg_size
;
6982 /* If we must allocate a stack frame at all, we must also allocate
6983 room for register window spillage, so as to be binary compatible
6984 with libraries and operating systems that do not use -mflat. */
6986 total_size
+= extra_size
;
6990 total_size
= SPARC_STACK_ALIGN (total_size
);
6992 /* Save other computed information. */
6993 current_frame_info
.total_size
= total_size
;
6994 current_frame_info
.var_size
= var_size
;
6995 current_frame_info
.args_size
= args_size
;
6996 current_frame_info
.extra_size
= extra_size
;
6997 current_frame_info
.gp_reg_size
= gp_reg_size
;
6998 current_frame_info
.fp_reg_size
= fp_reg_size
;
6999 current_frame_info
.gmask
= gmask
;
7000 current_frame_info
.fmask
= fmask
;
7001 current_frame_info
.reg_offset
= reg_offset
;
7002 current_frame_info
.initialized
= reload_completed
;
7004 /* Ok, we're done. */
7008 /* Save/restore registers in GMASK and FMASK at register BASE_REG plus offset
7011 BASE_REG must be 8 byte aligned. This allows us to test OFFSET for
7012 appropriate alignment and use DOUBLEWORD_OP when we can. We assume
7013 [BASE_REG+OFFSET] will always be a valid address.
7015 WORD_OP is either "st" for save, "ld" for restore.
7016 DOUBLEWORD_OP is either "std" for save, "ldd" for restore. */
7019 sparc_flat_save_restore (file
, base_reg
, offset
, gmask
, fmask
, word_op
,
7020 doubleword_op
, base_offset
)
7022 const char *base_reg
;
7023 unsigned int offset
;
7024 unsigned long gmask
;
7025 unsigned long fmask
;
7026 const char *word_op
;
7027 const char *doubleword_op
;
7028 unsigned long base_offset
;
7032 if (gmask
== 0 && fmask
== 0)
7035 /* Save registers starting from high to low. We've already saved the
7036 previous frame pointer and previous return address for the debugger's
7037 sake. The debugger allows us to not need a nop in the epilog if at least
7038 one register is reloaded in addition to return address. */
7042 for (regno
= 1; regno
<= 31; regno
++)
7044 if ((gmask
& (1L << regno
)) != 0)
7046 if ((regno
& 0x1) == 0 && ((gmask
& (1L << (regno
+1))) != 0))
7048 /* We can save two registers in a row. If we're not at a
7049 double word boundary, move to one.
7050 sparc_flat_compute_frame_size ensures there's room to do
7052 if (offset
% 8 != 0)
7053 offset
+= UNITS_PER_WORD
;
7055 if (word_op
[0] == 's')
7057 fprintf (file
, "\t%s\t%s, [%s+%d]\n",
7058 doubleword_op
, reg_names
[regno
],
7060 if (dwarf2out_do_frame ())
7062 char *l
= dwarf2out_cfi_label ();
7063 dwarf2out_reg_save (l
, regno
, offset
+ base_offset
);
7065 (l
, regno
+1, offset
+base_offset
+ UNITS_PER_WORD
);
7069 fprintf (file
, "\t%s\t[%s+%d], %s\n",
7070 doubleword_op
, base_reg
, offset
,
7073 offset
+= 2 * UNITS_PER_WORD
;
7078 if (word_op
[0] == 's')
7080 fprintf (file
, "\t%s\t%s, [%s+%d]\n",
7081 word_op
, reg_names
[regno
],
7083 if (dwarf2out_do_frame ())
7084 dwarf2out_reg_save ("", regno
, offset
+ base_offset
);
7087 fprintf (file
, "\t%s\t[%s+%d], %s\n",
7088 word_op
, base_reg
, offset
, reg_names
[regno
]);
7090 offset
+= UNITS_PER_WORD
;
7098 for (regno
= 32; regno
<= 63; regno
++)
7100 if ((fmask
& (1L << (regno
- 32))) != 0)
7102 if (word_op
[0] == 's')
7104 fprintf (file
, "\t%s\t%s, [%s+%d]\n",
7105 word_op
, reg_names
[regno
],
7107 if (dwarf2out_do_frame ())
7108 dwarf2out_reg_save ("", regno
, offset
+ base_offset
);
7111 fprintf (file
, "\t%s\t[%s+%d], %s\n",
7112 word_op
, base_reg
, offset
, reg_names
[regno
]);
7114 offset
+= UNITS_PER_WORD
;
7120 /* Set up the stack and frame (if desired) for the function. */
7123 sparc_flat_function_prologue (file
, size
)
7127 const char *sp_str
= reg_names
[STACK_POINTER_REGNUM
];
7128 unsigned long gmask
= current_frame_info
.gmask
;
7130 sparc_output_scratch_registers (file
);
7132 /* This is only for the human reader. */
7133 fprintf (file
, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START
);
7134 fprintf (file
, "\t%s# vars= %ld, regs= %d/%d, args= %d, extra= %ld\n",
7136 current_frame_info
.var_size
,
7137 current_frame_info
.gp_reg_size
/ 4,
7138 current_frame_info
.fp_reg_size
/ 4,
7139 current_function_outgoing_args_size
,
7140 current_frame_info
.extra_size
);
7142 size
= SPARC_STACK_ALIGN (size
);
7143 size
= (! current_frame_info
.initialized
7144 ? sparc_flat_compute_frame_size (size
)
7145 : current_frame_info
.total_size
);
7147 /* These cases shouldn't happen. Catch them now. */
7148 if (size
== 0 && (gmask
|| current_frame_info
.fmask
))
7151 /* Allocate our stack frame by decrementing %sp.
7152 At present, the only algorithm gdb can use to determine if this is a
7153 flat frame is if we always set %i7 if we set %sp. This can be optimized
7154 in the future by putting in some sort of debugging information that says
7155 this is a `flat' function. However, there is still the case of debugging
7156 code without such debugging information (including cases where most fns
7157 have such info, but there is one that doesn't). So, always do this now
7158 so we don't get a lot of code out there that gdb can't handle.
7159 If the frame pointer isn't needn't then that's ok - gdb won't be able to
7160 distinguish us from a non-flat function but there won't (and shouldn't)
7161 be any differences anyway. The return pc is saved (if necessary) right
7162 after %i7 so gdb won't have to look too far to find it. */
7165 unsigned int reg_offset
= current_frame_info
.reg_offset
;
7166 const char *const fp_str
= reg_names
[HARD_FRAME_POINTER_REGNUM
];
7167 static const char *const t1_str
= "%g1";
7169 /* Things get a little tricky if local variables take up more than ~4096
7170 bytes and outgoing arguments take up more than ~4096 bytes. When that
7171 happens, the register save area can't be accessed from either end of
7172 the frame. Handle this by decrementing %sp to the start of the gp
7173 register save area, save the regs, update %i7, and then set %sp to its
7174 final value. Given that we only have one scratch register to play
7175 with it is the cheapest solution, and it helps gdb out as it won't
7176 slow down recognition of flat functions.
7177 Don't change the order of insns emitted here without checking with
7178 the gdb folk first. */
7180 /* Is the entire register save area offsettable from %sp? */
7181 if (reg_offset
< 4096 - 64 * (unsigned) UNITS_PER_WORD
)
7185 fprintf (file
, "\tadd\t%s, %d, %s\n",
7186 sp_str
, (int) -size
, sp_str
);
7187 if (gmask
& HARD_FRAME_POINTER_MASK
)
7189 fprintf (file
, "\tst\t%s, [%s+%d]\n",
7190 fp_str
, sp_str
, reg_offset
);
7191 fprintf (file
, "\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7192 sp_str
, (int) -size
, fp_str
, ASM_COMMENT_START
);
7198 fprintf (file
, "\tset\t");
7199 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size
);
7200 fprintf (file
, ", %s\n\tsub\t%s, %s, %s\n",
7201 t1_str
, sp_str
, t1_str
, sp_str
);
7202 if (gmask
& HARD_FRAME_POINTER_MASK
)
7204 fprintf (file
, "\tst\t%s, [%s+%d]\n",
7205 fp_str
, sp_str
, reg_offset
);
7206 fprintf (file
, "\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7207 sp_str
, t1_str
, fp_str
, ASM_COMMENT_START
);
7211 if (dwarf2out_do_frame ())
7213 char *l
= dwarf2out_cfi_label ();
7214 if (gmask
& HARD_FRAME_POINTER_MASK
)
7216 dwarf2out_reg_save (l
, HARD_FRAME_POINTER_REGNUM
,
7217 reg_offset
- 4 - size
);
7218 dwarf2out_def_cfa (l
, HARD_FRAME_POINTER_REGNUM
, 0);
7221 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, size
);
7223 if (gmask
& RETURN_ADDR_MASK
)
7225 fprintf (file
, "\tst\t%s, [%s+%d]\n",
7226 reg_names
[RETURN_ADDR_REGNUM
], sp_str
, reg_offset
);
7227 if (dwarf2out_do_frame ())
7228 dwarf2out_return_save ("", reg_offset
- size
);
7231 sparc_flat_save_restore (file
, sp_str
, reg_offset
,
7232 gmask
& ~(HARD_FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
7233 current_frame_info
.fmask
,
7234 "st", "std", -size
);
7238 /* Subtract %sp in two steps, but make sure there is always a
7239 64 byte register save area, and %sp is properly aligned. */
7240 /* Amount to decrement %sp by, the first time. */
7241 unsigned HOST_WIDE_INT size1
= ((size
- reg_offset
+ 64) + 15) & -16;
7242 /* Offset to register save area from %sp. */
7243 unsigned HOST_WIDE_INT offset
= size1
- (size
- reg_offset
);
7247 fprintf (file
, "\tadd\t%s, %d, %s\n",
7248 sp_str
, (int) -size1
, sp_str
);
7249 if (gmask
& HARD_FRAME_POINTER_MASK
)
7251 fprintf (file
, "\tst\t%s, [%s+%d]\n\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7252 fp_str
, sp_str
, (int) offset
, sp_str
, (int) -size1
,
7253 fp_str
, ASM_COMMENT_START
);
7259 fprintf (file
, "\tset\t");
7260 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size1
);
7261 fprintf (file
, ", %s\n\tsub\t%s, %s, %s\n",
7262 t1_str
, sp_str
, t1_str
, sp_str
);
7263 if (gmask
& HARD_FRAME_POINTER_MASK
)
7265 fprintf (file
, "\tst\t%s, [%s+%d]\n\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7266 fp_str
, sp_str
, (int) offset
, sp_str
, t1_str
,
7267 fp_str
, ASM_COMMENT_START
);
7271 if (dwarf2out_do_frame ())
7273 char *l
= dwarf2out_cfi_label ();
7274 if (gmask
& HARD_FRAME_POINTER_MASK
)
7276 dwarf2out_reg_save (l
, HARD_FRAME_POINTER_REGNUM
,
7277 offset
- 4 - size1
);
7278 dwarf2out_def_cfa (l
, HARD_FRAME_POINTER_REGNUM
, 0);
7281 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, size1
);
7283 if (gmask
& RETURN_ADDR_MASK
)
7285 fprintf (file
, "\tst\t%s, [%s+%d]\n",
7286 reg_names
[RETURN_ADDR_REGNUM
], sp_str
, (int) offset
);
7287 if (dwarf2out_do_frame ())
7288 /* offset - size1 == reg_offset - size
7289 if reg_offset were updated above like offset. */
7290 dwarf2out_return_save ("", offset
- size1
);
7293 sparc_flat_save_restore (file
, sp_str
, offset
,
7294 gmask
& ~(HARD_FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
7295 current_frame_info
.fmask
,
7296 "st", "std", -size1
);
7297 fprintf (file
, "\tset\t");
7298 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size
- size1
);
7299 fprintf (file
, ", %s\n\tsub\t%s, %s, %s\n",
7300 t1_str
, sp_str
, t1_str
, sp_str
);
7301 if (dwarf2out_do_frame ())
7302 if (! (gmask
& HARD_FRAME_POINTER_MASK
))
7303 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, size
);
7307 fprintf (file
, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START
);
7310 /* Do any necessary cleanup after a function to restore stack, frame,
7314 sparc_flat_function_epilogue (file
, size
)
7318 rtx epilogue_delay
= current_function_epilogue_delay_list
;
7319 int noepilogue
= FALSE
;
7321 /* This is only for the human reader. */
7322 fprintf (file
, "\t%s#EPILOGUE#\n", ASM_COMMENT_START
);
7324 /* The epilogue does not depend on any registers, but the stack
7325 registers, so we assume that if we have 1 pending nop, it can be
7326 ignored, and 2 it must be filled (2 nops occur for integer
7327 multiply and divide). */
7329 size
= SPARC_STACK_ALIGN (size
);
7330 size
= (!current_frame_info
.initialized
7331 ? sparc_flat_compute_frame_size (size
)
7332 : current_frame_info
.total_size
);
7334 if (size
== 0 && epilogue_delay
== 0)
7336 rtx insn
= get_last_insn ();
7338 /* If the last insn was a BARRIER, we don't have to write any code
7339 because a jump (aka return) was put there. */
7340 if (GET_CODE (insn
) == NOTE
)
7341 insn
= prev_nonnote_insn (insn
);
7342 if (insn
&& GET_CODE (insn
) == BARRIER
)
7348 unsigned HOST_WIDE_INT reg_offset
= current_frame_info
.reg_offset
;
7349 unsigned HOST_WIDE_INT size1
;
7350 const char *const sp_str
= reg_names
[STACK_POINTER_REGNUM
];
7351 const char *const fp_str
= reg_names
[HARD_FRAME_POINTER_REGNUM
];
7352 static const char *const t1_str
= "%g1";
7354 /* In the reload sequence, we don't need to fill the load delay
7355 slots for most of the loads, also see if we can fill the final
7356 delay slot if not otherwise filled by the reload sequence. */
7360 fprintf (file
, "\tset\t");
7361 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size
);
7362 fprintf (file
, ", %s\n", t1_str
);
7365 if (frame_pointer_needed
)
7368 fprintf (file
,"\tsub\t%s, %s, %s\t\t%s# sp not trusted here\n",
7369 fp_str
, t1_str
, sp_str
, ASM_COMMENT_START
);
7371 fprintf (file
,"\tsub\t%s, %d, %s\t\t%s# sp not trusted here\n",
7372 fp_str
, (int) size
, sp_str
, ASM_COMMENT_START
);
7375 /* Is the entire register save area offsettable from %sp? */
7376 if (reg_offset
< 4096 - 64 * (unsigned) UNITS_PER_WORD
)
7382 /* Restore %sp in two steps, but make sure there is always a
7383 64 byte register save area, and %sp is properly aligned. */
7384 /* Amount to increment %sp by, the first time. */
7385 size1
= ((reg_offset
- 64 - 16) + 15) & -16;
7386 /* Offset to register save area from %sp. */
7387 reg_offset
= size1
- reg_offset
;
7389 fprintf (file
, "\tset\t");
7390 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size1
);
7391 fprintf (file
, ", %s\n\tadd\t%s, %s, %s\n",
7392 t1_str
, sp_str
, t1_str
, sp_str
);
7395 /* We must restore the frame pointer and return address reg first
7396 because they are treated specially by the prologue output code. */
7397 if (current_frame_info
.gmask
& HARD_FRAME_POINTER_MASK
)
7399 fprintf (file
, "\tld\t[%s+%d], %s\n",
7400 sp_str
, (int) reg_offset
, fp_str
);
7403 if (current_frame_info
.gmask
& RETURN_ADDR_MASK
)
7405 fprintf (file
, "\tld\t[%s+%d], %s\n",
7406 sp_str
, (int) reg_offset
, reg_names
[RETURN_ADDR_REGNUM
]);
7410 /* Restore any remaining saved registers. */
7411 sparc_flat_save_restore (file
, sp_str
, reg_offset
,
7412 current_frame_info
.gmask
& ~(HARD_FRAME_POINTER_MASK
| RETURN_ADDR_MASK
),
7413 current_frame_info
.fmask
,
7416 /* If we had to increment %sp in two steps, record it so the second
7417 restoration in the epilogue finishes up. */
7423 fprintf (file
, "\tset\t");
7424 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, size
);
7425 fprintf (file
, ", %s\n", t1_str
);
7429 if (current_function_returns_struct
)
7430 fprintf (file
, "\tjmp\t%%o7+12\n");
7432 fprintf (file
, "\tretl\n");
7434 /* If the only register saved is the return address, we need a
7435 nop, unless we have an instruction to put into it. Otherwise
7436 we don't since reloading multiple registers doesn't reference
7437 the register being loaded. */
7443 final_scan_insn (XEXP (epilogue_delay
, 0), file
, 1, -2, 1);
7446 else if (size
> 4095)
7447 fprintf (file
, "\tadd\t%s, %s, %s\n", sp_str
, t1_str
, sp_str
);
7450 fprintf (file
, "\tadd\t%s, %d, %s\n", sp_str
, (int) size
, sp_str
);
7453 fprintf (file
, "\tnop\n");
7456 /* Reset state info for each function. */
7457 current_frame_info
= zero_frame_info
;
7459 sparc_output_deferred_case_vectors ();
7462 /* Define the number of delay slots needed for the function epilogue.
7464 On the sparc, we need a slot if either no stack has been allocated,
7465 or the only register saved is the return register. */
7468 sparc_flat_epilogue_delay_slots ()
7470 if (!current_frame_info
.initialized
)
7471 (void) sparc_flat_compute_frame_size (get_frame_size ());
7473 if (current_frame_info
.total_size
== 0)
7479 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
7480 Any single length instruction which doesn't reference the stack or frame
7484 sparc_flat_eligible_for_epilogue_delay (trial
, slot
)
7486 int slot ATTRIBUTE_UNUSED
;
7488 rtx pat
= PATTERN (trial
);
7490 if (get_attr_length (trial
) != 1)
7493 if (! reg_mentioned_p (stack_pointer_rtx
, pat
)
7494 && ! reg_mentioned_p (frame_pointer_rtx
, pat
))
7500 /* Adjust the cost of a scheduling dependency. Return the new cost of
7501 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
7504 supersparc_adjust_cost (insn
, link
, dep_insn
, cost
)
7510 enum attr_type insn_type
;
7512 if (! recog_memoized (insn
))
7515 insn_type
= get_attr_type (insn
);
7517 if (REG_NOTE_KIND (link
) == 0)
7519 /* Data dependency; DEP_INSN writes a register that INSN reads some
7522 /* if a load, then the dependence must be on the memory address;
7523 add an extra "cycle". Note that the cost could be two cycles
7524 if the reg was written late in an instruction group; we ca not tell
7526 if (insn_type
== TYPE_LOAD
|| insn_type
== TYPE_FPLOAD
)
7529 /* Get the delay only if the address of the store is the dependence. */
7530 if (insn_type
== TYPE_STORE
|| insn_type
== TYPE_FPSTORE
)
7532 rtx pat
= PATTERN(insn
);
7533 rtx dep_pat
= PATTERN (dep_insn
);
7535 if (GET_CODE (pat
) != SET
|| GET_CODE (dep_pat
) != SET
)
7536 return cost
; /* This should not happen! */
7538 /* The dependency between the two instructions was on the data that
7539 is being stored. Assume that this implies that the address of the
7540 store is not dependent. */
7541 if (rtx_equal_p (SET_DEST (dep_pat
), SET_SRC (pat
)))
7544 return cost
+ 3; /* An approximation. */
7547 /* A shift instruction cannot receive its data from an instruction
7548 in the same cycle; add a one cycle penalty. */
7549 if (insn_type
== TYPE_SHIFT
)
7550 return cost
+ 3; /* Split before cascade into shift. */
7554 /* Anti- or output- dependency; DEP_INSN reads/writes a register that
7555 INSN writes some cycles later. */
7557 /* These are only significant for the fpu unit; writing a fp reg before
7558 the fpu has finished with it stalls the processor. */
7560 /* Reusing an integer register causes no problems. */
7561 if (insn_type
== TYPE_IALU
|| insn_type
== TYPE_SHIFT
)
7569 hypersparc_adjust_cost (insn
, link
, dep_insn
, cost
)
7575 enum attr_type insn_type
, dep_type
;
7576 rtx pat
= PATTERN(insn
);
7577 rtx dep_pat
= PATTERN (dep_insn
);
7579 if (recog_memoized (insn
) < 0 || recog_memoized (dep_insn
) < 0)
7582 insn_type
= get_attr_type (insn
);
7583 dep_type
= get_attr_type (dep_insn
);
7585 switch (REG_NOTE_KIND (link
))
7588 /* Data dependency; DEP_INSN writes a register that INSN reads some
7595 /* Get the delay iff the address of the store is the dependence. */
7596 if (GET_CODE (pat
) != SET
|| GET_CODE (dep_pat
) != SET
)
7599 if (rtx_equal_p (SET_DEST (dep_pat
), SET_SRC (pat
)))
7606 /* If a load, then the dependence must be on the memory address. If
7607 the addresses aren't equal, then it might be a false dependency */
7608 if (dep_type
== TYPE_STORE
|| dep_type
== TYPE_FPSTORE
)
7610 if (GET_CODE (pat
) != SET
|| GET_CODE (dep_pat
) != SET
7611 || GET_CODE (SET_DEST (dep_pat
)) != MEM
7612 || GET_CODE (SET_SRC (pat
)) != MEM
7613 || ! rtx_equal_p (XEXP (SET_DEST (dep_pat
), 0),
7614 XEXP (SET_SRC (pat
), 0)))
7622 /* Compare to branch latency is 0. There is no benefit from
7623 separating compare and branch. */
7624 if (dep_type
== TYPE_COMPARE
)
7626 /* Floating point compare to branch latency is less than
7627 compare to conditional move. */
7628 if (dep_type
== TYPE_FPCMP
)
7637 /* Anti-dependencies only penalize the fpu unit. */
7638 if (insn_type
== TYPE_IALU
|| insn_type
== TYPE_SHIFT
)
7650 sparc_adjust_cost(insn
, link
, dep
, cost
)
7658 case PROCESSOR_SUPERSPARC
:
7659 cost
= supersparc_adjust_cost (insn
, link
, dep
, cost
);
7661 case PROCESSOR_HYPERSPARC
:
7662 case PROCESSOR_SPARCLITE86X
:
7663 cost
= hypersparc_adjust_cost (insn
, link
, dep
, cost
);
7672 sparc_sched_init (dump
, sched_verbose
, max_ready
)
7673 FILE *dump ATTRIBUTE_UNUSED
;
7674 int sched_verbose ATTRIBUTE_UNUSED
;
7675 int max_ready ATTRIBUTE_UNUSED
;
7680 sparc_use_dfa_pipeline_interface ()
7682 if ((1 << sparc_cpu
) &
7683 ((1 << PROCESSOR_ULTRASPARC
) | (1 << PROCESSOR_CYPRESS
) |
7684 (1 << PROCESSOR_SUPERSPARC
) | (1 << PROCESSOR_HYPERSPARC
) |
7685 (1 << PROCESSOR_SPARCLITE86X
) | (1 << PROCESSOR_TSC701
) |
7686 (1 << PROCESSOR_ULTRASPARC3
)))
7692 sparc_use_sched_lookahead ()
7694 if (sparc_cpu
== PROCESSOR_ULTRASPARC
7695 || sparc_cpu
== PROCESSOR_ULTRASPARC3
)
7697 if ((1 << sparc_cpu
) &
7698 ((1 << PROCESSOR_SUPERSPARC
) | (1 << PROCESSOR_HYPERSPARC
) |
7699 (1 << PROCESSOR_SPARCLITE86X
)))
7712 /* Assume V9 processors are capable of at least dual-issue. */
7714 case PROCESSOR_SUPERSPARC
:
7716 case PROCESSOR_HYPERSPARC
:
7717 case PROCESSOR_SPARCLITE86X
:
7719 case PROCESSOR_ULTRASPARC
:
7720 case PROCESSOR_ULTRASPARC3
:
7729 register rtx pat
= PATTERN (insn
);
7731 switch (GET_CODE (SET_SRC (pat
)))
7733 /* Load and some shift instructions zero extend. */
7736 /* sethi clears the high bits */
7738 /* LO_SUM is used with sethi. sethi cleared the high
7739 bits and the values used with lo_sum are positive */
7741 /* Store flag stores 0 or 1 */
7751 rtx op0
= XEXP (SET_SRC (pat
), 0);
7752 rtx op1
= XEXP (SET_SRC (pat
), 1);
7753 if (GET_CODE (op1
) == CONST_INT
)
7754 return INTVAL (op1
) >= 0;
7755 if (GET_CODE (op0
) != REG
)
7757 if (sparc_check_64 (op0
, insn
) == 1)
7759 return (GET_CODE (op1
) == REG
&& sparc_check_64 (op1
, insn
) == 1);
7764 rtx op0
= XEXP (SET_SRC (pat
), 0);
7765 rtx op1
= XEXP (SET_SRC (pat
), 1);
7766 if (GET_CODE (op0
) != REG
|| sparc_check_64 (op0
, insn
) <= 0)
7768 if (GET_CODE (op1
) == CONST_INT
)
7769 return INTVAL (op1
) >= 0;
7770 return (GET_CODE (op1
) == REG
&& sparc_check_64 (op1
, insn
) == 1);
7774 return GET_MODE (SET_SRC (pat
)) == SImode
;
7775 /* Positive integers leave the high bits zero. */
7777 return ! (CONST_DOUBLE_LOW (SET_SRC (pat
)) & 0x80000000);
7779 return ! (INTVAL (SET_SRC (pat
)) & 0x80000000);
7782 return - (GET_MODE (SET_SRC (pat
)) == SImode
);
7784 return sparc_check_64 (SET_SRC (pat
), insn
);
7790 /* We _ought_ to have only one kind per function, but... */
7791 static rtx sparc_addr_diff_list
;
7792 static rtx sparc_addr_list
;
7795 sparc_defer_case_vector (lab
, vec
, diff
)
7799 vec
= gen_rtx_EXPR_LIST (VOIDmode
, lab
, vec
);
7801 sparc_addr_diff_list
7802 = gen_rtx_EXPR_LIST (VOIDmode
, vec
, sparc_addr_diff_list
);
7804 sparc_addr_list
= gen_rtx_EXPR_LIST (VOIDmode
, vec
, sparc_addr_list
);
7808 sparc_output_addr_vec (vec
)
7811 rtx lab
= XEXP (vec
, 0), body
= XEXP (vec
, 1);
7812 int idx
, vlen
= XVECLEN (body
, 0);
7814 #ifdef ASM_OUTPUT_ADDR_VEC_START
7815 ASM_OUTPUT_ADDR_VEC_START (asm_out_file
);
7818 #ifdef ASM_OUTPUT_CASE_LABEL
7819 ASM_OUTPUT_CASE_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (lab
),
7822 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (lab
));
7825 for (idx
= 0; idx
< vlen
; idx
++)
7827 ASM_OUTPUT_ADDR_VEC_ELT
7828 (asm_out_file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
7831 #ifdef ASM_OUTPUT_ADDR_VEC_END
7832 ASM_OUTPUT_ADDR_VEC_END (asm_out_file
);
7837 sparc_output_addr_diff_vec (vec
)
7840 rtx lab
= XEXP (vec
, 0), body
= XEXP (vec
, 1);
7841 rtx base
= XEXP (XEXP (body
, 0), 0);
7842 int idx
, vlen
= XVECLEN (body
, 1);
7844 #ifdef ASM_OUTPUT_ADDR_VEC_START
7845 ASM_OUTPUT_ADDR_VEC_START (asm_out_file
);
7848 #ifdef ASM_OUTPUT_CASE_LABEL
7849 ASM_OUTPUT_CASE_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (lab
),
7852 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (lab
));
7855 for (idx
= 0; idx
< vlen
; idx
++)
7857 ASM_OUTPUT_ADDR_DIFF_ELT
7860 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
7861 CODE_LABEL_NUMBER (base
));
7864 #ifdef ASM_OUTPUT_ADDR_VEC_END
7865 ASM_OUTPUT_ADDR_VEC_END (asm_out_file
);
7870 sparc_output_deferred_case_vectors ()
7875 if (sparc_addr_list
== NULL_RTX
7876 && sparc_addr_diff_list
== NULL_RTX
)
7879 /* Align to cache line in the function's code section. */
7880 function_section (current_function_decl
);
7882 align
= floor_log2 (FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
7884 ASM_OUTPUT_ALIGN (asm_out_file
, align
);
7886 for (t
= sparc_addr_list
; t
; t
= XEXP (t
, 1))
7887 sparc_output_addr_vec (XEXP (t
, 0));
7888 for (t
= sparc_addr_diff_list
; t
; t
= XEXP (t
, 1))
7889 sparc_output_addr_diff_vec (XEXP (t
, 0));
7891 sparc_addr_list
= sparc_addr_diff_list
= NULL_RTX
;
7894 /* Return 0 if the high 32 bits of X (the low word of X, if DImode) are
7895 unknown. Return 1 if the high bits are zero, -1 if the register is
7898 sparc_check_64 (x
, insn
)
7901 /* If a register is set only once it is safe to ignore insns this
7902 code does not know how to handle. The loop will either recognize
7903 the single set and return the correct value or fail to recognize
7908 if (GET_CODE (x
) != REG
)
7911 if (GET_MODE (x
) == DImode
)
7912 y
= gen_rtx_REG (SImode
, REGNO (x
) + WORDS_BIG_ENDIAN
);
7914 if (flag_expensive_optimizations
7915 && REG_N_SETS (REGNO (y
)) == 1)
7921 insn
= get_last_insn_anywhere ();
7926 while ((insn
= PREV_INSN (insn
)))
7928 switch (GET_CODE (insn
))
7941 rtx pat
= PATTERN (insn
);
7942 if (GET_CODE (pat
) != SET
)
7944 if (rtx_equal_p (x
, SET_DEST (pat
)))
7945 return set_extends (insn
);
7946 if (y
&& rtx_equal_p (y
, SET_DEST (pat
)))
7947 return set_extends (insn
);
7948 if (reg_overlap_mentioned_p (SET_DEST (pat
), y
))
7957 sparc_v8plus_shift (operands
, insn
, opcode
)
7962 static char asm_code
[60];
7964 if (GET_CODE (operands
[3]) == SCRATCH
)
7965 operands
[3] = operands
[0];
7966 if (GET_CODE (operands
[1]) == CONST_INT
)
7968 output_asm_insn ("mov\t%1, %3", operands
);
7972 output_asm_insn ("sllx\t%H1, 32, %3", operands
);
7973 if (sparc_check_64 (operands
[1], insn
) <= 0)
7974 output_asm_insn ("srl\t%L1, 0, %L1", operands
);
7975 output_asm_insn ("or\t%L1, %3, %3", operands
);
7978 strcpy(asm_code
, opcode
);
7979 if (which_alternative
!= 2)
7980 return strcat (asm_code
, "\t%0, %2, %L0\n\tsrlx\t%L0, 32, %H0");
7982 return strcat (asm_code
, "\t%3, %2, %3\n\tsrlx\t%3, 32, %H0\n\tmov\t%3, %L0");
7985 /* Output rtl to increment the profiler label LABELNO
7986 for profiling a function entry. */
7989 sparc_profile_hook (labelno
)
7995 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
7996 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
7997 fun
= gen_rtx_SYMBOL_REF (Pmode
, MCOUNT_FUNCTION
);
7999 emit_library_call (fun
, LCT_NORMAL
, VOIDmode
, 1, lab
, Pmode
);
8002 /* Called to register all of our global variables with the garbage
8006 sparc_add_gc_roots ()
8008 ggc_add_rtx_root (&sparc_compare_op0
, 1);
8009 ggc_add_rtx_root (&sparc_compare_op1
, 1);
8010 ggc_add_rtx_root (&global_offset_table
, 1);
8011 ggc_add_rtx_root (&get_pc_symbol
, 1);
8012 ggc_add_rtx_root (&sparc_addr_diff_list
, 1);
8013 ggc_add_rtx_root (&sparc_addr_list
, 1);
8016 #ifdef OBJECT_FORMAT_ELF
8018 sparc_elf_asm_named_section (name
, flags
)
8022 if (flags
& SECTION_MERGE
)
8024 /* entsize cannot be expressed in this section attributes
8026 default_elf_asm_named_section (name
, flags
);
8030 fprintf (asm_out_file
, "\t.section\t\"%s\"", name
);
8032 if (!(flags
& SECTION_DEBUG
))
8033 fputs (",#alloc", asm_out_file
);
8034 if (flags
& SECTION_WRITE
)
8035 fputs (",#write", asm_out_file
);
8036 if (flags
& SECTION_CODE
)
8037 fputs (",#execinstr", asm_out_file
);
8039 /* ??? Handle SECTION_BSS. */
8041 fputc ('\n', asm_out_file
);
8043 #endif /* OBJECT_FORMAT_ELF */
8046 sparc_extra_constraint_check (op
, c
, strict
)
8054 && (c
== 'T' || c
== 'U'))
8060 return fp_sethi_p (op
);
8063 return fp_mov_p (op
);
8066 return fp_high_losum_p (op
);
8070 || (GET_CODE (op
) == REG
8071 && (REGNO (op
) < FIRST_PSEUDO_REGISTER
8072 || reg_renumber
[REGNO (op
)] >= 0)))
8073 return register_ok_for_ldd (op
);
8085 /* Our memory extra constraints have to emulate the
8086 behavior of 'm' and 'o' in order for reload to work
8088 if (GET_CODE (op
) == MEM
)
8091 if ((TARGET_ARCH64
|| mem_min_alignment (op
, 8))
8093 || strict_memory_address_p (Pmode
, XEXP (op
, 0))))
8098 reload_ok_mem
= (reload_in_progress
8099 && GET_CODE (op
) == REG
8100 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
8101 && reg_renumber
[REGNO (op
)] < 0);
8104 return reload_ok_mem
;
8107 /* ??? This duplicates information provided to the compiler by the
8108 ??? scheduler description. Some day, teach genautomata to output
8109 ??? the latencies and then CSE will just use that. */
8112 sparc_rtx_costs (x
, code
, outer_code
)
8114 enum rtx_code code
, outer_code
;
8118 case PLUS
: case MINUS
: case ABS
: case NEG
:
8119 case FLOAT
: case UNSIGNED_FLOAT
:
8120 case FIX
: case UNSIGNED_FIX
:
8121 case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
8122 if (FLOAT_MODE_P (GET_MODE (x
)))
8126 case PROCESSOR_ULTRASPARC
:
8127 case PROCESSOR_ULTRASPARC3
:
8128 return COSTS_N_INSNS (4);
8130 case PROCESSOR_SUPERSPARC
:
8131 return COSTS_N_INSNS (3);
8133 case PROCESSOR_CYPRESS
:
8134 return COSTS_N_INSNS (5);
8136 case PROCESSOR_HYPERSPARC
:
8137 case PROCESSOR_SPARCLITE86X
:
8139 return COSTS_N_INSNS (1);
8143 return COSTS_N_INSNS (1);
8148 case PROCESSOR_ULTRASPARC
:
8149 if (GET_MODE (x
) == SFmode
)
8150 return COSTS_N_INSNS (13);
8152 return COSTS_N_INSNS (23);
8154 case PROCESSOR_ULTRASPARC3
:
8155 if (GET_MODE (x
) == SFmode
)
8156 return COSTS_N_INSNS (20);
8158 return COSTS_N_INSNS (29);
8160 case PROCESSOR_SUPERSPARC
:
8161 return COSTS_N_INSNS (12);
8163 case PROCESSOR_CYPRESS
:
8164 return COSTS_N_INSNS (63);
8166 case PROCESSOR_HYPERSPARC
:
8167 case PROCESSOR_SPARCLITE86X
:
8168 return COSTS_N_INSNS (17);
8171 return COSTS_N_INSNS (30);
8175 if (FLOAT_MODE_P (GET_MODE (x
)))
8179 case PROCESSOR_ULTRASPARC
:
8180 case PROCESSOR_ULTRASPARC3
:
8181 return COSTS_N_INSNS (1);
8183 case PROCESSOR_SUPERSPARC
:
8184 return COSTS_N_INSNS (3);
8186 case PROCESSOR_CYPRESS
:
8187 return COSTS_N_INSNS (5);
8189 case PROCESSOR_HYPERSPARC
:
8190 case PROCESSOR_SPARCLITE86X
:
8192 return COSTS_N_INSNS (1);
8196 /* ??? Maybe mark integer compares as zero cost on
8197 ??? all UltraSPARC processors because the result
8198 ??? can be bypassed to a branch in the same group. */
8200 return COSTS_N_INSNS (1);
8203 if (FLOAT_MODE_P (GET_MODE (x
)))
8207 case PROCESSOR_ULTRASPARC
:
8208 case PROCESSOR_ULTRASPARC3
:
8209 return COSTS_N_INSNS (4);
8211 case PROCESSOR_SUPERSPARC
:
8212 return COSTS_N_INSNS (3);
8214 case PROCESSOR_CYPRESS
:
8215 return COSTS_N_INSNS (7);
8217 case PROCESSOR_HYPERSPARC
:
8218 case PROCESSOR_SPARCLITE86X
:
8219 return COSTS_N_INSNS (1);
8222 return COSTS_N_INSNS (5);
8226 /* The latency is actually variable for Ultra-I/II
8227 And if one of the inputs have a known constant
8228 value, we could calculate this precisely.
8230 However, for that to be useful we would need to
8231 add some machine description changes which would
8232 make sure small constants ended up in rs1 of the
8233 multiply instruction. This is because the multiply
8234 latency is determined by the number of clear (or
8235 set if the value is negative) bits starting from
8236 the most significant bit of the first input.
8238 The algorithm for computing num_cycles of a multiply
8242 highest_bit = highest_clear_bit(rs1);
8244 highest_bit = highest_set_bit(rs1);
8247 num_cycles = 4 + ((highest_bit - 3) / 2);
8249 If we did that we would have to also consider register
8250 allocation issues that would result from forcing such
8251 a value into a register.
8253 There are other similar tricks we could play if we
8254 knew, for example, that one input was an array index.
8256 Since we do not play any such tricks currently the
8257 safest thing to do is report the worst case latency. */
8258 if (sparc_cpu
== PROCESSOR_ULTRASPARC
)
8259 return (GET_MODE (x
) == DImode
?
8260 COSTS_N_INSNS (34) : COSTS_N_INSNS (19));
8262 /* Multiply latency on Ultra-III, fortunately, is constant. */
8263 if (sparc_cpu
== PROCESSOR_ULTRASPARC3
)
8264 return COSTS_N_INSNS (6);
8266 if (sparc_cpu
== PROCESSOR_HYPERSPARC
8267 || sparc_cpu
== PROCESSOR_SPARCLITE86X
)
8268 return COSTS_N_INSNS (17);
8270 return (TARGET_HARD_MUL
8272 : COSTS_N_INSNS (25));
8278 if (FLOAT_MODE_P (GET_MODE (x
)))
8282 case PROCESSOR_ULTRASPARC
:
8283 if (GET_MODE (x
) == SFmode
)
8284 return COSTS_N_INSNS (13);
8286 return COSTS_N_INSNS (23);
8288 case PROCESSOR_ULTRASPARC3
:
8289 if (GET_MODE (x
) == SFmode
)
8290 return COSTS_N_INSNS (17);
8292 return COSTS_N_INSNS (20);
8294 case PROCESSOR_SUPERSPARC
:
8295 if (GET_MODE (x
) == SFmode
)
8296 return COSTS_N_INSNS (6);
8298 return COSTS_N_INSNS (9);
8300 case PROCESSOR_HYPERSPARC
:
8301 case PROCESSOR_SPARCLITE86X
:
8302 if (GET_MODE (x
) == SFmode
)
8303 return COSTS_N_INSNS (8);
8305 return COSTS_N_INSNS (12);
8308 return COSTS_N_INSNS (7);
8312 if (sparc_cpu
== PROCESSOR_ULTRASPARC
)
8313 return (GET_MODE (x
) == DImode
?
8314 COSTS_N_INSNS (68) : COSTS_N_INSNS (37));
8315 if (sparc_cpu
== PROCESSOR_ULTRASPARC3
)
8316 return (GET_MODE (x
) == DImode
?
8317 COSTS_N_INSNS (71) : COSTS_N_INSNS (40));
8318 return COSTS_N_INSNS (25);
8321 /* Conditional moves. */
8324 case PROCESSOR_ULTRASPARC
:
8325 return COSTS_N_INSNS (2);
8327 case PROCESSOR_ULTRASPARC3
:
8328 if (FLOAT_MODE_P (GET_MODE (x
)))
8329 return COSTS_N_INSNS (3);
8331 return COSTS_N_INSNS (2);
8334 return COSTS_N_INSNS (1);
8338 /* If outer-code is SIGN/ZERO extension we have to subtract
8339 out COSTS_N_INSNS (1) from whatever we return in determining
8343 case PROCESSOR_ULTRASPARC
:
8344 if (outer_code
== ZERO_EXTEND
)
8345 return COSTS_N_INSNS (1);
8347 return COSTS_N_INSNS (2);
8349 case PROCESSOR_ULTRASPARC3
:
8350 if (outer_code
== ZERO_EXTEND
)
8352 if (GET_MODE (x
) == QImode
8353 || GET_MODE (x
) == HImode
8354 || outer_code
== SIGN_EXTEND
)
8355 return COSTS_N_INSNS (2);
8357 return COSTS_N_INSNS (1);
8361 /* This handles sign extension (3 cycles)
8362 and everything else (2 cycles). */
8363 return COSTS_N_INSNS (2);
8366 case PROCESSOR_SUPERSPARC
:
8367 if (FLOAT_MODE_P (GET_MODE (x
))
8368 || outer_code
== ZERO_EXTEND
8369 || outer_code
== SIGN_EXTEND
)
8370 return COSTS_N_INSNS (0);
8372 return COSTS_N_INSNS (1);
8374 case PROCESSOR_TSC701
:
8375 if (outer_code
== ZERO_EXTEND
8376 || outer_code
== SIGN_EXTEND
)
8377 return COSTS_N_INSNS (2);
8379 return COSTS_N_INSNS (3);
8381 case PROCESSOR_CYPRESS
:
8382 if (outer_code
== ZERO_EXTEND
8383 || outer_code
== SIGN_EXTEND
)
8384 return COSTS_N_INSNS (1);
8386 return COSTS_N_INSNS (2);
8388 case PROCESSOR_HYPERSPARC
:
8389 case PROCESSOR_SPARCLITE86X
:
8391 if (outer_code
== ZERO_EXTEND
8392 || outer_code
== SIGN_EXTEND
)
8393 return COSTS_N_INSNS (0);
8395 return COSTS_N_INSNS (1);
8399 if (INTVAL (x
) < 0x1000 && INTVAL (x
) >= -0x1000)
8412 if (GET_MODE (x
) == DImode
)
8413 if ((XINT (x
, 3) == 0
8414 && (unsigned) XINT (x
, 2) < 0x1000)
8415 || (XINT (x
, 3) == -1
8417 && XINT (x
, 2) >= -0x1000))