* targhooks.c: New file.
[official-gcc.git] / gcc / config / sparc / sparc.c
blob84d27d26a34d97db62d514ad8fb8c6e379382a08
1 /* Subroutines for insn-output.c for SPARC.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 64-bit SPARC-V9 support by Michael Tiemann, Jim Wilson, and Doug Evans,
6 at Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "libfuncs.h"
43 #include "recog.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "tm_p.h"
47 #include "debug.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "cfglayout.h"
52 /* 1 if the caller has placed an "unimp" insn immediately after the call.
53 This is used in v8 code when calling a function that returns a structure.
54 v9 doesn't have this. Be careful to have this test be the same as that
55 used on the call. */
57 #define SKIP_CALLERS_UNIMP_P \
58 (!TARGET_ARCH64 && current_function_returns_struct \
59 && ! integer_zerop (DECL_SIZE (DECL_RESULT (current_function_decl))) \
60 && (TREE_CODE (DECL_SIZE (DECL_RESULT (current_function_decl))) \
61 == INTEGER_CST))
63 /* Global variables for machine-dependent things. */
65 /* Size of frame. Need to know this to emit return insns from leaf procedures.
66 ACTUAL_FSIZE is set by compute_frame_size() which is called during the
67 reload pass. This is important as the value is later used in insn
68 scheduling (to see what can go in a delay slot).
69 APPARENT_FSIZE is the size of the stack less the register save area and less
70 the outgoing argument area. It is used when saving call preserved regs. */
71 static int apparent_fsize;
72 static int actual_fsize;
74 /* Number of live general or floating point registers needed to be
75 saved (as 4-byte quantities). */
76 static int num_gfregs;
78 /* Save the operands last given to a compare for use when we
79 generate a scc or bcc insn. */
80 rtx sparc_compare_op0, sparc_compare_op1;
82 /* Coordinate with the md file wrt special insns created by
83 sparc_nonflat_function_epilogue. */
84 bool sparc_emitting_epilogue;
86 /* Vector to say how input registers are mapped to output registers.
87 HARD_FRAME_POINTER_REGNUM cannot be remapped by this function to
88 eliminate it. You must use -fomit-frame-pointer to get that. */
89 char leaf_reg_remap[] =
90 { 0, 1, 2, 3, 4, 5, 6, 7,
91 -1, -1, -1, -1, -1, -1, 14, -1,
92 -1, -1, -1, -1, -1, -1, -1, -1,
93 8, 9, 10, 11, 12, 13, -1, 15,
95 32, 33, 34, 35, 36, 37, 38, 39,
96 40, 41, 42, 43, 44, 45, 46, 47,
97 48, 49, 50, 51, 52, 53, 54, 55,
98 56, 57, 58, 59, 60, 61, 62, 63,
99 64, 65, 66, 67, 68, 69, 70, 71,
100 72, 73, 74, 75, 76, 77, 78, 79,
101 80, 81, 82, 83, 84, 85, 86, 87,
102 88, 89, 90, 91, 92, 93, 94, 95,
103 96, 97, 98, 99, 100};
105 /* Vector, indexed by hard register number, which contains 1
106 for a register that is allowable in a candidate for leaf
107 function treatment. */
108 char sparc_leaf_regs[] =
109 { 1, 1, 1, 1, 1, 1, 1, 1,
110 0, 0, 0, 0, 0, 0, 1, 0,
111 0, 0, 0, 0, 0, 0, 0, 0,
112 1, 1, 1, 1, 1, 1, 0, 1,
113 1, 1, 1, 1, 1, 1, 1, 1,
114 1, 1, 1, 1, 1, 1, 1, 1,
115 1, 1, 1, 1, 1, 1, 1, 1,
116 1, 1, 1, 1, 1, 1, 1, 1,
117 1, 1, 1, 1, 1, 1, 1, 1,
118 1, 1, 1, 1, 1, 1, 1, 1,
119 1, 1, 1, 1, 1, 1, 1, 1,
120 1, 1, 1, 1, 1, 1, 1, 1,
121 1, 1, 1, 1, 1};
123 /* Name of where we pretend to think the frame pointer points.
124 Normally, this is "%fp", but if we are in a leaf procedure,
125 this is "%sp+something". We record "something" separately as it may be
126 too big for reg+constant addressing. */
128 static const char *frame_base_name;
129 static int frame_base_offset;
131 static void sparc_init_modes (void);
132 static int save_regs (FILE *, int, int, const char *, int, int, int);
133 static int restore_regs (FILE *, int, int, const char *, int, int);
134 static void build_big_number (FILE *, int, const char *);
135 static int function_arg_slotno (const CUMULATIVE_ARGS *, enum machine_mode,
136 tree, int, int, int *, int *);
138 static int supersparc_adjust_cost (rtx, rtx, rtx, int);
139 static int hypersparc_adjust_cost (rtx, rtx, rtx, int);
141 static void sparc_output_addr_vec (rtx);
142 static void sparc_output_addr_diff_vec (rtx);
143 static void sparc_output_deferred_case_vectors (void);
144 static int check_return_regs (rtx);
145 static int epilogue_renumber (rtx *, int);
146 static bool sparc_assemble_integer (rtx, unsigned int, int);
147 static int set_extends (rtx);
148 static void output_restore_regs (FILE *, int);
149 static void sparc_output_function_prologue (FILE *, HOST_WIDE_INT);
150 static void sparc_output_function_epilogue (FILE *, HOST_WIDE_INT);
151 static void sparc_flat_function_epilogue (FILE *, HOST_WIDE_INT);
152 static void sparc_flat_function_prologue (FILE *, HOST_WIDE_INT);
153 static void sparc_nonflat_function_epilogue (FILE *, HOST_WIDE_INT, int);
154 static void sparc_nonflat_function_prologue (FILE *, HOST_WIDE_INT, int);
155 #ifdef OBJECT_FORMAT_ELF
156 static void sparc_elf_asm_named_section (const char *, unsigned int);
157 #endif
158 static void sparc_aout_select_section (tree, int, unsigned HOST_WIDE_INT)
159 ATTRIBUTE_UNUSED;
160 static void sparc_aout_select_rtx_section (enum machine_mode, rtx,
161 unsigned HOST_WIDE_INT)
162 ATTRIBUTE_UNUSED;
164 static int sparc_adjust_cost (rtx, rtx, rtx, int);
165 static int sparc_issue_rate (void);
166 static void sparc_sched_init (FILE *, int, int);
167 static int sparc_use_dfa_pipeline_interface (void);
168 static int sparc_use_sched_lookahead (void);
170 static void emit_soft_tfmode_libcall (const char *, int, rtx *);
171 static void emit_soft_tfmode_binop (enum rtx_code, rtx *);
172 static void emit_soft_tfmode_unop (enum rtx_code, rtx *);
173 static void emit_soft_tfmode_cvt (enum rtx_code, rtx *);
174 static void emit_hard_tfmode_operation (enum rtx_code, rtx *);
176 static bool sparc_function_ok_for_sibcall (tree, tree);
177 static void sparc_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
178 HOST_WIDE_INT, tree);
179 static bool sparc_rtx_costs (rtx, int, int, int *);
181 /* Option handling. */
183 /* Code model option as passed by user. */
184 const char *sparc_cmodel_string;
185 /* Parsed value. */
186 enum cmodel sparc_cmodel;
188 char sparc_hard_reg_printed[8];
190 struct sparc_cpu_select sparc_select[] =
192 /* switch name, tune arch */
193 { (char *)0, "default", 1, 1 },
194 { (char *)0, "-mcpu=", 1, 1 },
195 { (char *)0, "-mtune=", 1, 0 },
196 { 0, 0, 0, 0 }
199 /* CPU type. This is set from TARGET_CPU_DEFAULT and -m{cpu,tune}=xxx. */
200 enum processor_type sparc_cpu;
202 /* Initialize the GCC target structure. */
204 /* The sparc default is to use .half rather than .short for aligned
205 HI objects. Use .word instead of .long on non-ELF systems. */
206 #undef TARGET_ASM_ALIGNED_HI_OP
207 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
208 #ifndef OBJECT_FORMAT_ELF
209 #undef TARGET_ASM_ALIGNED_SI_OP
210 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
211 #endif
213 #undef TARGET_ASM_UNALIGNED_HI_OP
214 #define TARGET_ASM_UNALIGNED_HI_OP "\t.uahalf\t"
215 #undef TARGET_ASM_UNALIGNED_SI_OP
216 #define TARGET_ASM_UNALIGNED_SI_OP "\t.uaword\t"
217 #undef TARGET_ASM_UNALIGNED_DI_OP
218 #define TARGET_ASM_UNALIGNED_DI_OP "\t.uaxword\t"
220 /* The target hook has to handle DI-mode values. */
221 #undef TARGET_ASM_INTEGER
222 #define TARGET_ASM_INTEGER sparc_assemble_integer
224 #undef TARGET_ASM_FUNCTION_PROLOGUE
225 #define TARGET_ASM_FUNCTION_PROLOGUE sparc_output_function_prologue
226 #undef TARGET_ASM_FUNCTION_EPILOGUE
227 #define TARGET_ASM_FUNCTION_EPILOGUE sparc_output_function_epilogue
229 #undef TARGET_SCHED_ADJUST_COST
230 #define TARGET_SCHED_ADJUST_COST sparc_adjust_cost
231 #undef TARGET_SCHED_ISSUE_RATE
232 #define TARGET_SCHED_ISSUE_RATE sparc_issue_rate
233 #undef TARGET_SCHED_INIT
234 #define TARGET_SCHED_INIT sparc_sched_init
235 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
236 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE sparc_use_dfa_pipeline_interface
237 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
238 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD sparc_use_sched_lookahead
240 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
241 #define TARGET_FUNCTION_OK_FOR_SIBCALL sparc_function_ok_for_sibcall
243 #undef TARGET_ASM_OUTPUT_MI_THUNK
244 #define TARGET_ASM_OUTPUT_MI_THUNK sparc_output_mi_thunk
245 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
246 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
248 #undef TARGET_RTX_COSTS
249 #define TARGET_RTX_COSTS sparc_rtx_costs
250 #undef TARGET_ADDRESS_COST
251 #define TARGET_ADDRESS_COST hook_int_rtx_0
253 struct gcc_target targetm = TARGET_INITIALIZER;
255 /* Validate and override various options, and do some machine dependent
256 initialization. */
258 void
259 sparc_override_options (void)
261 static struct code_model {
262 const char *const name;
263 const int value;
264 } const cmodels[] = {
265 { "32", CM_32 },
266 { "medlow", CM_MEDLOW },
267 { "medmid", CM_MEDMID },
268 { "medany", CM_MEDANY },
269 { "embmedany", CM_EMBMEDANY },
270 { 0, 0 }
272 const struct code_model *cmodel;
273 /* Map TARGET_CPU_DEFAULT to value for -m{arch,tune}=. */
274 static struct cpu_default {
275 const int cpu;
276 const char *const name;
277 } const cpu_default[] = {
278 /* There must be one entry here for each TARGET_CPU value. */
279 { TARGET_CPU_sparc, "cypress" },
280 { TARGET_CPU_sparclet, "tsc701" },
281 { TARGET_CPU_sparclite, "f930" },
282 { TARGET_CPU_v8, "v8" },
283 { TARGET_CPU_hypersparc, "hypersparc" },
284 { TARGET_CPU_sparclite86x, "sparclite86x" },
285 { TARGET_CPU_supersparc, "supersparc" },
286 { TARGET_CPU_v9, "v9" },
287 { TARGET_CPU_ultrasparc, "ultrasparc" },
288 { TARGET_CPU_ultrasparc3, "ultrasparc3" },
289 { 0, 0 }
291 const struct cpu_default *def;
292 /* Table of values for -m{cpu,tune}=. */
293 static struct cpu_table {
294 const char *const name;
295 const enum processor_type processor;
296 const int disable;
297 const int enable;
298 } const cpu_table[] = {
299 { "v7", PROCESSOR_V7, MASK_ISA, 0 },
300 { "cypress", PROCESSOR_CYPRESS, MASK_ISA, 0 },
301 { "v8", PROCESSOR_V8, MASK_ISA, MASK_V8 },
302 /* TI TMS390Z55 supersparc */
303 { "supersparc", PROCESSOR_SUPERSPARC, MASK_ISA, MASK_V8 },
304 { "sparclite", PROCESSOR_SPARCLITE, MASK_ISA, MASK_SPARCLITE },
305 /* The Fujitsu MB86930 is the original sparclite chip, with no fpu.
306 The Fujitsu MB86934 is the recent sparclite chip, with an fpu. */
307 { "f930", PROCESSOR_F930, MASK_ISA|MASK_FPU, MASK_SPARCLITE },
308 { "f934", PROCESSOR_F934, MASK_ISA, MASK_SPARCLITE|MASK_FPU },
309 { "hypersparc", PROCESSOR_HYPERSPARC, MASK_ISA, MASK_V8|MASK_FPU },
310 { "sparclite86x", PROCESSOR_SPARCLITE86X, MASK_ISA|MASK_FPU,
311 MASK_SPARCLITE },
312 { "sparclet", PROCESSOR_SPARCLET, MASK_ISA, MASK_SPARCLET },
313 /* TEMIC sparclet */
314 { "tsc701", PROCESSOR_TSC701, MASK_ISA, MASK_SPARCLET },
315 { "v9", PROCESSOR_V9, MASK_ISA, MASK_V9 },
316 /* TI ultrasparc I, II, IIi */
317 { "ultrasparc", PROCESSOR_ULTRASPARC, MASK_ISA, MASK_V9
318 /* Although insns using %y are deprecated, it is a clear win on current
319 ultrasparcs. */
320 |MASK_DEPRECATED_V8_INSNS},
321 /* TI ultrasparc III */
322 /* ??? Check if %y issue still holds true in ultra3. */
323 { "ultrasparc3", PROCESSOR_ULTRASPARC3, MASK_ISA, MASK_V9|MASK_DEPRECATED_V8_INSNS},
324 { 0, 0, 0, 0 }
326 const struct cpu_table *cpu;
327 const struct sparc_cpu_select *sel;
328 int fpu;
330 #ifndef SPARC_BI_ARCH
331 /* Check for unsupported architecture size. */
332 if (! TARGET_64BIT != DEFAULT_ARCH32_P)
333 error ("%s is not supported by this configuration",
334 DEFAULT_ARCH32_P ? "-m64" : "-m32");
335 #endif
337 /* We force all 64bit archs to use 128 bit long double */
338 if (TARGET_64BIT && ! TARGET_LONG_DOUBLE_128)
340 error ("-mlong-double-64 not allowed with -m64");
341 target_flags |= MASK_LONG_DOUBLE_128;
344 /* Code model selection. */
345 sparc_cmodel = SPARC_DEFAULT_CMODEL;
347 #ifdef SPARC_BI_ARCH
348 if (TARGET_ARCH32)
349 sparc_cmodel = CM_32;
350 #endif
352 if (sparc_cmodel_string != NULL)
354 if (TARGET_ARCH64)
356 for (cmodel = &cmodels[0]; cmodel->name; cmodel++)
357 if (strcmp (sparc_cmodel_string, cmodel->name) == 0)
358 break;
359 if (cmodel->name == NULL)
360 error ("bad value (%s) for -mcmodel= switch", sparc_cmodel_string);
361 else
362 sparc_cmodel = cmodel->value;
364 else
365 error ("-mcmodel= is not supported on 32 bit systems");
368 fpu = TARGET_FPU; /* save current -mfpu status */
370 /* Set the default CPU. */
371 for (def = &cpu_default[0]; def->name; ++def)
372 if (def->cpu == TARGET_CPU_DEFAULT)
373 break;
374 if (! def->name)
375 abort ();
376 sparc_select[0].string = def->name;
378 for (sel = &sparc_select[0]; sel->name; ++sel)
380 if (sel->string)
382 for (cpu = &cpu_table[0]; cpu->name; ++cpu)
383 if (! strcmp (sel->string, cpu->name))
385 if (sel->set_tune_p)
386 sparc_cpu = cpu->processor;
388 if (sel->set_arch_p)
390 target_flags &= ~cpu->disable;
391 target_flags |= cpu->enable;
393 break;
396 if (! cpu->name)
397 error ("bad value (%s) for %s switch", sel->string, sel->name);
401 /* If -mfpu or -mno-fpu was explicitly used, don't override with
402 the processor default. Clear MASK_FPU_SET to avoid confusing
403 the reverse mapping from switch values to names. */
404 if (TARGET_FPU_SET)
406 target_flags = (target_flags & ~MASK_FPU) | fpu;
407 target_flags &= ~MASK_FPU_SET;
410 /* Don't allow -mvis if FPU is disabled. */
411 if (! TARGET_FPU)
412 target_flags &= ~MASK_VIS;
414 /* -mvis assumes UltraSPARC+, so we are sure v9 instructions
415 are available.
416 -m64 also implies v9. */
417 if (TARGET_VIS || TARGET_ARCH64)
419 target_flags |= MASK_V9;
420 target_flags &= ~(MASK_V8 | MASK_SPARCLET | MASK_SPARCLITE);
423 /* Use the deprecated v8 insns for sparc64 in 32 bit mode. */
424 if (TARGET_V9 && TARGET_ARCH32)
425 target_flags |= MASK_DEPRECATED_V8_INSNS;
427 /* V8PLUS requires V9, makes no sense in 64 bit mode. */
428 if (! TARGET_V9 || TARGET_ARCH64)
429 target_flags &= ~MASK_V8PLUS;
431 /* Don't use stack biasing in 32 bit mode. */
432 if (TARGET_ARCH32)
433 target_flags &= ~MASK_STACK_BIAS;
435 /* Supply a default value for align_functions. */
436 if (align_functions == 0
437 && (sparc_cpu == PROCESSOR_ULTRASPARC
438 || sparc_cpu == PROCESSOR_ULTRASPARC3))
439 align_functions = 32;
441 /* Validate PCC_STRUCT_RETURN. */
442 if (flag_pcc_struct_return == DEFAULT_PCC_STRUCT_RETURN)
443 flag_pcc_struct_return = (TARGET_ARCH64 ? 0 : 1);
445 /* Only use .uaxword when compiling for a 64-bit target. */
446 if (!TARGET_ARCH64)
447 targetm.asm_out.unaligned_op.di = NULL;
449 /* Do various machine dependent initializations. */
450 sparc_init_modes ();
453 /* Miscellaneous utilities. */
455 /* Nonzero if CODE, a comparison, is suitable for use in v9 conditional move
456 or branch on register contents instructions. */
459 v9_regcmp_p (enum rtx_code code)
461 return (code == EQ || code == NE || code == GE || code == LT
462 || code == LE || code == GT);
466 /* Operand constraints. */
468 /* Return nonzero only if OP is a register of mode MODE,
469 or const0_rtx. */
472 reg_or_0_operand (rtx op, enum machine_mode mode)
474 if (register_operand (op, mode))
475 return 1;
476 if (op == const0_rtx)
477 return 1;
478 if (GET_MODE (op) == VOIDmode && GET_CODE (op) == CONST_DOUBLE
479 && CONST_DOUBLE_HIGH (op) == 0
480 && CONST_DOUBLE_LOW (op) == 0)
481 return 1;
482 if (fp_zero_operand (op, mode))
483 return 1;
484 return 0;
487 /* Return nonzero only if OP is const1_rtx. */
490 const1_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
492 return op == const1_rtx;
495 /* Nonzero if OP is a floating point value with value 0.0. */
498 fp_zero_operand (rtx op, enum machine_mode mode)
500 if (GET_MODE_CLASS (GET_MODE (op)) != MODE_FLOAT)
501 return 0;
502 return op == CONST0_RTX (mode);
505 /* Nonzero if OP is a register operand in floating point register. */
508 fp_register_operand (rtx op, enum machine_mode mode)
510 if (! register_operand (op, mode))
511 return 0;
512 if (GET_CODE (op) == SUBREG)
513 op = SUBREG_REG (op);
514 return GET_CODE (op) == REG && SPARC_FP_REG_P (REGNO (op));
517 /* Nonzero if OP is a floating point constant which can
518 be loaded into an integer register using a single
519 sethi instruction. */
522 fp_sethi_p (rtx op)
524 if (GET_CODE (op) == CONST_DOUBLE)
526 REAL_VALUE_TYPE r;
527 long i;
529 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
530 if (REAL_VALUES_EQUAL (r, dconst0) &&
531 ! REAL_VALUE_MINUS_ZERO (r))
532 return 0;
533 REAL_VALUE_TO_TARGET_SINGLE (r, i);
534 if (SPARC_SETHI_P (i))
535 return 1;
538 return 0;
541 /* Nonzero if OP is a floating point constant which can
542 be loaded into an integer register using a single
543 mov instruction. */
546 fp_mov_p (rtx op)
548 if (GET_CODE (op) == CONST_DOUBLE)
550 REAL_VALUE_TYPE r;
551 long i;
553 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
554 if (REAL_VALUES_EQUAL (r, dconst0) &&
555 ! REAL_VALUE_MINUS_ZERO (r))
556 return 0;
557 REAL_VALUE_TO_TARGET_SINGLE (r, i);
558 if (SPARC_SIMM13_P (i))
559 return 1;
562 return 0;
565 /* Nonzero if OP is a floating point constant which can
566 be loaded into an integer register using a high/losum
567 instruction sequence. */
570 fp_high_losum_p (rtx op)
572 /* The constraints calling this should only be in
573 SFmode move insns, so any constant which cannot
574 be moved using a single insn will do. */
575 if (GET_CODE (op) == CONST_DOUBLE)
577 REAL_VALUE_TYPE r;
578 long i;
580 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
581 if (REAL_VALUES_EQUAL (r, dconst0) &&
582 ! REAL_VALUE_MINUS_ZERO (r))
583 return 0;
584 REAL_VALUE_TO_TARGET_SINGLE (r, i);
585 if (! SPARC_SETHI_P (i)
586 && ! SPARC_SIMM13_P (i))
587 return 1;
590 return 0;
593 /* Nonzero if OP is an integer register. */
596 intreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
598 return (register_operand (op, SImode)
599 || (TARGET_ARCH64 && register_operand (op, DImode)));
602 /* Nonzero if OP is a floating point condition code register. */
605 fcc_reg_operand (rtx op, enum machine_mode mode)
607 /* This can happen when recog is called from combine. Op may be a MEM.
608 Fail instead of calling abort in this case. */
609 if (GET_CODE (op) != REG)
610 return 0;
612 if (mode != VOIDmode && mode != GET_MODE (op))
613 return 0;
614 if (mode == VOIDmode
615 && (GET_MODE (op) != CCFPmode && GET_MODE (op) != CCFPEmode))
616 return 0;
618 #if 0 /* ??? ==> 1 when %fcc0-3 are pseudos first. See gen_compare_reg(). */
619 if (reg_renumber == 0)
620 return REGNO (op) >= FIRST_PSEUDO_REGISTER;
621 return REGNO_OK_FOR_CCFP_P (REGNO (op));
622 #else
623 return (unsigned) REGNO (op) - SPARC_FIRST_V9_FCC_REG < 4;
624 #endif
627 /* Nonzero if OP is a floating point condition code fcc0 register. */
630 fcc0_reg_operand (rtx op, enum machine_mode mode)
632 /* This can happen when recog is called from combine. Op may be a MEM.
633 Fail instead of calling abort in this case. */
634 if (GET_CODE (op) != REG)
635 return 0;
637 if (mode != VOIDmode && mode != GET_MODE (op))
638 return 0;
639 if (mode == VOIDmode
640 && (GET_MODE (op) != CCFPmode && GET_MODE (op) != CCFPEmode))
641 return 0;
643 return REGNO (op) == SPARC_FCC_REG;
646 /* Nonzero if OP is an integer or floating point condition code register. */
649 icc_or_fcc_reg_operand (rtx op, enum machine_mode mode)
651 if (GET_CODE (op) == REG && REGNO (op) == SPARC_ICC_REG)
653 if (mode != VOIDmode && mode != GET_MODE (op))
654 return 0;
655 if (mode == VOIDmode
656 && GET_MODE (op) != CCmode && GET_MODE (op) != CCXmode)
657 return 0;
658 return 1;
661 return fcc_reg_operand (op, mode);
664 /* Nonzero if OP can appear as the dest of a RESTORE insn. */
666 restore_operand (rtx op, enum machine_mode mode)
668 return (GET_CODE (op) == REG && GET_MODE (op) == mode
669 && (REGNO (op) < 8 || (REGNO (op) >= 24 && REGNO (op) < 32)));
672 /* Call insn on SPARC can take a PC-relative constant address, or any regular
673 memory address. */
676 call_operand (rtx op, enum machine_mode mode)
678 if (GET_CODE (op) != MEM)
679 abort ();
680 op = XEXP (op, 0);
681 return (symbolic_operand (op, mode) || memory_address_p (Pmode, op));
685 call_operand_address (rtx op, enum machine_mode mode)
687 return (symbolic_operand (op, mode) || memory_address_p (Pmode, op));
690 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
691 reference and a constant. */
694 symbolic_operand (register rtx op, enum machine_mode mode)
696 enum machine_mode omode = GET_MODE (op);
698 if (omode != mode && omode != VOIDmode && mode != VOIDmode)
699 return 0;
701 switch (GET_CODE (op))
703 case SYMBOL_REF:
704 case LABEL_REF:
705 return 1;
707 case CONST:
708 op = XEXP (op, 0);
709 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
710 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
711 && GET_CODE (XEXP (op, 1)) == CONST_INT);
713 default:
714 return 0;
718 /* Return truth value of statement that OP is a symbolic memory
719 operand of mode MODE. */
722 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
724 if (GET_CODE (op) == SUBREG)
725 op = SUBREG_REG (op);
726 if (GET_CODE (op) != MEM)
727 return 0;
728 op = XEXP (op, 0);
729 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
730 || GET_CODE (op) == HIGH || GET_CODE (op) == LABEL_REF);
733 /* Return truth value of statement that OP is a LABEL_REF of mode MODE. */
736 label_ref_operand (rtx op, enum machine_mode mode)
738 if (GET_CODE (op) != LABEL_REF)
739 return 0;
740 if (GET_MODE (op) != mode)
741 return 0;
742 return 1;
745 /* Return 1 if the operand is an argument used in generating pic references
746 in either the medium/low or medium/anywhere code models of sparc64. */
749 sp64_medium_pic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
751 /* Check for (const (minus (symbol_ref:GOT)
752 (const (minus (label) (pc))))). */
753 if (GET_CODE (op) != CONST)
754 return 0;
755 op = XEXP (op, 0);
756 if (GET_CODE (op) != MINUS)
757 return 0;
758 if (GET_CODE (XEXP (op, 0)) != SYMBOL_REF)
759 return 0;
760 /* ??? Ensure symbol is GOT. */
761 if (GET_CODE (XEXP (op, 1)) != CONST)
762 return 0;
763 if (GET_CODE (XEXP (XEXP (op, 1), 0)) != MINUS)
764 return 0;
765 return 1;
768 /* Return 1 if the operand is a data segment reference. This includes
769 the readonly data segment, or in other words anything but the text segment.
770 This is needed in the medium/anywhere code model on v9. These values
771 are accessed with EMBMEDANY_BASE_REG. */
774 data_segment_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
776 switch (GET_CODE (op))
778 case SYMBOL_REF :
779 return ! SYMBOL_REF_FUNCTION_P (op);
780 case PLUS :
781 /* Assume canonical format of symbol + constant.
782 Fall through. */
783 case CONST :
784 return data_segment_operand (XEXP (op, 0), VOIDmode);
785 default :
786 return 0;
790 /* Return 1 if the operand is a text segment reference.
791 This is needed in the medium/anywhere code model on v9. */
794 text_segment_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
796 switch (GET_CODE (op))
798 case LABEL_REF :
799 return 1;
800 case SYMBOL_REF :
801 return SYMBOL_REF_FUNCTION_P (op);
802 case PLUS :
803 /* Assume canonical format of symbol + constant.
804 Fall through. */
805 case CONST :
806 return text_segment_operand (XEXP (op, 0), VOIDmode);
807 default :
808 return 0;
812 /* Return 1 if the operand is either a register or a memory operand that is
813 not symbolic. */
816 reg_or_nonsymb_mem_operand (register rtx op, enum machine_mode mode)
818 if (register_operand (op, mode))
819 return 1;
821 if (memory_operand (op, mode) && ! symbolic_memory_operand (op, mode))
822 return 1;
824 return 0;
828 splittable_symbolic_memory_operand (rtx op,
829 enum machine_mode mode ATTRIBUTE_UNUSED)
831 if (GET_CODE (op) != MEM)
832 return 0;
833 if (! symbolic_operand (XEXP (op, 0), Pmode))
834 return 0;
835 return 1;
839 splittable_immediate_memory_operand (rtx op,
840 enum machine_mode mode ATTRIBUTE_UNUSED)
842 if (GET_CODE (op) != MEM)
843 return 0;
844 if (! immediate_operand (XEXP (op, 0), Pmode))
845 return 0;
846 return 1;
849 /* Return truth value of whether OP is EQ or NE. */
852 eq_or_neq (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
854 return (GET_CODE (op) == EQ || GET_CODE (op) == NE);
857 /* Return 1 if this is a comparison operator, but not an EQ, NE, GEU,
858 or LTU for non-floating-point. We handle those specially. */
861 normal_comp_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
863 enum rtx_code code = GET_CODE (op);
865 if (GET_RTX_CLASS (code) != '<')
866 return 0;
868 if (GET_MODE (XEXP (op, 0)) == CCFPmode
869 || GET_MODE (XEXP (op, 0)) == CCFPEmode)
870 return 1;
872 return (code != NE && code != EQ && code != GEU && code != LTU);
875 /* Return 1 if this is a comparison operator. This allows the use of
876 MATCH_OPERATOR to recognize all the branch insns. */
879 noov_compare_op (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
881 enum rtx_code code = GET_CODE (op);
883 if (GET_RTX_CLASS (code) != '<')
884 return 0;
886 if (GET_MODE (XEXP (op, 0)) == CC_NOOVmode
887 || GET_MODE (XEXP (op, 0)) == CCX_NOOVmode)
888 /* These are the only branches which work with CC_NOOVmode. */
889 return (code == EQ || code == NE || code == GE || code == LT);
890 return 1;
893 /* Return 1 if this is a 64-bit comparison operator. This allows the use of
894 MATCH_OPERATOR to recognize all the branch insns. */
897 noov_compare64_op (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
899 enum rtx_code code = GET_CODE (op);
901 if (! TARGET_V9)
902 return 0;
904 if (GET_RTX_CLASS (code) != '<')
905 return 0;
907 if (GET_MODE (XEXP (op, 0)) == CCX_NOOVmode)
908 /* These are the only branches which work with CCX_NOOVmode. */
909 return (code == EQ || code == NE || code == GE || code == LT);
910 return (GET_MODE (XEXP (op, 0)) == CCXmode);
913 /* Nonzero if OP is a comparison operator suitable for use in v9
914 conditional move or branch on register contents instructions. */
917 v9_regcmp_op (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
919 enum rtx_code code = GET_CODE (op);
921 if (GET_RTX_CLASS (code) != '<')
922 return 0;
924 return v9_regcmp_p (code);
927 /* Return 1 if this is a SIGN_EXTEND or ZERO_EXTEND operation. */
930 extend_op (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
932 return GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND;
935 /* Return nonzero if OP is an operator of mode MODE which can set
936 the condition codes explicitly. We do not include PLUS and MINUS
937 because these require CC_NOOVmode, which we handle explicitly. */
940 cc_arithop (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
942 if (GET_CODE (op) == AND
943 || GET_CODE (op) == IOR
944 || GET_CODE (op) == XOR)
945 return 1;
947 return 0;
950 /* Return nonzero if OP is an operator of mode MODE which can bitwise
951 complement its second operand and set the condition codes explicitly. */
954 cc_arithopn (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
956 /* XOR is not here because combine canonicalizes (xor (not ...) ...)
957 and (xor ... (not ...)) to (not (xor ...)). */
958 return (GET_CODE (op) == AND
959 || GET_CODE (op) == IOR);
962 /* Return true if OP is a register, or is a CONST_INT that can fit in a
963 signed 13 bit immediate field. This is an acceptable SImode operand for
964 most 3 address instructions. */
967 arith_operand (rtx op, enum machine_mode mode)
969 if (register_operand (op, mode))
970 return 1;
971 if (GET_CODE (op) != CONST_INT)
972 return 0;
973 return SMALL_INT32 (op);
976 /* Return true if OP is a constant 4096 */
979 arith_4096_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
981 if (GET_CODE (op) != CONST_INT)
982 return 0;
983 else
984 return INTVAL (op) == 4096;
987 /* Return true if OP is suitable as second operand for add/sub */
990 arith_add_operand (rtx op, enum machine_mode mode)
992 return arith_operand (op, mode) || arith_4096_operand (op, mode);
995 /* Return true if OP is a CONST_INT or a CONST_DOUBLE which can fit in the
996 immediate field of OR and XOR instructions. Used for 64-bit
997 constant formation patterns. */
999 const64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1001 return ((GET_CODE (op) == CONST_INT
1002 && SPARC_SIMM13_P (INTVAL (op)))
1003 #if HOST_BITS_PER_WIDE_INT != 64
1004 || (GET_CODE (op) == CONST_DOUBLE
1005 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))
1006 && (CONST_DOUBLE_HIGH (op) ==
1007 ((CONST_DOUBLE_LOW (op) & 0x80000000) != 0 ?
1008 (HOST_WIDE_INT)-1 : 0)))
1009 #endif
1013 /* The same, but only for sethi instructions. */
1015 const64_high_operand (rtx op, enum machine_mode mode)
1017 return ((GET_CODE (op) == CONST_INT
1018 && (INTVAL (op) & ~(HOST_WIDE_INT)0x3ff) != 0
1019 && SPARC_SETHI_P (INTVAL (op) & GET_MODE_MASK (mode))
1021 || (GET_CODE (op) == CONST_DOUBLE
1022 && CONST_DOUBLE_HIGH (op) == 0
1023 && (CONST_DOUBLE_LOW (op) & ~(HOST_WIDE_INT)0x3ff) != 0
1024 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op))));
1027 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1028 signed 11 bit immediate field. This is an acceptable SImode operand for
1029 the movcc instructions. */
1032 arith11_operand (rtx op, enum machine_mode mode)
1034 return (register_operand (op, mode)
1035 || (GET_CODE (op) == CONST_INT && SPARC_SIMM11_P (INTVAL (op))));
1038 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1039 signed 10 bit immediate field. This is an acceptable SImode operand for
1040 the movrcc instructions. */
1043 arith10_operand (rtx op, enum machine_mode mode)
1045 return (register_operand (op, mode)
1046 || (GET_CODE (op) == CONST_INT && SPARC_SIMM10_P (INTVAL (op))));
1049 /* Return true if OP is a register, is a CONST_INT that fits in a 13 bit
1050 immediate field, or is a CONST_DOUBLE whose both parts fit in a 13 bit
1051 immediate field.
1052 v9: Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1053 can fit in a 13 bit immediate field. This is an acceptable DImode operand
1054 for most 3 address instructions. */
1057 arith_double_operand (rtx op, enum machine_mode mode)
1059 return (register_operand (op, mode)
1060 || (GET_CODE (op) == CONST_INT && SMALL_INT (op))
1061 || (! TARGET_ARCH64
1062 && GET_CODE (op) == CONST_DOUBLE
1063 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x1000) < 0x2000
1064 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_HIGH (op) + 0x1000) < 0x2000)
1065 || (TARGET_ARCH64
1066 && GET_CODE (op) == CONST_DOUBLE
1067 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x1000) < 0x2000
1068 && ((CONST_DOUBLE_HIGH (op) == -1
1069 && (CONST_DOUBLE_LOW (op) & 0x1000) == 0x1000)
1070 || (CONST_DOUBLE_HIGH (op) == 0
1071 && (CONST_DOUBLE_LOW (op) & 0x1000) == 0))));
1074 /* Return true if OP is a constant 4096 for DImode on ARCH64 */
1077 arith_double_4096_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1079 return (TARGET_ARCH64 &&
1080 ((GET_CODE (op) == CONST_INT && INTVAL (op) == 4096) ||
1081 (GET_CODE (op) == CONST_DOUBLE &&
1082 CONST_DOUBLE_LOW (op) == 4096 &&
1083 CONST_DOUBLE_HIGH (op) == 0)));
1086 /* Return true if OP is suitable as second operand for add/sub in DImode */
1089 arith_double_add_operand (rtx op, enum machine_mode mode)
1091 return arith_double_operand (op, mode) || arith_double_4096_operand (op, mode);
1094 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1095 can fit in an 11 bit immediate field. This is an acceptable DImode
1096 operand for the movcc instructions. */
1097 /* ??? Replace with arith11_operand? */
1100 arith11_double_operand (rtx op, enum machine_mode mode)
1102 return (register_operand (op, mode)
1103 || (GET_CODE (op) == CONST_DOUBLE
1104 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1105 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x400) < 0x800
1106 && ((CONST_DOUBLE_HIGH (op) == -1
1107 && (CONST_DOUBLE_LOW (op) & 0x400) == 0x400)
1108 || (CONST_DOUBLE_HIGH (op) == 0
1109 && (CONST_DOUBLE_LOW (op) & 0x400) == 0)))
1110 || (GET_CODE (op) == CONST_INT
1111 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1112 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x400) < 0x800));
1115 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1116 can fit in an 10 bit immediate field. This is an acceptable DImode
1117 operand for the movrcc instructions. */
1118 /* ??? Replace with arith10_operand? */
1121 arith10_double_operand (rtx op, enum machine_mode mode)
1123 return (register_operand (op, mode)
1124 || (GET_CODE (op) == CONST_DOUBLE
1125 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1126 && (unsigned) (CONST_DOUBLE_LOW (op) + 0x200) < 0x400
1127 && ((CONST_DOUBLE_HIGH (op) == -1
1128 && (CONST_DOUBLE_LOW (op) & 0x200) == 0x200)
1129 || (CONST_DOUBLE_HIGH (op) == 0
1130 && (CONST_DOUBLE_LOW (op) & 0x200) == 0)))
1131 || (GET_CODE (op) == CONST_INT
1132 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1133 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x200) < 0x400));
1136 /* Return truth value of whether OP is an integer which fits the
1137 range constraining immediate operands in most three-address insns,
1138 which have a 13 bit immediate field. */
1141 small_int (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1143 return (GET_CODE (op) == CONST_INT && SMALL_INT (op));
1147 small_int_or_double (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1149 return ((GET_CODE (op) == CONST_INT && SMALL_INT (op))
1150 || (GET_CODE (op) == CONST_DOUBLE
1151 && CONST_DOUBLE_HIGH (op) == 0
1152 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))));
1155 /* Recognize operand values for the umul instruction. That instruction sign
1156 extends immediate values just like all other sparc instructions, but
1157 interprets the extended result as an unsigned number. */
1160 uns_small_int (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1162 #if HOST_BITS_PER_WIDE_INT > 32
1163 /* All allowed constants will fit a CONST_INT. */
1164 return (GET_CODE (op) == CONST_INT
1165 && ((INTVAL (op) >= 0 && INTVAL (op) < 0x1000)
1166 || (INTVAL (op) >= 0xFFFFF000
1167 && INTVAL (op) <= 0xFFFFFFFF)));
1168 #else
1169 return ((GET_CODE (op) == CONST_INT && (unsigned) INTVAL (op) < 0x1000)
1170 || (GET_CODE (op) == CONST_DOUBLE
1171 && CONST_DOUBLE_HIGH (op) == 0
1172 && (unsigned) CONST_DOUBLE_LOW (op) - 0xFFFFF000 < 0x1000));
1173 #endif
1177 uns_arith_operand (rtx op, enum machine_mode mode)
1179 return register_operand (op, mode) || uns_small_int (op, mode);
1182 /* Return truth value of statement that OP is a call-clobbered register. */
1184 clobbered_register (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1186 return (GET_CODE (op) == REG && call_used_regs[REGNO (op)]);
1189 /* Return 1 if OP is a valid operand for the source of a move insn. */
1192 input_operand (rtx op, enum machine_mode mode)
1194 /* If both modes are non-void they must be the same. */
1195 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1196 return 0;
1198 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and result in 0/1. */
1199 if (GET_CODE (op) == CONSTANT_P_RTX)
1200 return 1;
1202 /* Allow any one instruction integer constant, and all CONST_INT
1203 variants when we are working in DImode and !arch64. */
1204 if (GET_MODE_CLASS (mode) == MODE_INT
1205 && ((GET_CODE (op) == CONST_INT
1206 && (SPARC_SETHI_P (INTVAL (op) & GET_MODE_MASK (mode))
1207 || SPARC_SIMM13_P (INTVAL (op))
1208 || (mode == DImode
1209 && ! TARGET_ARCH64)))
1210 || (TARGET_ARCH64
1211 && GET_CODE (op) == CONST_DOUBLE
1212 && ((CONST_DOUBLE_HIGH (op) == 0
1213 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op)))
1215 #if HOST_BITS_PER_WIDE_INT == 64
1216 (CONST_DOUBLE_HIGH (op) == 0
1217 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op)))
1218 #else
1219 (SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))
1220 && (((CONST_DOUBLE_LOW (op) & 0x80000000) == 0
1221 && CONST_DOUBLE_HIGH (op) == 0)
1222 || (CONST_DOUBLE_HIGH (op) == -1
1223 && CONST_DOUBLE_LOW (op) & 0x80000000) != 0))
1224 #endif
1225 ))))
1226 return 1;
1228 /* If !arch64 and this is a DImode const, allow it so that
1229 the splits can be generated. */
1230 if (! TARGET_ARCH64
1231 && mode == DImode
1232 && GET_CODE (op) == CONST_DOUBLE)
1233 return 1;
1235 if (register_operand (op, mode))
1236 return 1;
1238 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1239 && GET_CODE (op) == CONST_DOUBLE)
1240 return 1;
1242 /* If this is a SUBREG, look inside so that we handle
1243 paradoxical ones. */
1244 if (GET_CODE (op) == SUBREG)
1245 op = SUBREG_REG (op);
1247 /* Check for valid MEM forms. */
1248 if (GET_CODE (op) == MEM)
1250 rtx inside = XEXP (op, 0);
1252 if (GET_CODE (inside) == LO_SUM)
1254 /* We can't allow these because all of the splits
1255 (eventually as they trickle down into DFmode
1256 splits) require offsettable memory references. */
1257 if (! TARGET_V9
1258 && GET_MODE (op) == TFmode)
1259 return 0;
1261 return (register_operand (XEXP (inside, 0), Pmode)
1262 && CONSTANT_P (XEXP (inside, 1)));
1264 return memory_address_p (mode, inside);
1267 return 0;
1271 /* We know it can't be done in one insn when we get here,
1272 the movsi expander guarantees this. */
1273 void
1274 sparc_emit_set_const32 (rtx op0, rtx op1)
1276 enum machine_mode mode = GET_MODE (op0);
1277 rtx temp;
1279 if (GET_CODE (op1) == CONST_INT)
1281 HOST_WIDE_INT value = INTVAL (op1);
1283 if (SPARC_SETHI_P (value & GET_MODE_MASK (mode))
1284 || SPARC_SIMM13_P (value))
1285 abort ();
1288 /* Full 2-insn decomposition is needed. */
1289 if (reload_in_progress || reload_completed)
1290 temp = op0;
1291 else
1292 temp = gen_reg_rtx (mode);
1294 if (GET_CODE (op1) == CONST_INT)
1296 /* Emit them as real moves instead of a HIGH/LO_SUM,
1297 this way CSE can see everything and reuse intermediate
1298 values if it wants. */
1299 if (TARGET_ARCH64
1300 && HOST_BITS_PER_WIDE_INT != 64
1301 && (INTVAL (op1) & 0x80000000) != 0)
1302 emit_insn (gen_rtx_SET
1303 (VOIDmode, temp,
1304 immed_double_const (INTVAL (op1) & ~(HOST_WIDE_INT)0x3ff,
1305 0, DImode)));
1306 else
1307 emit_insn (gen_rtx_SET (VOIDmode, temp,
1308 GEN_INT (INTVAL (op1)
1309 & ~(HOST_WIDE_INT)0x3ff)));
1311 emit_insn (gen_rtx_SET (VOIDmode,
1312 op0,
1313 gen_rtx_IOR (mode, temp,
1314 GEN_INT (INTVAL (op1) & 0x3ff))));
1316 else
1318 /* A symbol, emit in the traditional way. */
1319 emit_insn (gen_rtx_SET (VOIDmode, temp,
1320 gen_rtx_HIGH (mode, op1)));
1321 emit_insn (gen_rtx_SET (VOIDmode,
1322 op0, gen_rtx_LO_SUM (mode, temp, op1)));
1328 /* SPARC-v9 code-model support. */
1329 void
1330 sparc_emit_set_symbolic_const64 (rtx op0, rtx op1, rtx temp1)
1332 rtx ti_temp1 = 0;
1334 if (temp1 && GET_MODE (temp1) == TImode)
1336 ti_temp1 = temp1;
1337 temp1 = gen_rtx_REG (DImode, REGNO (temp1));
1340 switch (sparc_cmodel)
1342 case CM_MEDLOW:
1343 /* The range spanned by all instructions in the object is less
1344 than 2^31 bytes (2GB) and the distance from any instruction
1345 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1346 than 2^31 bytes (2GB).
1348 The executable must be in the low 4TB of the virtual address
1349 space.
1351 sethi %hi(symbol), %temp
1352 or %temp, %lo(symbol), %reg */
1353 emit_insn (gen_rtx_SET (VOIDmode, temp1, gen_rtx_HIGH (DImode, op1)));
1354 emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_LO_SUM (DImode, temp1, op1)));
1355 break;
1357 case CM_MEDMID:
1358 /* The range spanned by all instructions in the object is less
1359 than 2^31 bytes (2GB) and the distance from any instruction
1360 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1361 than 2^31 bytes (2GB).
1363 The executable must be in the low 16TB of the virtual address
1364 space.
1366 sethi %h44(symbol), %temp1
1367 or %temp1, %m44(symbol), %temp2
1368 sllx %temp2, 12, %temp3
1369 or %temp3, %l44(symbol), %reg */
1370 emit_insn (gen_seth44 (op0, op1));
1371 emit_insn (gen_setm44 (op0, op0, op1));
1372 emit_insn (gen_rtx_SET (VOIDmode, temp1,
1373 gen_rtx_ASHIFT (DImode, op0, GEN_INT (12))));
1374 emit_insn (gen_setl44 (op0, temp1, op1));
1375 break;
1377 case CM_MEDANY:
1378 /* The range spanned by all instructions in the object is less
1379 than 2^31 bytes (2GB) and the distance from any instruction
1380 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1381 than 2^31 bytes (2GB).
1383 The executable can be placed anywhere in the virtual address
1384 space.
1386 sethi %hh(symbol), %temp1
1387 sethi %lm(symbol), %temp2
1388 or %temp1, %hm(symbol), %temp3
1389 or %temp2, %lo(symbol), %temp4
1390 sllx %temp3, 32, %temp5
1391 or %temp4, %temp5, %reg */
1393 /* It is possible that one of the registers we got for operands[2]
1394 might coincide with that of operands[0] (which is why we made
1395 it TImode). Pick the other one to use as our scratch. */
1396 if (rtx_equal_p (temp1, op0))
1398 if (ti_temp1)
1399 temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1);
1400 else
1401 abort();
1404 emit_insn (gen_sethh (op0, op1));
1405 emit_insn (gen_setlm (temp1, op1));
1406 emit_insn (gen_sethm (op0, op0, op1));
1407 emit_insn (gen_rtx_SET (VOIDmode, op0,
1408 gen_rtx_ASHIFT (DImode, op0, GEN_INT (32))));
1409 emit_insn (gen_rtx_SET (VOIDmode, op0,
1410 gen_rtx_PLUS (DImode, op0, temp1)));
1411 emit_insn (gen_setlo (op0, op0, op1));
1412 break;
1414 case CM_EMBMEDANY:
1415 /* Old old old backwards compatibility kruft here.
1416 Essentially it is MEDLOW with a fixed 64-bit
1417 virtual base added to all data segment addresses.
1418 Text-segment stuff is computed like MEDANY, we can't
1419 reuse the code above because the relocation knobs
1420 look different.
1422 Data segment: sethi %hi(symbol), %temp1
1423 or %temp1, %lo(symbol), %temp2
1424 add %temp2, EMBMEDANY_BASE_REG, %reg
1426 Text segment: sethi %uhi(symbol), %temp1
1427 sethi %hi(symbol), %temp2
1428 or %temp1, %ulo(symbol), %temp3
1429 or %temp2, %lo(symbol), %temp4
1430 sllx %temp3, 32, %temp5
1431 or %temp4, %temp5, %reg */
1432 if (data_segment_operand (op1, GET_MODE (op1)))
1434 emit_insn (gen_embmedany_sethi (temp1, op1));
1435 emit_insn (gen_embmedany_brsum (op0, temp1));
1436 emit_insn (gen_embmedany_losum (op0, op0, op1));
1438 else
1440 /* It is possible that one of the registers we got for operands[2]
1441 might coincide with that of operands[0] (which is why we made
1442 it TImode). Pick the other one to use as our scratch. */
1443 if (rtx_equal_p (temp1, op0))
1445 if (ti_temp1)
1446 temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1);
1447 else
1448 abort();
1451 emit_insn (gen_embmedany_textuhi (op0, op1));
1452 emit_insn (gen_embmedany_texthi (temp1, op1));
1453 emit_insn (gen_embmedany_textulo (op0, op0, op1));
1454 emit_insn (gen_rtx_SET (VOIDmode, op0,
1455 gen_rtx_ASHIFT (DImode, op0, GEN_INT (32))));
1456 emit_insn (gen_rtx_SET (VOIDmode, op0,
1457 gen_rtx_PLUS (DImode, op0, temp1)));
1458 emit_insn (gen_embmedany_textlo (op0, op0, op1));
1460 break;
1462 default:
1463 abort();
1467 /* These avoid problems when cross compiling. If we do not
1468 go through all this hair then the optimizer will see
1469 invalid REG_EQUAL notes or in some cases none at all. */
1470 static void sparc_emit_set_safe_HIGH64 (rtx, HOST_WIDE_INT);
1471 static rtx gen_safe_SET64 (rtx, HOST_WIDE_INT);
1472 static rtx gen_safe_OR64 (rtx, HOST_WIDE_INT);
1473 static rtx gen_safe_XOR64 (rtx, HOST_WIDE_INT);
1475 #if HOST_BITS_PER_WIDE_INT == 64
1476 #define GEN_HIGHINT64(__x) GEN_INT ((__x) & ~(HOST_WIDE_INT)0x3ff)
1477 #define GEN_INT64(__x) GEN_INT (__x)
1478 #else
1479 #define GEN_HIGHINT64(__x) \
1480 immed_double_const ((__x) & ~(HOST_WIDE_INT)0x3ff, 0, DImode)
1481 #define GEN_INT64(__x) \
1482 immed_double_const ((__x) & 0xffffffff, \
1483 ((__x) & 0x80000000 ? -1 : 0), DImode)
1484 #endif
1486 /* The optimizer is not to assume anything about exactly
1487 which bits are set for a HIGH, they are unspecified.
1488 Unfortunately this leads to many missed optimizations
1489 during CSE. We mask out the non-HIGH bits, and matches
1490 a plain movdi, to alleviate this problem. */
1491 static void
1492 sparc_emit_set_safe_HIGH64 (rtx dest, HOST_WIDE_INT val)
1494 emit_insn (gen_rtx_SET (VOIDmode, dest, GEN_HIGHINT64 (val)));
1497 static rtx
1498 gen_safe_SET64 (rtx dest, HOST_WIDE_INT val)
1500 return gen_rtx_SET (VOIDmode, dest, GEN_INT64 (val));
1503 static rtx
1504 gen_safe_OR64 (rtx src, HOST_WIDE_INT val)
1506 return gen_rtx_IOR (DImode, src, GEN_INT64 (val));
1509 static rtx
1510 gen_safe_XOR64 (rtx src, HOST_WIDE_INT val)
1512 return gen_rtx_XOR (DImode, src, GEN_INT64 (val));
1515 /* Worker routines for 64-bit constant formation on arch64.
1516 One of the key things to be doing in these emissions is
1517 to create as many temp REGs as possible. This makes it
1518 possible for half-built constants to be used later when
1519 such values are similar to something required later on.
1520 Without doing this, the optimizer cannot see such
1521 opportunities. */
1523 static void sparc_emit_set_const64_quick1 (rtx, rtx,
1524 unsigned HOST_WIDE_INT, int);
1526 static void
1527 sparc_emit_set_const64_quick1 (rtx op0, rtx temp,
1528 unsigned HOST_WIDE_INT low_bits, int is_neg)
1530 unsigned HOST_WIDE_INT high_bits;
1532 if (is_neg)
1533 high_bits = (~low_bits) & 0xffffffff;
1534 else
1535 high_bits = low_bits;
1537 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1538 if (!is_neg)
1540 emit_insn (gen_rtx_SET (VOIDmode, op0,
1541 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1543 else
1545 /* If we are XOR'ing with -1, then we should emit a one's complement
1546 instead. This way the combiner will notice logical operations
1547 such as ANDN later on and substitute. */
1548 if ((low_bits & 0x3ff) == 0x3ff)
1550 emit_insn (gen_rtx_SET (VOIDmode, op0,
1551 gen_rtx_NOT (DImode, temp)));
1553 else
1555 emit_insn (gen_rtx_SET (VOIDmode, op0,
1556 gen_safe_XOR64 (temp,
1557 (-(HOST_WIDE_INT)0x400
1558 | (low_bits & 0x3ff)))));
1563 static void sparc_emit_set_const64_quick2 (rtx, rtx, unsigned HOST_WIDE_INT,
1564 unsigned HOST_WIDE_INT, int);
1566 static void
1567 sparc_emit_set_const64_quick2 (rtx op0, rtx temp,
1568 unsigned HOST_WIDE_INT high_bits,
1569 unsigned HOST_WIDE_INT low_immediate,
1570 int shift_count)
1572 rtx temp2 = op0;
1574 if ((high_bits & 0xfffffc00) != 0)
1576 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1577 if ((high_bits & ~0xfffffc00) != 0)
1578 emit_insn (gen_rtx_SET (VOIDmode, op0,
1579 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1580 else
1581 temp2 = temp;
1583 else
1585 emit_insn (gen_safe_SET64 (temp, high_bits));
1586 temp2 = temp;
1589 /* Now shift it up into place. */
1590 emit_insn (gen_rtx_SET (VOIDmode, op0,
1591 gen_rtx_ASHIFT (DImode, temp2,
1592 GEN_INT (shift_count))));
1594 /* If there is a low immediate part piece, finish up by
1595 putting that in as well. */
1596 if (low_immediate != 0)
1597 emit_insn (gen_rtx_SET (VOIDmode, op0,
1598 gen_safe_OR64 (op0, low_immediate)));
1601 static void sparc_emit_set_const64_longway (rtx, rtx, unsigned HOST_WIDE_INT,
1602 unsigned HOST_WIDE_INT);
1604 /* Full 64-bit constant decomposition. Even though this is the
1605 'worst' case, we still optimize a few things away. */
1606 static void
1607 sparc_emit_set_const64_longway (rtx op0, rtx temp,
1608 unsigned HOST_WIDE_INT high_bits,
1609 unsigned HOST_WIDE_INT low_bits)
1611 rtx sub_temp;
1613 if (reload_in_progress || reload_completed)
1614 sub_temp = op0;
1615 else
1616 sub_temp = gen_reg_rtx (DImode);
1618 if ((high_bits & 0xfffffc00) != 0)
1620 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1621 if ((high_bits & ~0xfffffc00) != 0)
1622 emit_insn (gen_rtx_SET (VOIDmode,
1623 sub_temp,
1624 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1625 else
1626 sub_temp = temp;
1628 else
1630 emit_insn (gen_safe_SET64 (temp, high_bits));
1631 sub_temp = temp;
1634 if (!reload_in_progress && !reload_completed)
1636 rtx temp2 = gen_reg_rtx (DImode);
1637 rtx temp3 = gen_reg_rtx (DImode);
1638 rtx temp4 = gen_reg_rtx (DImode);
1640 emit_insn (gen_rtx_SET (VOIDmode, temp4,
1641 gen_rtx_ASHIFT (DImode, sub_temp,
1642 GEN_INT (32))));
1644 sparc_emit_set_safe_HIGH64 (temp2, low_bits);
1645 if ((low_bits & ~0xfffffc00) != 0)
1647 emit_insn (gen_rtx_SET (VOIDmode, temp3,
1648 gen_safe_OR64 (temp2, (low_bits & 0x3ff))));
1649 emit_insn (gen_rtx_SET (VOIDmode, op0,
1650 gen_rtx_PLUS (DImode, temp4, temp3)));
1652 else
1654 emit_insn (gen_rtx_SET (VOIDmode, op0,
1655 gen_rtx_PLUS (DImode, temp4, temp2)));
1658 else
1660 rtx low1 = GEN_INT ((low_bits >> (32 - 12)) & 0xfff);
1661 rtx low2 = GEN_INT ((low_bits >> (32 - 12 - 12)) & 0xfff);
1662 rtx low3 = GEN_INT ((low_bits >> (32 - 12 - 12 - 8)) & 0x0ff);
1663 int to_shift = 12;
1665 /* We are in the middle of reload, so this is really
1666 painful. However we do still make an attempt to
1667 avoid emitting truly stupid code. */
1668 if (low1 != const0_rtx)
1670 emit_insn (gen_rtx_SET (VOIDmode, op0,
1671 gen_rtx_ASHIFT (DImode, sub_temp,
1672 GEN_INT (to_shift))));
1673 emit_insn (gen_rtx_SET (VOIDmode, op0,
1674 gen_rtx_IOR (DImode, op0, low1)));
1675 sub_temp = op0;
1676 to_shift = 12;
1678 else
1680 to_shift += 12;
1682 if (low2 != const0_rtx)
1684 emit_insn (gen_rtx_SET (VOIDmode, op0,
1685 gen_rtx_ASHIFT (DImode, sub_temp,
1686 GEN_INT (to_shift))));
1687 emit_insn (gen_rtx_SET (VOIDmode, op0,
1688 gen_rtx_IOR (DImode, op0, low2)));
1689 sub_temp = op0;
1690 to_shift = 8;
1692 else
1694 to_shift += 8;
1696 emit_insn (gen_rtx_SET (VOIDmode, op0,
1697 gen_rtx_ASHIFT (DImode, sub_temp,
1698 GEN_INT (to_shift))));
1699 if (low3 != const0_rtx)
1700 emit_insn (gen_rtx_SET (VOIDmode, op0,
1701 gen_rtx_IOR (DImode, op0, low3)));
1702 /* phew... */
1706 /* Analyze a 64-bit constant for certain properties. */
1707 static void analyze_64bit_constant (unsigned HOST_WIDE_INT,
1708 unsigned HOST_WIDE_INT,
1709 int *, int *, int *);
1711 static void
1712 analyze_64bit_constant (unsigned HOST_WIDE_INT high_bits,
1713 unsigned HOST_WIDE_INT low_bits,
1714 int *hbsp, int *lbsp, int *abbasp)
1716 int lowest_bit_set, highest_bit_set, all_bits_between_are_set;
1717 int i;
1719 lowest_bit_set = highest_bit_set = -1;
1720 i = 0;
1723 if ((lowest_bit_set == -1)
1724 && ((low_bits >> i) & 1))
1725 lowest_bit_set = i;
1726 if ((highest_bit_set == -1)
1727 && ((high_bits >> (32 - i - 1)) & 1))
1728 highest_bit_set = (64 - i - 1);
1730 while (++i < 32
1731 && ((highest_bit_set == -1)
1732 || (lowest_bit_set == -1)));
1733 if (i == 32)
1735 i = 0;
1738 if ((lowest_bit_set == -1)
1739 && ((high_bits >> i) & 1))
1740 lowest_bit_set = i + 32;
1741 if ((highest_bit_set == -1)
1742 && ((low_bits >> (32 - i - 1)) & 1))
1743 highest_bit_set = 32 - i - 1;
1745 while (++i < 32
1746 && ((highest_bit_set == -1)
1747 || (lowest_bit_set == -1)));
1749 /* If there are no bits set this should have gone out
1750 as one instruction! */
1751 if (lowest_bit_set == -1
1752 || highest_bit_set == -1)
1753 abort ();
1754 all_bits_between_are_set = 1;
1755 for (i = lowest_bit_set; i <= highest_bit_set; i++)
1757 if (i < 32)
1759 if ((low_bits & (1 << i)) != 0)
1760 continue;
1762 else
1764 if ((high_bits & (1 << (i - 32))) != 0)
1765 continue;
1767 all_bits_between_are_set = 0;
1768 break;
1770 *hbsp = highest_bit_set;
1771 *lbsp = lowest_bit_set;
1772 *abbasp = all_bits_between_are_set;
1775 static int const64_is_2insns (unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT);
1777 static int
1778 const64_is_2insns (unsigned HOST_WIDE_INT high_bits,
1779 unsigned HOST_WIDE_INT low_bits)
1781 int highest_bit_set, lowest_bit_set, all_bits_between_are_set;
1783 if (high_bits == 0
1784 || high_bits == 0xffffffff)
1785 return 1;
1787 analyze_64bit_constant (high_bits, low_bits,
1788 &highest_bit_set, &lowest_bit_set,
1789 &all_bits_between_are_set);
1791 if ((highest_bit_set == 63
1792 || lowest_bit_set == 0)
1793 && all_bits_between_are_set != 0)
1794 return 1;
1796 if ((highest_bit_set - lowest_bit_set) < 21)
1797 return 1;
1799 return 0;
1802 static unsigned HOST_WIDE_INT create_simple_focus_bits (unsigned HOST_WIDE_INT,
1803 unsigned HOST_WIDE_INT,
1804 int, int);
1806 static unsigned HOST_WIDE_INT
1807 create_simple_focus_bits (unsigned HOST_WIDE_INT high_bits,
1808 unsigned HOST_WIDE_INT low_bits,
1809 int lowest_bit_set, int shift)
1811 HOST_WIDE_INT hi, lo;
1813 if (lowest_bit_set < 32)
1815 lo = (low_bits >> lowest_bit_set) << shift;
1816 hi = ((high_bits << (32 - lowest_bit_set)) << shift);
1818 else
1820 lo = 0;
1821 hi = ((high_bits >> (lowest_bit_set - 32)) << shift);
1823 if (hi & lo)
1824 abort ();
1825 return (hi | lo);
1828 /* Here we are sure to be arch64 and this is an integer constant
1829 being loaded into a register. Emit the most efficient
1830 insn sequence possible. Detection of all the 1-insn cases
1831 has been done already. */
1832 void
1833 sparc_emit_set_const64 (rtx op0, rtx op1)
1835 unsigned HOST_WIDE_INT high_bits, low_bits;
1836 int lowest_bit_set, highest_bit_set;
1837 int all_bits_between_are_set;
1838 rtx temp;
1840 /* Sanity check that we know what we are working with. */
1841 if (! TARGET_ARCH64)
1842 abort ();
1844 if (GET_CODE (op0) != SUBREG)
1846 if (GET_CODE (op0) != REG
1847 || (REGNO (op0) >= SPARC_FIRST_FP_REG
1848 && REGNO (op0) <= SPARC_LAST_V9_FP_REG))
1849 abort ();
1852 if (reload_in_progress || reload_completed)
1853 temp = op0;
1854 else
1855 temp = gen_reg_rtx (DImode);
1857 if (GET_CODE (op1) != CONST_DOUBLE
1858 && GET_CODE (op1) != CONST_INT)
1860 sparc_emit_set_symbolic_const64 (op0, op1, temp);
1861 return;
1864 if (GET_CODE (op1) == CONST_DOUBLE)
1866 #if HOST_BITS_PER_WIDE_INT == 64
1867 high_bits = (CONST_DOUBLE_LOW (op1) >> 32) & 0xffffffff;
1868 low_bits = CONST_DOUBLE_LOW (op1) & 0xffffffff;
1869 #else
1870 high_bits = CONST_DOUBLE_HIGH (op1);
1871 low_bits = CONST_DOUBLE_LOW (op1);
1872 #endif
1874 else
1876 #if HOST_BITS_PER_WIDE_INT == 64
1877 high_bits = ((INTVAL (op1) >> 32) & 0xffffffff);
1878 low_bits = (INTVAL (op1) & 0xffffffff);
1879 #else
1880 high_bits = ((INTVAL (op1) < 0) ?
1881 0xffffffff :
1882 0x00000000);
1883 low_bits = INTVAL (op1);
1884 #endif
1887 /* low_bits bits 0 --> 31
1888 high_bits bits 32 --> 63 */
1890 analyze_64bit_constant (high_bits, low_bits,
1891 &highest_bit_set, &lowest_bit_set,
1892 &all_bits_between_are_set);
1894 /* First try for a 2-insn sequence. */
1896 /* These situations are preferred because the optimizer can
1897 * do more things with them:
1898 * 1) mov -1, %reg
1899 * sllx %reg, shift, %reg
1900 * 2) mov -1, %reg
1901 * srlx %reg, shift, %reg
1902 * 3) mov some_small_const, %reg
1903 * sllx %reg, shift, %reg
1905 if (((highest_bit_set == 63
1906 || lowest_bit_set == 0)
1907 && all_bits_between_are_set != 0)
1908 || ((highest_bit_set - lowest_bit_set) < 12))
1910 HOST_WIDE_INT the_const = -1;
1911 int shift = lowest_bit_set;
1913 if ((highest_bit_set != 63
1914 && lowest_bit_set != 0)
1915 || all_bits_between_are_set == 0)
1917 the_const =
1918 create_simple_focus_bits (high_bits, low_bits,
1919 lowest_bit_set, 0);
1921 else if (lowest_bit_set == 0)
1922 shift = -(63 - highest_bit_set);
1924 if (! SPARC_SIMM13_P (the_const))
1925 abort ();
1927 emit_insn (gen_safe_SET64 (temp, the_const));
1928 if (shift > 0)
1929 emit_insn (gen_rtx_SET (VOIDmode,
1930 op0,
1931 gen_rtx_ASHIFT (DImode,
1932 temp,
1933 GEN_INT (shift))));
1934 else if (shift < 0)
1935 emit_insn (gen_rtx_SET (VOIDmode,
1936 op0,
1937 gen_rtx_LSHIFTRT (DImode,
1938 temp,
1939 GEN_INT (-shift))));
1940 else
1941 abort ();
1942 return;
1945 /* Now a range of 22 or less bits set somewhere.
1946 * 1) sethi %hi(focus_bits), %reg
1947 * sllx %reg, shift, %reg
1948 * 2) sethi %hi(focus_bits), %reg
1949 * srlx %reg, shift, %reg
1951 if ((highest_bit_set - lowest_bit_set) < 21)
1953 unsigned HOST_WIDE_INT focus_bits =
1954 create_simple_focus_bits (high_bits, low_bits,
1955 lowest_bit_set, 10);
1957 if (! SPARC_SETHI_P (focus_bits))
1958 abort ();
1960 sparc_emit_set_safe_HIGH64 (temp, focus_bits);
1962 /* If lowest_bit_set == 10 then a sethi alone could have done it. */
1963 if (lowest_bit_set < 10)
1964 emit_insn (gen_rtx_SET (VOIDmode,
1965 op0,
1966 gen_rtx_LSHIFTRT (DImode, temp,
1967 GEN_INT (10 - lowest_bit_set))));
1968 else if (lowest_bit_set > 10)
1969 emit_insn (gen_rtx_SET (VOIDmode,
1970 op0,
1971 gen_rtx_ASHIFT (DImode, temp,
1972 GEN_INT (lowest_bit_set - 10))));
1973 else
1974 abort ();
1975 return;
1978 /* 1) sethi %hi(low_bits), %reg
1979 * or %reg, %lo(low_bits), %reg
1980 * 2) sethi %hi(~low_bits), %reg
1981 * xor %reg, %lo(-0x400 | (low_bits & 0x3ff)), %reg
1983 if (high_bits == 0
1984 || high_bits == 0xffffffff)
1986 sparc_emit_set_const64_quick1 (op0, temp, low_bits,
1987 (high_bits == 0xffffffff));
1988 return;
1991 /* Now, try 3-insn sequences. */
1993 /* 1) sethi %hi(high_bits), %reg
1994 * or %reg, %lo(high_bits), %reg
1995 * sllx %reg, 32, %reg
1997 if (low_bits == 0)
1999 sparc_emit_set_const64_quick2 (op0, temp, high_bits, 0, 32);
2000 return;
2003 /* We may be able to do something quick
2004 when the constant is negated, so try that. */
2005 if (const64_is_2insns ((~high_bits) & 0xffffffff,
2006 (~low_bits) & 0xfffffc00))
2008 /* NOTE: The trailing bits get XOR'd so we need the
2009 non-negated bits, not the negated ones. */
2010 unsigned HOST_WIDE_INT trailing_bits = low_bits & 0x3ff;
2012 if ((((~high_bits) & 0xffffffff) == 0
2013 && ((~low_bits) & 0x80000000) == 0)
2014 || (((~high_bits) & 0xffffffff) == 0xffffffff
2015 && ((~low_bits) & 0x80000000) != 0))
2017 int fast_int = (~low_bits & 0xffffffff);
2019 if ((SPARC_SETHI_P (fast_int)
2020 && (~high_bits & 0xffffffff) == 0)
2021 || SPARC_SIMM13_P (fast_int))
2022 emit_insn (gen_safe_SET64 (temp, fast_int));
2023 else
2024 sparc_emit_set_const64 (temp, GEN_INT64 (fast_int));
2026 else
2028 rtx negated_const;
2029 #if HOST_BITS_PER_WIDE_INT == 64
2030 negated_const = GEN_INT (((~low_bits) & 0xfffffc00) |
2031 (((HOST_WIDE_INT)((~high_bits) & 0xffffffff))<<32));
2032 #else
2033 negated_const = immed_double_const ((~low_bits) & 0xfffffc00,
2034 (~high_bits) & 0xffffffff,
2035 DImode);
2036 #endif
2037 sparc_emit_set_const64 (temp, negated_const);
2040 /* If we are XOR'ing with -1, then we should emit a one's complement
2041 instead. This way the combiner will notice logical operations
2042 such as ANDN later on and substitute. */
2043 if (trailing_bits == 0x3ff)
2045 emit_insn (gen_rtx_SET (VOIDmode, op0,
2046 gen_rtx_NOT (DImode, temp)));
2048 else
2050 emit_insn (gen_rtx_SET (VOIDmode,
2051 op0,
2052 gen_safe_XOR64 (temp,
2053 (-0x400 | trailing_bits))));
2055 return;
2058 /* 1) sethi %hi(xxx), %reg
2059 * or %reg, %lo(xxx), %reg
2060 * sllx %reg, yyy, %reg
2062 * ??? This is just a generalized version of the low_bits==0
2063 * thing above, FIXME...
2065 if ((highest_bit_set - lowest_bit_set) < 32)
2067 unsigned HOST_WIDE_INT focus_bits =
2068 create_simple_focus_bits (high_bits, low_bits,
2069 lowest_bit_set, 0);
2071 /* We can't get here in this state. */
2072 if (highest_bit_set < 32
2073 || lowest_bit_set >= 32)
2074 abort ();
2076 /* So what we know is that the set bits straddle the
2077 middle of the 64-bit word. */
2078 sparc_emit_set_const64_quick2 (op0, temp,
2079 focus_bits, 0,
2080 lowest_bit_set);
2081 return;
2084 /* 1) sethi %hi(high_bits), %reg
2085 * or %reg, %lo(high_bits), %reg
2086 * sllx %reg, 32, %reg
2087 * or %reg, low_bits, %reg
2089 if (SPARC_SIMM13_P(low_bits)
2090 && ((int)low_bits > 0))
2092 sparc_emit_set_const64_quick2 (op0, temp, high_bits, low_bits, 32);
2093 return;
2096 /* The easiest way when all else fails, is full decomposition. */
2097 #if 0
2098 printf ("sparc_emit_set_const64: Hard constant [%08lx%08lx] neg[%08lx%08lx]\n",
2099 high_bits, low_bits, ~high_bits, ~low_bits);
2100 #endif
2101 sparc_emit_set_const64_longway (op0, temp, high_bits, low_bits);
2104 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
2105 return the mode to be used for the comparison. For floating-point,
2106 CCFP[E]mode is used. CC_NOOVmode should be used when the first operand
2107 is a PLUS, MINUS, NEG, or ASHIFT. CCmode should be used when no special
2108 processing is needed. */
2110 enum machine_mode
2111 select_cc_mode (enum rtx_code op, rtx x, rtx y ATTRIBUTE_UNUSED)
2113 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2115 switch (op)
2117 case EQ:
2118 case NE:
2119 case UNORDERED:
2120 case ORDERED:
2121 case UNLT:
2122 case UNLE:
2123 case UNGT:
2124 case UNGE:
2125 case UNEQ:
2126 case LTGT:
2127 return CCFPmode;
2129 case LT:
2130 case LE:
2131 case GT:
2132 case GE:
2133 return CCFPEmode;
2135 default:
2136 abort ();
2139 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
2140 || GET_CODE (x) == NEG || GET_CODE (x) == ASHIFT)
2142 if (TARGET_ARCH64 && GET_MODE (x) == DImode)
2143 return CCX_NOOVmode;
2144 else
2145 return CC_NOOVmode;
2147 else
2149 if (TARGET_ARCH64 && GET_MODE (x) == DImode)
2150 return CCXmode;
2151 else
2152 return CCmode;
2156 /* X and Y are two things to compare using CODE. Emit the compare insn and
2157 return the rtx for the cc reg in the proper mode. */
2160 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
2162 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2163 rtx cc_reg;
2165 /* ??? We don't have movcc patterns so we cannot generate pseudo regs for the
2166 fcc regs (cse can't tell they're really call clobbered regs and will
2167 remove a duplicate comparison even if there is an intervening function
2168 call - it will then try to reload the cc reg via an int reg which is why
2169 we need the movcc patterns). It is possible to provide the movcc
2170 patterns by using the ldxfsr/stxfsr v9 insns. I tried it: you need two
2171 registers (say %g1,%g5) and it takes about 6 insns. A better fix would be
2172 to tell cse that CCFPE mode registers (even pseudos) are call
2173 clobbered. */
2175 /* ??? This is an experiment. Rather than making changes to cse which may
2176 or may not be easy/clean, we do our own cse. This is possible because
2177 we will generate hard registers. Cse knows they're call clobbered (it
2178 doesn't know the same thing about pseudos). If we guess wrong, no big
2179 deal, but if we win, great! */
2181 if (TARGET_V9 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2182 #if 1 /* experiment */
2184 int reg;
2185 /* We cycle through the registers to ensure they're all exercised. */
2186 static int next_fcc_reg = 0;
2187 /* Previous x,y for each fcc reg. */
2188 static rtx prev_args[4][2];
2190 /* Scan prev_args for x,y. */
2191 for (reg = 0; reg < 4; reg++)
2192 if (prev_args[reg][0] == x && prev_args[reg][1] == y)
2193 break;
2194 if (reg == 4)
2196 reg = next_fcc_reg;
2197 prev_args[reg][0] = x;
2198 prev_args[reg][1] = y;
2199 next_fcc_reg = (next_fcc_reg + 1) & 3;
2201 cc_reg = gen_rtx_REG (mode, reg + SPARC_FIRST_V9_FCC_REG);
2203 #else
2204 cc_reg = gen_reg_rtx (mode);
2205 #endif /* ! experiment */
2206 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2207 cc_reg = gen_rtx_REG (mode, SPARC_FCC_REG);
2208 else
2209 cc_reg = gen_rtx_REG (mode, SPARC_ICC_REG);
2211 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
2212 gen_rtx_COMPARE (mode, x, y)));
2214 return cc_reg;
2217 /* This function is used for v9 only.
2218 CODE is the code for an Scc's comparison.
2219 OPERANDS[0] is the target of the Scc insn.
2220 OPERANDS[1] is the value we compare against const0_rtx (which hasn't
2221 been generated yet).
2223 This function is needed to turn
2225 (set (reg:SI 110)
2226 (gt (reg:CCX 100 %icc)
2227 (const_int 0)))
2228 into
2229 (set (reg:SI 110)
2230 (gt:DI (reg:CCX 100 %icc)
2231 (const_int 0)))
2233 IE: The instruction recognizer needs to see the mode of the comparison to
2234 find the right instruction. We could use "gt:DI" right in the
2235 define_expand, but leaving it out allows us to handle DI, SI, etc.
2237 We refer to the global sparc compare operands sparc_compare_op0 and
2238 sparc_compare_op1. */
2241 gen_v9_scc (enum rtx_code compare_code, register rtx *operands)
2243 rtx temp, op0, op1;
2245 if (! TARGET_ARCH64
2246 && (GET_MODE (sparc_compare_op0) == DImode
2247 || GET_MODE (operands[0]) == DImode))
2248 return 0;
2250 op0 = sparc_compare_op0;
2251 op1 = sparc_compare_op1;
2253 /* Try to use the movrCC insns. */
2254 if (TARGET_ARCH64
2255 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
2256 && op1 == const0_rtx
2257 && v9_regcmp_p (compare_code))
2259 /* Special case for op0 != 0. This can be done with one instruction if
2260 operands[0] == sparc_compare_op0. */
2262 if (compare_code == NE
2263 && GET_MODE (operands[0]) == DImode
2264 && rtx_equal_p (op0, operands[0]))
2266 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2267 gen_rtx_IF_THEN_ELSE (DImode,
2268 gen_rtx_fmt_ee (compare_code, DImode,
2269 op0, const0_rtx),
2270 const1_rtx,
2271 operands[0])));
2272 return 1;
2275 if (reg_overlap_mentioned_p (operands[0], op0))
2277 /* Handle the case where operands[0] == sparc_compare_op0.
2278 We "early clobber" the result. */
2279 op0 = gen_reg_rtx (GET_MODE (sparc_compare_op0));
2280 emit_move_insn (op0, sparc_compare_op0);
2283 emit_insn (gen_rtx_SET (VOIDmode, operands[0], const0_rtx));
2284 if (GET_MODE (op0) != DImode)
2286 temp = gen_reg_rtx (DImode);
2287 convert_move (temp, op0, 0);
2289 else
2290 temp = op0;
2291 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2292 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
2293 gen_rtx_fmt_ee (compare_code, DImode,
2294 temp, const0_rtx),
2295 const1_rtx,
2296 operands[0])));
2297 return 1;
2299 else
2301 operands[1] = gen_compare_reg (compare_code, op0, op1);
2303 switch (GET_MODE (operands[1]))
2305 case CCmode :
2306 case CCXmode :
2307 case CCFPEmode :
2308 case CCFPmode :
2309 break;
2310 default :
2311 abort ();
2313 emit_insn (gen_rtx_SET (VOIDmode, operands[0], const0_rtx));
2314 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2315 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
2316 gen_rtx_fmt_ee (compare_code,
2317 GET_MODE (operands[1]),
2318 operands[1], const0_rtx),
2319 const1_rtx, operands[0])));
2320 return 1;
2324 /* Emit a conditional jump insn for the v9 architecture using comparison code
2325 CODE and jump target LABEL.
2326 This function exists to take advantage of the v9 brxx insns. */
2328 void
2329 emit_v9_brxx_insn (enum rtx_code code, rtx op0, rtx label)
2331 emit_jump_insn (gen_rtx_SET (VOIDmode,
2332 pc_rtx,
2333 gen_rtx_IF_THEN_ELSE (VOIDmode,
2334 gen_rtx_fmt_ee (code, GET_MODE (op0),
2335 op0, const0_rtx),
2336 gen_rtx_LABEL_REF (VOIDmode, label),
2337 pc_rtx)));
2340 /* Generate a DFmode part of a hard TFmode register.
2341 REG is the TFmode hard register, LOW is 1 for the
2342 low 64bit of the register and 0 otherwise.
2345 gen_df_reg (rtx reg, int low)
2347 int regno = REGNO (reg);
2349 if ((WORDS_BIG_ENDIAN == 0) ^ (low != 0))
2350 regno += (TARGET_ARCH64 && regno < 32) ? 1 : 2;
2351 return gen_rtx_REG (DFmode, regno);
2354 /* Generate a call to FUNC with OPERANDS. Operand 0 is the return value.
2355 Unlike normal calls, TFmode operands are passed by reference. It is
2356 assumed that no more than 3 operands are required. */
2358 static void
2359 emit_soft_tfmode_libcall (const char *func_name, int nargs, rtx *operands)
2361 rtx ret_slot = NULL, arg[3], func_sym;
2362 int i;
2364 /* We only expect to be called for conversions, unary, and binary ops. */
2365 if (nargs < 2 || nargs > 3)
2366 abort ();
2368 for (i = 0; i < nargs; ++i)
2370 rtx this_arg = operands[i];
2371 rtx this_slot;
2373 /* TFmode arguments and return values are passed by reference. */
2374 if (GET_MODE (this_arg) == TFmode)
2376 int force_stack_temp;
2378 force_stack_temp = 0;
2379 if (TARGET_BUGGY_QP_LIB && i == 0)
2380 force_stack_temp = 1;
2382 if (GET_CODE (this_arg) == MEM
2383 && ! force_stack_temp)
2384 this_arg = XEXP (this_arg, 0);
2385 else if (CONSTANT_P (this_arg)
2386 && ! force_stack_temp)
2388 this_slot = force_const_mem (TFmode, this_arg);
2389 this_arg = XEXP (this_slot, 0);
2391 else
2393 this_slot = assign_stack_temp (TFmode, GET_MODE_SIZE (TFmode), 0);
2395 /* Operand 0 is the return value. We'll copy it out later. */
2396 if (i > 0)
2397 emit_move_insn (this_slot, this_arg);
2398 else
2399 ret_slot = this_slot;
2401 this_arg = XEXP (this_slot, 0);
2405 arg[i] = this_arg;
2408 func_sym = gen_rtx_SYMBOL_REF (Pmode, func_name);
2410 if (GET_MODE (operands[0]) == TFmode)
2412 if (nargs == 2)
2413 emit_library_call (func_sym, LCT_NORMAL, VOIDmode, 2,
2414 arg[0], GET_MODE (arg[0]),
2415 arg[1], GET_MODE (arg[1]));
2416 else
2417 emit_library_call (func_sym, LCT_NORMAL, VOIDmode, 3,
2418 arg[0], GET_MODE (arg[0]),
2419 arg[1], GET_MODE (arg[1]),
2420 arg[2], GET_MODE (arg[2]));
2422 if (ret_slot)
2423 emit_move_insn (operands[0], ret_slot);
2425 else
2427 rtx ret;
2429 if (nargs != 2)
2430 abort ();
2432 ret = emit_library_call_value (func_sym, operands[0], LCT_NORMAL,
2433 GET_MODE (operands[0]), 1,
2434 arg[1], GET_MODE (arg[1]));
2436 if (ret != operands[0])
2437 emit_move_insn (operands[0], ret);
2441 /* Expand soft-float TFmode calls to sparc abi routines. */
2443 static void
2444 emit_soft_tfmode_binop (enum rtx_code code, rtx *operands)
2446 const char *func;
2448 switch (code)
2450 case PLUS:
2451 func = "_Qp_add";
2452 break;
2453 case MINUS:
2454 func = "_Qp_sub";
2455 break;
2456 case MULT:
2457 func = "_Qp_mul";
2458 break;
2459 case DIV:
2460 func = "_Qp_div";
2461 break;
2462 default:
2463 abort ();
2466 emit_soft_tfmode_libcall (func, 3, operands);
2469 static void
2470 emit_soft_tfmode_unop (enum rtx_code code, rtx *operands)
2472 const char *func;
2474 switch (code)
2476 case SQRT:
2477 func = "_Qp_sqrt";
2478 break;
2479 default:
2480 abort ();
2483 emit_soft_tfmode_libcall (func, 2, operands);
2486 static void
2487 emit_soft_tfmode_cvt (enum rtx_code code, rtx *operands)
2489 const char *func;
2491 switch (code)
2493 case FLOAT_EXTEND:
2494 switch (GET_MODE (operands[1]))
2496 case SFmode:
2497 func = "_Qp_stoq";
2498 break;
2499 case DFmode:
2500 func = "_Qp_dtoq";
2501 break;
2502 default:
2503 abort ();
2505 break;
2507 case FLOAT_TRUNCATE:
2508 switch (GET_MODE (operands[0]))
2510 case SFmode:
2511 func = "_Qp_qtos";
2512 break;
2513 case DFmode:
2514 func = "_Qp_qtod";
2515 break;
2516 default:
2517 abort ();
2519 break;
2521 case FLOAT:
2522 switch (GET_MODE (operands[1]))
2524 case SImode:
2525 func = "_Qp_itoq";
2526 break;
2527 case DImode:
2528 func = "_Qp_xtoq";
2529 break;
2530 default:
2531 abort ();
2533 break;
2535 case UNSIGNED_FLOAT:
2536 switch (GET_MODE (operands[1]))
2538 case SImode:
2539 func = "_Qp_uitoq";
2540 break;
2541 case DImode:
2542 func = "_Qp_uxtoq";
2543 break;
2544 default:
2545 abort ();
2547 break;
2549 case FIX:
2550 switch (GET_MODE (operands[0]))
2552 case SImode:
2553 func = "_Qp_qtoi";
2554 break;
2555 case DImode:
2556 func = "_Qp_qtox";
2557 break;
2558 default:
2559 abort ();
2561 break;
2563 case UNSIGNED_FIX:
2564 switch (GET_MODE (operands[0]))
2566 case SImode:
2567 func = "_Qp_qtoui";
2568 break;
2569 case DImode:
2570 func = "_Qp_qtoux";
2571 break;
2572 default:
2573 abort ();
2575 break;
2577 default:
2578 abort ();
2581 emit_soft_tfmode_libcall (func, 2, operands);
2584 /* Expand a hard-float tfmode operation. All arguments must be in
2585 registers. */
2587 static void
2588 emit_hard_tfmode_operation (enum rtx_code code, rtx *operands)
2590 rtx op, dest;
2592 if (GET_RTX_CLASS (code) == '1')
2594 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
2595 op = gen_rtx_fmt_e (code, GET_MODE (operands[0]), operands[1]);
2597 else
2599 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
2600 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
2601 op = gen_rtx_fmt_ee (code, GET_MODE (operands[0]),
2602 operands[1], operands[2]);
2605 if (register_operand (operands[0], VOIDmode))
2606 dest = operands[0];
2607 else
2608 dest = gen_reg_rtx (GET_MODE (operands[0]));
2610 emit_insn (gen_rtx_SET (VOIDmode, dest, op));
2612 if (dest != operands[0])
2613 emit_move_insn (operands[0], dest);
2616 void
2617 emit_tfmode_binop (enum rtx_code code, rtx *operands)
2619 if (TARGET_HARD_QUAD)
2620 emit_hard_tfmode_operation (code, operands);
2621 else
2622 emit_soft_tfmode_binop (code, operands);
2625 void
2626 emit_tfmode_unop (enum rtx_code code, rtx *operands)
2628 if (TARGET_HARD_QUAD)
2629 emit_hard_tfmode_operation (code, operands);
2630 else
2631 emit_soft_tfmode_unop (code, operands);
2634 void
2635 emit_tfmode_cvt (enum rtx_code code, rtx *operands)
2637 if (TARGET_HARD_QUAD)
2638 emit_hard_tfmode_operation (code, operands);
2639 else
2640 emit_soft_tfmode_cvt (code, operands);
2643 /* Return nonzero if a return peephole merging return with
2644 setting of output register is ok. */
2646 leaf_return_peephole_ok (void)
2648 return (actual_fsize == 0);
2651 /* Return nonzero if a branch/jump/call instruction will be emitting
2652 nop into its delay slot. */
2655 empty_delay_slot (rtx insn)
2657 rtx seq;
2659 /* If no previous instruction (should not happen), return true. */
2660 if (PREV_INSN (insn) == NULL)
2661 return 1;
2663 seq = NEXT_INSN (PREV_INSN (insn));
2664 if (GET_CODE (PATTERN (seq)) == SEQUENCE)
2665 return 0;
2667 return 1;
2670 /* Return nonzero if TRIAL can go into the function epilogue's
2671 delay slot. SLOT is the slot we are trying to fill. */
2674 eligible_for_epilogue_delay (rtx trial, int slot)
2676 rtx pat, src;
2678 if (slot >= 1)
2679 return 0;
2681 if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
2682 return 0;
2684 if (get_attr_length (trial) != 1)
2685 return 0;
2687 /* If there are any call-saved registers, we should scan TRIAL if it
2688 does not reference them. For now just make it easy. */
2689 if (num_gfregs)
2690 return 0;
2692 /* If the function uses __builtin_eh_return, the eh_return machinery
2693 occupies the delay slot. */
2694 if (current_function_calls_eh_return)
2695 return 0;
2697 /* In the case of a true leaf function, anything can go into the delay slot.
2698 A delay slot only exists however if the frame size is zero, otherwise
2699 we will put an insn to adjust the stack after the return. */
2700 if (current_function_uses_only_leaf_regs)
2702 if (leaf_return_peephole_ok ())
2703 return ((get_attr_in_uncond_branch_delay (trial)
2704 == IN_BRANCH_DELAY_TRUE));
2705 return 0;
2708 pat = PATTERN (trial);
2710 /* Otherwise, only operations which can be done in tandem with
2711 a `restore' or `return' insn can go into the delay slot. */
2712 if (GET_CODE (SET_DEST (pat)) != REG
2713 || REGNO (SET_DEST (pat)) < 24)
2714 return 0;
2716 /* If this instruction sets up floating point register and we have a return
2717 instruction, it can probably go in. But restore will not work
2718 with FP_REGS. */
2719 if (REGNO (SET_DEST (pat)) >= 32)
2721 if (TARGET_V9 && ! epilogue_renumber (&pat, 1)
2722 && (get_attr_in_uncond_branch_delay (trial) == IN_BRANCH_DELAY_TRUE))
2723 return 1;
2724 return 0;
2727 /* The set of insns matched here must agree precisely with the set of
2728 patterns paired with a RETURN in sparc.md. */
2730 src = SET_SRC (pat);
2732 /* This matches "*return_[qhs]i" or even "*return_di" on TARGET_ARCH64. */
2733 if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2734 && arith_operand (src, GET_MODE (src)))
2736 if (TARGET_ARCH64)
2737 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2738 else
2739 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (SImode);
2742 /* This matches "*return_di". */
2743 else if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2744 && arith_double_operand (src, GET_MODE (src)))
2745 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2747 /* This matches "*return_sf_no_fpu". */
2748 else if (! TARGET_FPU && restore_operand (SET_DEST (pat), SFmode)
2749 && register_operand (src, SFmode))
2750 return 1;
2752 /* If we have return instruction, anything that does not use
2753 local or output registers and can go into a delay slot wins. */
2754 else if (TARGET_V9 && ! epilogue_renumber (&pat, 1)
2755 && (get_attr_in_uncond_branch_delay (trial) == IN_BRANCH_DELAY_TRUE))
2756 return 1;
2758 /* This matches "*return_addsi". */
2759 else if (GET_CODE (src) == PLUS
2760 && arith_operand (XEXP (src, 0), SImode)
2761 && arith_operand (XEXP (src, 1), SImode)
2762 && (register_operand (XEXP (src, 0), SImode)
2763 || register_operand (XEXP (src, 1), SImode)))
2764 return 1;
2766 /* This matches "*return_adddi". */
2767 else if (GET_CODE (src) == PLUS
2768 && arith_double_operand (XEXP (src, 0), DImode)
2769 && arith_double_operand (XEXP (src, 1), DImode)
2770 && (register_operand (XEXP (src, 0), DImode)
2771 || register_operand (XEXP (src, 1), DImode)))
2772 return 1;
2774 /* This can match "*return_losum_[sd]i".
2775 Catch only some cases, so that return_losum* don't have
2776 to be too big. */
2777 else if (GET_CODE (src) == LO_SUM
2778 && ! TARGET_CM_MEDMID
2779 && ((register_operand (XEXP (src, 0), SImode)
2780 && immediate_operand (XEXP (src, 1), SImode))
2781 || (TARGET_ARCH64
2782 && register_operand (XEXP (src, 0), DImode)
2783 && immediate_operand (XEXP (src, 1), DImode))))
2784 return 1;
2786 /* sll{,x} reg,1,reg2 is add reg,reg,reg2 as well. */
2787 else if (GET_CODE (src) == ASHIFT
2788 && (register_operand (XEXP (src, 0), SImode)
2789 || register_operand (XEXP (src, 0), DImode))
2790 && XEXP (src, 1) == const1_rtx)
2791 return 1;
2793 return 0;
2796 /* Return nonzero if TRIAL can go into the sibling call
2797 delay slot. */
2800 eligible_for_sibcall_delay (rtx trial)
2802 rtx pat, src;
2804 if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
2805 return 0;
2807 if (get_attr_length (trial) != 1)
2808 return 0;
2810 pat = PATTERN (trial);
2812 if (current_function_uses_only_leaf_regs)
2814 /* If the tail call is done using the call instruction,
2815 we have to restore %o7 in the delay slot. */
2816 if ((TARGET_ARCH64 && ! TARGET_CM_MEDLOW) || flag_pic)
2817 return 0;
2819 /* %g1 is used to build the function address */
2820 if (reg_mentioned_p (gen_rtx_REG (Pmode, 1), pat))
2821 return 0;
2823 return 1;
2826 /* Otherwise, only operations which can be done in tandem with
2827 a `restore' insn can go into the delay slot. */
2828 if (GET_CODE (SET_DEST (pat)) != REG
2829 || REGNO (SET_DEST (pat)) < 24
2830 || REGNO (SET_DEST (pat)) >= 32)
2831 return 0;
2833 /* If it mentions %o7, it can't go in, because sibcall will clobber it
2834 in most cases. */
2835 if (reg_mentioned_p (gen_rtx_REG (Pmode, 15), pat))
2836 return 0;
2838 src = SET_SRC (pat);
2840 if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2841 && arith_operand (src, GET_MODE (src)))
2843 if (TARGET_ARCH64)
2844 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2845 else
2846 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (SImode);
2849 else if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2850 && arith_double_operand (src, GET_MODE (src)))
2851 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2853 else if (! TARGET_FPU && restore_operand (SET_DEST (pat), SFmode)
2854 && register_operand (src, SFmode))
2855 return 1;
2857 else if (GET_CODE (src) == PLUS
2858 && arith_operand (XEXP (src, 0), SImode)
2859 && arith_operand (XEXP (src, 1), SImode)
2860 && (register_operand (XEXP (src, 0), SImode)
2861 || register_operand (XEXP (src, 1), SImode)))
2862 return 1;
2864 else if (GET_CODE (src) == PLUS
2865 && arith_double_operand (XEXP (src, 0), DImode)
2866 && arith_double_operand (XEXP (src, 1), DImode)
2867 && (register_operand (XEXP (src, 0), DImode)
2868 || register_operand (XEXP (src, 1), DImode)))
2869 return 1;
2871 else if (GET_CODE (src) == LO_SUM
2872 && ! TARGET_CM_MEDMID
2873 && ((register_operand (XEXP (src, 0), SImode)
2874 && immediate_operand (XEXP (src, 1), SImode))
2875 || (TARGET_ARCH64
2876 && register_operand (XEXP (src, 0), DImode)
2877 && immediate_operand (XEXP (src, 1), DImode))))
2878 return 1;
2880 else if (GET_CODE (src) == ASHIFT
2881 && (register_operand (XEXP (src, 0), SImode)
2882 || register_operand (XEXP (src, 0), DImode))
2883 && XEXP (src, 1) == const1_rtx)
2884 return 1;
2886 return 0;
2889 static int
2890 check_return_regs (rtx x)
2892 switch (GET_CODE (x))
2894 case REG:
2895 return IN_OR_GLOBAL_P (x);
2897 case CONST_INT:
2898 case CONST_DOUBLE:
2899 case CONST:
2900 case SYMBOL_REF:
2901 case LABEL_REF:
2902 return 1;
2904 case SET:
2905 case IOR:
2906 case AND:
2907 case XOR:
2908 case PLUS:
2909 case MINUS:
2910 if (check_return_regs (XEXP (x, 1)) == 0)
2911 return 0;
2912 case NOT:
2913 case NEG:
2914 case MEM:
2915 return check_return_regs (XEXP (x, 0));
2917 default:
2918 return 0;
2924 short_branch (int uid1, int uid2)
2926 int delta = INSN_ADDRESSES (uid1) - INSN_ADDRESSES (uid2);
2928 /* Leave a few words of "slop". */
2929 if (delta >= -1023 && delta <= 1022)
2930 return 1;
2932 return 0;
2935 /* Return nonzero if REG is not used after INSN.
2936 We assume REG is a reload reg, and therefore does
2937 not live past labels or calls or jumps. */
2939 reg_unused_after (rtx reg, rtx insn)
2941 enum rtx_code code, prev_code = UNKNOWN;
2943 while ((insn = NEXT_INSN (insn)))
2945 if (prev_code == CALL_INSN && call_used_regs[REGNO (reg)])
2946 return 1;
2948 code = GET_CODE (insn);
2949 if (GET_CODE (insn) == CODE_LABEL)
2950 return 1;
2952 if (GET_RTX_CLASS (code) == 'i')
2954 rtx set = single_set (insn);
2955 int in_src = set && reg_overlap_mentioned_p (reg, SET_SRC (set));
2956 if (set && in_src)
2957 return 0;
2958 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2959 return 1;
2960 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
2961 return 0;
2963 prev_code = code;
2965 return 1;
2968 /* The table we use to reference PIC data. */
2969 static GTY(()) rtx global_offset_table;
2971 /* The function we use to get at it. */
2972 static GTY(()) rtx get_pc_symbol;
2973 static char get_pc_symbol_name[256];
2975 /* Ensure that we are not using patterns that are not OK with PIC. */
2978 check_pic (int i)
2980 switch (flag_pic)
2982 case 1:
2983 if (GET_CODE (recog_data.operand[i]) == SYMBOL_REF
2984 || (GET_CODE (recog_data.operand[i]) == CONST
2985 && ! (GET_CODE (XEXP (recog_data.operand[i], 0)) == MINUS
2986 && (XEXP (XEXP (recog_data.operand[i], 0), 0)
2987 == global_offset_table)
2988 && (GET_CODE (XEXP (XEXP (recog_data.operand[i], 0), 1))
2989 == CONST))))
2990 abort ();
2991 case 2:
2992 default:
2993 return 1;
2997 /* Return true if X is an address which needs a temporary register when
2998 reloaded while generating PIC code. */
3001 pic_address_needs_scratch (rtx x)
3003 /* An address which is a symbolic plus a non SMALL_INT needs a temp reg. */
3004 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
3005 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3006 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3007 && ! SMALL_INT (XEXP (XEXP (x, 0), 1)))
3008 return 1;
3010 return 0;
3013 /* Legitimize PIC addresses. If the address is already position-independent,
3014 we return ORIG. Newly generated position-independent addresses go into a
3015 reg. This is REG if nonzero, otherwise we allocate register(s) as
3016 necessary. */
3019 legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
3020 rtx reg)
3022 if (GET_CODE (orig) == SYMBOL_REF)
3024 rtx pic_ref, address;
3025 rtx insn;
3027 if (reg == 0)
3029 if (reload_in_progress || reload_completed)
3030 abort ();
3031 else
3032 reg = gen_reg_rtx (Pmode);
3035 if (flag_pic == 2)
3037 /* If not during reload, allocate another temp reg here for loading
3038 in the address, so that these instructions can be optimized
3039 properly. */
3040 rtx temp_reg = ((reload_in_progress || reload_completed)
3041 ? reg : gen_reg_rtx (Pmode));
3043 /* Must put the SYMBOL_REF inside an UNSPEC here so that cse
3044 won't get confused into thinking that these two instructions
3045 are loading in the true address of the symbol. If in the
3046 future a PIC rtx exists, that should be used instead. */
3047 if (Pmode == SImode)
3049 emit_insn (gen_movsi_high_pic (temp_reg, orig));
3050 emit_insn (gen_movsi_lo_sum_pic (temp_reg, temp_reg, orig));
3052 else
3054 emit_insn (gen_movdi_high_pic (temp_reg, orig));
3055 emit_insn (gen_movdi_lo_sum_pic (temp_reg, temp_reg, orig));
3057 address = temp_reg;
3059 else
3060 address = orig;
3062 pic_ref = gen_rtx_MEM (Pmode,
3063 gen_rtx_PLUS (Pmode,
3064 pic_offset_table_rtx, address));
3065 current_function_uses_pic_offset_table = 1;
3066 RTX_UNCHANGING_P (pic_ref) = 1;
3067 insn = emit_move_insn (reg, pic_ref);
3068 /* Put a REG_EQUAL note on this insn, so that it can be optimized
3069 by loop. */
3070 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
3071 REG_NOTES (insn));
3072 return reg;
3074 else if (GET_CODE (orig) == CONST)
3076 rtx base, offset;
3078 if (GET_CODE (XEXP (orig, 0)) == PLUS
3079 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3080 return orig;
3082 if (reg == 0)
3084 if (reload_in_progress || reload_completed)
3085 abort ();
3086 else
3087 reg = gen_reg_rtx (Pmode);
3090 if (GET_CODE (XEXP (orig, 0)) == PLUS)
3092 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3093 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3094 base == reg ? 0 : reg);
3096 else
3097 abort ();
3099 if (GET_CODE (offset) == CONST_INT)
3101 if (SMALL_INT (offset))
3102 return plus_constant (base, INTVAL (offset));
3103 else if (! reload_in_progress && ! reload_completed)
3104 offset = force_reg (Pmode, offset);
3105 else
3106 /* If we reach here, then something is seriously wrong. */
3107 abort ();
3109 return gen_rtx_PLUS (Pmode, base, offset);
3111 else if (GET_CODE (orig) == LABEL_REF)
3112 /* ??? Why do we do this? */
3113 /* Now movsi_pic_label_ref uses it, but we ought to be checking that
3114 the register is live instead, in case it is eliminated. */
3115 current_function_uses_pic_offset_table = 1;
3117 return orig;
3120 /* Emit special PIC prologues. */
3122 void
3123 load_pic_register (void)
3125 /* Labels to get the PC in the prologue of this function. */
3126 int orig_flag_pic = flag_pic;
3128 if (! flag_pic)
3129 abort ();
3131 /* If we haven't emitted the special get_pc helper function, do so now. */
3132 if (get_pc_symbol_name[0] == 0)
3134 int align;
3136 ASM_GENERATE_INTERNAL_LABEL (get_pc_symbol_name, "LGETPC", 0);
3137 text_section ();
3139 align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
3140 if (align > 0)
3141 ASM_OUTPUT_ALIGN (asm_out_file, align);
3142 (*targetm.asm_out.internal_label) (asm_out_file, "LGETPC", 0);
3143 fputs ("\tretl\n\tadd\t%o7, %l7, %l7\n", asm_out_file);
3146 /* Initialize every time through, since we can't easily
3147 know this to be permanent. */
3148 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3149 get_pc_symbol = gen_rtx_SYMBOL_REF (Pmode, get_pc_symbol_name);
3150 flag_pic = 0;
3152 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
3153 get_pc_symbol));
3155 flag_pic = orig_flag_pic;
3157 /* Need to emit this whether or not we obey regdecls,
3158 since setjmp/longjmp can cause life info to screw up.
3159 ??? In the case where we don't obey regdecls, this is not sufficient
3160 since we may not fall out the bottom. */
3161 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3164 /* Return 1 if RTX is a MEM which is known to be aligned to at
3165 least a DESIRED byte boundary. */
3168 mem_min_alignment (rtx mem, int desired)
3170 rtx addr, base, offset;
3172 /* If it's not a MEM we can't accept it. */
3173 if (GET_CODE (mem) != MEM)
3174 return 0;
3176 addr = XEXP (mem, 0);
3177 base = offset = NULL_RTX;
3178 if (GET_CODE (addr) == PLUS)
3180 if (GET_CODE (XEXP (addr, 0)) == REG)
3182 base = XEXP (addr, 0);
3184 /* What we are saying here is that if the base
3185 REG is aligned properly, the compiler will make
3186 sure any REG based index upon it will be so
3187 as well. */
3188 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3189 offset = XEXP (addr, 1);
3190 else
3191 offset = const0_rtx;
3194 else if (GET_CODE (addr) == REG)
3196 base = addr;
3197 offset = const0_rtx;
3200 if (base != NULL_RTX)
3202 int regno = REGNO (base);
3204 if (regno != HARD_FRAME_POINTER_REGNUM && regno != STACK_POINTER_REGNUM)
3206 /* Check if the compiler has recorded some information
3207 about the alignment of the base REG. If reload has
3208 completed, we already matched with proper alignments.
3209 If not running global_alloc, reload might give us
3210 unaligned pointer to local stack though. */
3211 if (((cfun != 0
3212 && REGNO_POINTER_ALIGN (regno) >= desired * BITS_PER_UNIT)
3213 || (optimize && reload_completed))
3214 && (INTVAL (offset) & (desired - 1)) == 0)
3215 return 1;
3217 else
3219 if (((INTVAL (offset) - SPARC_STACK_BIAS) & (desired - 1)) == 0)
3220 return 1;
3223 else if (! TARGET_UNALIGNED_DOUBLES
3224 || CONSTANT_P (addr)
3225 || GET_CODE (addr) == LO_SUM)
3227 /* Anything else we know is properly aligned unless TARGET_UNALIGNED_DOUBLES
3228 is true, in which case we can only assume that an access is aligned if
3229 it is to a constant address, or the address involves a LO_SUM. */
3230 return 1;
3233 /* An obviously unaligned address. */
3234 return 0;
3238 /* Vectors to keep interesting information about registers where it can easily
3239 be got. We used to use the actual mode value as the bit number, but there
3240 are more than 32 modes now. Instead we use two tables: one indexed by
3241 hard register number, and one indexed by mode. */
3243 /* The purpose of sparc_mode_class is to shrink the range of modes so that
3244 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
3245 mapped into one sparc_mode_class mode. */
3247 enum sparc_mode_class {
3248 S_MODE, D_MODE, T_MODE, O_MODE,
3249 SF_MODE, DF_MODE, TF_MODE, OF_MODE,
3250 CC_MODE, CCFP_MODE
3253 /* Modes for single-word and smaller quantities. */
3254 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
3256 /* Modes for double-word and smaller quantities. */
3257 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
3259 /* Modes for quad-word and smaller quantities. */
3260 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
3262 /* Modes for 8-word and smaller quantities. */
3263 #define O_MODES (T_MODES | (1 << (int) O_MODE) | (1 << (int) OF_MODE))
3265 /* Modes for single-float quantities. We must allow any single word or
3266 smaller quantity. This is because the fix/float conversion instructions
3267 take integer inputs/outputs from the float registers. */
3268 #define SF_MODES (S_MODES)
3270 /* Modes for double-float and smaller quantities. */
3271 #define DF_MODES (S_MODES | D_MODES)
3273 /* Modes for double-float only quantities. */
3274 #define DF_MODES_NO_S ((1 << (int) D_MODE) | (1 << (int) DF_MODE))
3276 /* Modes for quad-float only quantities. */
3277 #define TF_ONLY_MODES (1 << (int) TF_MODE)
3279 /* Modes for quad-float and smaller quantities. */
3280 #define TF_MODES (DF_MODES | TF_ONLY_MODES)
3282 /* Modes for quad-float and double-float quantities. */
3283 #define TF_MODES_NO_S (DF_MODES_NO_S | TF_ONLY_MODES)
3285 /* Modes for quad-float pair only quantities. */
3286 #define OF_ONLY_MODES (1 << (int) OF_MODE)
3288 /* Modes for quad-float pairs and smaller quantities. */
3289 #define OF_MODES (TF_MODES | OF_ONLY_MODES)
3291 #define OF_MODES_NO_S (TF_MODES_NO_S | OF_ONLY_MODES)
3293 /* Modes for condition codes. */
3294 #define CC_MODES (1 << (int) CC_MODE)
3295 #define CCFP_MODES (1 << (int) CCFP_MODE)
3297 /* Value is 1 if register/mode pair is acceptable on sparc.
3298 The funny mixture of D and T modes is because integer operations
3299 do not specially operate on tetra quantities, so non-quad-aligned
3300 registers can hold quadword quantities (except %o4 and %i4 because
3301 they cross fixed registers). */
3303 /* This points to either the 32 bit or the 64 bit version. */
3304 const int *hard_regno_mode_classes;
3306 static const int hard_32bit_mode_classes[] = {
3307 S_MODES, S_MODES, T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES,
3308 T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES, D_MODES, S_MODES,
3309 T_MODES, S_MODES, T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES,
3310 T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES, D_MODES, S_MODES,
3312 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3313 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3314 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3315 OF_MODES, SF_MODES, DF_MODES, SF_MODES, TF_MODES, SF_MODES, DF_MODES, SF_MODES,
3317 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3318 and none can hold SFmode/SImode values. */
3319 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3320 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3321 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3322 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, TF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3324 /* %fcc[0123] */
3325 CCFP_MODES, CCFP_MODES, CCFP_MODES, CCFP_MODES,
3327 /* %icc */
3328 CC_MODES
3331 static const int hard_64bit_mode_classes[] = {
3332 D_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3333 O_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3334 T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3335 O_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3337 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3338 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3339 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3340 OF_MODES, SF_MODES, DF_MODES, SF_MODES, TF_MODES, SF_MODES, DF_MODES, SF_MODES,
3342 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3343 and none can hold SFmode/SImode values. */
3344 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3345 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3346 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3347 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, TF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3349 /* %fcc[0123] */
3350 CCFP_MODES, CCFP_MODES, CCFP_MODES, CCFP_MODES,
3352 /* %icc */
3353 CC_MODES
3356 int sparc_mode_class [NUM_MACHINE_MODES];
3358 enum reg_class sparc_regno_reg_class[FIRST_PSEUDO_REGISTER];
3360 static void
3361 sparc_init_modes (void)
3363 int i;
3365 for (i = 0; i < NUM_MACHINE_MODES; i++)
3367 switch (GET_MODE_CLASS (i))
3369 case MODE_INT:
3370 case MODE_PARTIAL_INT:
3371 case MODE_COMPLEX_INT:
3372 if (GET_MODE_SIZE (i) <= 4)
3373 sparc_mode_class[i] = 1 << (int) S_MODE;
3374 else if (GET_MODE_SIZE (i) == 8)
3375 sparc_mode_class[i] = 1 << (int) D_MODE;
3376 else if (GET_MODE_SIZE (i) == 16)
3377 sparc_mode_class[i] = 1 << (int) T_MODE;
3378 else if (GET_MODE_SIZE (i) == 32)
3379 sparc_mode_class[i] = 1 << (int) O_MODE;
3380 else
3381 sparc_mode_class[i] = 0;
3382 break;
3383 case MODE_FLOAT:
3384 case MODE_COMPLEX_FLOAT:
3385 if (GET_MODE_SIZE (i) <= 4)
3386 sparc_mode_class[i] = 1 << (int) SF_MODE;
3387 else if (GET_MODE_SIZE (i) == 8)
3388 sparc_mode_class[i] = 1 << (int) DF_MODE;
3389 else if (GET_MODE_SIZE (i) == 16)
3390 sparc_mode_class[i] = 1 << (int) TF_MODE;
3391 else if (GET_MODE_SIZE (i) == 32)
3392 sparc_mode_class[i] = 1 << (int) OF_MODE;
3393 else
3394 sparc_mode_class[i] = 0;
3395 break;
3396 case MODE_CC:
3397 default:
3398 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
3399 we must explicitly check for them here. */
3400 if (i == (int) CCFPmode || i == (int) CCFPEmode)
3401 sparc_mode_class[i] = 1 << (int) CCFP_MODE;
3402 else if (i == (int) CCmode || i == (int) CC_NOOVmode
3403 || i == (int) CCXmode || i == (int) CCX_NOOVmode)
3404 sparc_mode_class[i] = 1 << (int) CC_MODE;
3405 else
3406 sparc_mode_class[i] = 0;
3407 break;
3411 if (TARGET_ARCH64)
3412 hard_regno_mode_classes = hard_64bit_mode_classes;
3413 else
3414 hard_regno_mode_classes = hard_32bit_mode_classes;
3416 /* Initialize the array used by REGNO_REG_CLASS. */
3417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3419 if (i < 16 && TARGET_V8PLUS)
3420 sparc_regno_reg_class[i] = I64_REGS;
3421 else if (i < 32 || i == FRAME_POINTER_REGNUM)
3422 sparc_regno_reg_class[i] = GENERAL_REGS;
3423 else if (i < 64)
3424 sparc_regno_reg_class[i] = FP_REGS;
3425 else if (i < 96)
3426 sparc_regno_reg_class[i] = EXTRA_FP_REGS;
3427 else if (i < 100)
3428 sparc_regno_reg_class[i] = FPCC_REGS;
3429 else
3430 sparc_regno_reg_class[i] = NO_REGS;
3434 /* Save non call used registers from LOW to HIGH at BASE+OFFSET.
3435 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3436 v9 int regs as it simplifies the code. */
3438 static int
3439 save_regs (FILE *file, int low, int high, const char *base,
3440 int offset, int n_regs, int real_offset)
3442 int i;
3444 if (TARGET_ARCH64 && high <= 32)
3446 for (i = low; i < high; i++)
3448 if (regs_ever_live[i] && ! call_used_regs[i])
3450 fprintf (file, "\tstx\t%s, [%s+%d]\n",
3451 reg_names[i], base, offset + 4 * n_regs);
3452 if (dwarf2out_do_frame ())
3453 dwarf2out_reg_save ("", i, real_offset + 4 * n_regs);
3454 n_regs += 2;
3458 else
3460 for (i = low; i < high; i += 2)
3462 if (regs_ever_live[i] && ! call_used_regs[i])
3464 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3466 fprintf (file, "\tstd\t%s, [%s+%d]\n",
3467 reg_names[i], base, offset + 4 * n_regs);
3468 if (dwarf2out_do_frame ())
3470 char *l = dwarf2out_cfi_label ();
3471 dwarf2out_reg_save (l, i, real_offset + 4 * n_regs);
3472 dwarf2out_reg_save (l, i+1, real_offset + 4 * n_regs + 4);
3474 n_regs += 2;
3476 else
3478 fprintf (file, "\tst\t%s, [%s+%d]\n",
3479 reg_names[i], base, offset + 4 * n_regs);
3480 if (dwarf2out_do_frame ())
3481 dwarf2out_reg_save ("", i, real_offset + 4 * n_regs);
3482 n_regs += 2;
3485 else
3487 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3489 fprintf (file, "\tst\t%s, [%s+%d]\n",
3490 reg_names[i+1], base, offset + 4 * n_regs + 4);
3491 if (dwarf2out_do_frame ())
3492 dwarf2out_reg_save ("", i + 1, real_offset + 4 * n_regs + 4);
3493 n_regs += 2;
3498 return n_regs;
3501 /* Restore non call used registers from LOW to HIGH at BASE+OFFSET.
3503 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3504 v9 int regs as it simplifies the code. */
3506 static int
3507 restore_regs (FILE *file, int low, int high, const char *base,
3508 int offset, int n_regs)
3510 int i;
3512 if (TARGET_ARCH64 && high <= 32)
3514 for (i = low; i < high; i++)
3516 if (regs_ever_live[i] && ! call_used_regs[i])
3517 fprintf (file, "\tldx\t[%s+%d], %s\n",
3518 base, offset + 4 * n_regs, reg_names[i]),
3519 n_regs += 2;
3522 else
3524 for (i = low; i < high; i += 2)
3526 if (regs_ever_live[i] && ! call_used_regs[i])
3527 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3528 fprintf (file, "\tldd\t[%s+%d], %s\n",
3529 base, offset + 4 * n_regs, reg_names[i]),
3530 n_regs += 2;
3531 else
3532 fprintf (file, "\tld\t[%s+%d], %s\n",
3533 base, offset + 4 * n_regs, reg_names[i]),
3534 n_regs += 2;
3535 else if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3536 fprintf (file, "\tld\t[%s+%d], %s\n",
3537 base, offset + 4 * n_regs + 4, reg_names[i+1]),
3538 n_regs += 2;
3541 return n_regs;
3544 /* Compute the frame size required by the function. This function is called
3545 during the reload pass and also by output_function_prologue(). */
3548 compute_frame_size (int size, int leaf_function)
3550 int n_regs = 0, i;
3551 int outgoing_args_size = (current_function_outgoing_args_size
3552 + REG_PARM_STACK_SPACE (current_function_decl));
3554 /* N_REGS is the number of 4-byte regs saved thus far. This applies
3555 even to v9 int regs to be consistent with save_regs/restore_regs. */
3557 if (TARGET_ARCH64)
3559 for (i = 0; i < 8; i++)
3560 if (regs_ever_live[i] && ! call_used_regs[i])
3561 n_regs += 2;
3563 else
3565 for (i = 0; i < 8; i += 2)
3566 if ((regs_ever_live[i] && ! call_used_regs[i])
3567 || (regs_ever_live[i+1] && ! call_used_regs[i+1]))
3568 n_regs += 2;
3571 for (i = 32; i < (TARGET_V9 ? 96 : 64); i += 2)
3572 if ((regs_ever_live[i] && ! call_used_regs[i])
3573 || (regs_ever_live[i+1] && ! call_used_regs[i+1]))
3574 n_regs += 2;
3576 /* Set up values for use in `function_epilogue'. */
3577 num_gfregs = n_regs;
3579 if (leaf_function && n_regs == 0
3580 && size == 0 && current_function_outgoing_args_size == 0)
3582 actual_fsize = apparent_fsize = 0;
3584 else
3586 /* We subtract STARTING_FRAME_OFFSET, remember it's negative. */
3587 apparent_fsize = (size - STARTING_FRAME_OFFSET + 7) & -8;
3588 apparent_fsize += n_regs * 4;
3589 actual_fsize = apparent_fsize + ((outgoing_args_size + 7) & -8);
3592 /* Make sure nothing can clobber our register windows.
3593 If a SAVE must be done, or there is a stack-local variable,
3594 the register window area must be allocated.
3595 ??? For v8 we apparently need an additional 8 bytes of reserved space. */
3596 if (leaf_function == 0 || size > 0)
3597 actual_fsize += (16 * UNITS_PER_WORD) + (TARGET_ARCH64 ? 0 : 8);
3599 return SPARC_STACK_ALIGN (actual_fsize);
3602 /* Build a (32 bit) big number in a register. */
3603 /* ??? We may be able to use the set macro here too. */
3605 static void
3606 build_big_number (FILE *file, int num, const char *reg)
3608 if (num >= 0 || ! TARGET_ARCH64)
3610 fprintf (file, "\tsethi\t%%hi(%d), %s\n", num, reg);
3611 if ((num & 0x3ff) != 0)
3612 fprintf (file, "\tor\t%s, %%lo(%d), %s\n", reg, num, reg);
3614 else /* num < 0 && TARGET_ARCH64 */
3616 /* Sethi does not sign extend, so we must use a little trickery
3617 to use it for negative numbers. Invert the constant before
3618 loading it in, then use xor immediate to invert the loaded bits
3619 (along with the upper 32 bits) to the desired constant. This
3620 works because the sethi and immediate fields overlap. */
3621 int asize = num;
3622 int inv = ~asize;
3623 int low = -0x400 + (asize & 0x3FF);
3625 fprintf (file, "\tsethi\t%%hi(%d), %s\n\txor\t%s, %d, %s\n",
3626 inv, reg, reg, low, reg);
3630 /* Output any necessary .register pseudo-ops. */
3631 void
3632 sparc_output_scratch_registers (FILE *file ATTRIBUTE_UNUSED)
3634 #ifdef HAVE_AS_REGISTER_PSEUDO_OP
3635 int i;
3637 if (TARGET_ARCH32)
3638 return;
3640 /* Check if %g[2367] were used without
3641 .register being printed for them already. */
3642 for (i = 2; i < 8; i++)
3644 if (regs_ever_live [i]
3645 && ! sparc_hard_reg_printed [i])
3647 sparc_hard_reg_printed [i] = 1;
3648 fprintf (file, "\t.register\t%%g%d, #scratch\n", i);
3650 if (i == 3) i = 5;
3652 #endif
3655 /* This function generates the assembly code for function entry.
3656 FILE is a stdio stream to output the code to.
3657 SIZE is an int: how many units of temporary storage to allocate.
3658 Refer to the array `regs_ever_live' to determine which registers
3659 to save; `regs_ever_live[I]' is nonzero if register number I
3660 is ever used in the function. This macro is responsible for
3661 knowing which registers should not be saved even if used. */
3663 /* On SPARC, move-double insns between fpu and cpu need an 8-byte block
3664 of memory. If any fpu reg is used in the function, we allocate
3665 such a block here, at the bottom of the frame, just in case it's needed.
3667 If this function is a leaf procedure, then we may choose not
3668 to do a "save" insn. The decision about whether or not
3669 to do this is made in regclass.c. */
3671 static void
3672 sparc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
3674 if (TARGET_FLAT)
3675 sparc_flat_function_prologue (file, size);
3676 else
3677 sparc_nonflat_function_prologue (file, size,
3678 current_function_uses_only_leaf_regs);
3681 /* Output code for the function prologue. */
3683 static void
3684 sparc_nonflat_function_prologue (FILE *file, HOST_WIDE_INT size,
3685 int leaf_function)
3687 sparc_output_scratch_registers (file);
3689 /* Need to use actual_fsize, since we are also allocating
3690 space for our callee (and our own register save area). */
3691 actual_fsize = compute_frame_size (size, leaf_function);
3693 if (leaf_function)
3695 frame_base_name = "%sp";
3696 frame_base_offset = actual_fsize + SPARC_STACK_BIAS;
3698 else
3700 frame_base_name = "%fp";
3701 frame_base_offset = SPARC_STACK_BIAS;
3704 /* This is only for the human reader. */
3705 fprintf (file, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START);
3707 if (actual_fsize == 0)
3708 /* do nothing. */ ;
3709 else if (! leaf_function)
3711 if (actual_fsize <= 4096)
3712 fprintf (file, "\tsave\t%%sp, -%d, %%sp\n", actual_fsize);
3713 else if (actual_fsize <= 8192)
3715 fprintf (file, "\tsave\t%%sp, -4096, %%sp\n");
3716 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize - 4096);
3718 else
3720 build_big_number (file, -actual_fsize, "%g1");
3721 fprintf (file, "\tsave\t%%sp, %%g1, %%sp\n");
3724 else /* leaf function */
3726 if (actual_fsize <= 4096)
3727 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize);
3728 else if (actual_fsize <= 8192)
3730 fprintf (file, "\tadd\t%%sp, -4096, %%sp\n");
3731 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize - 4096);
3733 else
3735 build_big_number (file, -actual_fsize, "%g1");
3736 fprintf (file, "\tadd\t%%sp, %%g1, %%sp\n");
3740 if (dwarf2out_do_frame () && actual_fsize)
3742 char *label = dwarf2out_cfi_label ();
3744 /* The canonical frame address refers to the top of the frame. */
3745 dwarf2out_def_cfa (label, (leaf_function ? STACK_POINTER_REGNUM
3746 : HARD_FRAME_POINTER_REGNUM),
3747 frame_base_offset);
3749 if (! leaf_function)
3751 /* Note the register window save. This tells the unwinder that
3752 it needs to restore the window registers from the previous
3753 frame's window save area at 0(cfa). */
3754 dwarf2out_window_save (label);
3756 /* The return address (-8) is now in %i7. */
3757 dwarf2out_return_reg (label, 31);
3761 /* If doing anything with PIC, do it now. */
3762 if (! flag_pic)
3763 fprintf (file, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START);
3765 /* Call saved registers are saved just above the outgoing argument area. */
3766 if (num_gfregs)
3768 int offset, real_offset, n_regs;
3769 const char *base;
3771 real_offset = -apparent_fsize;
3772 offset = -apparent_fsize + frame_base_offset;
3773 if (offset < -4096 || offset + num_gfregs * 4 > 4096)
3775 /* ??? This might be optimized a little as %g1 might already have a
3776 value close enough that a single add insn will do. */
3777 /* ??? Although, all of this is probably only a temporary fix
3778 because if %g1 can hold a function result, then
3779 output_function_epilogue will lose (the result will get
3780 clobbered). */
3781 build_big_number (file, offset, "%g1");
3782 fprintf (file, "\tadd\t%s, %%g1, %%g1\n", frame_base_name);
3783 base = "%g1";
3784 offset = 0;
3786 else
3788 base = frame_base_name;
3791 n_regs = save_regs (file, 0, 8, base, offset, 0, real_offset);
3792 save_regs (file, 32, TARGET_V9 ? 96 : 64, base, offset, n_regs,
3793 real_offset);
3797 /* Output code to restore any call saved registers. */
3799 static void
3800 output_restore_regs (FILE *file, int leaf_function ATTRIBUTE_UNUSED)
3802 int offset, n_regs;
3803 const char *base;
3805 offset = -apparent_fsize + frame_base_offset;
3806 if (offset < -4096 || offset + num_gfregs * 4 > 4096 - 8 /*double*/)
3808 build_big_number (file, offset, "%g1");
3809 fprintf (file, "\tadd\t%s, %%g1, %%g1\n", frame_base_name);
3810 base = "%g1";
3811 offset = 0;
3813 else
3815 base = frame_base_name;
3818 n_regs = restore_regs (file, 0, 8, base, offset, 0);
3819 restore_regs (file, 32, TARGET_V9 ? 96 : 64, base, offset, n_regs);
3822 /* This function generates the assembly code for function exit,
3823 on machines that need it.
3825 The function epilogue should not depend on the current stack pointer!
3826 It should use the frame pointer only. This is mandatory because
3827 of alloca; we also take advantage of it to omit stack adjustments
3828 before returning. */
3830 static void
3831 sparc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
3833 if (TARGET_FLAT)
3834 sparc_flat_function_epilogue (file, size);
3835 else
3836 sparc_nonflat_function_epilogue (file, size,
3837 current_function_uses_only_leaf_regs);
3840 /* Output code for the function epilogue. */
3842 static void
3843 sparc_nonflat_function_epilogue (FILE *file,
3844 HOST_WIDE_INT size ATTRIBUTE_UNUSED,
3845 int leaf_function)
3847 const char *ret;
3849 if (current_function_epilogue_delay_list == 0)
3851 /* If code does not drop into the epilogue, we need
3852 do nothing except output pending case vectors.
3854 We have to still output a dummy nop for the sake of
3855 sane backtraces. Otherwise, if the last two instructions
3856 of a function were call foo; dslot; this can make the return
3857 PC of foo (ie. address of call instruction plus 8) point to
3858 the first instruction in the next function. */
3859 rtx insn, last_real_insn;
3861 insn = get_last_insn ();
3863 last_real_insn = prev_real_insn (insn);
3864 if (last_real_insn
3865 && GET_CODE (last_real_insn) == INSN
3866 && GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
3867 last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
3869 if (last_real_insn && GET_CODE (last_real_insn) == CALL_INSN)
3870 fputs("\tnop\n", file);
3872 if (GET_CODE (insn) == NOTE)
3873 insn = prev_nonnote_insn (insn);
3874 if (insn && GET_CODE (insn) == BARRIER)
3875 goto output_vectors;
3878 if (num_gfregs)
3879 output_restore_regs (file, leaf_function);
3881 /* Work out how to skip the caller's unimp instruction if required. */
3882 if (leaf_function)
3883 ret = (SKIP_CALLERS_UNIMP_P ? "jmp\t%o7+12" : "retl");
3884 else
3885 ret = (SKIP_CALLERS_UNIMP_P ? "jmp\t%i7+12" : "ret");
3887 if (! leaf_function)
3889 if (current_function_calls_eh_return)
3891 if (current_function_epilogue_delay_list)
3892 abort ();
3893 if (SKIP_CALLERS_UNIMP_P)
3894 abort ();
3896 fputs ("\trestore\n\tretl\n\tadd\t%sp, %g1, %sp\n", file);
3898 /* If we wound up with things in our delay slot, flush them here. */
3899 else if (current_function_epilogue_delay_list)
3901 rtx delay = PATTERN (XEXP (current_function_epilogue_delay_list, 0));
3903 if (TARGET_V9 && ! epilogue_renumber (&delay, 1))
3905 epilogue_renumber (&delay, 0);
3906 fputs (SKIP_CALLERS_UNIMP_P
3907 ? "\treturn\t%i7+12\n"
3908 : "\treturn\t%i7+8\n", file);
3909 final_scan_insn (XEXP (current_function_epilogue_delay_list, 0),
3910 file, 1, 0, 0);
3912 else
3914 rtx insn, src;
3916 if (GET_CODE (delay) != SET)
3917 abort();
3919 src = SET_SRC (delay);
3920 if (GET_CODE (src) == ASHIFT)
3922 if (XEXP (src, 1) != const1_rtx)
3923 abort();
3924 SET_SRC (delay)
3925 = gen_rtx_PLUS (GET_MODE (src), XEXP (src, 0),
3926 XEXP (src, 0));
3929 insn = gen_rtx_PARALLEL (VOIDmode,
3930 gen_rtvec (2, delay,
3931 gen_rtx_RETURN (VOIDmode)));
3932 insn = emit_jump_insn (insn);
3934 sparc_emitting_epilogue = true;
3935 final_scan_insn (insn, file, 1, 0, 1);
3936 sparc_emitting_epilogue = false;
3939 else if (TARGET_V9 && ! SKIP_CALLERS_UNIMP_P)
3940 fputs ("\treturn\t%i7+8\n\tnop\n", file);
3941 else
3942 fprintf (file, "\t%s\n\trestore\n", ret);
3944 /* All of the following cases are for leaf functions. */
3945 else if (current_function_calls_eh_return)
3946 abort ();
3947 else if (current_function_epilogue_delay_list)
3949 /* eligible_for_epilogue_delay_slot ensures that if this is a
3950 leaf function, then we will only have insn in the delay slot
3951 if the frame size is zero, thus no adjust for the stack is
3952 needed here. */
3953 if (actual_fsize != 0)
3954 abort ();
3955 fprintf (file, "\t%s\n", ret);
3956 final_scan_insn (XEXP (current_function_epilogue_delay_list, 0),
3957 file, 1, 0, 1);
3959 /* Output 'nop' instead of 'sub %sp,-0,%sp' when no frame, so as to
3960 avoid generating confusing assembly language output. */
3961 else if (actual_fsize == 0)
3962 fprintf (file, "\t%s\n\tnop\n", ret);
3963 else if (actual_fsize <= 4096)
3964 fprintf (file, "\t%s\n\tsub\t%%sp, -%d, %%sp\n", ret, actual_fsize);
3965 else if (actual_fsize <= 8192)
3966 fprintf (file, "\tsub\t%%sp, -4096, %%sp\n\t%s\n\tsub\t%%sp, -%d, %%sp\n",
3967 ret, actual_fsize - 4096);
3968 else if ((actual_fsize & 0x3ff) == 0)
3969 fprintf (file, "\tsethi\t%%hi(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
3970 actual_fsize, ret);
3971 else
3972 fprintf (file, "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
3973 actual_fsize, actual_fsize, ret);
3975 output_vectors:
3976 sparc_output_deferred_case_vectors ();
3979 /* Output a sibling call. */
3981 const char *
3982 output_sibcall (rtx insn, rtx call_operand)
3984 int leaf_regs = current_function_uses_only_leaf_regs;
3985 rtx operands[3];
3986 int delay_slot = dbr_sequence_length () > 0;
3988 if (num_gfregs)
3990 /* Call to restore global regs might clobber
3991 the delay slot. Instead of checking for this
3992 output the delay slot now. */
3993 if (delay_slot)
3995 rtx delay = NEXT_INSN (insn);
3997 if (! delay)
3998 abort ();
4000 final_scan_insn (delay, asm_out_file, 1, 0, 1);
4001 PATTERN (delay) = gen_blockage ();
4002 INSN_CODE (delay) = -1;
4003 delay_slot = 0;
4005 output_restore_regs (asm_out_file, leaf_regs);
4008 operands[0] = call_operand;
4010 if (leaf_regs)
4012 #ifdef HAVE_AS_RELAX_OPTION
4013 /* If as and ld are relaxing tail call insns into branch always,
4014 use or %o7,%g0,X; call Y; or X,%g0,%o7 always, so that it can
4015 be optimized. With sethi/jmpl as nor ld has no easy way how to
4016 find out if somebody does not branch between the sethi and jmpl. */
4017 int spare_slot = 0;
4018 #else
4019 int spare_slot = ((TARGET_ARCH32 || TARGET_CM_MEDLOW) && ! flag_pic);
4020 #endif
4021 int size = 0;
4023 if ((actual_fsize || ! spare_slot) && delay_slot)
4025 rtx delay = NEXT_INSN (insn);
4027 if (! delay)
4028 abort ();
4030 final_scan_insn (delay, asm_out_file, 1, 0, 1);
4031 PATTERN (delay) = gen_blockage ();
4032 INSN_CODE (delay) = -1;
4033 delay_slot = 0;
4035 if (actual_fsize)
4037 if (actual_fsize <= 4096)
4038 size = actual_fsize;
4039 else if (actual_fsize <= 8192)
4041 fputs ("\tsub\t%sp, -4096, %sp\n", asm_out_file);
4042 size = actual_fsize - 4096;
4044 else if ((actual_fsize & 0x3ff) == 0)
4045 fprintf (asm_out_file,
4046 "\tsethi\t%%hi(%d), %%g1\n\tadd\t%%sp, %%g1, %%sp\n",
4047 actual_fsize);
4048 else
4050 fprintf (asm_out_file,
4051 "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n",
4052 actual_fsize, actual_fsize);
4053 fputs ("\tadd\t%%sp, %%g1, %%sp\n", asm_out_file);
4056 if (spare_slot)
4058 output_asm_insn ("sethi\t%%hi(%a0), %%g1", operands);
4059 output_asm_insn ("jmpl\t%%g1 + %%lo(%a0), %%g0", operands);
4060 if (size)
4061 fprintf (asm_out_file, "\t sub\t%%sp, -%d, %%sp\n", size);
4062 else if (! delay_slot)
4063 fputs ("\t nop\n", asm_out_file);
4065 else
4067 if (size)
4068 fprintf (asm_out_file, "\tsub\t%%sp, -%d, %%sp\n", size);
4069 /* Use or with rs2 %%g0 instead of mov, so that as/ld can optimize
4070 it into branch if possible. */
4071 output_asm_insn ("or\t%%o7, %%g0, %%g1", operands);
4072 output_asm_insn ("call\t%a0, 0", operands);
4073 output_asm_insn (" or\t%%g1, %%g0, %%o7", operands);
4075 return "";
4078 output_asm_insn ("call\t%a0, 0", operands);
4079 if (delay_slot)
4081 rtx delay = NEXT_INSN (insn), pat;
4083 if (! delay)
4084 abort ();
4086 pat = PATTERN (delay);
4087 if (GET_CODE (pat) != SET)
4088 abort ();
4090 operands[0] = SET_DEST (pat);
4091 pat = SET_SRC (pat);
4092 switch (GET_CODE (pat))
4094 case PLUS:
4095 operands[1] = XEXP (pat, 0);
4096 operands[2] = XEXP (pat, 1);
4097 output_asm_insn (" restore %r1, %2, %Y0", operands);
4098 break;
4099 case LO_SUM:
4100 operands[1] = XEXP (pat, 0);
4101 operands[2] = XEXP (pat, 1);
4102 output_asm_insn (" restore %r1, %%lo(%a2), %Y0", operands);
4103 break;
4104 case ASHIFT:
4105 operands[1] = XEXP (pat, 0);
4106 output_asm_insn (" restore %r1, %r1, %Y0", operands);
4107 break;
4108 default:
4109 operands[1] = pat;
4110 output_asm_insn (" restore %%g0, %1, %Y0", operands);
4111 break;
4113 PATTERN (delay) = gen_blockage ();
4114 INSN_CODE (delay) = -1;
4116 else
4117 fputs ("\t restore\n", asm_out_file);
4118 return "";
4121 /* Functions for handling argument passing.
4123 For v8 the first six args are normally in registers and the rest are
4124 pushed. Any arg that starts within the first 6 words is at least
4125 partially passed in a register unless its data type forbids.
4127 For v9, the argument registers are laid out as an array of 16 elements
4128 and arguments are added sequentially. The first 6 int args and up to the
4129 first 16 fp args (depending on size) are passed in regs.
4131 Slot Stack Integral Float Float in structure Double Long Double
4132 ---- ----- -------- ----- ------------------ ------ -----------
4133 15 [SP+248] %f31 %f30,%f31 %d30
4134 14 [SP+240] %f29 %f28,%f29 %d28 %q28
4135 13 [SP+232] %f27 %f26,%f27 %d26
4136 12 [SP+224] %f25 %f24,%f25 %d24 %q24
4137 11 [SP+216] %f23 %f22,%f23 %d22
4138 10 [SP+208] %f21 %f20,%f21 %d20 %q20
4139 9 [SP+200] %f19 %f18,%f19 %d18
4140 8 [SP+192] %f17 %f16,%f17 %d16 %q16
4141 7 [SP+184] %f15 %f14,%f15 %d14
4142 6 [SP+176] %f13 %f12,%f13 %d12 %q12
4143 5 [SP+168] %o5 %f11 %f10,%f11 %d10
4144 4 [SP+160] %o4 %f9 %f8,%f9 %d8 %q8
4145 3 [SP+152] %o3 %f7 %f6,%f7 %d6
4146 2 [SP+144] %o2 %f5 %f4,%f5 %d4 %q4
4147 1 [SP+136] %o1 %f3 %f2,%f3 %d2
4148 0 [SP+128] %o0 %f1 %f0,%f1 %d0 %q0
4150 Here SP = %sp if -mno-stack-bias or %sp+stack_bias otherwise.
4152 Integral arguments are always passed as 64 bit quantities appropriately
4153 extended.
4155 Passing of floating point values is handled as follows.
4156 If a prototype is in scope:
4157 If the value is in a named argument (i.e. not a stdarg function or a
4158 value not part of the `...') then the value is passed in the appropriate
4159 fp reg.
4160 If the value is part of the `...' and is passed in one of the first 6
4161 slots then the value is passed in the appropriate int reg.
4162 If the value is part of the `...' and is not passed in one of the first 6
4163 slots then the value is passed in memory.
4164 If a prototype is not in scope:
4165 If the value is one of the first 6 arguments the value is passed in the
4166 appropriate integer reg and the appropriate fp reg.
4167 If the value is not one of the first 6 arguments the value is passed in
4168 the appropriate fp reg and in memory.
4171 /* Maximum number of int regs for args. */
4172 #define SPARC_INT_ARG_MAX 6
4173 /* Maximum number of fp regs for args. */
4174 #define SPARC_FP_ARG_MAX 16
4176 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4178 /* Handle the INIT_CUMULATIVE_ARGS macro.
4179 Initialize a variable CUM of type CUMULATIVE_ARGS
4180 for a call to a function whose data type is FNTYPE.
4181 For a library call, FNTYPE is 0. */
4183 void
4184 init_cumulative_args (struct sparc_args *cum, tree fntype,
4185 rtx libname ATTRIBUTE_UNUSED,
4186 tree fndecl ATTRIBUTE_UNUSED)
4188 cum->words = 0;
4189 cum->prototype_p = fntype && TYPE_ARG_TYPES (fntype);
4190 cum->libcall_p = fntype == 0;
4193 /* Compute the slot number to pass an argument in.
4194 Returns the slot number or -1 if passing on the stack.
4196 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4197 the preceding args and about the function being called.
4198 MODE is the argument's machine mode.
4199 TYPE is the data type of the argument (as a tree).
4200 This is null for libcalls where that information may
4201 not be available.
4202 NAMED is nonzero if this argument is a named parameter
4203 (otherwise it is an extra parameter matching an ellipsis).
4204 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
4205 *PREGNO records the register number to use if scalar type.
4206 *PPADDING records the amount of padding needed in words. */
4208 static int
4209 function_arg_slotno (const struct sparc_args *cum, enum machine_mode mode,
4210 tree type, int named, int incoming_p,
4211 int *pregno, int *ppadding)
4213 int regbase = (incoming_p
4214 ? SPARC_INCOMING_INT_ARG_FIRST
4215 : SPARC_OUTGOING_INT_ARG_FIRST);
4216 int slotno = cum->words;
4217 int regno;
4219 *ppadding = 0;
4221 if (type != 0 && TREE_ADDRESSABLE (type))
4222 return -1;
4223 if (TARGET_ARCH32
4224 && type != 0 && mode == BLKmode
4225 && TYPE_ALIGN (type) % PARM_BOUNDARY != 0)
4226 return -1;
4228 switch (mode)
4230 case VOIDmode :
4231 /* MODE is VOIDmode when generating the actual call.
4232 See emit_call_1. */
4233 return -1;
4235 case QImode : case CQImode :
4236 case HImode : case CHImode :
4237 case SImode : case CSImode :
4238 case DImode : case CDImode :
4239 case TImode : case CTImode :
4240 if (slotno >= SPARC_INT_ARG_MAX)
4241 return -1;
4242 regno = regbase + slotno;
4243 break;
4245 case SFmode : case SCmode :
4246 case DFmode : case DCmode :
4247 case TFmode : case TCmode :
4248 if (TARGET_ARCH32)
4250 if (slotno >= SPARC_INT_ARG_MAX)
4251 return -1;
4252 regno = regbase + slotno;
4254 else
4256 if ((mode == TFmode || mode == TCmode)
4257 && (slotno & 1) != 0)
4258 slotno++, *ppadding = 1;
4259 if (TARGET_FPU && named)
4261 if (slotno >= SPARC_FP_ARG_MAX)
4262 return -1;
4263 regno = SPARC_FP_ARG_FIRST + slotno * 2;
4264 if (mode == SFmode)
4265 regno++;
4267 else
4269 if (slotno >= SPARC_INT_ARG_MAX)
4270 return -1;
4271 regno = regbase + slotno;
4274 break;
4276 case BLKmode :
4277 /* For sparc64, objects requiring 16 byte alignment get it. */
4278 if (TARGET_ARCH64)
4280 if (type && TYPE_ALIGN (type) == 128 && (slotno & 1) != 0)
4281 slotno++, *ppadding = 1;
4284 if (TARGET_ARCH32
4285 || (type && TREE_CODE (type) == UNION_TYPE))
4287 if (slotno >= SPARC_INT_ARG_MAX)
4288 return -1;
4289 regno = regbase + slotno;
4291 else
4293 tree field;
4294 int intregs_p = 0, fpregs_p = 0;
4295 /* The ABI obviously doesn't specify how packed
4296 structures are passed. These are defined to be passed
4297 in int regs if possible, otherwise memory. */
4298 int packed_p = 0;
4300 /* First see what kinds of registers we need. */
4301 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4303 if (TREE_CODE (field) == FIELD_DECL)
4305 if (TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4306 && TARGET_FPU)
4307 fpregs_p = 1;
4308 else
4309 intregs_p = 1;
4310 if (DECL_PACKED (field))
4311 packed_p = 1;
4314 if (packed_p || !named)
4315 fpregs_p = 0, intregs_p = 1;
4317 /* If all arg slots are filled, then must pass on stack. */
4318 if (fpregs_p && slotno >= SPARC_FP_ARG_MAX)
4319 return -1;
4320 /* If there are only int args and all int arg slots are filled,
4321 then must pass on stack. */
4322 if (!fpregs_p && intregs_p && slotno >= SPARC_INT_ARG_MAX)
4323 return -1;
4324 /* Note that even if all int arg slots are filled, fp members may
4325 still be passed in regs if such regs are available.
4326 *PREGNO isn't set because there may be more than one, it's up
4327 to the caller to compute them. */
4328 return slotno;
4330 break;
4332 default :
4333 abort ();
4336 *pregno = regno;
4337 return slotno;
4340 /* Handle recursive register counting for structure field layout. */
4342 struct function_arg_record_value_parms
4344 rtx ret; /* return expression being built. */
4345 int slotno; /* slot number of the argument. */
4346 int named; /* whether the argument is named. */
4347 int regbase; /* regno of the base register. */
4348 int stack; /* 1 if part of the argument is on the stack. */
4349 int intoffset; /* offset of the pending integer field. */
4350 unsigned int nregs; /* number of words passed in registers. */
4353 static void function_arg_record_value_3
4354 (HOST_WIDE_INT, struct function_arg_record_value_parms *);
4355 static void function_arg_record_value_2
4356 (tree, HOST_WIDE_INT, struct function_arg_record_value_parms *);
4357 static void function_arg_record_value_1
4358 (tree, HOST_WIDE_INT, struct function_arg_record_value_parms *);
4359 static rtx function_arg_record_value (tree, enum machine_mode, int, int, int);
4361 /* A subroutine of function_arg_record_value. Traverse the structure
4362 recursively and determine how many registers will be required. */
4364 static void
4365 function_arg_record_value_1 (tree type, HOST_WIDE_INT startbitpos,
4366 struct function_arg_record_value_parms *parms)
4368 tree field;
4370 /* The ABI obviously doesn't specify how packed structures are
4371 passed. These are defined to be passed in int regs if possible,
4372 otherwise memory. */
4373 int packed_p = 0;
4375 /* We need to compute how many registers are needed so we can
4376 allocate the PARALLEL but before we can do that we need to know
4377 whether there are any packed fields. If there are, int regs are
4378 used regardless of whether there are fp values present. */
4379 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4381 if (TREE_CODE (field) == FIELD_DECL && DECL_PACKED (field))
4383 packed_p = 1;
4384 break;
4388 /* Compute how many registers we need. */
4389 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4391 if (TREE_CODE (field) == FIELD_DECL)
4393 HOST_WIDE_INT bitpos = startbitpos;
4395 if (DECL_SIZE (field) != 0
4396 && host_integerp (bit_position (field), 1))
4397 bitpos += int_bit_position (field);
4399 /* ??? FIXME: else assume zero offset. */
4401 if (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE)
4402 function_arg_record_value_1 (TREE_TYPE (field), bitpos, parms);
4403 else if ((TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4404 || (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE
4405 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field)))
4406 == REAL_TYPE)))
4407 && TARGET_FPU
4408 && ! packed_p
4409 && parms->named)
4411 if (parms->intoffset != -1)
4413 int intslots, this_slotno;
4415 intslots = (bitpos - parms->intoffset + BITS_PER_WORD - 1)
4416 / BITS_PER_WORD;
4417 this_slotno = parms->slotno + parms->intoffset
4418 / BITS_PER_WORD;
4420 if (intslots > 0 && intslots > SPARC_INT_ARG_MAX - this_slotno)
4422 intslots = MAX (0, SPARC_INT_ARG_MAX - this_slotno);
4423 /* We need to pass this field on the stack. */
4424 parms->stack = 1;
4427 parms->nregs += intslots;
4428 parms->intoffset = -1;
4431 /* There's no need to check this_slotno < SPARC_FP_ARG MAX.
4432 If it wasn't true we wouldn't be here. */
4433 parms->nregs += 1;
4434 if (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE)
4435 parms->nregs += 1;
4437 else
4439 if (parms->intoffset == -1)
4440 parms->intoffset = bitpos;
4446 /* A subroutine of function_arg_record_value. Assign the bits of the
4447 structure between parms->intoffset and bitpos to integer registers. */
4449 static void
4450 function_arg_record_value_3 (HOST_WIDE_INT bitpos,
4451 struct function_arg_record_value_parms *parms)
4453 enum machine_mode mode;
4454 unsigned int regno;
4455 unsigned int startbit, endbit;
4456 int this_slotno, intslots, intoffset;
4457 rtx reg;
4459 if (parms->intoffset == -1)
4460 return;
4462 intoffset = parms->intoffset;
4463 parms->intoffset = -1;
4465 startbit = intoffset & -BITS_PER_WORD;
4466 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4467 intslots = (endbit - startbit) / BITS_PER_WORD;
4468 this_slotno = parms->slotno + intoffset / BITS_PER_WORD;
4470 intslots = MIN (intslots, SPARC_INT_ARG_MAX - this_slotno);
4471 if (intslots <= 0)
4472 return;
4474 /* If this is the trailing part of a word, only load that much into
4475 the register. Otherwise load the whole register. Note that in
4476 the latter case we may pick up unwanted bits. It's not a problem
4477 at the moment but may wish to revisit. */
4479 if (intoffset % BITS_PER_WORD != 0)
4480 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4481 MODE_INT, 0);
4482 else
4483 mode = word_mode;
4485 intoffset /= BITS_PER_UNIT;
4488 regno = parms->regbase + this_slotno;
4489 reg = gen_rtx_REG (mode, regno);
4490 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4491 = gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4493 this_slotno += 1;
4494 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4495 parms->nregs += 1;
4496 intslots -= 1;
4498 while (intslots > 0);
4501 /* A subroutine of function_arg_record_value. Traverse the structure
4502 recursively and assign bits to floating point registers. Track which
4503 bits in between need integer registers; invoke function_arg_record_value_3
4504 to make that happen. */
4506 static void
4507 function_arg_record_value_2 (tree type, HOST_WIDE_INT startbitpos,
4508 struct function_arg_record_value_parms *parms)
4510 tree field;
4511 int packed_p = 0;
4513 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4515 if (TREE_CODE (field) == FIELD_DECL && DECL_PACKED (field))
4517 packed_p = 1;
4518 break;
4522 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4524 if (TREE_CODE (field) == FIELD_DECL)
4526 HOST_WIDE_INT bitpos = startbitpos;
4528 if (DECL_SIZE (field) != 0
4529 && host_integerp (bit_position (field), 1))
4530 bitpos += int_bit_position (field);
4532 /* ??? FIXME: else assume zero offset. */
4534 if (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE)
4535 function_arg_record_value_2 (TREE_TYPE (field), bitpos, parms);
4536 else if ((TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4537 || (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE
4538 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field)))
4539 == REAL_TYPE)))
4540 && TARGET_FPU
4541 && ! packed_p
4542 && parms->named)
4544 int this_slotno = parms->slotno + bitpos / BITS_PER_WORD;
4545 int regno;
4546 enum machine_mode mode = DECL_MODE (field);
4547 rtx reg;
4549 function_arg_record_value_3 (bitpos, parms);
4550 regno = SPARC_FP_ARG_FIRST + this_slotno * 2
4551 + ((mode == SFmode || mode == SCmode)
4552 && (bitpos & 32) != 0);
4553 switch (mode)
4555 case SCmode: mode = SFmode; break;
4556 case DCmode: mode = DFmode; break;
4557 case TCmode: mode = TFmode; break;
4558 default: break;
4560 reg = gen_rtx_REG (mode, regno);
4561 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4562 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4563 GEN_INT (bitpos / BITS_PER_UNIT));
4564 parms->nregs += 1;
4565 if (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE)
4567 regno += GET_MODE_SIZE (mode) / 4;
4568 reg = gen_rtx_REG (mode, regno);
4569 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4570 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4571 GEN_INT ((bitpos + GET_MODE_BITSIZE (mode))
4572 / BITS_PER_UNIT));
4573 parms->nregs += 1;
4576 else
4578 if (parms->intoffset == -1)
4579 parms->intoffset = bitpos;
4585 /* Used by function_arg and function_value to implement the complex
4586 conventions of the 64-bit ABI for passing and returning structures.
4587 Return an expression valid as a return value for the two macros
4588 FUNCTION_ARG and FUNCTION_VALUE.
4590 TYPE is the data type of the argument (as a tree).
4591 This is null for libcalls where that information may
4592 not be available.
4593 MODE is the argument's machine mode.
4594 SLOTNO is the index number of the argument's slot in the parameter array.
4595 NAMED is nonzero if this argument is a named parameter
4596 (otherwise it is an extra parameter matching an ellipsis).
4597 REGBASE is the regno of the base register for the parameter array. */
4599 static rtx
4600 function_arg_record_value (tree type, enum machine_mode mode,
4601 int slotno, int named, int regbase)
4603 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4604 struct function_arg_record_value_parms parms;
4605 unsigned int nregs;
4607 parms.ret = NULL_RTX;
4608 parms.slotno = slotno;
4609 parms.named = named;
4610 parms.regbase = regbase;
4611 parms.stack = 0;
4613 /* Compute how many registers we need. */
4614 parms.nregs = 0;
4615 parms.intoffset = 0;
4616 function_arg_record_value_1 (type, 0, &parms);
4618 if (parms.intoffset != -1)
4620 unsigned int startbit, endbit;
4621 int intslots, this_slotno;
4623 startbit = parms.intoffset & -BITS_PER_WORD;
4624 endbit = (typesize*BITS_PER_UNIT + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4625 intslots = (endbit - startbit) / BITS_PER_WORD;
4626 this_slotno = slotno + parms.intoffset / BITS_PER_WORD;
4628 if (intslots > 0 && intslots > SPARC_INT_ARG_MAX - this_slotno)
4630 intslots = MAX (0, SPARC_INT_ARG_MAX - this_slotno);
4631 /* We need to pass this field on the stack. */
4632 parms.stack = 1;
4635 parms.nregs += intslots;
4637 nregs = parms.nregs;
4639 /* Allocate the vector and handle some annoying special cases. */
4640 if (nregs == 0)
4642 /* ??? Empty structure has no value? Duh? */
4643 if (typesize <= 0)
4645 /* Though there's nothing really to store, return a word register
4646 anyway so the rest of gcc doesn't go nuts. Returning a PARALLEL
4647 leads to breakage due to the fact that there are zero bytes to
4648 load. */
4649 return gen_rtx_REG (mode, regbase);
4651 else
4653 /* ??? C++ has structures with no fields, and yet a size. Give up
4654 for now and pass everything back in integer registers. */
4655 nregs = (typesize + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4657 if (nregs + slotno > SPARC_INT_ARG_MAX)
4658 nregs = SPARC_INT_ARG_MAX - slotno;
4660 if (nregs == 0)
4661 abort ();
4663 parms.ret = gen_rtx_PARALLEL (mode, rtvec_alloc (parms.stack + nregs));
4665 /* If at least one field must be passed on the stack, generate
4666 (parallel [(expr_list (nil) ...) ...]) so that all fields will
4667 also be passed on the stack. We can't do much better because the
4668 semantics of FUNCTION_ARG_PARTIAL_NREGS doesn't handle the case
4669 of structures for which the fields passed exclusively in registers
4670 are not at the beginning of the structure. */
4671 if (parms.stack)
4672 XVECEXP (parms.ret, 0, 0)
4673 = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4675 /* Fill in the entries. */
4676 parms.nregs = 0;
4677 parms.intoffset = 0;
4678 function_arg_record_value_2 (type, 0, &parms);
4679 function_arg_record_value_3 (typesize * BITS_PER_UNIT, &parms);
4681 if (parms.nregs != nregs)
4682 abort ();
4684 return parms.ret;
4687 /* Handle the FUNCTION_ARG macro.
4688 Determine where to put an argument to a function.
4689 Value is zero to push the argument on the stack,
4690 or a hard register in which to store the argument.
4692 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4693 the preceding args and about the function being called.
4694 MODE is the argument's machine mode.
4695 TYPE is the data type of the argument (as a tree).
4696 This is null for libcalls where that information may
4697 not be available.
4698 NAMED is nonzero if this argument is a named parameter
4699 (otherwise it is an extra parameter matching an ellipsis).
4700 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG. */
4703 function_arg (const struct sparc_args *cum, enum machine_mode mode,
4704 tree type, int named, int incoming_p)
4706 int regbase = (incoming_p
4707 ? SPARC_INCOMING_INT_ARG_FIRST
4708 : SPARC_OUTGOING_INT_ARG_FIRST);
4709 int slotno, regno, padding;
4710 rtx reg;
4712 slotno = function_arg_slotno (cum, mode, type, named, incoming_p,
4713 &regno, &padding);
4715 if (slotno == -1)
4716 return 0;
4718 if (TARGET_ARCH32)
4720 reg = gen_rtx_REG (mode, regno);
4721 return reg;
4724 /* v9 fp args in reg slots beyond the int reg slots get passed in regs
4725 but also have the slot allocated for them.
4726 If no prototype is in scope fp values in register slots get passed
4727 in two places, either fp regs and int regs or fp regs and memory. */
4728 if ((GET_MODE_CLASS (mode) == MODE_FLOAT
4729 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4730 && SPARC_FP_REG_P (regno))
4732 reg = gen_rtx_REG (mode, regno);
4733 if (cum->prototype_p || cum->libcall_p)
4735 /* "* 2" because fp reg numbers are recorded in 4 byte
4736 quantities. */
4737 #if 0
4738 /* ??? This will cause the value to be passed in the fp reg and
4739 in the stack. When a prototype exists we want to pass the
4740 value in the reg but reserve space on the stack. That's an
4741 optimization, and is deferred [for a bit]. */
4742 if ((regno - SPARC_FP_ARG_FIRST) >= SPARC_INT_ARG_MAX * 2)
4743 return gen_rtx_PARALLEL (mode,
4744 gen_rtvec (2,
4745 gen_rtx_EXPR_LIST (VOIDmode,
4746 NULL_RTX, const0_rtx),
4747 gen_rtx_EXPR_LIST (VOIDmode,
4748 reg, const0_rtx)));
4749 else
4750 #else
4751 /* ??? It seems that passing back a register even when past
4752 the area declared by REG_PARM_STACK_SPACE will allocate
4753 space appropriately, and will not copy the data onto the
4754 stack, exactly as we desire.
4756 This is due to locate_and_pad_parm being called in
4757 expand_call whenever reg_parm_stack_space > 0, which
4758 while beneficial to our example here, would seem to be
4759 in error from what had been intended. Ho hum... -- r~ */
4760 #endif
4761 return reg;
4763 else
4765 rtx v0, v1;
4767 if ((regno - SPARC_FP_ARG_FIRST) < SPARC_INT_ARG_MAX * 2)
4769 int intreg;
4771 /* On incoming, we don't need to know that the value
4772 is passed in %f0 and %i0, and it confuses other parts
4773 causing needless spillage even on the simplest cases. */
4774 if (incoming_p)
4775 return reg;
4777 intreg = (SPARC_OUTGOING_INT_ARG_FIRST
4778 + (regno - SPARC_FP_ARG_FIRST) / 2);
4780 v0 = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4781 v1 = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (mode, intreg),
4782 const0_rtx);
4783 return gen_rtx_PARALLEL (mode, gen_rtvec (2, v0, v1));
4785 else
4787 v0 = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4788 v1 = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4789 return gen_rtx_PARALLEL (mode, gen_rtvec (2, v0, v1));
4793 else if (type && TREE_CODE (type) == RECORD_TYPE)
4795 /* Structures up to 16 bytes in size are passed in arg slots on the
4796 stack and are promoted to registers where possible. */
4798 if (int_size_in_bytes (type) > 16)
4799 abort (); /* shouldn't get here */
4801 return function_arg_record_value (type, mode, slotno, named, regbase);
4803 else if (type && TREE_CODE (type) == UNION_TYPE)
4805 enum machine_mode mode;
4806 int bytes = int_size_in_bytes (type);
4808 if (bytes > 16)
4809 abort ();
4811 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 0);
4812 reg = gen_rtx_REG (mode, regno);
4814 else
4816 /* Scalar or complex int. */
4817 reg = gen_rtx_REG (mode, regno);
4820 return reg;
4823 /* Handle the FUNCTION_ARG_PARTIAL_NREGS macro.
4824 For an arg passed partly in registers and partly in memory,
4825 this is the number of registers used.
4826 For args passed entirely in registers or entirely in memory, zero.
4828 Any arg that starts in the first 6 regs but won't entirely fit in them
4829 needs partial registers on v8. On v9, structures with integer
4830 values in arg slots 5,6 will be passed in %o5 and SP+176, and complex fp
4831 values that begin in the last fp reg [where "last fp reg" varies with the
4832 mode] will be split between that reg and memory. */
4835 function_arg_partial_nregs (const struct sparc_args *cum,
4836 enum machine_mode mode, tree type, int named)
4838 int slotno, regno, padding;
4840 /* We pass 0 for incoming_p here, it doesn't matter. */
4841 slotno = function_arg_slotno (cum, mode, type, named, 0, &regno, &padding);
4843 if (slotno == -1)
4844 return 0;
4846 if (TARGET_ARCH32)
4848 if ((slotno + (mode == BLKmode
4849 ? ROUND_ADVANCE (int_size_in_bytes (type))
4850 : ROUND_ADVANCE (GET_MODE_SIZE (mode))))
4851 > NPARM_REGS (SImode))
4852 return NPARM_REGS (SImode) - slotno;
4853 return 0;
4855 else
4857 if (type && AGGREGATE_TYPE_P (type))
4859 int size = int_size_in_bytes (type);
4860 int align = TYPE_ALIGN (type);
4862 if (align == 16)
4863 slotno += slotno & 1;
4864 if (size > 8 && size <= 16
4865 && slotno == SPARC_INT_ARG_MAX - 1)
4866 return 1;
4868 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
4869 || (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
4870 && ! (TARGET_FPU && named)))
4872 if (GET_MODE_ALIGNMENT (mode) == 128)
4874 slotno += slotno & 1;
4875 if (slotno == SPARC_INT_ARG_MAX - 2)
4876 return 1;
4878 else
4880 if (slotno == SPARC_INT_ARG_MAX - 1)
4881 return 1;
4884 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4886 if (GET_MODE_ALIGNMENT (mode) == 128)
4887 slotno += slotno & 1;
4888 if ((slotno + GET_MODE_SIZE (mode) / UNITS_PER_WORD)
4889 > SPARC_FP_ARG_MAX)
4890 return 1;
4892 return 0;
4896 /* Handle the FUNCTION_ARG_PASS_BY_REFERENCE macro.
4897 !v9: The SPARC ABI stipulates passing struct arguments (of any size) and
4898 quad-precision floats by invisible reference.
4899 v9: Aggregates greater than 16 bytes are passed by reference.
4900 For Pascal, also pass arrays by reference. */
4903 function_arg_pass_by_reference (const struct sparc_args *cum ATTRIBUTE_UNUSED,
4904 enum machine_mode mode, tree type,
4905 int named ATTRIBUTE_UNUSED)
4907 if (TARGET_ARCH32)
4909 return ((type && AGGREGATE_TYPE_P (type))
4910 || mode == TFmode || mode == TCmode);
4912 else
4914 return ((type && TREE_CODE (type) == ARRAY_TYPE)
4915 /* Consider complex values as aggregates, so care for TCmode. */
4916 || GET_MODE_SIZE (mode) > 16
4917 || (type
4918 && AGGREGATE_TYPE_P (type)
4919 && (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 16));
4923 /* Handle the FUNCTION_ARG_ADVANCE macro.
4924 Update the data in CUM to advance over an argument
4925 of mode MODE and data type TYPE.
4926 TYPE is null for libcalls where that information may not be available. */
4928 void
4929 function_arg_advance (struct sparc_args *cum, enum machine_mode mode,
4930 tree type, int named)
4932 int slotno, regno, padding;
4934 /* We pass 0 for incoming_p here, it doesn't matter. */
4935 slotno = function_arg_slotno (cum, mode, type, named, 0, &regno, &padding);
4937 /* If register required leading padding, add it. */
4938 if (slotno != -1)
4939 cum->words += padding;
4941 if (TARGET_ARCH32)
4943 cum->words += (mode != BLKmode
4944 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
4945 : ROUND_ADVANCE (int_size_in_bytes (type)));
4947 else
4949 if (type && AGGREGATE_TYPE_P (type))
4951 int size = int_size_in_bytes (type);
4953 if (size <= 8)
4954 ++cum->words;
4955 else if (size <= 16)
4956 cum->words += 2;
4957 else /* passed by reference */
4958 ++cum->words;
4960 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
4962 cum->words += 2;
4964 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4966 cum->words += GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4968 else
4970 cum->words += (mode != BLKmode
4971 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
4972 : ROUND_ADVANCE (int_size_in_bytes (type)));
4977 /* Handle the FUNCTION_ARG_PADDING macro.
4978 For the 64 bit ABI structs are always stored left shifted in their
4979 argument slot. */
4981 enum direction
4982 function_arg_padding (enum machine_mode mode, tree type)
4984 if (TARGET_ARCH64 && type != 0 && AGGREGATE_TYPE_P (type))
4985 return upward;
4987 /* This is the default definition. */
4988 return (! BYTES_BIG_ENDIAN
4989 ? upward
4990 : ((mode == BLKmode
4991 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4992 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
4993 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
4994 ? downward : upward));
4997 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE macros.
4998 For v9, function return values are subject to the same rules as arguments,
4999 except that up to 32-bytes may be returned in registers. */
5002 function_value (tree type, enum machine_mode mode, int incoming_p)
5004 int regno;
5005 int regbase = (incoming_p
5006 ? SPARC_OUTGOING_INT_ARG_FIRST
5007 : SPARC_INCOMING_INT_ARG_FIRST);
5009 if (TARGET_ARCH64 && type)
5011 if (TREE_CODE (type) == RECORD_TYPE)
5013 /* Structures up to 32 bytes in size are passed in registers,
5014 promoted to fp registers where possible. */
5016 if (int_size_in_bytes (type) > 32)
5017 abort (); /* shouldn't get here */
5019 return function_arg_record_value (type, mode, 0, 1, regbase);
5021 else if (AGGREGATE_TYPE_P (type))
5023 /* All other aggregate types are passed in an integer register
5024 in a mode corresponding to the size of the type. */
5025 HOST_WIDE_INT bytes = int_size_in_bytes (type);
5027 if (bytes > 32)
5028 abort ();
5030 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 0);
5034 if (TARGET_ARCH64
5035 && GET_MODE_CLASS (mode) == MODE_INT
5036 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
5037 && type && ! AGGREGATE_TYPE_P (type))
5038 mode = DImode;
5040 if (incoming_p)
5041 regno = BASE_RETURN_VALUE_REG (mode);
5042 else
5043 regno = BASE_OUTGOING_VALUE_REG (mode);
5045 return gen_rtx_REG (mode, regno);
5048 /* Do what is necessary for `va_start'. We look at the current function
5049 to determine if stdarg or varargs is used and return the address of
5050 the first unnamed parameter. */
5053 sparc_builtin_saveregs (void)
5055 int first_reg = current_function_args_info.words;
5056 rtx address;
5057 int regno;
5059 for (regno = first_reg; regno < NPARM_REGS (word_mode); regno++)
5060 emit_move_insn (gen_rtx_MEM (word_mode,
5061 gen_rtx_PLUS (Pmode,
5062 frame_pointer_rtx,
5063 GEN_INT (FIRST_PARM_OFFSET (0)
5064 + (UNITS_PER_WORD
5065 * regno)))),
5066 gen_rtx_REG (word_mode,
5067 BASE_INCOMING_ARG_REG (word_mode) + regno));
5069 address = gen_rtx_PLUS (Pmode,
5070 frame_pointer_rtx,
5071 GEN_INT (FIRST_PARM_OFFSET (0)
5072 + UNITS_PER_WORD * first_reg));
5074 return address;
5077 /* Implement `va_start' for varargs and stdarg. */
5079 void
5080 sparc_va_start (tree valist, rtx nextarg)
5082 nextarg = expand_builtin_saveregs ();
5083 std_expand_builtin_va_start (valist, nextarg);
5086 /* Implement `va_arg'. */
5089 sparc_va_arg (tree valist, tree type)
5091 HOST_WIDE_INT size, rsize, align;
5092 tree addr, incr;
5093 rtx addr_rtx;
5094 int indirect = 0;
5096 /* Round up sizeof(type) to a word. */
5097 size = int_size_in_bytes (type);
5098 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
5099 align = 0;
5101 if (TARGET_ARCH64)
5103 if (TYPE_ALIGN (type) >= 2 * (unsigned) BITS_PER_WORD)
5104 align = 2 * UNITS_PER_WORD;
5106 if (AGGREGATE_TYPE_P (type))
5108 if ((unsigned HOST_WIDE_INT) size > 16)
5110 indirect = 1;
5111 size = rsize = UNITS_PER_WORD;
5112 align = 0;
5114 /* SPARC v9 ABI states that structures up to 8 bytes in size are
5115 given one 8 byte slot. */
5116 else if (size == 0)
5117 size = rsize = UNITS_PER_WORD;
5118 else
5119 size = rsize;
5122 else
5124 if (AGGREGATE_TYPE_P (type)
5125 || TYPE_MODE (type) == TFmode
5126 || TYPE_MODE (type) == TCmode)
5128 indirect = 1;
5129 size = rsize = UNITS_PER_WORD;
5133 incr = valist;
5134 if (align)
5136 incr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5137 build_int_2 (align - 1, 0)));
5138 incr = fold (build (BIT_AND_EXPR, ptr_type_node, incr,
5139 build_int_2 (-align, -1)));
5142 addr = incr = save_expr (incr);
5143 if (BYTES_BIG_ENDIAN && size < rsize)
5145 addr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5146 build_int_2 (rsize - size, 0)));
5148 incr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5149 build_int_2 (rsize, 0)));
5151 incr = build (MODIFY_EXPR, ptr_type_node, valist, incr);
5152 TREE_SIDE_EFFECTS (incr) = 1;
5153 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
5155 addr_rtx = expand_expr (addr, NULL, Pmode, EXPAND_NORMAL);
5157 /* If the address isn't aligned properly for the type,
5158 we may need to copy to a temporary.
5159 FIXME: This is inefficient. Usually we can do this
5160 in registers. */
5161 if (align == 0
5162 && TYPE_ALIGN (type) > BITS_PER_WORD
5163 && !indirect)
5165 /* FIXME: We really need to specify that the temporary is live
5166 for the whole function because expand_builtin_va_arg wants
5167 the alias set to be get_varargs_alias_set (), but in this
5168 case the alias set is that for TYPE and if the memory gets
5169 reused it will be reused with alias set TYPE. */
5170 rtx tmp = assign_temp (type, 0, 1, 0);
5171 rtx dest_addr;
5173 addr_rtx = force_reg (Pmode, addr_rtx);
5174 addr_rtx = gen_rtx_MEM (BLKmode, addr_rtx);
5175 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
5176 set_mem_align (addr_rtx, BITS_PER_WORD);
5177 tmp = shallow_copy_rtx (tmp);
5178 PUT_MODE (tmp, BLKmode);
5179 set_mem_alias_set (tmp, 0);
5181 dest_addr = emit_block_move (tmp, addr_rtx, GEN_INT (rsize),
5182 BLOCK_OP_NORMAL);
5183 if (dest_addr != NULL_RTX)
5184 addr_rtx = dest_addr;
5185 else
5186 addr_rtx = XCEXP (tmp, 0, MEM);
5189 if (indirect)
5191 addr_rtx = force_reg (Pmode, addr_rtx);
5192 addr_rtx = gen_rtx_MEM (Pmode, addr_rtx);
5193 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
5196 return addr_rtx;
5199 /* Return the string to output a conditional branch to LABEL, which is
5200 the operand number of the label. OP is the conditional expression.
5201 XEXP (OP, 0) is assumed to be a condition code register (integer or
5202 floating point) and its mode specifies what kind of comparison we made.
5204 REVERSED is nonzero if we should reverse the sense of the comparison.
5206 ANNUL is nonzero if we should generate an annulling branch.
5208 NOOP is nonzero if we have to follow this branch by a noop.
5210 INSN, if set, is the insn. */
5212 char *
5213 output_cbranch (rtx op, rtx dest, int label, int reversed, int annul,
5214 int noop, rtx insn)
5216 static char string[50];
5217 enum rtx_code code = GET_CODE (op);
5218 rtx cc_reg = XEXP (op, 0);
5219 enum machine_mode mode = GET_MODE (cc_reg);
5220 const char *labelno, *branch;
5221 int spaces = 8, far;
5222 char *p;
5224 /* v9 branches are limited to +-1MB. If it is too far away,
5225 change
5227 bne,pt %xcc, .LC30
5231 be,pn %xcc, .+12
5233 ba .LC30
5237 fbne,a,pn %fcc2, .LC29
5241 fbe,pt %fcc2, .+16
5243 ba .LC29 */
5245 far = get_attr_length (insn) >= 3;
5246 if (reversed ^ far)
5248 /* Reversal of FP compares takes care -- an ordered compare
5249 becomes an unordered compare and vice versa. */
5250 if (mode == CCFPmode || mode == CCFPEmode)
5251 code = reverse_condition_maybe_unordered (code);
5252 else
5253 code = reverse_condition (code);
5256 /* Start by writing the branch condition. */
5257 if (mode == CCFPmode || mode == CCFPEmode)
5259 switch (code)
5261 case NE:
5262 branch = "fbne";
5263 break;
5264 case EQ:
5265 branch = "fbe";
5266 break;
5267 case GE:
5268 branch = "fbge";
5269 break;
5270 case GT:
5271 branch = "fbg";
5272 break;
5273 case LE:
5274 branch = "fble";
5275 break;
5276 case LT:
5277 branch = "fbl";
5278 break;
5279 case UNORDERED:
5280 branch = "fbu";
5281 break;
5282 case ORDERED:
5283 branch = "fbo";
5284 break;
5285 case UNGT:
5286 branch = "fbug";
5287 break;
5288 case UNLT:
5289 branch = "fbul";
5290 break;
5291 case UNEQ:
5292 branch = "fbue";
5293 break;
5294 case UNGE:
5295 branch = "fbuge";
5296 break;
5297 case UNLE:
5298 branch = "fbule";
5299 break;
5300 case LTGT:
5301 branch = "fblg";
5302 break;
5304 default:
5305 abort ();
5308 /* ??? !v9: FP branches cannot be preceded by another floating point
5309 insn. Because there is currently no concept of pre-delay slots,
5310 we can fix this only by always emitting a nop before a floating
5311 point branch. */
5313 string[0] = '\0';
5314 if (! TARGET_V9)
5315 strcpy (string, "nop\n\t");
5316 strcat (string, branch);
5318 else
5320 switch (code)
5322 case NE:
5323 branch = "bne";
5324 break;
5325 case EQ:
5326 branch = "be";
5327 break;
5328 case GE:
5329 if (mode == CC_NOOVmode || mode == CCX_NOOVmode)
5330 branch = "bpos";
5331 else
5332 branch = "bge";
5333 break;
5334 case GT:
5335 branch = "bg";
5336 break;
5337 case LE:
5338 branch = "ble";
5339 break;
5340 case LT:
5341 if (mode == CC_NOOVmode || mode == CCX_NOOVmode)
5342 branch = "bneg";
5343 else
5344 branch = "bl";
5345 break;
5346 case GEU:
5347 branch = "bgeu";
5348 break;
5349 case GTU:
5350 branch = "bgu";
5351 break;
5352 case LEU:
5353 branch = "bleu";
5354 break;
5355 case LTU:
5356 branch = "blu";
5357 break;
5359 default:
5360 abort ();
5362 strcpy (string, branch);
5364 spaces -= strlen (branch);
5365 p = strchr (string, '\0');
5367 /* Now add the annulling, the label, and a possible noop. */
5368 if (annul && ! far)
5370 strcpy (p, ",a");
5371 p += 2;
5372 spaces -= 2;
5375 if (! TARGET_V9)
5376 labelno = "";
5377 else
5379 rtx note;
5380 int v8 = 0;
5382 if (! far && insn && INSN_ADDRESSES_SET_P ())
5384 int delta = (INSN_ADDRESSES (INSN_UID (dest))
5385 - INSN_ADDRESSES (INSN_UID (insn)));
5386 /* Leave some instructions for "slop". */
5387 if (delta < -260000 || delta >= 260000)
5388 v8 = 1;
5391 if (mode == CCFPmode || mode == CCFPEmode)
5393 static char v9_fcc_labelno[] = "%%fccX, ";
5394 /* Set the char indicating the number of the fcc reg to use. */
5395 v9_fcc_labelno[5] = REGNO (cc_reg) - SPARC_FIRST_V9_FCC_REG + '0';
5396 labelno = v9_fcc_labelno;
5397 if (v8)
5399 if (REGNO (cc_reg) == SPARC_FCC_REG)
5400 labelno = "";
5401 else
5402 abort ();
5405 else if (mode == CCXmode || mode == CCX_NOOVmode)
5407 labelno = "%%xcc, ";
5408 if (v8)
5409 abort ();
5411 else
5413 labelno = "%%icc, ";
5414 if (v8)
5415 labelno = "";
5418 if (*labelno && insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
5420 strcpy (p,
5421 ((INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2) ^ far)
5422 ? ",pt" : ",pn");
5423 p += 3;
5424 spaces -= 3;
5427 if (spaces > 0)
5428 *p++ = '\t';
5429 else
5430 *p++ = ' ';
5431 strcpy (p, labelno);
5432 p = strchr (p, '\0');
5433 if (far)
5435 strcpy (p, ".+12\n\tnop\n\tb\t");
5436 if (annul || noop)
5437 p[3] = '6';
5438 p += 13;
5440 *p++ = '%';
5441 *p++ = 'l';
5442 /* Set the char indicating the number of the operand containing the
5443 label_ref. */
5444 *p++ = label + '0';
5445 *p = '\0';
5446 if (noop)
5447 strcpy (p, "\n\tnop");
5449 return string;
5452 /* Emit a library call comparison between floating point X and Y.
5453 COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.).
5454 TARGET_ARCH64 uses _Qp_* functions, which use pointers to TFmode
5455 values as arguments instead of the TFmode registers themselves,
5456 that's why we cannot call emit_float_lib_cmp. */
5457 void
5458 sparc_emit_float_lib_cmp (rtx x, rtx y, enum rtx_code comparison)
5460 const char *qpfunc;
5461 rtx slot0, slot1, result, tem, tem2;
5462 enum machine_mode mode;
5464 switch (comparison)
5466 case EQ:
5467 qpfunc = (TARGET_ARCH64) ? "_Qp_feq" : "_Q_feq";
5468 break;
5470 case NE:
5471 qpfunc = (TARGET_ARCH64) ? "_Qp_fne" : "_Q_fne";
5472 break;
5474 case GT:
5475 qpfunc = (TARGET_ARCH64) ? "_Qp_fgt" : "_Q_fgt";
5476 break;
5478 case GE:
5479 qpfunc = (TARGET_ARCH64) ? "_Qp_fge" : "_Q_fge";
5480 break;
5482 case LT:
5483 qpfunc = (TARGET_ARCH64) ? "_Qp_flt" : "_Q_flt";
5484 break;
5486 case LE:
5487 qpfunc = (TARGET_ARCH64) ? "_Qp_fle" : "_Q_fle";
5488 break;
5490 case ORDERED:
5491 case UNORDERED:
5492 case UNGT:
5493 case UNLT:
5494 case UNEQ:
5495 case UNGE:
5496 case UNLE:
5497 case LTGT:
5498 qpfunc = (TARGET_ARCH64) ? "_Qp_cmp" : "_Q_cmp";
5499 break;
5501 default:
5502 abort();
5503 break;
5506 if (TARGET_ARCH64)
5508 if (GET_CODE (x) != MEM)
5510 slot0 = assign_stack_temp (TFmode, GET_MODE_SIZE(TFmode), 0);
5511 emit_insn (gen_rtx_SET (VOIDmode, slot0, x));
5513 else
5514 slot0 = x;
5516 if (GET_CODE (y) != MEM)
5518 slot1 = assign_stack_temp (TFmode, GET_MODE_SIZE(TFmode), 0);
5519 emit_insn (gen_rtx_SET (VOIDmode, slot1, y));
5521 else
5522 slot1 = y;
5524 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, qpfunc), LCT_NORMAL,
5525 DImode, 2,
5526 XEXP (slot0, 0), Pmode,
5527 XEXP (slot1, 0), Pmode);
5529 mode = DImode;
5531 else
5533 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, qpfunc), LCT_NORMAL,
5534 SImode, 2,
5535 x, TFmode, y, TFmode);
5537 mode = SImode;
5541 /* Immediately move the result of the libcall into a pseudo
5542 register so reload doesn't clobber the value if it needs
5543 the return register for a spill reg. */
5544 result = gen_reg_rtx (mode);
5545 emit_move_insn (result, hard_libcall_value (mode));
5547 switch (comparison)
5549 default:
5550 emit_cmp_insn (result, const0_rtx, NE, NULL_RTX, mode, 0);
5551 break;
5552 case ORDERED:
5553 case UNORDERED:
5554 emit_cmp_insn (result, GEN_INT(3), comparison == UNORDERED ? EQ : NE,
5555 NULL_RTX, mode, 0);
5556 break;
5557 case UNGT:
5558 case UNGE:
5559 emit_cmp_insn (result, const1_rtx,
5560 comparison == UNGT ? GT : NE, NULL_RTX, mode, 0);
5561 break;
5562 case UNLE:
5563 emit_cmp_insn (result, const2_rtx, NE, NULL_RTX, mode, 0);
5564 break;
5565 case UNLT:
5566 tem = gen_reg_rtx (mode);
5567 if (TARGET_ARCH32)
5568 emit_insn (gen_andsi3 (tem, result, const1_rtx));
5569 else
5570 emit_insn (gen_anddi3 (tem, result, const1_rtx));
5571 emit_cmp_insn (tem, const0_rtx, NE, NULL_RTX, mode, 0);
5572 break;
5573 case UNEQ:
5574 case LTGT:
5575 tem = gen_reg_rtx (mode);
5576 if (TARGET_ARCH32)
5577 emit_insn (gen_addsi3 (tem, result, const1_rtx));
5578 else
5579 emit_insn (gen_adddi3 (tem, result, const1_rtx));
5580 tem2 = gen_reg_rtx (mode);
5581 if (TARGET_ARCH32)
5582 emit_insn (gen_andsi3 (tem2, tem, const2_rtx));
5583 else
5584 emit_insn (gen_anddi3 (tem2, tem, const2_rtx));
5585 emit_cmp_insn (tem2, const0_rtx, comparison == UNEQ ? EQ : NE,
5586 NULL_RTX, mode, 0);
5587 break;
5591 /* Generate an unsigned DImode to FP conversion. This is the same code
5592 optabs would emit if we didn't have TFmode patterns. */
5594 void
5595 sparc_emit_floatunsdi (rtx *operands)
5597 rtx neglab, donelab, i0, i1, f0, in, out;
5598 enum machine_mode mode;
5600 out = operands[0];
5601 in = force_reg (DImode, operands[1]);
5602 mode = GET_MODE (out);
5603 neglab = gen_label_rtx ();
5604 donelab = gen_label_rtx ();
5605 i0 = gen_reg_rtx (DImode);
5606 i1 = gen_reg_rtx (DImode);
5607 f0 = gen_reg_rtx (mode);
5609 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
5611 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
5612 emit_jump_insn (gen_jump (donelab));
5613 emit_barrier ();
5615 emit_label (neglab);
5617 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
5618 emit_insn (gen_anddi3 (i1, in, const1_rtx));
5619 emit_insn (gen_iordi3 (i0, i0, i1));
5620 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
5621 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
5623 emit_label (donelab);
5626 /* Return the string to output a conditional branch to LABEL, testing
5627 register REG. LABEL is the operand number of the label; REG is the
5628 operand number of the reg. OP is the conditional expression. The mode
5629 of REG says what kind of comparison we made.
5631 REVERSED is nonzero if we should reverse the sense of the comparison.
5633 ANNUL is nonzero if we should generate an annulling branch.
5635 NOOP is nonzero if we have to follow this branch by a noop. */
5637 char *
5638 output_v9branch (rtx op, rtx dest, int reg, int label, int reversed,
5639 int annul, int noop, rtx insn)
5641 static char string[50];
5642 enum rtx_code code = GET_CODE (op);
5643 enum machine_mode mode = GET_MODE (XEXP (op, 0));
5644 rtx note;
5645 int far;
5646 char *p;
5648 /* branch on register are limited to +-128KB. If it is too far away,
5649 change
5651 brnz,pt %g1, .LC30
5655 brz,pn %g1, .+12
5657 ba,pt %xcc, .LC30
5661 brgez,a,pn %o1, .LC29
5665 brlz,pt %o1, .+16
5667 ba,pt %xcc, .LC29 */
5669 far = get_attr_length (insn) >= 3;
5671 /* If not floating-point or if EQ or NE, we can just reverse the code. */
5672 if (reversed ^ far)
5673 code = reverse_condition (code);
5675 /* Only 64 bit versions of these instructions exist. */
5676 if (mode != DImode)
5677 abort ();
5679 /* Start by writing the branch condition. */
5681 switch (code)
5683 case NE:
5684 strcpy (string, "brnz");
5685 break;
5687 case EQ:
5688 strcpy (string, "brz");
5689 break;
5691 case GE:
5692 strcpy (string, "brgez");
5693 break;
5695 case LT:
5696 strcpy (string, "brlz");
5697 break;
5699 case LE:
5700 strcpy (string, "brlez");
5701 break;
5703 case GT:
5704 strcpy (string, "brgz");
5705 break;
5707 default:
5708 abort ();
5711 p = strchr (string, '\0');
5713 /* Now add the annulling, reg, label, and nop. */
5714 if (annul && ! far)
5716 strcpy (p, ",a");
5717 p += 2;
5720 if (insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
5722 strcpy (p,
5723 ((INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2) ^ far)
5724 ? ",pt" : ",pn");
5725 p += 3;
5728 *p = p < string + 8 ? '\t' : ' ';
5729 p++;
5730 *p++ = '%';
5731 *p++ = '0' + reg;
5732 *p++ = ',';
5733 *p++ = ' ';
5734 if (far)
5736 int veryfar = 1, delta;
5738 if (INSN_ADDRESSES_SET_P ())
5740 delta = (INSN_ADDRESSES (INSN_UID (dest))
5741 - INSN_ADDRESSES (INSN_UID (insn)));
5742 /* Leave some instructions for "slop". */
5743 if (delta >= -260000 && delta < 260000)
5744 veryfar = 0;
5747 strcpy (p, ".+12\n\tnop\n\t");
5748 if (annul || noop)
5749 p[3] = '6';
5750 p += 11;
5751 if (veryfar)
5753 strcpy (p, "b\t");
5754 p += 2;
5756 else
5758 strcpy (p, "ba,pt\t%%xcc, ");
5759 p += 13;
5762 *p++ = '%';
5763 *p++ = 'l';
5764 *p++ = '0' + label;
5765 *p = '\0';
5767 if (noop)
5768 strcpy (p, "\n\tnop");
5770 return string;
5773 /* Return 1, if any of the registers of the instruction are %l[0-7] or %o[0-7].
5774 Such instructions cannot be used in the delay slot of return insn on v9.
5775 If TEST is 0, also rename all %i[0-7] registers to their %o[0-7] counterparts.
5778 static int
5779 epilogue_renumber (register rtx *where, int test)
5781 register const char *fmt;
5782 register int i;
5783 register enum rtx_code code;
5785 if (*where == 0)
5786 return 0;
5788 code = GET_CODE (*where);
5790 switch (code)
5792 case REG:
5793 if (REGNO (*where) >= 8 && REGNO (*where) < 24) /* oX or lX */
5794 return 1;
5795 if (! test && REGNO (*where) >= 24 && REGNO (*where) < 32)
5796 *where = gen_rtx (REG, GET_MODE (*where), OUTGOING_REGNO (REGNO(*where)));
5797 case SCRATCH:
5798 case CC0:
5799 case PC:
5800 case CONST_INT:
5801 case CONST_DOUBLE:
5802 return 0;
5804 /* Do not replace the frame pointer with the stack pointer because
5805 it can cause the delayed instruction to load below the stack.
5806 This occurs when instructions like:
5808 (set (reg/i:SI 24 %i0)
5809 (mem/f:SI (plus:SI (reg/f:SI 30 %fp)
5810 (const_int -20 [0xffffffec])) 0))
5812 are in the return delayed slot. */
5813 case PLUS:
5814 if (GET_CODE (XEXP (*where, 0)) == REG
5815 && REGNO (XEXP (*where, 0)) == HARD_FRAME_POINTER_REGNUM
5816 && (GET_CODE (XEXP (*where, 1)) != CONST_INT
5817 || INTVAL (XEXP (*where, 1)) < SPARC_STACK_BIAS))
5818 return 1;
5819 break;
5821 case MEM:
5822 if (SPARC_STACK_BIAS
5823 && GET_CODE (XEXP (*where, 0)) == REG
5824 && REGNO (XEXP (*where, 0)) == HARD_FRAME_POINTER_REGNUM)
5825 return 1;
5826 break;
5828 default:
5829 break;
5832 fmt = GET_RTX_FORMAT (code);
5834 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5836 if (fmt[i] == 'E')
5838 register int j;
5839 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5840 if (epilogue_renumber (&(XVECEXP (*where, i, j)), test))
5841 return 1;
5843 else if (fmt[i] == 'e'
5844 && epilogue_renumber (&(XEXP (*where, i)), test))
5845 return 1;
5847 return 0;
5850 /* Leaf functions and non-leaf functions have different needs. */
5852 static const int
5853 reg_leaf_alloc_order[] = REG_LEAF_ALLOC_ORDER;
5855 static const int
5856 reg_nonleaf_alloc_order[] = REG_ALLOC_ORDER;
5858 static const int *const reg_alloc_orders[] = {
5859 reg_leaf_alloc_order,
5860 reg_nonleaf_alloc_order};
5862 void
5863 order_regs_for_local_alloc (void)
5865 static int last_order_nonleaf = 1;
5867 if (regs_ever_live[15] != last_order_nonleaf)
5869 last_order_nonleaf = !last_order_nonleaf;
5870 memcpy ((char *) reg_alloc_order,
5871 (const char *) reg_alloc_orders[last_order_nonleaf],
5872 FIRST_PSEUDO_REGISTER * sizeof (int));
5876 /* Return 1 if REG and MEM are legitimate enough to allow the various
5877 mem<-->reg splits to be run. */
5880 sparc_splitdi_legitimate (rtx reg, rtx mem)
5882 /* Punt if we are here by mistake. */
5883 if (! reload_completed)
5884 abort ();
5886 /* We must have an offsettable memory reference. */
5887 if (! offsettable_memref_p (mem))
5888 return 0;
5890 /* If we have legitimate args for ldd/std, we do not want
5891 the split to happen. */
5892 if ((REGNO (reg) % 2) == 0
5893 && mem_min_alignment (mem, 8))
5894 return 0;
5896 /* Success. */
5897 return 1;
5900 /* Return 1 if x and y are some kind of REG and they refer to
5901 different hard registers. This test is guaranteed to be
5902 run after reload. */
5905 sparc_absnegfloat_split_legitimate (rtx x, rtx y)
5907 if (GET_CODE (x) != REG)
5908 return 0;
5909 if (GET_CODE (y) != REG)
5910 return 0;
5911 if (REGNO (x) == REGNO (y))
5912 return 0;
5913 return 1;
5916 /* Return 1 if REGNO (reg1) is even and REGNO (reg1) == REGNO (reg2) - 1.
5917 This makes them candidates for using ldd and std insns.
5919 Note reg1 and reg2 *must* be hard registers. */
5922 registers_ok_for_ldd_peep (rtx reg1, rtx reg2)
5924 /* We might have been passed a SUBREG. */
5925 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5926 return 0;
5928 if (REGNO (reg1) % 2 != 0)
5929 return 0;
5931 /* Integer ldd is deprecated in SPARC V9 */
5932 if (TARGET_V9 && REGNO (reg1) < 32)
5933 return 0;
5935 return (REGNO (reg1) == REGNO (reg2) - 1);
5938 /* Return 1 if the addresses in mem1 and mem2 are suitable for use in
5939 an ldd or std insn.
5941 This can only happen when addr1 and addr2, the addresses in mem1
5942 and mem2, are consecutive memory locations (addr1 + 4 == addr2).
5943 addr1 must also be aligned on a 64-bit boundary.
5945 Also iff dependent_reg_rtx is not null it should not be used to
5946 compute the address for mem1, i.e. we cannot optimize a sequence
5947 like:
5948 ld [%o0], %o0
5949 ld [%o0 + 4], %o1
5951 ldd [%o0], %o0
5952 nor:
5953 ld [%g3 + 4], %g3
5954 ld [%g3], %g2
5956 ldd [%g3], %g2
5958 But, note that the transformation from:
5959 ld [%g2 + 4], %g3
5960 ld [%g2], %g2
5962 ldd [%g2], %g2
5963 is perfectly fine. Thus, the peephole2 patterns always pass us
5964 the destination register of the first load, never the second one.
5966 For stores we don't have a similar problem, so dependent_reg_rtx is
5967 NULL_RTX. */
5970 mems_ok_for_ldd_peep (rtx mem1, rtx mem2, rtx dependent_reg_rtx)
5972 rtx addr1, addr2;
5973 unsigned int reg1;
5974 int offset1;
5976 /* The mems cannot be volatile. */
5977 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
5978 return 0;
5980 /* MEM1 should be aligned on a 64-bit boundary. */
5981 if (MEM_ALIGN (mem1) < 64)
5982 return 0;
5984 addr1 = XEXP (mem1, 0);
5985 addr2 = XEXP (mem2, 0);
5987 /* Extract a register number and offset (if used) from the first addr. */
5988 if (GET_CODE (addr1) == PLUS)
5990 /* If not a REG, return zero. */
5991 if (GET_CODE (XEXP (addr1, 0)) != REG)
5992 return 0;
5993 else
5995 reg1 = REGNO (XEXP (addr1, 0));
5996 /* The offset must be constant! */
5997 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5998 return 0;
5999 offset1 = INTVAL (XEXP (addr1, 1));
6002 else if (GET_CODE (addr1) != REG)
6003 return 0;
6004 else
6006 reg1 = REGNO (addr1);
6007 /* This was a simple (mem (reg)) expression. Offset is 0. */
6008 offset1 = 0;
6011 /* Make sure the second address is a (mem (plus (reg) (const_int). */
6012 if (GET_CODE (addr2) != PLUS)
6013 return 0;
6015 if (GET_CODE (XEXP (addr2, 0)) != REG
6016 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
6017 return 0;
6019 if (reg1 != REGNO (XEXP (addr2, 0)))
6020 return 0;
6022 if (dependent_reg_rtx != NULL_RTX && reg1 == REGNO (dependent_reg_rtx))
6023 return 0;
6025 /* The first offset must be evenly divisible by 8 to ensure the
6026 address is 64 bit aligned. */
6027 if (offset1 % 8 != 0)
6028 return 0;
6030 /* The offset for the second addr must be 4 more than the first addr. */
6031 if (INTVAL (XEXP (addr2, 1)) != offset1 + 4)
6032 return 0;
6034 /* All the tests passed. addr1 and addr2 are valid for ldd and std
6035 instructions. */
6036 return 1;
6039 /* Return 1 if reg is a pseudo, or is the first register in
6040 a hard register pair. This makes it a candidate for use in
6041 ldd and std insns. */
6044 register_ok_for_ldd (rtx reg)
6046 /* We might have been passed a SUBREG. */
6047 if (GET_CODE (reg) != REG)
6048 return 0;
6050 if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
6051 return (REGNO (reg) % 2 == 0);
6052 else
6053 return 1;
6056 /* Print operand X (an rtx) in assembler syntax to file FILE.
6057 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
6058 For `%' followed by punctuation, CODE is the punctuation and X is null. */
6060 void
6061 print_operand (FILE *file, rtx x, int code)
6063 switch (code)
6065 case '#':
6066 /* Output a 'nop' if there's nothing for the delay slot. */
6067 if (dbr_sequence_length () == 0)
6068 fputs ("\n\t nop", file);
6069 return;
6070 case '*':
6071 /* Output an annul flag if there's nothing for the delay slot and we
6072 are optimizing. This is always used with '(' below. */
6073 /* Sun OS 4.1.1 dbx can't handle an annulled unconditional branch;
6074 this is a dbx bug. So, we only do this when optimizing. */
6075 /* On UltraSPARC, a branch in a delay slot causes a pipeline flush.
6076 Always emit a nop in case the next instruction is a branch. */
6077 if (dbr_sequence_length () == 0
6078 && (optimize && (int)sparc_cpu < PROCESSOR_V9))
6079 fputs (",a", file);
6080 return;
6081 case '(':
6082 /* Output a 'nop' if there's nothing for the delay slot and we are
6083 not optimizing. This is always used with '*' above. */
6084 if (dbr_sequence_length () == 0
6085 && ! (optimize && (int)sparc_cpu < PROCESSOR_V9))
6086 fputs ("\n\t nop", file);
6087 return;
6088 case '_':
6089 /* Output the Embedded Medium/Anywhere code model base register. */
6090 fputs (EMBMEDANY_BASE_REG, file);
6091 return;
6092 case '@':
6093 /* Print out what we are using as the frame pointer. This might
6094 be %fp, or might be %sp+offset. */
6095 /* ??? What if offset is too big? Perhaps the caller knows it isn't? */
6096 fprintf (file, "%s+%d", frame_base_name, frame_base_offset);
6097 return;
6098 case 'Y':
6099 /* Adjust the operand to take into account a RESTORE operation. */
6100 if (GET_CODE (x) == CONST_INT)
6101 break;
6102 else if (GET_CODE (x) != REG)
6103 output_operand_lossage ("invalid %%Y operand");
6104 else if (REGNO (x) < 8)
6105 fputs (reg_names[REGNO (x)], file);
6106 else if (REGNO (x) >= 24 && REGNO (x) < 32)
6107 fputs (reg_names[REGNO (x)-16], file);
6108 else
6109 output_operand_lossage ("invalid %%Y operand");
6110 return;
6111 case 'L':
6112 /* Print out the low order register name of a register pair. */
6113 if (WORDS_BIG_ENDIAN)
6114 fputs (reg_names[REGNO (x)+1], file);
6115 else
6116 fputs (reg_names[REGNO (x)], file);
6117 return;
6118 case 'H':
6119 /* Print out the high order register name of a register pair. */
6120 if (WORDS_BIG_ENDIAN)
6121 fputs (reg_names[REGNO (x)], file);
6122 else
6123 fputs (reg_names[REGNO (x)+1], file);
6124 return;
6125 case 'R':
6126 /* Print out the second register name of a register pair or quad.
6127 I.e., R (%o0) => %o1. */
6128 fputs (reg_names[REGNO (x)+1], file);
6129 return;
6130 case 'S':
6131 /* Print out the third register name of a register quad.
6132 I.e., S (%o0) => %o2. */
6133 fputs (reg_names[REGNO (x)+2], file);
6134 return;
6135 case 'T':
6136 /* Print out the fourth register name of a register quad.
6137 I.e., T (%o0) => %o3. */
6138 fputs (reg_names[REGNO (x)+3], file);
6139 return;
6140 case 'x':
6141 /* Print a condition code register. */
6142 if (REGNO (x) == SPARC_ICC_REG)
6144 /* We don't handle CC[X]_NOOVmode because they're not supposed
6145 to occur here. */
6146 if (GET_MODE (x) == CCmode)
6147 fputs ("%icc", file);
6148 else if (GET_MODE (x) == CCXmode)
6149 fputs ("%xcc", file);
6150 else
6151 abort ();
6153 else
6154 /* %fccN register */
6155 fputs (reg_names[REGNO (x)], file);
6156 return;
6157 case 'm':
6158 /* Print the operand's address only. */
6159 output_address (XEXP (x, 0));
6160 return;
6161 case 'r':
6162 /* In this case we need a register. Use %g0 if the
6163 operand is const0_rtx. */
6164 if (x == const0_rtx
6165 || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
6167 fputs ("%g0", file);
6168 return;
6170 else
6171 break;
6173 case 'A':
6174 switch (GET_CODE (x))
6176 case IOR: fputs ("or", file); break;
6177 case AND: fputs ("and", file); break;
6178 case XOR: fputs ("xor", file); break;
6179 default: output_operand_lossage ("invalid %%A operand");
6181 return;
6183 case 'B':
6184 switch (GET_CODE (x))
6186 case IOR: fputs ("orn", file); break;
6187 case AND: fputs ("andn", file); break;
6188 case XOR: fputs ("xnor", file); break;
6189 default: output_operand_lossage ("invalid %%B operand");
6191 return;
6193 /* These are used by the conditional move instructions. */
6194 case 'c' :
6195 case 'C':
6197 enum rtx_code rc = GET_CODE (x);
6199 if (code == 'c')
6201 enum machine_mode mode = GET_MODE (XEXP (x, 0));
6202 if (mode == CCFPmode || mode == CCFPEmode)
6203 rc = reverse_condition_maybe_unordered (GET_CODE (x));
6204 else
6205 rc = reverse_condition (GET_CODE (x));
6207 switch (rc)
6209 case NE: fputs ("ne", file); break;
6210 case EQ: fputs ("e", file); break;
6211 case GE: fputs ("ge", file); break;
6212 case GT: fputs ("g", file); break;
6213 case LE: fputs ("le", file); break;
6214 case LT: fputs ("l", file); break;
6215 case GEU: fputs ("geu", file); break;
6216 case GTU: fputs ("gu", file); break;
6217 case LEU: fputs ("leu", file); break;
6218 case LTU: fputs ("lu", file); break;
6219 case LTGT: fputs ("lg", file); break;
6220 case UNORDERED: fputs ("u", file); break;
6221 case ORDERED: fputs ("o", file); break;
6222 case UNLT: fputs ("ul", file); break;
6223 case UNLE: fputs ("ule", file); break;
6224 case UNGT: fputs ("ug", file); break;
6225 case UNGE: fputs ("uge", file); break;
6226 case UNEQ: fputs ("ue", file); break;
6227 default: output_operand_lossage (code == 'c'
6228 ? "invalid %%c operand"
6229 : "invalid %%C operand");
6231 return;
6234 /* These are used by the movr instruction pattern. */
6235 case 'd':
6236 case 'D':
6238 enum rtx_code rc = (code == 'd'
6239 ? reverse_condition (GET_CODE (x))
6240 : GET_CODE (x));
6241 switch (rc)
6243 case NE: fputs ("ne", file); break;
6244 case EQ: fputs ("e", file); break;
6245 case GE: fputs ("gez", file); break;
6246 case LT: fputs ("lz", file); break;
6247 case LE: fputs ("lez", file); break;
6248 case GT: fputs ("gz", file); break;
6249 default: output_operand_lossage (code == 'd'
6250 ? "invalid %%d operand"
6251 : "invalid %%D operand");
6253 return;
6256 case 'b':
6258 /* Print a sign-extended character. */
6259 int i = trunc_int_for_mode (INTVAL (x), QImode);
6260 fprintf (file, "%d", i);
6261 return;
6264 case 'f':
6265 /* Operand must be a MEM; write its address. */
6266 if (GET_CODE (x) != MEM)
6267 output_operand_lossage ("invalid %%f operand");
6268 output_address (XEXP (x, 0));
6269 return;
6271 case 's':
6273 /* Print a sign-extended 32-bit value. */
6274 HOST_WIDE_INT i;
6275 if (GET_CODE(x) == CONST_INT)
6276 i = INTVAL (x);
6277 else if (GET_CODE(x) == CONST_DOUBLE)
6278 i = CONST_DOUBLE_LOW (x);
6279 else
6281 output_operand_lossage ("invalid %%s operand");
6282 return;
6284 i = trunc_int_for_mode (i, SImode);
6285 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
6286 return;
6289 case 0:
6290 /* Do nothing special. */
6291 break;
6293 default:
6294 /* Undocumented flag. */
6295 output_operand_lossage ("invalid operand output code");
6298 if (GET_CODE (x) == REG)
6299 fputs (reg_names[REGNO (x)], file);
6300 else if (GET_CODE (x) == MEM)
6302 fputc ('[', file);
6303 /* Poor Sun assembler doesn't understand absolute addressing. */
6304 if (CONSTANT_P (XEXP (x, 0)))
6305 fputs ("%g0+", file);
6306 output_address (XEXP (x, 0));
6307 fputc (']', file);
6309 else if (GET_CODE (x) == HIGH)
6311 fputs ("%hi(", file);
6312 output_addr_const (file, XEXP (x, 0));
6313 fputc (')', file);
6315 else if (GET_CODE (x) == LO_SUM)
6317 print_operand (file, XEXP (x, 0), 0);
6318 if (TARGET_CM_MEDMID)
6319 fputs ("+%l44(", file);
6320 else
6321 fputs ("+%lo(", file);
6322 output_addr_const (file, XEXP (x, 1));
6323 fputc (')', file);
6325 else if (GET_CODE (x) == CONST_DOUBLE
6326 && (GET_MODE (x) == VOIDmode
6327 || GET_MODE_CLASS (GET_MODE (x)) == MODE_INT))
6329 if (CONST_DOUBLE_HIGH (x) == 0)
6330 fprintf (file, "%u", (unsigned int) CONST_DOUBLE_LOW (x));
6331 else if (CONST_DOUBLE_HIGH (x) == -1
6332 && CONST_DOUBLE_LOW (x) < 0)
6333 fprintf (file, "%d", (int) CONST_DOUBLE_LOW (x));
6334 else
6335 output_operand_lossage ("long long constant not a valid immediate operand");
6337 else if (GET_CODE (x) == CONST_DOUBLE)
6338 output_operand_lossage ("floating point constant not a valid immediate operand");
6339 else { output_addr_const (file, x); }
6342 /* Target hook for assembling integer objects. The sparc version has
6343 special handling for aligned DI-mode objects. */
6345 static bool
6346 sparc_assemble_integer (rtx x, unsigned int size, int aligned_p)
6348 /* ??? We only output .xword's for symbols and only then in environments
6349 where the assembler can handle them. */
6350 if (aligned_p && size == 8
6351 && (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_DOUBLE))
6353 if (TARGET_V9)
6355 assemble_integer_with_op ("\t.xword\t", x);
6356 return true;
6358 else
6360 assemble_aligned_integer (4, const0_rtx);
6361 assemble_aligned_integer (4, x);
6362 return true;
6365 return default_assemble_integer (x, size, aligned_p);
6368 /* Return the value of a code used in the .proc pseudo-op that says
6369 what kind of result this function returns. For non-C types, we pick
6370 the closest C type. */
6372 #ifndef SHORT_TYPE_SIZE
6373 #define SHORT_TYPE_SIZE (BITS_PER_UNIT * 2)
6374 #endif
6376 #ifndef INT_TYPE_SIZE
6377 #define INT_TYPE_SIZE BITS_PER_WORD
6378 #endif
6380 #ifndef LONG_TYPE_SIZE
6381 #define LONG_TYPE_SIZE BITS_PER_WORD
6382 #endif
6384 #ifndef LONG_LONG_TYPE_SIZE
6385 #define LONG_LONG_TYPE_SIZE (BITS_PER_WORD * 2)
6386 #endif
6388 #ifndef FLOAT_TYPE_SIZE
6389 #define FLOAT_TYPE_SIZE BITS_PER_WORD
6390 #endif
6392 #ifndef DOUBLE_TYPE_SIZE
6393 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6394 #endif
6396 #ifndef LONG_DOUBLE_TYPE_SIZE
6397 #define LONG_DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6398 #endif
6400 unsigned long
6401 sparc_type_code (register tree type)
6403 register unsigned long qualifiers = 0;
6404 register unsigned shift;
6406 /* Only the first 30 bits of the qualifier are valid. We must refrain from
6407 setting more, since some assemblers will give an error for this. Also,
6408 we must be careful to avoid shifts of 32 bits or more to avoid getting
6409 unpredictable results. */
6411 for (shift = 6; shift < 30; shift += 2, type = TREE_TYPE (type))
6413 switch (TREE_CODE (type))
6415 case ERROR_MARK:
6416 return qualifiers;
6418 case ARRAY_TYPE:
6419 qualifiers |= (3 << shift);
6420 break;
6422 case FUNCTION_TYPE:
6423 case METHOD_TYPE:
6424 qualifiers |= (2 << shift);
6425 break;
6427 case POINTER_TYPE:
6428 case REFERENCE_TYPE:
6429 case OFFSET_TYPE:
6430 qualifiers |= (1 << shift);
6431 break;
6433 case RECORD_TYPE:
6434 return (qualifiers | 8);
6436 case UNION_TYPE:
6437 case QUAL_UNION_TYPE:
6438 return (qualifiers | 9);
6440 case ENUMERAL_TYPE:
6441 return (qualifiers | 10);
6443 case VOID_TYPE:
6444 return (qualifiers | 16);
6446 case INTEGER_TYPE:
6447 /* If this is a range type, consider it to be the underlying
6448 type. */
6449 if (TREE_TYPE (type) != 0)
6450 break;
6452 /* Carefully distinguish all the standard types of C,
6453 without messing up if the language is not C. We do this by
6454 testing TYPE_PRECISION and TREE_UNSIGNED. The old code used to
6455 look at both the names and the above fields, but that's redundant.
6456 Any type whose size is between two C types will be considered
6457 to be the wider of the two types. Also, we do not have a
6458 special code to use for "long long", so anything wider than
6459 long is treated the same. Note that we can't distinguish
6460 between "int" and "long" in this code if they are the same
6461 size, but that's fine, since neither can the assembler. */
6463 if (TYPE_PRECISION (type) <= CHAR_TYPE_SIZE)
6464 return (qualifiers | (TREE_UNSIGNED (type) ? 12 : 2));
6466 else if (TYPE_PRECISION (type) <= SHORT_TYPE_SIZE)
6467 return (qualifiers | (TREE_UNSIGNED (type) ? 13 : 3));
6469 else if (TYPE_PRECISION (type) <= INT_TYPE_SIZE)
6470 return (qualifiers | (TREE_UNSIGNED (type) ? 14 : 4));
6472 else
6473 return (qualifiers | (TREE_UNSIGNED (type) ? 15 : 5));
6475 case REAL_TYPE:
6476 /* If this is a range type, consider it to be the underlying
6477 type. */
6478 if (TREE_TYPE (type) != 0)
6479 break;
6481 /* Carefully distinguish all the standard types of C,
6482 without messing up if the language is not C. */
6484 if (TYPE_PRECISION (type) == FLOAT_TYPE_SIZE)
6485 return (qualifiers | 6);
6487 else
6488 return (qualifiers | 7);
6490 case COMPLEX_TYPE: /* GNU Fortran COMPLEX type. */
6491 /* ??? We need to distinguish between double and float complex types,
6492 but I don't know how yet because I can't reach this code from
6493 existing front-ends. */
6494 return (qualifiers | 7); /* Who knows? */
6496 case CHAR_TYPE: /* GNU Pascal CHAR type. Not used in C. */
6497 case BOOLEAN_TYPE: /* GNU Fortran BOOLEAN type. */
6498 case FILE_TYPE: /* GNU Pascal FILE type. */
6499 case SET_TYPE: /* GNU Pascal SET type. */
6500 case LANG_TYPE: /* ? */
6501 return qualifiers;
6503 default:
6504 abort (); /* Not a type! */
6508 return qualifiers;
6511 /* Nested function support. */
6513 /* Emit RTL insns to initialize the variable parts of a trampoline.
6514 FNADDR is an RTX for the address of the function's pure code.
6515 CXT is an RTX for the static chain value for the function.
6517 This takes 16 insns: 2 shifts & 2 ands (to split up addresses), 4 sethi
6518 (to load in opcodes), 4 iors (to merge address and opcodes), and 4 writes
6519 (to store insns). This is a bit excessive. Perhaps a different
6520 mechanism would be better here.
6522 Emit enough FLUSH insns to synchronize the data and instruction caches. */
6524 void
6525 sparc_initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
6527 /* SPARC 32 bit trampoline:
6529 sethi %hi(fn), %g1
6530 sethi %hi(static), %g2
6531 jmp %g1+%lo(fn)
6532 or %g2, %lo(static), %g2
6534 SETHI i,r = 00rr rrr1 00ii iiii iiii iiii iiii iiii
6535 JMPL r+i,d = 10dd ddd1 1100 0rrr rr1i iiii iiii iiii
6537 #ifdef TRANSFER_FROM_TRAMPOLINE
6538 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__enable_execute_stack"),
6539 LCT_NORMAL, VOIDmode, 1, tramp, Pmode);
6540 #endif
6542 emit_move_insn
6543 (gen_rtx_MEM (SImode, plus_constant (tramp, 0)),
6544 expand_binop (SImode, ior_optab,
6545 expand_shift (RSHIFT_EXPR, SImode, fnaddr,
6546 size_int (10), 0, 1),
6547 GEN_INT (trunc_int_for_mode (0x03000000, SImode)),
6548 NULL_RTX, 1, OPTAB_DIRECT));
6550 emit_move_insn
6551 (gen_rtx_MEM (SImode, plus_constant (tramp, 4)),
6552 expand_binop (SImode, ior_optab,
6553 expand_shift (RSHIFT_EXPR, SImode, cxt,
6554 size_int (10), 0, 1),
6555 GEN_INT (trunc_int_for_mode (0x05000000, SImode)),
6556 NULL_RTX, 1, OPTAB_DIRECT));
6558 emit_move_insn
6559 (gen_rtx_MEM (SImode, plus_constant (tramp, 8)),
6560 expand_binop (SImode, ior_optab,
6561 expand_and (SImode, fnaddr, GEN_INT (0x3ff), NULL_RTX),
6562 GEN_INT (trunc_int_for_mode (0x81c06000, SImode)),
6563 NULL_RTX, 1, OPTAB_DIRECT));
6565 emit_move_insn
6566 (gen_rtx_MEM (SImode, plus_constant (tramp, 12)),
6567 expand_binop (SImode, ior_optab,
6568 expand_and (SImode, cxt, GEN_INT (0x3ff), NULL_RTX),
6569 GEN_INT (trunc_int_for_mode (0x8410a000, SImode)),
6570 NULL_RTX, 1, OPTAB_DIRECT));
6572 /* On UltraSPARC a flush flushes an entire cache line. The trampoline is
6573 aligned on a 16 byte boundary so one flush clears it all. */
6574 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode, tramp))));
6575 if (sparc_cpu != PROCESSOR_ULTRASPARC
6576 && sparc_cpu != PROCESSOR_ULTRASPARC3)
6577 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode,
6578 plus_constant (tramp, 8)))));
6581 /* The 64 bit version is simpler because it makes more sense to load the
6582 values as "immediate" data out of the trampoline. It's also easier since
6583 we can read the PC without clobbering a register. */
6585 void
6586 sparc64_initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
6588 #ifdef TRANSFER_FROM_TRAMPOLINE
6589 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__enable_execute_stack"),
6590 LCT_NORMAL, VOIDmode, 1, tramp, Pmode);
6591 #endif
6594 rd %pc, %g1
6595 ldx [%g1+24], %g5
6596 jmp %g5
6597 ldx [%g1+16], %g5
6598 +16 bytes data
6601 emit_move_insn (gen_rtx_MEM (SImode, tramp),
6602 GEN_INT (trunc_int_for_mode (0x83414000, SImode)));
6603 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 4)),
6604 GEN_INT (trunc_int_for_mode (0xca586018, SImode)));
6605 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 8)),
6606 GEN_INT (trunc_int_for_mode (0x81c14000, SImode)));
6607 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 12)),
6608 GEN_INT (trunc_int_for_mode (0xca586010, SImode)));
6609 emit_move_insn (gen_rtx_MEM (DImode, plus_constant (tramp, 16)), cxt);
6610 emit_move_insn (gen_rtx_MEM (DImode, plus_constant (tramp, 24)), fnaddr);
6611 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode, tramp))));
6613 if (sparc_cpu != PROCESSOR_ULTRASPARC
6614 && sparc_cpu != PROCESSOR_ULTRASPARC3)
6615 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode, plus_constant (tramp, 8)))));
6618 /* Subroutines to support a flat (single) register window calling
6619 convention. */
6621 /* Single-register window sparc stack frames look like:
6623 Before call After call
6624 +-----------------------+ +-----------------------+
6625 high | | | |
6626 mem | caller's temps. | | caller's temps. |
6627 | | | |
6628 +-----------------------+ +-----------------------+
6629 | | | |
6630 | arguments on stack. | | arguments on stack. |
6631 | | | |
6632 +-----------------------+FP+92->+-----------------------+
6633 | 6 words to save | | 6 words to save |
6634 | arguments passed | | arguments passed |
6635 | in registers, even | | in registers, even |
6636 | if not passed. | | if not passed. |
6637 SP+68->+-----------------------+FP+68->+-----------------------+
6638 | 1 word struct addr | | 1 word struct addr |
6639 +-----------------------+FP+64->+-----------------------+
6640 | | | |
6641 | 16 word reg save area | | 16 word reg save area |
6642 | | | |
6643 SP->+-----------------------+ FP->+-----------------------+
6644 | 4 word area for |
6645 | fp/alu reg moves |
6646 FP-16->+-----------------------+
6648 | local variables |
6650 +-----------------------+
6652 | fp register save |
6654 +-----------------------+
6656 | gp register save |
6658 +-----------------------+
6660 | alloca allocations |
6662 +-----------------------+
6664 | arguments on stack |
6666 SP+92->+-----------------------+
6667 | 6 words to save |
6668 | arguments passed |
6669 | in registers, even |
6670 low | if not passed. |
6671 memory SP+68->+-----------------------+
6672 | 1 word struct addr |
6673 SP+64->+-----------------------+
6675 I 16 word reg save area |
6677 SP->+-----------------------+ */
6679 /* Structure to be filled in by sparc_flat_compute_frame_size with register
6680 save masks, and offsets for the current function. */
6682 struct sparc_frame_info
6684 unsigned long total_size; /* # bytes that the entire frame takes up. */
6685 unsigned long var_size; /* # bytes that variables take up. */
6686 unsigned long args_size; /* # bytes that outgoing arguments take up. */
6687 unsigned long extra_size; /* # bytes of extra gunk. */
6688 unsigned int gp_reg_size; /* # bytes needed to store gp regs. */
6689 unsigned int fp_reg_size; /* # bytes needed to store fp regs. */
6690 unsigned long gmask; /* Mask of saved gp registers. */
6691 unsigned long fmask; /* Mask of saved fp registers. */
6692 unsigned long reg_offset; /* Offset from new sp to store regs. */
6693 int initialized; /* Nonzero if frame size already calculated. */
6696 /* Current frame information calculated by sparc_flat_compute_frame_size. */
6697 struct sparc_frame_info current_frame_info;
6699 /* Zero structure to initialize current_frame_info. */
6700 struct sparc_frame_info zero_frame_info;
6702 /* Tell prologue and epilogue if register REGNO should be saved / restored. */
6704 #define RETURN_ADDR_REGNUM 15
6705 #define HARD_FRAME_POINTER_MASK (1 << (HARD_FRAME_POINTER_REGNUM))
6706 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
6708 #define MUST_SAVE_REGISTER(regno) \
6709 ((regs_ever_live[regno] && !call_used_regs[regno]) \
6710 || (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed) \
6711 || (regno == RETURN_ADDR_REGNUM && regs_ever_live[RETURN_ADDR_REGNUM]))
6713 /* Return the bytes needed to compute the frame pointer from the current
6714 stack pointer. */
6716 unsigned long
6717 sparc_flat_compute_frame_size (int size)
6718 /* # of var. bytes allocated. */
6720 int regno;
6721 unsigned long total_size; /* # bytes that the entire frame takes up. */
6722 unsigned long var_size; /* # bytes that variables take up. */
6723 unsigned long args_size; /* # bytes that outgoing arguments take up. */
6724 unsigned long extra_size; /* # extra bytes. */
6725 unsigned int gp_reg_size; /* # bytes needed to store gp regs. */
6726 unsigned int fp_reg_size; /* # bytes needed to store fp regs. */
6727 unsigned long gmask; /* Mask of saved gp registers. */
6728 unsigned long fmask; /* Mask of saved fp registers. */
6729 unsigned long reg_offset; /* Offset to register save area. */
6730 int need_aligned_p; /* 1 if need the save area 8 byte aligned. */
6732 /* This is the size of the 16 word reg save area, 1 word struct addr
6733 area, and 4 word fp/alu register copy area. */
6734 extra_size = -STARTING_FRAME_OFFSET + FIRST_PARM_OFFSET(0);
6735 var_size = size;
6736 gp_reg_size = 0;
6737 fp_reg_size = 0;
6738 gmask = 0;
6739 fmask = 0;
6740 reg_offset = 0;
6741 need_aligned_p = 0;
6743 args_size = 0;
6744 if (!leaf_function_p ())
6746 /* Also include the size needed for the 6 parameter registers. */
6747 args_size = current_function_outgoing_args_size + 24;
6749 total_size = var_size + args_size;
6751 /* Calculate space needed for gp registers. */
6752 for (regno = 1; regno <= 31; regno++)
6754 if (MUST_SAVE_REGISTER (regno))
6756 /* If we need to save two regs in a row, ensure there's room to bump
6757 up the address to align it to a doubleword boundary. */
6758 if ((regno & 0x1) == 0 && MUST_SAVE_REGISTER (regno+1))
6760 if (gp_reg_size % 8 != 0)
6761 gp_reg_size += 4;
6762 gp_reg_size += 2 * UNITS_PER_WORD;
6763 gmask |= 3 << regno;
6764 regno++;
6765 need_aligned_p = 1;
6767 else
6769 gp_reg_size += UNITS_PER_WORD;
6770 gmask |= 1 << regno;
6775 /* Calculate space needed for fp registers. */
6776 for (regno = 32; regno <= 63; regno++)
6778 if (regs_ever_live[regno] && !call_used_regs[regno])
6780 fp_reg_size += UNITS_PER_WORD;
6781 fmask |= 1 << (regno - 32);
6785 if (gmask || fmask)
6787 int n;
6788 reg_offset = FIRST_PARM_OFFSET(0) + args_size;
6789 /* Ensure save area is 8 byte aligned if we need it. */
6790 n = reg_offset % 8;
6791 if (need_aligned_p && n != 0)
6793 total_size += 8 - n;
6794 reg_offset += 8 - n;
6796 total_size += gp_reg_size + fp_reg_size;
6799 /* If we must allocate a stack frame at all, we must also allocate
6800 room for register window spillage, so as to be binary compatible
6801 with libraries and operating systems that do not use -mflat. */
6802 if (total_size > 0)
6803 total_size += extra_size;
6804 else
6805 extra_size = 0;
6807 total_size = SPARC_STACK_ALIGN (total_size);
6809 /* Save other computed information. */
6810 current_frame_info.total_size = total_size;
6811 current_frame_info.var_size = var_size;
6812 current_frame_info.args_size = args_size;
6813 current_frame_info.extra_size = extra_size;
6814 current_frame_info.gp_reg_size = gp_reg_size;
6815 current_frame_info.fp_reg_size = fp_reg_size;
6816 current_frame_info.gmask = gmask;
6817 current_frame_info.fmask = fmask;
6818 current_frame_info.reg_offset = reg_offset;
6819 current_frame_info.initialized = reload_completed;
6821 /* Ok, we're done. */
6822 return total_size;
6825 /* Save/restore registers in GMASK and FMASK at register BASE_REG plus offset
6826 OFFSET.
6828 BASE_REG must be 8 byte aligned. This allows us to test OFFSET for
6829 appropriate alignment and use DOUBLEWORD_OP when we can. We assume
6830 [BASE_REG+OFFSET] will always be a valid address.
6832 WORD_OP is either "st" for save, "ld" for restore.
6833 DOUBLEWORD_OP is either "std" for save, "ldd" for restore. */
6835 void
6836 sparc_flat_save_restore (FILE *file, const char *base_reg,
6837 unsigned int offset, long unsigned int gmask,
6838 long unsigned int fmask, const char *word_op,
6839 const char *doubleword_op,
6840 long unsigned int base_offset)
6842 int regno;
6844 if (gmask == 0 && fmask == 0)
6845 return;
6847 /* Save registers starting from high to low. We've already saved the
6848 previous frame pointer and previous return address for the debugger's
6849 sake. The debugger allows us to not need a nop in the epilog if at least
6850 one register is reloaded in addition to return address. */
6852 if (gmask)
6854 for (regno = 1; regno <= 31; regno++)
6856 if ((gmask & (1L << regno)) != 0)
6858 if ((regno & 0x1) == 0 && ((gmask & (1L << (regno+1))) != 0))
6860 /* We can save two registers in a row. If we're not at a
6861 double word boundary, move to one.
6862 sparc_flat_compute_frame_size ensures there's room to do
6863 this. */
6864 if (offset % 8 != 0)
6865 offset += UNITS_PER_WORD;
6867 if (word_op[0] == 's')
6869 fprintf (file, "\t%s\t%s, [%s+%d]\n",
6870 doubleword_op, reg_names[regno],
6871 base_reg, offset);
6872 if (dwarf2out_do_frame ())
6874 char *l = dwarf2out_cfi_label ();
6875 dwarf2out_reg_save (l, regno, offset + base_offset);
6876 dwarf2out_reg_save
6877 (l, regno+1, offset+base_offset + UNITS_PER_WORD);
6880 else
6881 fprintf (file, "\t%s\t[%s+%d], %s\n",
6882 doubleword_op, base_reg, offset,
6883 reg_names[regno]);
6885 offset += 2 * UNITS_PER_WORD;
6886 regno++;
6888 else
6890 if (word_op[0] == 's')
6892 fprintf (file, "\t%s\t%s, [%s+%d]\n",
6893 word_op, reg_names[regno],
6894 base_reg, offset);
6895 if (dwarf2out_do_frame ())
6896 dwarf2out_reg_save ("", regno, offset + base_offset);
6898 else
6899 fprintf (file, "\t%s\t[%s+%d], %s\n",
6900 word_op, base_reg, offset, reg_names[regno]);
6902 offset += UNITS_PER_WORD;
6908 if (fmask)
6910 for (regno = 32; regno <= 63; regno++)
6912 if ((fmask & (1L << (regno - 32))) != 0)
6914 if (word_op[0] == 's')
6916 fprintf (file, "\t%s\t%s, [%s+%d]\n",
6917 word_op, reg_names[regno],
6918 base_reg, offset);
6919 if (dwarf2out_do_frame ())
6920 dwarf2out_reg_save ("", regno, offset + base_offset);
6922 else
6923 fprintf (file, "\t%s\t[%s+%d], %s\n",
6924 word_op, base_reg, offset, reg_names[regno]);
6926 offset += UNITS_PER_WORD;
6932 /* Set up the stack and frame (if desired) for the function. */
6934 static void
6935 sparc_flat_function_prologue (FILE *file, HOST_WIDE_INT size)
6937 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
6938 unsigned long gmask = current_frame_info.gmask;
6940 sparc_output_scratch_registers (file);
6942 /* This is only for the human reader. */
6943 fprintf (file, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START);
6944 fprintf (file, "\t%s# vars= %ld, regs= %d/%d, args= %d, extra= %ld\n",
6945 ASM_COMMENT_START,
6946 current_frame_info.var_size,
6947 current_frame_info.gp_reg_size / 4,
6948 current_frame_info.fp_reg_size / 4,
6949 current_function_outgoing_args_size,
6950 current_frame_info.extra_size);
6952 size = SPARC_STACK_ALIGN (size);
6953 size = (! current_frame_info.initialized
6954 ? sparc_flat_compute_frame_size (size)
6955 : current_frame_info.total_size);
6957 /* These cases shouldn't happen. Catch them now. */
6958 if (size == 0 && (gmask || current_frame_info.fmask))
6959 abort ();
6961 /* Allocate our stack frame by decrementing %sp.
6962 At present, the only algorithm gdb can use to determine if this is a
6963 flat frame is if we always set %i7 if we set %sp. This can be optimized
6964 in the future by putting in some sort of debugging information that says
6965 this is a `flat' function. However, there is still the case of debugging
6966 code without such debugging information (including cases where most fns
6967 have such info, but there is one that doesn't). So, always do this now
6968 so we don't get a lot of code out there that gdb can't handle.
6969 If the frame pointer isn't needn't then that's ok - gdb won't be able to
6970 distinguish us from a non-flat function but there won't (and shouldn't)
6971 be any differences anyway. The return pc is saved (if necessary) right
6972 after %i7 so gdb won't have to look too far to find it. */
6973 if (size > 0)
6975 unsigned int reg_offset = current_frame_info.reg_offset;
6976 const char *const fp_str = reg_names[HARD_FRAME_POINTER_REGNUM];
6977 static const char *const t1_str = "%g1";
6979 /* Things get a little tricky if local variables take up more than ~4096
6980 bytes and outgoing arguments take up more than ~4096 bytes. When that
6981 happens, the register save area can't be accessed from either end of
6982 the frame. Handle this by decrementing %sp to the start of the gp
6983 register save area, save the regs, update %i7, and then set %sp to its
6984 final value. Given that we only have one scratch register to play
6985 with it is the cheapest solution, and it helps gdb out as it won't
6986 slow down recognition of flat functions.
6987 Don't change the order of insns emitted here without checking with
6988 the gdb folk first. */
6990 /* Is the entire register save area offsettable from %sp? */
6991 if (reg_offset < 4096 - 64 * (unsigned) UNITS_PER_WORD)
6993 if (size <= 4096)
6995 fprintf (file, "\tadd\t%s, %d, %s\n",
6996 sp_str, (int) -size, sp_str);
6997 if (gmask & HARD_FRAME_POINTER_MASK)
6999 fprintf (file, "\tst\t%s, [%s+%d]\n",
7000 fp_str, sp_str, reg_offset);
7001 fprintf (file, "\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7002 sp_str, (int) -size, fp_str, ASM_COMMENT_START);
7003 reg_offset += 4;
7006 else
7008 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7009 ", %s\n\tsub\t%s, %s, %s\n",
7010 size, t1_str, sp_str, t1_str, sp_str);
7011 if (gmask & HARD_FRAME_POINTER_MASK)
7013 fprintf (file, "\tst\t%s, [%s+%d]\n",
7014 fp_str, sp_str, reg_offset);
7015 fprintf (file, "\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7016 sp_str, t1_str, fp_str, ASM_COMMENT_START);
7017 reg_offset += 4;
7020 if (dwarf2out_do_frame ())
7022 char *l = dwarf2out_cfi_label ();
7023 if (gmask & HARD_FRAME_POINTER_MASK)
7025 dwarf2out_reg_save (l, HARD_FRAME_POINTER_REGNUM,
7026 reg_offset - 4 - size);
7027 dwarf2out_def_cfa (l, HARD_FRAME_POINTER_REGNUM, 0);
7029 else
7030 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, size);
7032 if (gmask & RETURN_ADDR_MASK)
7034 fprintf (file, "\tst\t%s, [%s+%d]\n",
7035 reg_names[RETURN_ADDR_REGNUM], sp_str, reg_offset);
7036 if (dwarf2out_do_frame ())
7037 dwarf2out_return_save ("", reg_offset - size);
7038 reg_offset += 4;
7040 sparc_flat_save_restore (file, sp_str, reg_offset,
7041 gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7042 current_frame_info.fmask,
7043 "st", "std", -size);
7045 else
7047 /* Subtract %sp in two steps, but make sure there is always a
7048 64 byte register save area, and %sp is properly aligned. */
7049 /* Amount to decrement %sp by, the first time. */
7050 unsigned HOST_WIDE_INT size1 = ((size - reg_offset + 64) + 15) & -16;
7051 /* Offset to register save area from %sp. */
7052 unsigned HOST_WIDE_INT offset = size1 - (size - reg_offset);
7054 if (size1 <= 4096)
7056 fprintf (file, "\tadd\t%s, %d, %s\n",
7057 sp_str, (int) -size1, sp_str);
7058 if (gmask & HARD_FRAME_POINTER_MASK)
7060 fprintf (file, "\tst\t%s, [%s+%d]\n\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7061 fp_str, sp_str, (int) offset, sp_str, (int) -size1,
7062 fp_str, ASM_COMMENT_START);
7063 offset += 4;
7066 else
7068 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7069 ", %s\n\tsub\t%s, %s, %s\n",
7070 size1, t1_str, sp_str, t1_str, sp_str);
7071 if (gmask & HARD_FRAME_POINTER_MASK)
7073 fprintf (file, "\tst\t%s, [%s+%d]\n\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7074 fp_str, sp_str, (int) offset, sp_str, t1_str,
7075 fp_str, ASM_COMMENT_START);
7076 offset += 4;
7079 if (dwarf2out_do_frame ())
7081 char *l = dwarf2out_cfi_label ();
7082 if (gmask & HARD_FRAME_POINTER_MASK)
7084 dwarf2out_reg_save (l, HARD_FRAME_POINTER_REGNUM,
7085 offset - 4 - size1);
7086 dwarf2out_def_cfa (l, HARD_FRAME_POINTER_REGNUM, 0);
7088 else
7089 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, size1);
7091 if (gmask & RETURN_ADDR_MASK)
7093 fprintf (file, "\tst\t%s, [%s+%d]\n",
7094 reg_names[RETURN_ADDR_REGNUM], sp_str, (int) offset);
7095 if (dwarf2out_do_frame ())
7096 /* offset - size1 == reg_offset - size
7097 if reg_offset were updated above like offset. */
7098 dwarf2out_return_save ("", offset - size1);
7099 offset += 4;
7101 sparc_flat_save_restore (file, sp_str, offset,
7102 gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7103 current_frame_info.fmask,
7104 "st", "std", -size1);
7105 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7106 ", %s\n\tsub\t%s, %s, %s\n",
7107 size - size1, t1_str, sp_str, t1_str, sp_str);
7108 if (dwarf2out_do_frame ())
7109 if (! (gmask & HARD_FRAME_POINTER_MASK))
7110 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, size);
7114 fprintf (file, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START);
7117 /* Do any necessary cleanup after a function to restore stack, frame,
7118 and regs. */
7120 static void
7121 sparc_flat_function_epilogue (FILE *file, HOST_WIDE_INT size)
7123 rtx epilogue_delay = current_function_epilogue_delay_list;
7124 int noepilogue = FALSE;
7126 /* This is only for the human reader. */
7127 fprintf (file, "\t%s#EPILOGUE#\n", ASM_COMMENT_START);
7129 /* The epilogue does not depend on any registers, but the stack
7130 registers, so we assume that if we have 1 pending nop, it can be
7131 ignored, and 2 it must be filled (2 nops occur for integer
7132 multiply and divide). */
7134 size = SPARC_STACK_ALIGN (size);
7135 size = (!current_frame_info.initialized
7136 ? sparc_flat_compute_frame_size (size)
7137 : current_frame_info.total_size);
7139 if (size == 0 && epilogue_delay == 0)
7141 rtx insn = get_last_insn ();
7143 /* If the last insn was a BARRIER, we don't have to write any code
7144 because a jump (aka return) was put there. */
7145 if (GET_CODE (insn) == NOTE)
7146 insn = prev_nonnote_insn (insn);
7147 if (insn && GET_CODE (insn) == BARRIER)
7148 noepilogue = TRUE;
7151 if (!noepilogue)
7153 unsigned HOST_WIDE_INT reg_offset = current_frame_info.reg_offset;
7154 unsigned HOST_WIDE_INT size1;
7155 const char *const sp_str = reg_names[STACK_POINTER_REGNUM];
7156 const char *const fp_str = reg_names[HARD_FRAME_POINTER_REGNUM];
7157 static const char *const t1_str = "%g1";
7159 /* In the reload sequence, we don't need to fill the load delay
7160 slots for most of the loads, also see if we can fill the final
7161 delay slot if not otherwise filled by the reload sequence. */
7163 if (size > 4095)
7164 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC ", %s\n",
7165 size, t1_str);
7167 if (frame_pointer_needed)
7169 if (size > 4095)
7170 fprintf (file,"\tsub\t%s, %s, %s\t\t%s# sp not trusted here\n",
7171 fp_str, t1_str, sp_str, ASM_COMMENT_START);
7172 else
7173 fprintf (file,"\tsub\t%s, %d, %s\t\t%s# sp not trusted here\n",
7174 fp_str, (int) size, sp_str, ASM_COMMENT_START);
7177 /* Is the entire register save area offsettable from %sp? */
7178 if (reg_offset < 4096 - 64 * (unsigned) UNITS_PER_WORD)
7180 size1 = 0;
7182 else
7184 /* Restore %sp in two steps, but make sure there is always a
7185 64 byte register save area, and %sp is properly aligned. */
7186 /* Amount to increment %sp by, the first time. */
7187 size1 = ((reg_offset - 64 - 16) + 15) & -16;
7188 /* Offset to register save area from %sp. */
7189 reg_offset = size1 - reg_offset;
7191 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7192 ", %s\n\tadd\t%s, %s, %s\n",
7193 size1, t1_str, sp_str, t1_str, sp_str);
7196 /* We must restore the frame pointer and return address reg first
7197 because they are treated specially by the prologue output code. */
7198 if (current_frame_info.gmask & HARD_FRAME_POINTER_MASK)
7200 fprintf (file, "\tld\t[%s+%d], %s\n",
7201 sp_str, (int) reg_offset, fp_str);
7202 reg_offset += 4;
7204 if (current_frame_info.gmask & RETURN_ADDR_MASK)
7206 fprintf (file, "\tld\t[%s+%d], %s\n",
7207 sp_str, (int) reg_offset, reg_names[RETURN_ADDR_REGNUM]);
7208 reg_offset += 4;
7211 /* Restore any remaining saved registers. */
7212 sparc_flat_save_restore (file, sp_str, reg_offset,
7213 current_frame_info.gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7214 current_frame_info.fmask,
7215 "ld", "ldd", 0);
7217 /* If we had to increment %sp in two steps, record it so the second
7218 restoration in the epilogue finishes up. */
7219 if (size1 > 0)
7221 size -= size1;
7222 if (size > 4095)
7223 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC ", %s\n",
7224 size, t1_str);
7227 if (current_function_returns_struct)
7228 fprintf (file, "\tjmp\t%%o7+12\n");
7229 else
7230 fprintf (file, "\tretl\n");
7232 /* If the only register saved is the return address, we need a
7233 nop, unless we have an instruction to put into it. Otherwise
7234 we don't since reloading multiple registers doesn't reference
7235 the register being loaded. */
7237 if (epilogue_delay)
7239 if (size)
7240 abort ();
7241 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
7244 else if (size > 4095)
7245 fprintf (file, "\tadd\t%s, %s, %s\n", sp_str, t1_str, sp_str);
7247 else if (size > 0)
7248 fprintf (file, "\tadd\t%s, %d, %s\n", sp_str, (int) size, sp_str);
7250 else
7251 fprintf (file, "\tnop\n");
7254 /* Reset state info for each function. */
7255 current_frame_info = zero_frame_info;
7257 sparc_output_deferred_case_vectors ();
7260 /* Define the number of delay slots needed for the function epilogue.
7262 On the sparc, we need a slot if either no stack has been allocated,
7263 or the only register saved is the return register. */
7266 sparc_flat_epilogue_delay_slots (void)
7268 if (!current_frame_info.initialized)
7269 (void) sparc_flat_compute_frame_size (get_frame_size ());
7271 if (current_frame_info.total_size == 0)
7272 return 1;
7274 return 0;
7277 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
7278 Any single length instruction which doesn't reference the stack or frame
7279 pointer is OK. */
7282 sparc_flat_eligible_for_epilogue_delay (rtx trial, int slot ATTRIBUTE_UNUSED)
7284 rtx pat = PATTERN (trial);
7286 if (get_attr_length (trial) != 1)
7287 return 0;
7289 if (! reg_mentioned_p (stack_pointer_rtx, pat)
7290 && ! reg_mentioned_p (frame_pointer_rtx, pat))
7291 return 1;
7293 return 0;
7296 /* Adjust the cost of a scheduling dependency. Return the new cost of
7297 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
7299 static int
7300 supersparc_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
7302 enum attr_type insn_type;
7304 if (! recog_memoized (insn))
7305 return 0;
7307 insn_type = get_attr_type (insn);
7309 if (REG_NOTE_KIND (link) == 0)
7311 /* Data dependency; DEP_INSN writes a register that INSN reads some
7312 cycles later. */
7314 /* if a load, then the dependence must be on the memory address;
7315 add an extra "cycle". Note that the cost could be two cycles
7316 if the reg was written late in an instruction group; we ca not tell
7317 here. */
7318 if (insn_type == TYPE_LOAD || insn_type == TYPE_FPLOAD)
7319 return cost + 3;
7321 /* Get the delay only if the address of the store is the dependence. */
7322 if (insn_type == TYPE_STORE || insn_type == TYPE_FPSTORE)
7324 rtx pat = PATTERN(insn);
7325 rtx dep_pat = PATTERN (dep_insn);
7327 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
7328 return cost; /* This should not happen! */
7330 /* The dependency between the two instructions was on the data that
7331 is being stored. Assume that this implies that the address of the
7332 store is not dependent. */
7333 if (rtx_equal_p (SET_DEST (dep_pat), SET_SRC (pat)))
7334 return cost;
7336 return cost + 3; /* An approximation. */
7339 /* A shift instruction cannot receive its data from an instruction
7340 in the same cycle; add a one cycle penalty. */
7341 if (insn_type == TYPE_SHIFT)
7342 return cost + 3; /* Split before cascade into shift. */
7344 else
7346 /* Anti- or output- dependency; DEP_INSN reads/writes a register that
7347 INSN writes some cycles later. */
7349 /* These are only significant for the fpu unit; writing a fp reg before
7350 the fpu has finished with it stalls the processor. */
7352 /* Reusing an integer register causes no problems. */
7353 if (insn_type == TYPE_IALU || insn_type == TYPE_SHIFT)
7354 return 0;
7357 return cost;
7360 static int
7361 hypersparc_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
7363 enum attr_type insn_type, dep_type;
7364 rtx pat = PATTERN(insn);
7365 rtx dep_pat = PATTERN (dep_insn);
7367 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
7368 return cost;
7370 insn_type = get_attr_type (insn);
7371 dep_type = get_attr_type (dep_insn);
7373 switch (REG_NOTE_KIND (link))
7375 case 0:
7376 /* Data dependency; DEP_INSN writes a register that INSN reads some
7377 cycles later. */
7379 switch (insn_type)
7381 case TYPE_STORE:
7382 case TYPE_FPSTORE:
7383 /* Get the delay iff the address of the store is the dependence. */
7384 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
7385 return cost;
7387 if (rtx_equal_p (SET_DEST (dep_pat), SET_SRC (pat)))
7388 return cost;
7389 return cost + 3;
7391 case TYPE_LOAD:
7392 case TYPE_SLOAD:
7393 case TYPE_FPLOAD:
7394 /* If a load, then the dependence must be on the memory address. If
7395 the addresses aren't equal, then it might be a false dependency */
7396 if (dep_type == TYPE_STORE || dep_type == TYPE_FPSTORE)
7398 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET
7399 || GET_CODE (SET_DEST (dep_pat)) != MEM
7400 || GET_CODE (SET_SRC (pat)) != MEM
7401 || ! rtx_equal_p (XEXP (SET_DEST (dep_pat), 0),
7402 XEXP (SET_SRC (pat), 0)))
7403 return cost + 2;
7405 return cost + 8;
7407 break;
7409 case TYPE_BRANCH:
7410 /* Compare to branch latency is 0. There is no benefit from
7411 separating compare and branch. */
7412 if (dep_type == TYPE_COMPARE)
7413 return 0;
7414 /* Floating point compare to branch latency is less than
7415 compare to conditional move. */
7416 if (dep_type == TYPE_FPCMP)
7417 return cost - 1;
7418 break;
7419 default:
7420 break;
7422 break;
7424 case REG_DEP_ANTI:
7425 /* Anti-dependencies only penalize the fpu unit. */
7426 if (insn_type == TYPE_IALU || insn_type == TYPE_SHIFT)
7427 return 0;
7428 break;
7430 default:
7431 break;
7434 return cost;
7437 static int
7438 sparc_adjust_cost(rtx insn, rtx link, rtx dep, int cost)
7440 switch (sparc_cpu)
7442 case PROCESSOR_SUPERSPARC:
7443 cost = supersparc_adjust_cost (insn, link, dep, cost);
7444 break;
7445 case PROCESSOR_HYPERSPARC:
7446 case PROCESSOR_SPARCLITE86X:
7447 cost = hypersparc_adjust_cost (insn, link, dep, cost);
7448 break;
7449 default:
7450 break;
7452 return cost;
7455 static void
7456 sparc_sched_init (FILE *dump ATTRIBUTE_UNUSED,
7457 int sched_verbose ATTRIBUTE_UNUSED,
7458 int max_ready ATTRIBUTE_UNUSED)
7462 static int
7463 sparc_use_dfa_pipeline_interface (void)
7465 if ((1 << sparc_cpu) &
7466 ((1 << PROCESSOR_ULTRASPARC) | (1 << PROCESSOR_CYPRESS) |
7467 (1 << PROCESSOR_SUPERSPARC) | (1 << PROCESSOR_HYPERSPARC) |
7468 (1 << PROCESSOR_SPARCLITE86X) | (1 << PROCESSOR_TSC701) |
7469 (1 << PROCESSOR_ULTRASPARC3)))
7470 return 1;
7471 return 0;
7474 static int
7475 sparc_use_sched_lookahead (void)
7477 if (sparc_cpu == PROCESSOR_ULTRASPARC
7478 || sparc_cpu == PROCESSOR_ULTRASPARC3)
7479 return 4;
7480 if ((1 << sparc_cpu) &
7481 ((1 << PROCESSOR_SUPERSPARC) | (1 << PROCESSOR_HYPERSPARC) |
7482 (1 << PROCESSOR_SPARCLITE86X)))
7483 return 3;
7484 return 0;
7487 static int
7488 sparc_issue_rate (void)
7490 switch (sparc_cpu)
7492 default:
7493 return 1;
7494 case PROCESSOR_V9:
7495 /* Assume V9 processors are capable of at least dual-issue. */
7496 return 2;
7497 case PROCESSOR_SUPERSPARC:
7498 return 3;
7499 case PROCESSOR_HYPERSPARC:
7500 case PROCESSOR_SPARCLITE86X:
7501 return 2;
7502 case PROCESSOR_ULTRASPARC:
7503 case PROCESSOR_ULTRASPARC3:
7504 return 4;
7508 static int
7509 set_extends (rtx insn)
7511 register rtx pat = PATTERN (insn);
7513 switch (GET_CODE (SET_SRC (pat)))
7515 /* Load and some shift instructions zero extend. */
7516 case MEM:
7517 case ZERO_EXTEND:
7518 /* sethi clears the high bits */
7519 case HIGH:
7520 /* LO_SUM is used with sethi. sethi cleared the high
7521 bits and the values used with lo_sum are positive */
7522 case LO_SUM:
7523 /* Store flag stores 0 or 1 */
7524 case LT: case LTU:
7525 case GT: case GTU:
7526 case LE: case LEU:
7527 case GE: case GEU:
7528 case EQ:
7529 case NE:
7530 return 1;
7531 case AND:
7533 rtx op0 = XEXP (SET_SRC (pat), 0);
7534 rtx op1 = XEXP (SET_SRC (pat), 1);
7535 if (GET_CODE (op1) == CONST_INT)
7536 return INTVAL (op1) >= 0;
7537 if (GET_CODE (op0) != REG)
7538 return 0;
7539 if (sparc_check_64 (op0, insn) == 1)
7540 return 1;
7541 return (GET_CODE (op1) == REG && sparc_check_64 (op1, insn) == 1);
7543 case IOR:
7544 case XOR:
7546 rtx op0 = XEXP (SET_SRC (pat), 0);
7547 rtx op1 = XEXP (SET_SRC (pat), 1);
7548 if (GET_CODE (op0) != REG || sparc_check_64 (op0, insn) <= 0)
7549 return 0;
7550 if (GET_CODE (op1) == CONST_INT)
7551 return INTVAL (op1) >= 0;
7552 return (GET_CODE (op1) == REG && sparc_check_64 (op1, insn) == 1);
7554 case LSHIFTRT:
7555 return GET_MODE (SET_SRC (pat)) == SImode;
7556 /* Positive integers leave the high bits zero. */
7557 case CONST_DOUBLE:
7558 return ! (CONST_DOUBLE_LOW (SET_SRC (pat)) & 0x80000000);
7559 case CONST_INT:
7560 return ! (INTVAL (SET_SRC (pat)) & 0x80000000);
7561 case ASHIFTRT:
7562 case SIGN_EXTEND:
7563 return - (GET_MODE (SET_SRC (pat)) == SImode);
7564 case REG:
7565 return sparc_check_64 (SET_SRC (pat), insn);
7566 default:
7567 return 0;
7571 /* We _ought_ to have only one kind per function, but... */
7572 static GTY(()) rtx sparc_addr_diff_list;
7573 static GTY(()) rtx sparc_addr_list;
7575 void
7576 sparc_defer_case_vector (rtx lab, rtx vec, int diff)
7578 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
7579 if (diff)
7580 sparc_addr_diff_list
7581 = gen_rtx_EXPR_LIST (VOIDmode, vec, sparc_addr_diff_list);
7582 else
7583 sparc_addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec, sparc_addr_list);
7586 static void
7587 sparc_output_addr_vec (rtx vec)
7589 rtx lab = XEXP (vec, 0), body = XEXP (vec, 1);
7590 int idx, vlen = XVECLEN (body, 0);
7592 #ifdef ASM_OUTPUT_ADDR_VEC_START
7593 ASM_OUTPUT_ADDR_VEC_START (asm_out_file);
7594 #endif
7596 #ifdef ASM_OUTPUT_CASE_LABEL
7597 ASM_OUTPUT_CASE_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (lab),
7598 NEXT_INSN (lab));
7599 #else
7600 (*targetm.asm_out.internal_label) (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
7601 #endif
7603 for (idx = 0; idx < vlen; idx++)
7605 ASM_OUTPUT_ADDR_VEC_ELT
7606 (asm_out_file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
7609 #ifdef ASM_OUTPUT_ADDR_VEC_END
7610 ASM_OUTPUT_ADDR_VEC_END (asm_out_file);
7611 #endif
7614 static void
7615 sparc_output_addr_diff_vec (rtx vec)
7617 rtx lab = XEXP (vec, 0), body = XEXP (vec, 1);
7618 rtx base = XEXP (XEXP (body, 0), 0);
7619 int idx, vlen = XVECLEN (body, 1);
7621 #ifdef ASM_OUTPUT_ADDR_VEC_START
7622 ASM_OUTPUT_ADDR_VEC_START (asm_out_file);
7623 #endif
7625 #ifdef ASM_OUTPUT_CASE_LABEL
7626 ASM_OUTPUT_CASE_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (lab),
7627 NEXT_INSN (lab));
7628 #else
7629 (*targetm.asm_out.internal_label) (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
7630 #endif
7632 for (idx = 0; idx < vlen; idx++)
7634 ASM_OUTPUT_ADDR_DIFF_ELT
7635 (asm_out_file,
7636 body,
7637 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
7638 CODE_LABEL_NUMBER (base));
7641 #ifdef ASM_OUTPUT_ADDR_VEC_END
7642 ASM_OUTPUT_ADDR_VEC_END (asm_out_file);
7643 #endif
7646 static void
7647 sparc_output_deferred_case_vectors (void)
7649 rtx t;
7650 int align;
7652 if (sparc_addr_list == NULL_RTX
7653 && sparc_addr_diff_list == NULL_RTX)
7654 return;
7656 /* Align to cache line in the function's code section. */
7657 function_section (current_function_decl);
7659 align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
7660 if (align > 0)
7661 ASM_OUTPUT_ALIGN (asm_out_file, align);
7663 for (t = sparc_addr_list; t ; t = XEXP (t, 1))
7664 sparc_output_addr_vec (XEXP (t, 0));
7665 for (t = sparc_addr_diff_list; t ; t = XEXP (t, 1))
7666 sparc_output_addr_diff_vec (XEXP (t, 0));
7668 sparc_addr_list = sparc_addr_diff_list = NULL_RTX;
7671 /* Return 0 if the high 32 bits of X (the low word of X, if DImode) are
7672 unknown. Return 1 if the high bits are zero, -1 if the register is
7673 sign extended. */
7675 sparc_check_64 (rtx x, rtx insn)
7677 /* If a register is set only once it is safe to ignore insns this
7678 code does not know how to handle. The loop will either recognize
7679 the single set and return the correct value or fail to recognize
7680 it and return 0. */
7681 int set_once = 0;
7682 rtx y = x;
7684 if (GET_CODE (x) != REG)
7685 abort ();
7687 if (GET_MODE (x) == DImode)
7688 y = gen_rtx_REG (SImode, REGNO (x) + WORDS_BIG_ENDIAN);
7690 if (flag_expensive_optimizations
7691 && REG_N_SETS (REGNO (y)) == 1)
7692 set_once = 1;
7694 if (insn == 0)
7696 if (set_once)
7697 insn = get_last_insn_anywhere ();
7698 else
7699 return 0;
7702 while ((insn = PREV_INSN (insn)))
7704 switch (GET_CODE (insn))
7706 case JUMP_INSN:
7707 case NOTE:
7708 break;
7709 case CODE_LABEL:
7710 case CALL_INSN:
7711 default:
7712 if (! set_once)
7713 return 0;
7714 break;
7715 case INSN:
7717 rtx pat = PATTERN (insn);
7718 if (GET_CODE (pat) != SET)
7719 return 0;
7720 if (rtx_equal_p (x, SET_DEST (pat)))
7721 return set_extends (insn);
7722 if (y && rtx_equal_p (y, SET_DEST (pat)))
7723 return set_extends (insn);
7724 if (reg_overlap_mentioned_p (SET_DEST (pat), y))
7725 return 0;
7729 return 0;
7732 /* Returns assembly code to perform a DImode shift using
7733 a 64-bit global or out register on SPARC-V8+. */
7734 char *
7735 sparc_v8plus_shift (rtx *operands, rtx insn, const char *opcode)
7737 static char asm_code[60];
7739 /* The scratch register is only required when the destination
7740 register is not a 64-bit global or out register. */
7741 if (which_alternative != 2)
7742 operands[3] = operands[0];
7744 if (GET_CODE (operands[1]) == CONST_INT)
7746 output_asm_insn ("mov\t%1, %3", operands);
7748 else
7750 output_asm_insn ("sllx\t%H1, 32, %3", operands);
7751 if (sparc_check_64 (operands[1], insn) <= 0)
7752 output_asm_insn ("srl\t%L1, 0, %L1", operands);
7753 output_asm_insn ("or\t%L1, %3, %3", operands);
7756 strcpy(asm_code, opcode);
7758 if (which_alternative != 2)
7759 return strcat (asm_code, "\t%0, %2, %L0\n\tsrlx\t%L0, 32, %H0");
7760 else
7761 return strcat (asm_code, "\t%3, %2, %3\n\tsrlx\t%3, 32, %H0\n\tmov\t%3, %L0");
7764 /* Output rtl to increment the profiler label LABELNO
7765 for profiling a function entry. */
7767 void
7768 sparc_profile_hook (int labelno)
7770 char buf[32];
7771 rtx lab, fun;
7773 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
7774 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7775 fun = gen_rtx_SYMBOL_REF (Pmode, MCOUNT_FUNCTION);
7777 emit_library_call (fun, LCT_NORMAL, VOIDmode, 1, lab, Pmode);
7780 #ifdef OBJECT_FORMAT_ELF
7781 static void
7782 sparc_elf_asm_named_section (const char *name, unsigned int flags)
7784 if (flags & SECTION_MERGE)
7786 /* entsize cannot be expressed in this section attributes
7787 encoding style. */
7788 default_elf_asm_named_section (name, flags);
7789 return;
7792 fprintf (asm_out_file, "\t.section\t\"%s\"", name);
7794 if (!(flags & SECTION_DEBUG))
7795 fputs (",#alloc", asm_out_file);
7796 if (flags & SECTION_WRITE)
7797 fputs (",#write", asm_out_file);
7798 if (flags & SECTION_CODE)
7799 fputs (",#execinstr", asm_out_file);
7801 /* ??? Handle SECTION_BSS. */
7803 fputc ('\n', asm_out_file);
7805 #endif /* OBJECT_FORMAT_ELF */
7807 /* We do not allow sibling calls if -mflat, nor
7808 we do not allow indirect calls to be optimized into sibling calls.
7810 Also, on sparc 32-bit we cannot emit a sibling call when the
7811 current function returns a structure. This is because the "unimp
7812 after call" convention would cause the callee to return to the
7813 wrong place. The generic code already disallows cases where the
7814 function being called returns a structure.
7816 It may seem strange how this last case could occur. Usually there
7817 is code after the call which jumps to epilogue code which dumps the
7818 return value into the struct return area. That ought to invalidate
7819 the sibling call right? Well, in the c++ case we can end up passing
7820 the pointer to the struct return area to a constructor (which returns
7821 void) and then nothing else happens. Such a sibling call would look
7822 valid without the added check here. */
7823 static bool
7824 sparc_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
7826 return (decl
7827 && ! TARGET_FLAT
7828 && (TARGET_ARCH64 || ! current_function_returns_struct));
7831 /* ??? Similar to the standard section selection, but force reloc-y-ness
7832 if SUNOS4_SHARED_LIBRARIES. Unclear why this helps (as opposed to
7833 pretending PIC always on), but that's what the old code did. */
7835 static void
7836 sparc_aout_select_section (tree t, int reloc, unsigned HOST_WIDE_INT align)
7838 default_select_section (t, reloc | SUNOS4_SHARED_LIBRARIES, align);
7841 /* Use text section for a constant unless we need more alignment than
7842 that offers. */
7844 static void
7845 sparc_aout_select_rtx_section (enum machine_mode mode, rtx x,
7846 unsigned HOST_WIDE_INT align)
7848 if (align <= MAX_TEXT_ALIGN
7849 && ! (flag_pic && (symbolic_operand (x, mode)
7850 || SUNOS4_SHARED_LIBRARIES)))
7851 readonly_data_section ();
7852 else
7853 data_section ();
7857 sparc_extra_constraint_check (rtx op, int c, int strict)
7859 int reload_ok_mem;
7861 if (TARGET_ARCH64
7862 && (c == 'T' || c == 'U'))
7863 return 0;
7865 switch (c)
7867 case 'Q':
7868 return fp_sethi_p (op);
7870 case 'R':
7871 return fp_mov_p (op);
7873 case 'S':
7874 return fp_high_losum_p (op);
7876 case 'U':
7877 if (! strict
7878 || (GET_CODE (op) == REG
7879 && (REGNO (op) < FIRST_PSEUDO_REGISTER
7880 || reg_renumber[REGNO (op)] >= 0)))
7881 return register_ok_for_ldd (op);
7883 return 0;
7885 case 'W':
7886 case 'T':
7887 break;
7889 default:
7890 return 0;
7893 /* Our memory extra constraints have to emulate the
7894 behavior of 'm' and 'o' in order for reload to work
7895 correctly. */
7896 if (GET_CODE (op) == MEM)
7898 reload_ok_mem = 0;
7899 if ((TARGET_ARCH64 || mem_min_alignment (op, 8))
7900 && (! strict
7901 || strict_memory_address_p (Pmode, XEXP (op, 0))))
7902 reload_ok_mem = 1;
7904 else
7906 reload_ok_mem = (reload_in_progress
7907 && GET_CODE (op) == REG
7908 && REGNO (op) >= FIRST_PSEUDO_REGISTER
7909 && reg_renumber [REGNO (op)] < 0);
7912 return reload_ok_mem;
7915 /* ??? This duplicates information provided to the compiler by the
7916 ??? scheduler description. Some day, teach genautomata to output
7917 ??? the latencies and then CSE will just use that. */
7919 static bool
7920 sparc_rtx_costs (rtx x, int code, int outer_code, int *total)
7922 switch (code)
7924 case PLUS: case MINUS: case ABS: case NEG:
7925 case FLOAT: case UNSIGNED_FLOAT:
7926 case FIX: case UNSIGNED_FIX:
7927 case FLOAT_EXTEND: case FLOAT_TRUNCATE:
7928 if (FLOAT_MODE_P (GET_MODE (x)))
7930 switch (sparc_cpu)
7932 case PROCESSOR_ULTRASPARC:
7933 case PROCESSOR_ULTRASPARC3:
7934 *total = COSTS_N_INSNS (4);
7935 return true;
7937 case PROCESSOR_SUPERSPARC:
7938 *total = COSTS_N_INSNS (3);
7939 return true;
7941 case PROCESSOR_CYPRESS:
7942 *total = COSTS_N_INSNS (5);
7943 return true;
7945 case PROCESSOR_HYPERSPARC:
7946 case PROCESSOR_SPARCLITE86X:
7947 default:
7948 *total = COSTS_N_INSNS (1);
7949 return true;
7953 *total = COSTS_N_INSNS (1);
7954 return true;
7956 case SQRT:
7957 switch (sparc_cpu)
7959 case PROCESSOR_ULTRASPARC:
7960 if (GET_MODE (x) == SFmode)
7961 *total = COSTS_N_INSNS (13);
7962 else
7963 *total = COSTS_N_INSNS (23);
7964 return true;
7966 case PROCESSOR_ULTRASPARC3:
7967 if (GET_MODE (x) == SFmode)
7968 *total = COSTS_N_INSNS (20);
7969 else
7970 *total = COSTS_N_INSNS (29);
7971 return true;
7973 case PROCESSOR_SUPERSPARC:
7974 *total = COSTS_N_INSNS (12);
7975 return true;
7977 case PROCESSOR_CYPRESS:
7978 *total = COSTS_N_INSNS (63);
7979 return true;
7981 case PROCESSOR_HYPERSPARC:
7982 case PROCESSOR_SPARCLITE86X:
7983 *total = COSTS_N_INSNS (17);
7984 return true;
7986 default:
7987 *total = COSTS_N_INSNS (30);
7988 return true;
7991 case COMPARE:
7992 if (FLOAT_MODE_P (GET_MODE (x)))
7994 switch (sparc_cpu)
7996 case PROCESSOR_ULTRASPARC:
7997 case PROCESSOR_ULTRASPARC3:
7998 *total = COSTS_N_INSNS (1);
7999 return true;
8001 case PROCESSOR_SUPERSPARC:
8002 *total = COSTS_N_INSNS (3);
8003 return true;
8005 case PROCESSOR_CYPRESS:
8006 *total = COSTS_N_INSNS (5);
8007 return true;
8009 case PROCESSOR_HYPERSPARC:
8010 case PROCESSOR_SPARCLITE86X:
8011 default:
8012 *total = COSTS_N_INSNS (1);
8013 return true;
8017 /* ??? Maybe mark integer compares as zero cost on
8018 ??? all UltraSPARC processors because the result
8019 ??? can be bypassed to a branch in the same group. */
8021 *total = COSTS_N_INSNS (1);
8022 return true;
8024 case MULT:
8025 if (FLOAT_MODE_P (GET_MODE (x)))
8027 switch (sparc_cpu)
8029 case PROCESSOR_ULTRASPARC:
8030 case PROCESSOR_ULTRASPARC3:
8031 *total = COSTS_N_INSNS (4);
8032 return true;
8034 case PROCESSOR_SUPERSPARC:
8035 *total = COSTS_N_INSNS (3);
8036 return true;
8038 case PROCESSOR_CYPRESS:
8039 *total = COSTS_N_INSNS (7);
8040 return true;
8042 case PROCESSOR_HYPERSPARC:
8043 case PROCESSOR_SPARCLITE86X:
8044 *total = COSTS_N_INSNS (1);
8045 return true;
8047 default:
8048 *total = COSTS_N_INSNS (5);
8049 return true;
8053 /* The latency is actually variable for Ultra-I/II
8054 And if one of the inputs have a known constant
8055 value, we could calculate this precisely.
8057 However, for that to be useful we would need to
8058 add some machine description changes which would
8059 make sure small constants ended up in rs1 of the
8060 multiply instruction. This is because the multiply
8061 latency is determined by the number of clear (or
8062 set if the value is negative) bits starting from
8063 the most significant bit of the first input.
8065 The algorithm for computing num_cycles of a multiply
8066 on Ultra-I/II is:
8068 if (rs1 < 0)
8069 highest_bit = highest_clear_bit(rs1);
8070 else
8071 highest_bit = highest_set_bit(rs1);
8072 if (num_bits < 3)
8073 highest_bit = 3;
8074 num_cycles = 4 + ((highest_bit - 3) / 2);
8076 If we did that we would have to also consider register
8077 allocation issues that would result from forcing such
8078 a value into a register.
8080 There are other similar tricks we could play if we
8081 knew, for example, that one input was an array index.
8083 Since we do not play any such tricks currently the
8084 safest thing to do is report the worst case latency. */
8085 if (sparc_cpu == PROCESSOR_ULTRASPARC)
8087 *total = (GET_MODE (x) == DImode
8088 ? COSTS_N_INSNS (34) : COSTS_N_INSNS (19));
8089 return true;
8092 /* Multiply latency on Ultra-III, fortunately, is constant. */
8093 if (sparc_cpu == PROCESSOR_ULTRASPARC3)
8095 *total = COSTS_N_INSNS (6);
8096 return true;
8099 if (sparc_cpu == PROCESSOR_HYPERSPARC
8100 || sparc_cpu == PROCESSOR_SPARCLITE86X)
8102 *total = COSTS_N_INSNS (17);
8103 return true;
8106 *total = (TARGET_HARD_MUL ? COSTS_N_INSNS (5) : COSTS_N_INSNS (25));
8107 return true;
8109 case DIV:
8110 case UDIV:
8111 case MOD:
8112 case UMOD:
8113 if (FLOAT_MODE_P (GET_MODE (x)))
8115 switch (sparc_cpu)
8117 case PROCESSOR_ULTRASPARC:
8118 if (GET_MODE (x) == SFmode)
8119 *total = COSTS_N_INSNS (13);
8120 else
8121 *total = COSTS_N_INSNS (23);
8122 return true;
8124 case PROCESSOR_ULTRASPARC3:
8125 if (GET_MODE (x) == SFmode)
8126 *total = COSTS_N_INSNS (17);
8127 else
8128 *total = COSTS_N_INSNS (20);
8129 return true;
8131 case PROCESSOR_SUPERSPARC:
8132 if (GET_MODE (x) == SFmode)
8133 *total = COSTS_N_INSNS (6);
8134 else
8135 *total = COSTS_N_INSNS (9);
8136 return true;
8138 case PROCESSOR_HYPERSPARC:
8139 case PROCESSOR_SPARCLITE86X:
8140 if (GET_MODE (x) == SFmode)
8141 *total = COSTS_N_INSNS (8);
8142 else
8143 *total = COSTS_N_INSNS (12);
8144 return true;
8146 default:
8147 *total = COSTS_N_INSNS (7);
8148 return true;
8152 if (sparc_cpu == PROCESSOR_ULTRASPARC)
8153 *total = (GET_MODE (x) == DImode
8154 ? COSTS_N_INSNS (68) : COSTS_N_INSNS (37));
8155 else if (sparc_cpu == PROCESSOR_ULTRASPARC3)
8156 *total = (GET_MODE (x) == DImode
8157 ? COSTS_N_INSNS (71) : COSTS_N_INSNS (40));
8158 else
8159 *total = COSTS_N_INSNS (25);
8160 return true;
8162 case IF_THEN_ELSE:
8163 /* Conditional moves. */
8164 switch (sparc_cpu)
8166 case PROCESSOR_ULTRASPARC:
8167 *total = COSTS_N_INSNS (2);
8168 return true;
8170 case PROCESSOR_ULTRASPARC3:
8171 if (FLOAT_MODE_P (GET_MODE (x)))
8172 *total = COSTS_N_INSNS (3);
8173 else
8174 *total = COSTS_N_INSNS (2);
8175 return true;
8177 default:
8178 *total = COSTS_N_INSNS (1);
8179 return true;
8182 case MEM:
8183 /* If outer-code is SIGN/ZERO extension we have to subtract
8184 out COSTS_N_INSNS (1) from whatever we return in determining
8185 the cost. */
8186 switch (sparc_cpu)
8188 case PROCESSOR_ULTRASPARC:
8189 if (outer_code == ZERO_EXTEND)
8190 *total = COSTS_N_INSNS (1);
8191 else
8192 *total = COSTS_N_INSNS (2);
8193 return true;
8195 case PROCESSOR_ULTRASPARC3:
8196 if (outer_code == ZERO_EXTEND)
8198 if (GET_MODE (x) == QImode
8199 || GET_MODE (x) == HImode
8200 || outer_code == SIGN_EXTEND)
8201 *total = COSTS_N_INSNS (2);
8202 else
8203 *total = COSTS_N_INSNS (1);
8205 else
8207 /* This handles sign extension (3 cycles)
8208 and everything else (2 cycles). */
8209 *total = COSTS_N_INSNS (2);
8211 return true;
8213 case PROCESSOR_SUPERSPARC:
8214 if (FLOAT_MODE_P (GET_MODE (x))
8215 || outer_code == ZERO_EXTEND
8216 || outer_code == SIGN_EXTEND)
8217 *total = COSTS_N_INSNS (0);
8218 else
8219 *total = COSTS_N_INSNS (1);
8220 return true;
8222 case PROCESSOR_TSC701:
8223 if (outer_code == ZERO_EXTEND
8224 || outer_code == SIGN_EXTEND)
8225 *total = COSTS_N_INSNS (2);
8226 else
8227 *total = COSTS_N_INSNS (3);
8228 return true;
8230 case PROCESSOR_CYPRESS:
8231 if (outer_code == ZERO_EXTEND
8232 || outer_code == SIGN_EXTEND)
8233 *total = COSTS_N_INSNS (1);
8234 else
8235 *total = COSTS_N_INSNS (2);
8236 return true;
8238 case PROCESSOR_HYPERSPARC:
8239 case PROCESSOR_SPARCLITE86X:
8240 default:
8241 if (outer_code == ZERO_EXTEND
8242 || outer_code == SIGN_EXTEND)
8243 *total = COSTS_N_INSNS (0);
8244 else
8245 *total = COSTS_N_INSNS (1);
8246 return true;
8249 case CONST_INT:
8250 if (INTVAL (x) < 0x1000 && INTVAL (x) >= -0x1000)
8252 *total = 0;
8253 return true;
8255 /* FALLTHRU */
8257 case HIGH:
8258 *total = 2;
8259 return true;
8261 case CONST:
8262 case LABEL_REF:
8263 case SYMBOL_REF:
8264 *total = 4;
8265 return true;
8267 case CONST_DOUBLE:
8268 if (GET_MODE (x) == DImode
8269 && ((XINT (x, 3) == 0
8270 && (unsigned HOST_WIDE_INT) XINT (x, 2) < 0x1000)
8271 || (XINT (x, 3) == -1
8272 && XINT (x, 2) < 0
8273 && XINT (x, 2) >= -0x1000)))
8274 *total = 0;
8275 else
8276 *total = 8;
8277 return true;
8279 default:
8280 return false;
8284 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
8285 Used for C++ multiple inheritance. */
8287 static void
8288 sparc_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8289 HOST_WIDE_INT delta,
8290 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
8291 tree function)
8293 rtx this, insn, funexp, delta_rtx, tmp;
8295 reload_completed = 1;
8296 epilogue_completed = 1;
8297 no_new_pseudos = 1;
8298 current_function_uses_only_leaf_regs = 1;
8300 emit_note (NOTE_INSN_PROLOGUE_END);
8302 /* Find the "this" pointer. Normally in %o0, but in ARCH64 if the function
8303 returns a structure, the structure return pointer is there instead. */
8304 if (TARGET_ARCH64 && aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8305 this = gen_rtx_REG (Pmode, SPARC_INCOMING_INT_ARG_FIRST + 1);
8306 else
8307 this = gen_rtx_REG (Pmode, SPARC_INCOMING_INT_ARG_FIRST);
8309 /* Add DELTA. When possible use a plain add, otherwise load it into
8310 a register first. */
8311 delta_rtx = GEN_INT (delta);
8312 if (!SPARC_SIMM13_P (delta))
8314 rtx scratch = gen_rtx_REG (Pmode, 1);
8315 if (TARGET_ARCH64)
8316 sparc_emit_set_const64 (scratch, delta_rtx);
8317 else
8318 sparc_emit_set_const32 (scratch, delta_rtx);
8319 delta_rtx = scratch;
8322 tmp = gen_rtx_PLUS (Pmode, this, delta_rtx);
8323 emit_insn (gen_rtx_SET (VOIDmode, this, tmp));
8325 /* Generate a tail call to the target function. */
8326 if (! TREE_USED (function))
8328 assemble_external (function);
8329 TREE_USED (function) = 1;
8331 funexp = XEXP (DECL_RTL (function), 0);
8332 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
8333 insn = emit_call_insn (gen_sibcall (funexp));
8334 SIBLING_CALL_P (insn) = 1;
8335 emit_barrier ();
8337 /* Run just enough of rest_of_compilation to get the insns emitted.
8338 There's not really enough bulk here to make other passes such as
8339 instruction scheduling worth while. Note that use_thunk calls
8340 assemble_start_function and assemble_end_function. */
8341 insn = get_insns ();
8342 insn_locators_initialize ();
8343 shorten_branches (insn);
8344 final_start_function (insn, file, 1);
8345 final (insn, file, 1, 0);
8346 final_end_function ();
8348 reload_completed = 0;
8349 epilogue_completed = 0;
8350 no_new_pseudos = 0;
8353 #include "gt-sparc.h"