2003-07-15 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / config / sparc / sparc.c
blob14c7c1fcf737c41e7cae59f6e5d8e8b9a6fd1239
1 /* Subroutines for insn-output.c for SPARC.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 64-bit SPARC-V9 support by Michael Tiemann, Jim Wilson, and Doug Evans,
6 at Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "libfuncs.h"
43 #include "recog.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "tm_p.h"
47 #include "debug.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "cfglayout.h"
52 /* 1 if the caller has placed an "unimp" insn immediately after the call.
53 This is used in v8 code when calling a function that returns a structure.
54 v9 doesn't have this. Be careful to have this test be the same as that
55 used on the call. */
57 #define SKIP_CALLERS_UNIMP_P \
58 (!TARGET_ARCH64 && current_function_returns_struct \
59 && ! integer_zerop (DECL_SIZE (DECL_RESULT (current_function_decl))) \
60 && (TREE_CODE (DECL_SIZE (DECL_RESULT (current_function_decl))) \
61 == INTEGER_CST))
63 /* Global variables for machine-dependent things. */
65 /* Size of frame. Need to know this to emit return insns from leaf procedures.
66 ACTUAL_FSIZE is set by compute_frame_size() which is called during the
67 reload pass. This is important as the value is later used in insn
68 scheduling (to see what can go in a delay slot).
69 APPARENT_FSIZE is the size of the stack less the register save area and less
70 the outgoing argument area. It is used when saving call preserved regs. */
71 static int apparent_fsize;
72 static int actual_fsize;
74 /* Number of live general or floating point registers needed to be
75 saved (as 4-byte quantities). */
76 static int num_gfregs;
78 /* Save the operands last given to a compare for use when we
79 generate a scc or bcc insn. */
80 rtx sparc_compare_op0, sparc_compare_op1;
82 /* Coordinate with the md file wrt special insns created by
83 sparc_nonflat_function_epilogue. */
84 bool sparc_emitting_epilogue;
86 /* Vector to say how input registers are mapped to output registers.
87 HARD_FRAME_POINTER_REGNUM cannot be remapped by this function to
88 eliminate it. You must use -fomit-frame-pointer to get that. */
89 char leaf_reg_remap[] =
90 { 0, 1, 2, 3, 4, 5, 6, 7,
91 -1, -1, -1, -1, -1, -1, 14, -1,
92 -1, -1, -1, -1, -1, -1, -1, -1,
93 8, 9, 10, 11, 12, 13, -1, 15,
95 32, 33, 34, 35, 36, 37, 38, 39,
96 40, 41, 42, 43, 44, 45, 46, 47,
97 48, 49, 50, 51, 52, 53, 54, 55,
98 56, 57, 58, 59, 60, 61, 62, 63,
99 64, 65, 66, 67, 68, 69, 70, 71,
100 72, 73, 74, 75, 76, 77, 78, 79,
101 80, 81, 82, 83, 84, 85, 86, 87,
102 88, 89, 90, 91, 92, 93, 94, 95,
103 96, 97, 98, 99, 100};
105 /* Vector, indexed by hard register number, which contains 1
106 for a register that is allowable in a candidate for leaf
107 function treatment. */
108 char sparc_leaf_regs[] =
109 { 1, 1, 1, 1, 1, 1, 1, 1,
110 0, 0, 0, 0, 0, 0, 1, 0,
111 0, 0, 0, 0, 0, 0, 0, 0,
112 1, 1, 1, 1, 1, 1, 0, 1,
113 1, 1, 1, 1, 1, 1, 1, 1,
114 1, 1, 1, 1, 1, 1, 1, 1,
115 1, 1, 1, 1, 1, 1, 1, 1,
116 1, 1, 1, 1, 1, 1, 1, 1,
117 1, 1, 1, 1, 1, 1, 1, 1,
118 1, 1, 1, 1, 1, 1, 1, 1,
119 1, 1, 1, 1, 1, 1, 1, 1,
120 1, 1, 1, 1, 1, 1, 1, 1,
121 1, 1, 1, 1, 1};
123 /* Name of where we pretend to think the frame pointer points.
124 Normally, this is "%fp", but if we are in a leaf procedure,
125 this is "%sp+something". We record "something" separately as it may be
126 too big for reg+constant addressing. */
128 static const char *frame_base_name;
129 static int frame_base_offset;
131 static void sparc_init_modes PARAMS ((void));
132 static int save_regs PARAMS ((FILE *, int, int, const char *,
133 int, int, int));
134 static int restore_regs PARAMS ((FILE *, int, int, const char *, int, int));
135 static void build_big_number PARAMS ((FILE *, int, const char *));
136 static int function_arg_slotno PARAMS ((const CUMULATIVE_ARGS *,
137 enum machine_mode, tree, int, int,
138 int *, int *));
140 static int supersparc_adjust_cost PARAMS ((rtx, rtx, rtx, int));
141 static int hypersparc_adjust_cost PARAMS ((rtx, rtx, rtx, int));
143 static void sparc_output_addr_vec PARAMS ((rtx));
144 static void sparc_output_addr_diff_vec PARAMS ((rtx));
145 static void sparc_output_deferred_case_vectors PARAMS ((void));
146 static int check_return_regs PARAMS ((rtx));
147 static int epilogue_renumber PARAMS ((rtx *, int));
148 static bool sparc_assemble_integer PARAMS ((rtx, unsigned int, int));
149 static int set_extends PARAMS ((rtx));
150 static void output_restore_regs PARAMS ((FILE *, int));
151 static void sparc_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
152 static void sparc_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
153 static void sparc_flat_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
154 static void sparc_flat_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
155 static void sparc_nonflat_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT,
156 int));
157 static void sparc_nonflat_function_prologue PARAMS ((FILE *, HOST_WIDE_INT,
158 int));
159 #ifdef OBJECT_FORMAT_ELF
160 static void sparc_elf_asm_named_section PARAMS ((const char *, unsigned int));
161 #endif
162 static void sparc_aout_select_section PARAMS ((tree, int,
163 unsigned HOST_WIDE_INT))
164 ATTRIBUTE_UNUSED;
165 static void sparc_aout_select_rtx_section PARAMS ((enum machine_mode, rtx,
166 unsigned HOST_WIDE_INT))
167 ATTRIBUTE_UNUSED;
169 static int sparc_adjust_cost PARAMS ((rtx, rtx, rtx, int));
170 static int sparc_issue_rate PARAMS ((void));
171 static void sparc_sched_init PARAMS ((FILE *, int, int));
172 static int sparc_use_dfa_pipeline_interface PARAMS ((void));
173 static int sparc_use_sched_lookahead PARAMS ((void));
175 static void emit_soft_tfmode_libcall PARAMS ((const char *, int, rtx *));
176 static void emit_soft_tfmode_binop PARAMS ((enum rtx_code, rtx *));
177 static void emit_soft_tfmode_unop PARAMS ((enum rtx_code, rtx *));
178 static void emit_soft_tfmode_cvt PARAMS ((enum rtx_code, rtx *));
179 static void emit_hard_tfmode_operation PARAMS ((enum rtx_code, rtx *));
181 static bool sparc_function_ok_for_sibcall PARAMS ((tree, tree));
182 static void sparc_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
183 HOST_WIDE_INT, tree));
184 static bool sparc_rtx_costs PARAMS ((rtx, int, int, int *));
186 /* Option handling. */
188 /* Code model option as passed by user. */
189 const char *sparc_cmodel_string;
190 /* Parsed value. */
191 enum cmodel sparc_cmodel;
193 char sparc_hard_reg_printed[8];
195 struct sparc_cpu_select sparc_select[] =
197 /* switch name, tune arch */
198 { (char *)0, "default", 1, 1 },
199 { (char *)0, "-mcpu=", 1, 1 },
200 { (char *)0, "-mtune=", 1, 0 },
201 { 0, 0, 0, 0 }
204 /* CPU type. This is set from TARGET_CPU_DEFAULT and -m{cpu,tune}=xxx. */
205 enum processor_type sparc_cpu;
207 /* Initialize the GCC target structure. */
209 /* The sparc default is to use .half rather than .short for aligned
210 HI objects. Use .word instead of .long on non-ELF systems. */
211 #undef TARGET_ASM_ALIGNED_HI_OP
212 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
213 #ifndef OBJECT_FORMAT_ELF
214 #undef TARGET_ASM_ALIGNED_SI_OP
215 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
216 #endif
218 #undef TARGET_ASM_UNALIGNED_HI_OP
219 #define TARGET_ASM_UNALIGNED_HI_OP "\t.uahalf\t"
220 #undef TARGET_ASM_UNALIGNED_SI_OP
221 #define TARGET_ASM_UNALIGNED_SI_OP "\t.uaword\t"
222 #undef TARGET_ASM_UNALIGNED_DI_OP
223 #define TARGET_ASM_UNALIGNED_DI_OP "\t.uaxword\t"
225 /* The target hook has to handle DI-mode values. */
226 #undef TARGET_ASM_INTEGER
227 #define TARGET_ASM_INTEGER sparc_assemble_integer
229 #undef TARGET_ASM_FUNCTION_PROLOGUE
230 #define TARGET_ASM_FUNCTION_PROLOGUE sparc_output_function_prologue
231 #undef TARGET_ASM_FUNCTION_EPILOGUE
232 #define TARGET_ASM_FUNCTION_EPILOGUE sparc_output_function_epilogue
234 #undef TARGET_SCHED_ADJUST_COST
235 #define TARGET_SCHED_ADJUST_COST sparc_adjust_cost
236 #undef TARGET_SCHED_ISSUE_RATE
237 #define TARGET_SCHED_ISSUE_RATE sparc_issue_rate
238 #undef TARGET_SCHED_INIT
239 #define TARGET_SCHED_INIT sparc_sched_init
240 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
241 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE sparc_use_dfa_pipeline_interface
242 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
243 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD sparc_use_sched_lookahead
245 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
246 #define TARGET_FUNCTION_OK_FOR_SIBCALL sparc_function_ok_for_sibcall
248 #undef TARGET_ASM_OUTPUT_MI_THUNK
249 #define TARGET_ASM_OUTPUT_MI_THUNK sparc_output_mi_thunk
250 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
251 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
253 #undef TARGET_RTX_COSTS
254 #define TARGET_RTX_COSTS sparc_rtx_costs
255 #undef TARGET_ADDRESS_COST
256 #define TARGET_ADDRESS_COST hook_int_rtx_0
258 struct gcc_target targetm = TARGET_INITIALIZER;
260 /* Validate and override various options, and do some machine dependent
261 initialization. */
263 void
264 sparc_override_options ()
266 static struct code_model {
267 const char *const name;
268 const int value;
269 } const cmodels[] = {
270 { "32", CM_32 },
271 { "medlow", CM_MEDLOW },
272 { "medmid", CM_MEDMID },
273 { "medany", CM_MEDANY },
274 { "embmedany", CM_EMBMEDANY },
275 { 0, 0 }
277 const struct code_model *cmodel;
278 /* Map TARGET_CPU_DEFAULT to value for -m{arch,tune}=. */
279 static struct cpu_default {
280 const int cpu;
281 const char *const name;
282 } const cpu_default[] = {
283 /* There must be one entry here for each TARGET_CPU value. */
284 { TARGET_CPU_sparc, "cypress" },
285 { TARGET_CPU_sparclet, "tsc701" },
286 { TARGET_CPU_sparclite, "f930" },
287 { TARGET_CPU_v8, "v8" },
288 { TARGET_CPU_hypersparc, "hypersparc" },
289 { TARGET_CPU_sparclite86x, "sparclite86x" },
290 { TARGET_CPU_supersparc, "supersparc" },
291 { TARGET_CPU_v9, "v9" },
292 { TARGET_CPU_ultrasparc, "ultrasparc" },
293 { TARGET_CPU_ultrasparc3, "ultrasparc3" },
294 { 0, 0 }
296 const struct cpu_default *def;
297 /* Table of values for -m{cpu,tune}=. */
298 static struct cpu_table {
299 const char *const name;
300 const enum processor_type processor;
301 const int disable;
302 const int enable;
303 } const cpu_table[] = {
304 { "v7", PROCESSOR_V7, MASK_ISA, 0 },
305 { "cypress", PROCESSOR_CYPRESS, MASK_ISA, 0 },
306 { "v8", PROCESSOR_V8, MASK_ISA, MASK_V8 },
307 /* TI TMS390Z55 supersparc */
308 { "supersparc", PROCESSOR_SUPERSPARC, MASK_ISA, MASK_V8 },
309 { "sparclite", PROCESSOR_SPARCLITE, MASK_ISA, MASK_SPARCLITE },
310 /* The Fujitsu MB86930 is the original sparclite chip, with no fpu.
311 The Fujitsu MB86934 is the recent sparclite chip, with an fpu. */
312 { "f930", PROCESSOR_F930, MASK_ISA|MASK_FPU, MASK_SPARCLITE },
313 { "f934", PROCESSOR_F934, MASK_ISA, MASK_SPARCLITE|MASK_FPU },
314 { "hypersparc", PROCESSOR_HYPERSPARC, MASK_ISA, MASK_V8|MASK_FPU },
315 { "sparclite86x", PROCESSOR_SPARCLITE86X, MASK_ISA|MASK_FPU,
316 MASK_SPARCLITE },
317 { "sparclet", PROCESSOR_SPARCLET, MASK_ISA, MASK_SPARCLET },
318 /* TEMIC sparclet */
319 { "tsc701", PROCESSOR_TSC701, MASK_ISA, MASK_SPARCLET },
320 { "v9", PROCESSOR_V9, MASK_ISA, MASK_V9 },
321 /* TI ultrasparc I, II, IIi */
322 { "ultrasparc", PROCESSOR_ULTRASPARC, MASK_ISA, MASK_V9
323 /* Although insns using %y are deprecated, it is a clear win on current
324 ultrasparcs. */
325 |MASK_DEPRECATED_V8_INSNS},
326 /* TI ultrasparc III */
327 /* ??? Check if %y issue still holds true in ultra3. */
328 { "ultrasparc3", PROCESSOR_ULTRASPARC3, MASK_ISA, MASK_V9|MASK_DEPRECATED_V8_INSNS},
329 { 0, 0, 0, 0 }
331 const struct cpu_table *cpu;
332 const struct sparc_cpu_select *sel;
333 int fpu;
335 #ifndef SPARC_BI_ARCH
336 /* Check for unsupported architecture size. */
337 if (! TARGET_64BIT != DEFAULT_ARCH32_P)
338 error ("%s is not supported by this configuration",
339 DEFAULT_ARCH32_P ? "-m64" : "-m32");
340 #endif
342 /* We force all 64bit archs to use 128 bit long double */
343 if (TARGET_64BIT && ! TARGET_LONG_DOUBLE_128)
345 error ("-mlong-double-64 not allowed with -m64");
346 target_flags |= MASK_LONG_DOUBLE_128;
349 /* Code model selection. */
350 sparc_cmodel = SPARC_DEFAULT_CMODEL;
352 #ifdef SPARC_BI_ARCH
353 if (TARGET_ARCH32)
354 sparc_cmodel = CM_32;
355 #endif
357 if (sparc_cmodel_string != NULL)
359 if (TARGET_ARCH64)
361 for (cmodel = &cmodels[0]; cmodel->name; cmodel++)
362 if (strcmp (sparc_cmodel_string, cmodel->name) == 0)
363 break;
364 if (cmodel->name == NULL)
365 error ("bad value (%s) for -mcmodel= switch", sparc_cmodel_string);
366 else
367 sparc_cmodel = cmodel->value;
369 else
370 error ("-mcmodel= is not supported on 32 bit systems");
373 fpu = TARGET_FPU; /* save current -mfpu status */
375 /* Set the default CPU. */
376 for (def = &cpu_default[0]; def->name; ++def)
377 if (def->cpu == TARGET_CPU_DEFAULT)
378 break;
379 if (! def->name)
380 abort ();
381 sparc_select[0].string = def->name;
383 for (sel = &sparc_select[0]; sel->name; ++sel)
385 if (sel->string)
387 for (cpu = &cpu_table[0]; cpu->name; ++cpu)
388 if (! strcmp (sel->string, cpu->name))
390 if (sel->set_tune_p)
391 sparc_cpu = cpu->processor;
393 if (sel->set_arch_p)
395 target_flags &= ~cpu->disable;
396 target_flags |= cpu->enable;
398 break;
401 if (! cpu->name)
402 error ("bad value (%s) for %s switch", sel->string, sel->name);
406 /* If -mfpu or -mno-fpu was explicitly used, don't override with
407 the processor default. Clear MASK_FPU_SET to avoid confusing
408 the reverse mapping from switch values to names. */
409 if (TARGET_FPU_SET)
411 target_flags = (target_flags & ~MASK_FPU) | fpu;
412 target_flags &= ~MASK_FPU_SET;
415 /* Don't allow -mvis if FPU is disabled. */
416 if (! TARGET_FPU)
417 target_flags &= ~MASK_VIS;
419 /* -mvis assumes UltraSPARC+, so we are sure v9 instructions
420 are available.
421 -m64 also implies v9. */
422 if (TARGET_VIS || TARGET_ARCH64)
424 target_flags |= MASK_V9;
425 target_flags &= ~(MASK_V8 | MASK_SPARCLET | MASK_SPARCLITE);
428 /* Use the deprecated v8 insns for sparc64 in 32 bit mode. */
429 if (TARGET_V9 && TARGET_ARCH32)
430 target_flags |= MASK_DEPRECATED_V8_INSNS;
432 /* V8PLUS requires V9, makes no sense in 64 bit mode. */
433 if (! TARGET_V9 || TARGET_ARCH64)
434 target_flags &= ~MASK_V8PLUS;
436 /* Don't use stack biasing in 32 bit mode. */
437 if (TARGET_ARCH32)
438 target_flags &= ~MASK_STACK_BIAS;
440 /* Supply a default value for align_functions. */
441 if (align_functions == 0
442 && (sparc_cpu == PROCESSOR_ULTRASPARC
443 || sparc_cpu == PROCESSOR_ULTRASPARC3))
444 align_functions = 32;
446 /* Validate PCC_STRUCT_RETURN. */
447 if (flag_pcc_struct_return == DEFAULT_PCC_STRUCT_RETURN)
448 flag_pcc_struct_return = (TARGET_ARCH64 ? 0 : 1);
450 /* Only use .uaxword when compiling for a 64-bit target. */
451 if (!TARGET_ARCH64)
452 targetm.asm_out.unaligned_op.di = NULL;
454 /* Do various machine dependent initializations. */
455 sparc_init_modes ();
458 /* Miscellaneous utilities. */
460 /* Nonzero if CODE, a comparison, is suitable for use in v9 conditional move
461 or branch on register contents instructions. */
464 v9_regcmp_p (code)
465 enum rtx_code code;
467 return (code == EQ || code == NE || code == GE || code == LT
468 || code == LE || code == GT);
472 /* Operand constraints. */
474 /* Return nonzero only if OP is a register of mode MODE,
475 or const0_rtx. */
478 reg_or_0_operand (op, mode)
479 rtx op;
480 enum machine_mode mode;
482 if (register_operand (op, mode))
483 return 1;
484 if (op == const0_rtx)
485 return 1;
486 if (GET_MODE (op) == VOIDmode && GET_CODE (op) == CONST_DOUBLE
487 && CONST_DOUBLE_HIGH (op) == 0
488 && CONST_DOUBLE_LOW (op) == 0)
489 return 1;
490 if (fp_zero_operand (op, mode))
491 return 1;
492 return 0;
495 /* Return nonzero only if OP is const1_rtx. */
498 const1_operand (op, mode)
499 rtx op;
500 enum machine_mode mode ATTRIBUTE_UNUSED;
502 return op == const1_rtx;
505 /* Nonzero if OP is a floating point value with value 0.0. */
508 fp_zero_operand (op, mode)
509 rtx op;
510 enum machine_mode mode;
512 if (GET_MODE_CLASS (GET_MODE (op)) != MODE_FLOAT)
513 return 0;
514 return op == CONST0_RTX (mode);
517 /* Nonzero if OP is a register operand in floating point register. */
520 fp_register_operand (op, mode)
521 rtx op;
522 enum machine_mode mode;
524 if (! register_operand (op, mode))
525 return 0;
526 if (GET_CODE (op) == SUBREG)
527 op = SUBREG_REG (op);
528 return GET_CODE (op) == REG && SPARC_FP_REG_P (REGNO (op));
531 /* Nonzero if OP is a floating point constant which can
532 be loaded into an integer register using a single
533 sethi instruction. */
536 fp_sethi_p (op)
537 rtx op;
539 if (GET_CODE (op) == CONST_DOUBLE)
541 REAL_VALUE_TYPE r;
542 long i;
544 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
545 if (REAL_VALUES_EQUAL (r, dconst0) &&
546 ! REAL_VALUE_MINUS_ZERO (r))
547 return 0;
548 REAL_VALUE_TO_TARGET_SINGLE (r, i);
549 if (SPARC_SETHI_P (i))
550 return 1;
553 return 0;
556 /* Nonzero if OP is a floating point constant which can
557 be loaded into an integer register using a single
558 mov instruction. */
561 fp_mov_p (op)
562 rtx op;
564 if (GET_CODE (op) == CONST_DOUBLE)
566 REAL_VALUE_TYPE r;
567 long i;
569 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
570 if (REAL_VALUES_EQUAL (r, dconst0) &&
571 ! REAL_VALUE_MINUS_ZERO (r))
572 return 0;
573 REAL_VALUE_TO_TARGET_SINGLE (r, i);
574 if (SPARC_SIMM13_P (i))
575 return 1;
578 return 0;
581 /* Nonzero if OP is a floating point constant which can
582 be loaded into an integer register using a high/losum
583 instruction sequence. */
586 fp_high_losum_p (op)
587 rtx op;
589 /* The constraints calling this should only be in
590 SFmode move insns, so any constant which cannot
591 be moved using a single insn will do. */
592 if (GET_CODE (op) == CONST_DOUBLE)
594 REAL_VALUE_TYPE r;
595 long i;
597 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
598 if (REAL_VALUES_EQUAL (r, dconst0) &&
599 ! REAL_VALUE_MINUS_ZERO (r))
600 return 0;
601 REAL_VALUE_TO_TARGET_SINGLE (r, i);
602 if (! SPARC_SETHI_P (i)
603 && ! SPARC_SIMM13_P (i))
604 return 1;
607 return 0;
610 /* Nonzero if OP is an integer register. */
613 intreg_operand (op, mode)
614 rtx op;
615 enum machine_mode mode ATTRIBUTE_UNUSED;
617 return (register_operand (op, SImode)
618 || (TARGET_ARCH64 && register_operand (op, DImode)));
621 /* Nonzero if OP is a floating point condition code register. */
624 fcc_reg_operand (op, mode)
625 rtx op;
626 enum machine_mode mode;
628 /* This can happen when recog is called from combine. Op may be a MEM.
629 Fail instead of calling abort in this case. */
630 if (GET_CODE (op) != REG)
631 return 0;
633 if (mode != VOIDmode && mode != GET_MODE (op))
634 return 0;
635 if (mode == VOIDmode
636 && (GET_MODE (op) != CCFPmode && GET_MODE (op) != CCFPEmode))
637 return 0;
639 #if 0 /* ??? ==> 1 when %fcc0-3 are pseudos first. See gen_compare_reg(). */
640 if (reg_renumber == 0)
641 return REGNO (op) >= FIRST_PSEUDO_REGISTER;
642 return REGNO_OK_FOR_CCFP_P (REGNO (op));
643 #else
644 return (unsigned) REGNO (op) - SPARC_FIRST_V9_FCC_REG < 4;
645 #endif
648 /* Nonzero if OP is a floating point condition code fcc0 register. */
651 fcc0_reg_operand (op, mode)
652 rtx op;
653 enum machine_mode mode;
655 /* This can happen when recog is called from combine. Op may be a MEM.
656 Fail instead of calling abort in this case. */
657 if (GET_CODE (op) != REG)
658 return 0;
660 if (mode != VOIDmode && mode != GET_MODE (op))
661 return 0;
662 if (mode == VOIDmode
663 && (GET_MODE (op) != CCFPmode && GET_MODE (op) != CCFPEmode))
664 return 0;
666 return REGNO (op) == SPARC_FCC_REG;
669 /* Nonzero if OP is an integer or floating point condition code register. */
672 icc_or_fcc_reg_operand (op, mode)
673 rtx op;
674 enum machine_mode mode;
676 if (GET_CODE (op) == REG && REGNO (op) == SPARC_ICC_REG)
678 if (mode != VOIDmode && mode != GET_MODE (op))
679 return 0;
680 if (mode == VOIDmode
681 && GET_MODE (op) != CCmode && GET_MODE (op) != CCXmode)
682 return 0;
683 return 1;
686 return fcc_reg_operand (op, mode);
689 /* Nonzero if OP can appear as the dest of a RESTORE insn. */
691 restore_operand (op, mode)
692 rtx op;
693 enum machine_mode mode;
695 return (GET_CODE (op) == REG && GET_MODE (op) == mode
696 && (REGNO (op) < 8 || (REGNO (op) >= 24 && REGNO (op) < 32)));
699 /* Call insn on SPARC can take a PC-relative constant address, or any regular
700 memory address. */
703 call_operand (op, mode)
704 rtx op;
705 enum machine_mode mode;
707 if (GET_CODE (op) != MEM)
708 abort ();
709 op = XEXP (op, 0);
710 return (symbolic_operand (op, mode) || memory_address_p (Pmode, op));
714 call_operand_address (op, mode)
715 rtx op;
716 enum machine_mode mode;
718 return (symbolic_operand (op, mode) || memory_address_p (Pmode, op));
721 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
722 reference and a constant. */
725 symbolic_operand (op, mode)
726 register rtx op;
727 enum machine_mode mode;
729 enum machine_mode omode = GET_MODE (op);
731 if (omode != mode && omode != VOIDmode && mode != VOIDmode)
732 return 0;
734 switch (GET_CODE (op))
736 case SYMBOL_REF:
737 case LABEL_REF:
738 return 1;
740 case CONST:
741 op = XEXP (op, 0);
742 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
743 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
744 && GET_CODE (XEXP (op, 1)) == CONST_INT);
746 default:
747 return 0;
751 /* Return truth value of statement that OP is a symbolic memory
752 operand of mode MODE. */
755 symbolic_memory_operand (op, mode)
756 rtx op;
757 enum machine_mode mode ATTRIBUTE_UNUSED;
759 if (GET_CODE (op) == SUBREG)
760 op = SUBREG_REG (op);
761 if (GET_CODE (op) != MEM)
762 return 0;
763 op = XEXP (op, 0);
764 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
765 || GET_CODE (op) == HIGH || GET_CODE (op) == LABEL_REF);
768 /* Return truth value of statement that OP is a LABEL_REF of mode MODE. */
771 label_ref_operand (op, mode)
772 rtx op;
773 enum machine_mode mode;
775 if (GET_CODE (op) != LABEL_REF)
776 return 0;
777 if (GET_MODE (op) != mode)
778 return 0;
779 return 1;
782 /* Return 1 if the operand is an argument used in generating pic references
783 in either the medium/low or medium/anywhere code models of sparc64. */
786 sp64_medium_pic_operand (op, mode)
787 rtx op;
788 enum machine_mode mode ATTRIBUTE_UNUSED;
790 /* Check for (const (minus (symbol_ref:GOT)
791 (const (minus (label) (pc))))). */
792 if (GET_CODE (op) != CONST)
793 return 0;
794 op = XEXP (op, 0);
795 if (GET_CODE (op) != MINUS)
796 return 0;
797 if (GET_CODE (XEXP (op, 0)) != SYMBOL_REF)
798 return 0;
799 /* ??? Ensure symbol is GOT. */
800 if (GET_CODE (XEXP (op, 1)) != CONST)
801 return 0;
802 if (GET_CODE (XEXP (XEXP (op, 1), 0)) != MINUS)
803 return 0;
804 return 1;
807 /* Return 1 if the operand is a data segment reference. This includes
808 the readonly data segment, or in other words anything but the text segment.
809 This is needed in the medium/anywhere code model on v9. These values
810 are accessed with EMBMEDANY_BASE_REG. */
813 data_segment_operand (op, mode)
814 rtx op;
815 enum machine_mode mode ATTRIBUTE_UNUSED;
817 switch (GET_CODE (op))
819 case SYMBOL_REF :
820 return ! SYMBOL_REF_FUNCTION_P (op);
821 case PLUS :
822 /* Assume canonical format of symbol + constant.
823 Fall through. */
824 case CONST :
825 return data_segment_operand (XEXP (op, 0), VOIDmode);
826 default :
827 return 0;
831 /* Return 1 if the operand is a text segment reference.
832 This is needed in the medium/anywhere code model on v9. */
835 text_segment_operand (op, mode)
836 rtx op;
837 enum machine_mode mode ATTRIBUTE_UNUSED;
839 switch (GET_CODE (op))
841 case LABEL_REF :
842 return 1;
843 case SYMBOL_REF :
844 return SYMBOL_REF_FUNCTION_P (op);
845 case PLUS :
846 /* Assume canonical format of symbol + constant.
847 Fall through. */
848 case CONST :
849 return text_segment_operand (XEXP (op, 0), VOIDmode);
850 default :
851 return 0;
855 /* Return 1 if the operand is either a register or a memory operand that is
856 not symbolic. */
859 reg_or_nonsymb_mem_operand (op, mode)
860 register rtx op;
861 enum machine_mode mode;
863 if (register_operand (op, mode))
864 return 1;
866 if (memory_operand (op, mode) && ! symbolic_memory_operand (op, mode))
867 return 1;
869 return 0;
873 splittable_symbolic_memory_operand (op, mode)
874 rtx op;
875 enum machine_mode mode ATTRIBUTE_UNUSED;
877 if (GET_CODE (op) != MEM)
878 return 0;
879 if (! symbolic_operand (XEXP (op, 0), Pmode))
880 return 0;
881 return 1;
885 splittable_immediate_memory_operand (op, mode)
886 rtx op;
887 enum machine_mode mode ATTRIBUTE_UNUSED;
889 if (GET_CODE (op) != MEM)
890 return 0;
891 if (! immediate_operand (XEXP (op, 0), Pmode))
892 return 0;
893 return 1;
896 /* Return truth value of whether OP is EQ or NE. */
899 eq_or_neq (op, mode)
900 rtx op;
901 enum machine_mode mode ATTRIBUTE_UNUSED;
903 return (GET_CODE (op) == EQ || GET_CODE (op) == NE);
906 /* Return 1 if this is a comparison operator, but not an EQ, NE, GEU,
907 or LTU for non-floating-point. We handle those specially. */
910 normal_comp_operator (op, mode)
911 rtx op;
912 enum machine_mode mode ATTRIBUTE_UNUSED;
914 enum rtx_code code = GET_CODE (op);
916 if (GET_RTX_CLASS (code) != '<')
917 return 0;
919 if (GET_MODE (XEXP (op, 0)) == CCFPmode
920 || GET_MODE (XEXP (op, 0)) == CCFPEmode)
921 return 1;
923 return (code != NE && code != EQ && code != GEU && code != LTU);
926 /* Return 1 if this is a comparison operator. This allows the use of
927 MATCH_OPERATOR to recognize all the branch insns. */
930 noov_compare_op (op, mode)
931 register rtx op;
932 enum machine_mode mode ATTRIBUTE_UNUSED;
934 enum rtx_code code = GET_CODE (op);
936 if (GET_RTX_CLASS (code) != '<')
937 return 0;
939 if (GET_MODE (XEXP (op, 0)) == CC_NOOVmode
940 || GET_MODE (XEXP (op, 0)) == CCX_NOOVmode)
941 /* These are the only branches which work with CC_NOOVmode. */
942 return (code == EQ || code == NE || code == GE || code == LT);
943 return 1;
946 /* Return 1 if this is a 64-bit comparison operator. This allows the use of
947 MATCH_OPERATOR to recognize all the branch insns. */
950 noov_compare64_op (op, mode)
951 register rtx op;
952 enum machine_mode mode ATTRIBUTE_UNUSED;
954 enum rtx_code code = GET_CODE (op);
956 if (! TARGET_V9)
957 return 0;
959 if (GET_RTX_CLASS (code) != '<')
960 return 0;
962 if (GET_MODE (XEXP (op, 0)) == CCX_NOOVmode)
963 /* These are the only branches which work with CCX_NOOVmode. */
964 return (code == EQ || code == NE || code == GE || code == LT);
965 return (GET_MODE (XEXP (op, 0)) == CCXmode);
968 /* Nonzero if OP is a comparison operator suitable for use in v9
969 conditional move or branch on register contents instructions. */
972 v9_regcmp_op (op, mode)
973 register rtx op;
974 enum machine_mode mode ATTRIBUTE_UNUSED;
976 enum rtx_code code = GET_CODE (op);
978 if (GET_RTX_CLASS (code) != '<')
979 return 0;
981 return v9_regcmp_p (code);
984 /* Return 1 if this is a SIGN_EXTEND or ZERO_EXTEND operation. */
987 extend_op (op, mode)
988 rtx op;
989 enum machine_mode mode ATTRIBUTE_UNUSED;
991 return GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND;
994 /* Return nonzero if OP is an operator of mode MODE which can set
995 the condition codes explicitly. We do not include PLUS and MINUS
996 because these require CC_NOOVmode, which we handle explicitly. */
999 cc_arithop (op, mode)
1000 rtx op;
1001 enum machine_mode mode ATTRIBUTE_UNUSED;
1003 if (GET_CODE (op) == AND
1004 || GET_CODE (op) == IOR
1005 || GET_CODE (op) == XOR)
1006 return 1;
1008 return 0;
1011 /* Return nonzero if OP is an operator of mode MODE which can bitwise
1012 complement its second operand and set the condition codes explicitly. */
1015 cc_arithopn (op, mode)
1016 rtx op;
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 /* XOR is not here because combine canonicalizes (xor (not ...) ...)
1020 and (xor ... (not ...)) to (not (xor ...)). */
1021 return (GET_CODE (op) == AND
1022 || GET_CODE (op) == IOR);
1025 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1026 signed 13 bit immediate field. This is an acceptable SImode operand for
1027 most 3 address instructions. */
1030 arith_operand (op, mode)
1031 rtx op;
1032 enum machine_mode mode;
1034 if (register_operand (op, mode))
1035 return 1;
1036 if (GET_CODE (op) != CONST_INT)
1037 return 0;
1038 return SMALL_INT32 (op);
1041 /* Return true if OP is a constant 4096 */
1044 arith_4096_operand (op, mode)
1045 rtx op;
1046 enum machine_mode mode ATTRIBUTE_UNUSED;
1048 if (GET_CODE (op) != CONST_INT)
1049 return 0;
1050 else
1051 return INTVAL (op) == 4096;
1054 /* Return true if OP is suitable as second operand for add/sub */
1057 arith_add_operand (op, mode)
1058 rtx op;
1059 enum machine_mode mode;
1061 return arith_operand (op, mode) || arith_4096_operand (op, mode);
1064 /* Return true if OP is a CONST_INT or a CONST_DOUBLE which can fit in the
1065 immediate field of OR and XOR instructions. Used for 64-bit
1066 constant formation patterns. */
1068 const64_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode ATTRIBUTE_UNUSED;
1072 return ((GET_CODE (op) == CONST_INT
1073 && SPARC_SIMM13_P (INTVAL (op)))
1074 #if HOST_BITS_PER_WIDE_INT != 64
1075 || (GET_CODE (op) == CONST_DOUBLE
1076 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))
1077 && (CONST_DOUBLE_HIGH (op) ==
1078 ((CONST_DOUBLE_LOW (op) & 0x80000000) != 0 ?
1079 (HOST_WIDE_INT)-1 : 0)))
1080 #endif
1084 /* The same, but only for sethi instructions. */
1086 const64_high_operand (op, mode)
1087 rtx op;
1088 enum machine_mode mode;
1090 return ((GET_CODE (op) == CONST_INT
1091 && (INTVAL (op) & ~(HOST_WIDE_INT)0x3ff) != 0
1092 && SPARC_SETHI_P (INTVAL (op) & GET_MODE_MASK (mode))
1094 || (GET_CODE (op) == CONST_DOUBLE
1095 && CONST_DOUBLE_HIGH (op) == 0
1096 && (CONST_DOUBLE_LOW (op) & ~(HOST_WIDE_INT)0x3ff) != 0
1097 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op))));
1100 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1101 signed 11 bit immediate field. This is an acceptable SImode operand for
1102 the movcc instructions. */
1105 arith11_operand (op, mode)
1106 rtx op;
1107 enum machine_mode mode;
1109 return (register_operand (op, mode)
1110 || (GET_CODE (op) == CONST_INT && SPARC_SIMM11_P (INTVAL (op))));
1113 /* Return true if OP is a register, or is a CONST_INT that can fit in a
1114 signed 10 bit immediate field. This is an acceptable SImode operand for
1115 the movrcc instructions. */
1118 arith10_operand (op, mode)
1119 rtx op;
1120 enum machine_mode mode;
1122 return (register_operand (op, mode)
1123 || (GET_CODE (op) == CONST_INT && SPARC_SIMM10_P (INTVAL (op))));
1126 /* Return true if OP is a register, is a CONST_INT that fits in a 13 bit
1127 immediate field, or is a CONST_DOUBLE whose both parts fit in a 13 bit
1128 immediate field.
1129 v9: Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1130 can fit in a 13 bit immediate field. This is an acceptable DImode operand
1131 for most 3 address instructions. */
1134 arith_double_operand (op, mode)
1135 rtx op;
1136 enum machine_mode mode;
1138 return (register_operand (op, mode)
1139 || (GET_CODE (op) == CONST_INT && SMALL_INT (op))
1140 || (! TARGET_ARCH64
1141 && GET_CODE (op) == CONST_DOUBLE
1142 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x1000) < 0x2000
1143 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_HIGH (op) + 0x1000) < 0x2000)
1144 || (TARGET_ARCH64
1145 && GET_CODE (op) == CONST_DOUBLE
1146 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x1000) < 0x2000
1147 && ((CONST_DOUBLE_HIGH (op) == -1
1148 && (CONST_DOUBLE_LOW (op) & 0x1000) == 0x1000)
1149 || (CONST_DOUBLE_HIGH (op) == 0
1150 && (CONST_DOUBLE_LOW (op) & 0x1000) == 0))));
1153 /* Return true if OP is a constant 4096 for DImode on ARCH64 */
1156 arith_double_4096_operand (op, mode)
1157 rtx op;
1158 enum machine_mode mode ATTRIBUTE_UNUSED;
1160 return (TARGET_ARCH64 &&
1161 ((GET_CODE (op) == CONST_INT && INTVAL (op) == 4096) ||
1162 (GET_CODE (op) == CONST_DOUBLE &&
1163 CONST_DOUBLE_LOW (op) == 4096 &&
1164 CONST_DOUBLE_HIGH (op) == 0)));
1167 /* Return true if OP is suitable as second operand for add/sub in DImode */
1170 arith_double_add_operand (op, mode)
1171 rtx op;
1172 enum machine_mode mode;
1174 return arith_double_operand (op, mode) || arith_double_4096_operand (op, mode);
1177 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1178 can fit in an 11 bit immediate field. This is an acceptable DImode
1179 operand for the movcc instructions. */
1180 /* ??? Replace with arith11_operand? */
1183 arith11_double_operand (op, mode)
1184 rtx op;
1185 enum machine_mode mode;
1187 return (register_operand (op, mode)
1188 || (GET_CODE (op) == CONST_DOUBLE
1189 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1190 && (unsigned HOST_WIDE_INT) (CONST_DOUBLE_LOW (op) + 0x400) < 0x800
1191 && ((CONST_DOUBLE_HIGH (op) == -1
1192 && (CONST_DOUBLE_LOW (op) & 0x400) == 0x400)
1193 || (CONST_DOUBLE_HIGH (op) == 0
1194 && (CONST_DOUBLE_LOW (op) & 0x400) == 0)))
1195 || (GET_CODE (op) == CONST_INT
1196 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1197 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x400) < 0x800));
1200 /* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that
1201 can fit in an 10 bit immediate field. This is an acceptable DImode
1202 operand for the movrcc instructions. */
1203 /* ??? Replace with arith10_operand? */
1206 arith10_double_operand (op, mode)
1207 rtx op;
1208 enum machine_mode mode;
1210 return (register_operand (op, mode)
1211 || (GET_CODE (op) == CONST_DOUBLE
1212 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1213 && (unsigned) (CONST_DOUBLE_LOW (op) + 0x200) < 0x400
1214 && ((CONST_DOUBLE_HIGH (op) == -1
1215 && (CONST_DOUBLE_LOW (op) & 0x200) == 0x200)
1216 || (CONST_DOUBLE_HIGH (op) == 0
1217 && (CONST_DOUBLE_LOW (op) & 0x200) == 0)))
1218 || (GET_CODE (op) == CONST_INT
1219 && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
1220 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x200) < 0x400));
1223 /* Return truth value of whether OP is an integer which fits the
1224 range constraining immediate operands in most three-address insns,
1225 which have a 13 bit immediate field. */
1228 small_int (op, mode)
1229 rtx op;
1230 enum machine_mode mode ATTRIBUTE_UNUSED;
1232 return (GET_CODE (op) == CONST_INT && SMALL_INT (op));
1236 small_int_or_double (op, mode)
1237 rtx op;
1238 enum machine_mode mode ATTRIBUTE_UNUSED;
1240 return ((GET_CODE (op) == CONST_INT && SMALL_INT (op))
1241 || (GET_CODE (op) == CONST_DOUBLE
1242 && CONST_DOUBLE_HIGH (op) == 0
1243 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))));
1246 /* Recognize operand values for the umul instruction. That instruction sign
1247 extends immediate values just like all other sparc instructions, but
1248 interprets the extended result as an unsigned number. */
1251 uns_small_int (op, mode)
1252 rtx op;
1253 enum machine_mode mode ATTRIBUTE_UNUSED;
1255 #if HOST_BITS_PER_WIDE_INT > 32
1256 /* All allowed constants will fit a CONST_INT. */
1257 return (GET_CODE (op) == CONST_INT
1258 && ((INTVAL (op) >= 0 && INTVAL (op) < 0x1000)
1259 || (INTVAL (op) >= 0xFFFFF000
1260 && INTVAL (op) <= 0xFFFFFFFF)));
1261 #else
1262 return ((GET_CODE (op) == CONST_INT && (unsigned) INTVAL (op) < 0x1000)
1263 || (GET_CODE (op) == CONST_DOUBLE
1264 && CONST_DOUBLE_HIGH (op) == 0
1265 && (unsigned) CONST_DOUBLE_LOW (op) - 0xFFFFF000 < 0x1000));
1266 #endif
1270 uns_arith_operand (op, mode)
1271 rtx op;
1272 enum machine_mode mode;
1274 return register_operand (op, mode) || uns_small_int (op, mode);
1277 /* Return truth value of statement that OP is a call-clobbered register. */
1279 clobbered_register (op, mode)
1280 rtx op;
1281 enum machine_mode mode ATTRIBUTE_UNUSED;
1283 return (GET_CODE (op) == REG && call_used_regs[REGNO (op)]);
1286 /* Return 1 if OP is a valid operand for the source of a move insn. */
1289 input_operand (op, mode)
1290 rtx op;
1291 enum machine_mode mode;
1293 /* If both modes are non-void they must be the same. */
1294 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1295 return 0;
1297 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and result in 0/1. */
1298 if (GET_CODE (op) == CONSTANT_P_RTX)
1299 return 1;
1301 /* Allow any one instruction integer constant, and all CONST_INT
1302 variants when we are working in DImode and !arch64. */
1303 if (GET_MODE_CLASS (mode) == MODE_INT
1304 && ((GET_CODE (op) == CONST_INT
1305 && (SPARC_SETHI_P (INTVAL (op) & GET_MODE_MASK (mode))
1306 || SPARC_SIMM13_P (INTVAL (op))
1307 || (mode == DImode
1308 && ! TARGET_ARCH64)))
1309 || (TARGET_ARCH64
1310 && GET_CODE (op) == CONST_DOUBLE
1311 && ((CONST_DOUBLE_HIGH (op) == 0
1312 && SPARC_SETHI_P (CONST_DOUBLE_LOW (op)))
1314 #if HOST_BITS_PER_WIDE_INT == 64
1315 (CONST_DOUBLE_HIGH (op) == 0
1316 && SPARC_SIMM13_P (CONST_DOUBLE_LOW (op)))
1317 #else
1318 (SPARC_SIMM13_P (CONST_DOUBLE_LOW (op))
1319 && (((CONST_DOUBLE_LOW (op) & 0x80000000) == 0
1320 && CONST_DOUBLE_HIGH (op) == 0)
1321 || (CONST_DOUBLE_HIGH (op) == -1
1322 && CONST_DOUBLE_LOW (op) & 0x80000000) != 0))
1323 #endif
1324 ))))
1325 return 1;
1327 /* If !arch64 and this is a DImode const, allow it so that
1328 the splits can be generated. */
1329 if (! TARGET_ARCH64
1330 && mode == DImode
1331 && GET_CODE (op) == CONST_DOUBLE)
1332 return 1;
1334 if (register_operand (op, mode))
1335 return 1;
1337 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1338 && GET_CODE (op) == CONST_DOUBLE)
1339 return 1;
1341 /* If this is a SUBREG, look inside so that we handle
1342 paradoxical ones. */
1343 if (GET_CODE (op) == SUBREG)
1344 op = SUBREG_REG (op);
1346 /* Check for valid MEM forms. */
1347 if (GET_CODE (op) == MEM)
1349 rtx inside = XEXP (op, 0);
1351 if (GET_CODE (inside) == LO_SUM)
1353 /* We can't allow these because all of the splits
1354 (eventually as they trickle down into DFmode
1355 splits) require offsettable memory references. */
1356 if (! TARGET_V9
1357 && GET_MODE (op) == TFmode)
1358 return 0;
1360 return (register_operand (XEXP (inside, 0), Pmode)
1361 && CONSTANT_P (XEXP (inside, 1)));
1363 return memory_address_p (mode, inside);
1366 return 0;
1370 /* We know it can't be done in one insn when we get here,
1371 the movsi expander guarantees this. */
1372 void
1373 sparc_emit_set_const32 (op0, op1)
1374 rtx op0;
1375 rtx op1;
1377 enum machine_mode mode = GET_MODE (op0);
1378 rtx temp;
1380 if (GET_CODE (op1) == CONST_INT)
1382 HOST_WIDE_INT value = INTVAL (op1);
1384 if (SPARC_SETHI_P (value & GET_MODE_MASK (mode))
1385 || SPARC_SIMM13_P (value))
1386 abort ();
1389 /* Full 2-insn decomposition is needed. */
1390 if (reload_in_progress || reload_completed)
1391 temp = op0;
1392 else
1393 temp = gen_reg_rtx (mode);
1395 if (GET_CODE (op1) == CONST_INT)
1397 /* Emit them as real moves instead of a HIGH/LO_SUM,
1398 this way CSE can see everything and reuse intermediate
1399 values if it wants. */
1400 if (TARGET_ARCH64
1401 && HOST_BITS_PER_WIDE_INT != 64
1402 && (INTVAL (op1) & 0x80000000) != 0)
1403 emit_insn (gen_rtx_SET
1404 (VOIDmode, temp,
1405 immed_double_const (INTVAL (op1) & ~(HOST_WIDE_INT)0x3ff,
1406 0, DImode)));
1407 else
1408 emit_insn (gen_rtx_SET (VOIDmode, temp,
1409 GEN_INT (INTVAL (op1)
1410 & ~(HOST_WIDE_INT)0x3ff)));
1412 emit_insn (gen_rtx_SET (VOIDmode,
1413 op0,
1414 gen_rtx_IOR (mode, temp,
1415 GEN_INT (INTVAL (op1) & 0x3ff))));
1417 else
1419 /* A symbol, emit in the traditional way. */
1420 emit_insn (gen_rtx_SET (VOIDmode, temp,
1421 gen_rtx_HIGH (mode, op1)));
1422 emit_insn (gen_rtx_SET (VOIDmode,
1423 op0, gen_rtx_LO_SUM (mode, temp, op1)));
1429 /* SPARC-v9 code-model support. */
1430 void
1431 sparc_emit_set_symbolic_const64 (op0, op1, temp1)
1432 rtx op0;
1433 rtx op1;
1434 rtx temp1;
1436 rtx ti_temp1 = 0;
1438 if (temp1 && GET_MODE (temp1) == TImode)
1440 ti_temp1 = temp1;
1441 temp1 = gen_rtx_REG (DImode, REGNO (temp1));
1444 switch (sparc_cmodel)
1446 case CM_MEDLOW:
1447 /* The range spanned by all instructions in the object is less
1448 than 2^31 bytes (2GB) and the distance from any instruction
1449 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1450 than 2^31 bytes (2GB).
1452 The executable must be in the low 4TB of the virtual address
1453 space.
1455 sethi %hi(symbol), %temp
1456 or %temp, %lo(symbol), %reg */
1457 emit_insn (gen_rtx_SET (VOIDmode, temp1, gen_rtx_HIGH (DImode, op1)));
1458 emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_LO_SUM (DImode, temp1, op1)));
1459 break;
1461 case CM_MEDMID:
1462 /* The range spanned by all instructions in the object is less
1463 than 2^31 bytes (2GB) and the distance from any instruction
1464 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1465 than 2^31 bytes (2GB).
1467 The executable must be in the low 16TB of the virtual address
1468 space.
1470 sethi %h44(symbol), %temp1
1471 or %temp1, %m44(symbol), %temp2
1472 sllx %temp2, 12, %temp3
1473 or %temp3, %l44(symbol), %reg */
1474 emit_insn (gen_seth44 (op0, op1));
1475 emit_insn (gen_setm44 (op0, op0, op1));
1476 emit_insn (gen_rtx_SET (VOIDmode, temp1,
1477 gen_rtx_ASHIFT (DImode, op0, GEN_INT (12))));
1478 emit_insn (gen_setl44 (op0, temp1, op1));
1479 break;
1481 case CM_MEDANY:
1482 /* The range spanned by all instructions in the object is less
1483 than 2^31 bytes (2GB) and the distance from any instruction
1484 to the location of the label _GLOBAL_OFFSET_TABLE_ is less
1485 than 2^31 bytes (2GB).
1487 The executable can be placed anywhere in the virtual address
1488 space.
1490 sethi %hh(symbol), %temp1
1491 sethi %lm(symbol), %temp2
1492 or %temp1, %hm(symbol), %temp3
1493 or %temp2, %lo(symbol), %temp4
1494 sllx %temp3, 32, %temp5
1495 or %temp4, %temp5, %reg */
1497 /* It is possible that one of the registers we got for operands[2]
1498 might coincide with that of operands[0] (which is why we made
1499 it TImode). Pick the other one to use as our scratch. */
1500 if (rtx_equal_p (temp1, op0))
1502 if (ti_temp1)
1503 temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1);
1504 else
1505 abort();
1508 emit_insn (gen_sethh (op0, op1));
1509 emit_insn (gen_setlm (temp1, op1));
1510 emit_insn (gen_sethm (op0, op0, op1));
1511 emit_insn (gen_rtx_SET (VOIDmode, op0,
1512 gen_rtx_ASHIFT (DImode, op0, GEN_INT (32))));
1513 emit_insn (gen_rtx_SET (VOIDmode, op0,
1514 gen_rtx_PLUS (DImode, op0, temp1)));
1515 emit_insn (gen_setlo (op0, op0, op1));
1516 break;
1518 case CM_EMBMEDANY:
1519 /* Old old old backwards compatibility kruft here.
1520 Essentially it is MEDLOW with a fixed 64-bit
1521 virtual base added to all data segment addresses.
1522 Text-segment stuff is computed like MEDANY, we can't
1523 reuse the code above because the relocation knobs
1524 look different.
1526 Data segment: sethi %hi(symbol), %temp1
1527 or %temp1, %lo(symbol), %temp2
1528 add %temp2, EMBMEDANY_BASE_REG, %reg
1530 Text segment: sethi %uhi(symbol), %temp1
1531 sethi %hi(symbol), %temp2
1532 or %temp1, %ulo(symbol), %temp3
1533 or %temp2, %lo(symbol), %temp4
1534 sllx %temp3, 32, %temp5
1535 or %temp4, %temp5, %reg */
1536 if (data_segment_operand (op1, GET_MODE (op1)))
1538 emit_insn (gen_embmedany_sethi (temp1, op1));
1539 emit_insn (gen_embmedany_brsum (op0, temp1));
1540 emit_insn (gen_embmedany_losum (op0, op0, op1));
1542 else
1544 /* It is possible that one of the registers we got for operands[2]
1545 might coincide with that of operands[0] (which is why we made
1546 it TImode). Pick the other one to use as our scratch. */
1547 if (rtx_equal_p (temp1, op0))
1549 if (ti_temp1)
1550 temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1);
1551 else
1552 abort();
1555 emit_insn (gen_embmedany_textuhi (op0, op1));
1556 emit_insn (gen_embmedany_texthi (temp1, op1));
1557 emit_insn (gen_embmedany_textulo (op0, op0, op1));
1558 emit_insn (gen_rtx_SET (VOIDmode, op0,
1559 gen_rtx_ASHIFT (DImode, op0, GEN_INT (32))));
1560 emit_insn (gen_rtx_SET (VOIDmode, op0,
1561 gen_rtx_PLUS (DImode, op0, temp1)));
1562 emit_insn (gen_embmedany_textlo (op0, op0, op1));
1564 break;
1566 default:
1567 abort();
1571 /* These avoid problems when cross compiling. If we do not
1572 go through all this hair then the optimizer will see
1573 invalid REG_EQUAL notes or in some cases none at all. */
1574 static void sparc_emit_set_safe_HIGH64 PARAMS ((rtx, HOST_WIDE_INT));
1575 static rtx gen_safe_SET64 PARAMS ((rtx, HOST_WIDE_INT));
1576 static rtx gen_safe_OR64 PARAMS ((rtx, HOST_WIDE_INT));
1577 static rtx gen_safe_XOR64 PARAMS ((rtx, HOST_WIDE_INT));
1579 #if HOST_BITS_PER_WIDE_INT == 64
1580 #define GEN_HIGHINT64(__x) GEN_INT ((__x) & ~(HOST_WIDE_INT)0x3ff)
1581 #define GEN_INT64(__x) GEN_INT (__x)
1582 #else
1583 #define GEN_HIGHINT64(__x) \
1584 immed_double_const ((__x) & ~(HOST_WIDE_INT)0x3ff, 0, DImode)
1585 #define GEN_INT64(__x) \
1586 immed_double_const ((__x) & 0xffffffff, \
1587 ((__x) & 0x80000000 ? -1 : 0), DImode)
1588 #endif
1590 /* The optimizer is not to assume anything about exactly
1591 which bits are set for a HIGH, they are unspecified.
1592 Unfortunately this leads to many missed optimizations
1593 during CSE. We mask out the non-HIGH bits, and matches
1594 a plain movdi, to alleviate this problem. */
1595 static void
1596 sparc_emit_set_safe_HIGH64 (dest, val)
1597 rtx dest;
1598 HOST_WIDE_INT val;
1600 emit_insn (gen_rtx_SET (VOIDmode, dest, GEN_HIGHINT64 (val)));
1603 static rtx
1604 gen_safe_SET64 (dest, val)
1605 rtx dest;
1606 HOST_WIDE_INT val;
1608 return gen_rtx_SET (VOIDmode, dest, GEN_INT64 (val));
1611 static rtx
1612 gen_safe_OR64 (src, val)
1613 rtx src;
1614 HOST_WIDE_INT val;
1616 return gen_rtx_IOR (DImode, src, GEN_INT64 (val));
1619 static rtx
1620 gen_safe_XOR64 (src, val)
1621 rtx src;
1622 HOST_WIDE_INT val;
1624 return gen_rtx_XOR (DImode, src, GEN_INT64 (val));
1627 /* Worker routines for 64-bit constant formation on arch64.
1628 One of the key things to be doing in these emissions is
1629 to create as many temp REGs as possible. This makes it
1630 possible for half-built constants to be used later when
1631 such values are similar to something required later on.
1632 Without doing this, the optimizer cannot see such
1633 opportunities. */
1635 static void sparc_emit_set_const64_quick1
1636 PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT, int));
1638 static void
1639 sparc_emit_set_const64_quick1 (op0, temp, low_bits, is_neg)
1640 rtx op0;
1641 rtx temp;
1642 unsigned HOST_WIDE_INT low_bits;
1643 int is_neg;
1645 unsigned HOST_WIDE_INT high_bits;
1647 if (is_neg)
1648 high_bits = (~low_bits) & 0xffffffff;
1649 else
1650 high_bits = low_bits;
1652 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1653 if (!is_neg)
1655 emit_insn (gen_rtx_SET (VOIDmode, op0,
1656 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1658 else
1660 /* If we are XOR'ing with -1, then we should emit a one's complement
1661 instead. This way the combiner will notice logical operations
1662 such as ANDN later on and substitute. */
1663 if ((low_bits & 0x3ff) == 0x3ff)
1665 emit_insn (gen_rtx_SET (VOIDmode, op0,
1666 gen_rtx_NOT (DImode, temp)));
1668 else
1670 emit_insn (gen_rtx_SET (VOIDmode, op0,
1671 gen_safe_XOR64 (temp,
1672 (-(HOST_WIDE_INT)0x400
1673 | (low_bits & 0x3ff)))));
1678 static void sparc_emit_set_const64_quick2
1679 PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT,
1680 unsigned HOST_WIDE_INT, int));
1682 static void
1683 sparc_emit_set_const64_quick2 (op0, temp, high_bits, low_immediate, shift_count)
1684 rtx op0;
1685 rtx temp;
1686 unsigned HOST_WIDE_INT high_bits;
1687 unsigned HOST_WIDE_INT low_immediate;
1688 int shift_count;
1690 rtx temp2 = op0;
1692 if ((high_bits & 0xfffffc00) != 0)
1694 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1695 if ((high_bits & ~0xfffffc00) != 0)
1696 emit_insn (gen_rtx_SET (VOIDmode, op0,
1697 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1698 else
1699 temp2 = temp;
1701 else
1703 emit_insn (gen_safe_SET64 (temp, high_bits));
1704 temp2 = temp;
1707 /* Now shift it up into place. */
1708 emit_insn (gen_rtx_SET (VOIDmode, op0,
1709 gen_rtx_ASHIFT (DImode, temp2,
1710 GEN_INT (shift_count))));
1712 /* If there is a low immediate part piece, finish up by
1713 putting that in as well. */
1714 if (low_immediate != 0)
1715 emit_insn (gen_rtx_SET (VOIDmode, op0,
1716 gen_safe_OR64 (op0, low_immediate)));
1719 static void sparc_emit_set_const64_longway
1720 PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT));
1722 /* Full 64-bit constant decomposition. Even though this is the
1723 'worst' case, we still optimize a few things away. */
1724 static void
1725 sparc_emit_set_const64_longway (op0, temp, high_bits, low_bits)
1726 rtx op0;
1727 rtx temp;
1728 unsigned HOST_WIDE_INT high_bits;
1729 unsigned HOST_WIDE_INT low_bits;
1731 rtx sub_temp;
1733 if (reload_in_progress || reload_completed)
1734 sub_temp = op0;
1735 else
1736 sub_temp = gen_reg_rtx (DImode);
1738 if ((high_bits & 0xfffffc00) != 0)
1740 sparc_emit_set_safe_HIGH64 (temp, high_bits);
1741 if ((high_bits & ~0xfffffc00) != 0)
1742 emit_insn (gen_rtx_SET (VOIDmode,
1743 sub_temp,
1744 gen_safe_OR64 (temp, (high_bits & 0x3ff))));
1745 else
1746 sub_temp = temp;
1748 else
1750 emit_insn (gen_safe_SET64 (temp, high_bits));
1751 sub_temp = temp;
1754 if (!reload_in_progress && !reload_completed)
1756 rtx temp2 = gen_reg_rtx (DImode);
1757 rtx temp3 = gen_reg_rtx (DImode);
1758 rtx temp4 = gen_reg_rtx (DImode);
1760 emit_insn (gen_rtx_SET (VOIDmode, temp4,
1761 gen_rtx_ASHIFT (DImode, sub_temp,
1762 GEN_INT (32))));
1764 sparc_emit_set_safe_HIGH64 (temp2, low_bits);
1765 if ((low_bits & ~0xfffffc00) != 0)
1767 emit_insn (gen_rtx_SET (VOIDmode, temp3,
1768 gen_safe_OR64 (temp2, (low_bits & 0x3ff))));
1769 emit_insn (gen_rtx_SET (VOIDmode, op0,
1770 gen_rtx_PLUS (DImode, temp4, temp3)));
1772 else
1774 emit_insn (gen_rtx_SET (VOIDmode, op0,
1775 gen_rtx_PLUS (DImode, temp4, temp2)));
1778 else
1780 rtx low1 = GEN_INT ((low_bits >> (32 - 12)) & 0xfff);
1781 rtx low2 = GEN_INT ((low_bits >> (32 - 12 - 12)) & 0xfff);
1782 rtx low3 = GEN_INT ((low_bits >> (32 - 12 - 12 - 8)) & 0x0ff);
1783 int to_shift = 12;
1785 /* We are in the middle of reload, so this is really
1786 painful. However we do still make an attempt to
1787 avoid emitting truly stupid code. */
1788 if (low1 != const0_rtx)
1790 emit_insn (gen_rtx_SET (VOIDmode, op0,
1791 gen_rtx_ASHIFT (DImode, sub_temp,
1792 GEN_INT (to_shift))));
1793 emit_insn (gen_rtx_SET (VOIDmode, op0,
1794 gen_rtx_IOR (DImode, op0, low1)));
1795 sub_temp = op0;
1796 to_shift = 12;
1798 else
1800 to_shift += 12;
1802 if (low2 != const0_rtx)
1804 emit_insn (gen_rtx_SET (VOIDmode, op0,
1805 gen_rtx_ASHIFT (DImode, sub_temp,
1806 GEN_INT (to_shift))));
1807 emit_insn (gen_rtx_SET (VOIDmode, op0,
1808 gen_rtx_IOR (DImode, op0, low2)));
1809 sub_temp = op0;
1810 to_shift = 8;
1812 else
1814 to_shift += 8;
1816 emit_insn (gen_rtx_SET (VOIDmode, op0,
1817 gen_rtx_ASHIFT (DImode, sub_temp,
1818 GEN_INT (to_shift))));
1819 if (low3 != const0_rtx)
1820 emit_insn (gen_rtx_SET (VOIDmode, op0,
1821 gen_rtx_IOR (DImode, op0, low3)));
1822 /* phew... */
1826 /* Analyze a 64-bit constant for certain properties. */
1827 static void analyze_64bit_constant
1828 PARAMS ((unsigned HOST_WIDE_INT,
1829 unsigned HOST_WIDE_INT,
1830 int *, int *, int *));
1832 static void
1833 analyze_64bit_constant (high_bits, low_bits, hbsp, lbsp, abbasp)
1834 unsigned HOST_WIDE_INT high_bits, low_bits;
1835 int *hbsp, *lbsp, *abbasp;
1837 int lowest_bit_set, highest_bit_set, all_bits_between_are_set;
1838 int i;
1840 lowest_bit_set = highest_bit_set = -1;
1841 i = 0;
1844 if ((lowest_bit_set == -1)
1845 && ((low_bits >> i) & 1))
1846 lowest_bit_set = i;
1847 if ((highest_bit_set == -1)
1848 && ((high_bits >> (32 - i - 1)) & 1))
1849 highest_bit_set = (64 - i - 1);
1851 while (++i < 32
1852 && ((highest_bit_set == -1)
1853 || (lowest_bit_set == -1)));
1854 if (i == 32)
1856 i = 0;
1859 if ((lowest_bit_set == -1)
1860 && ((high_bits >> i) & 1))
1861 lowest_bit_set = i + 32;
1862 if ((highest_bit_set == -1)
1863 && ((low_bits >> (32 - i - 1)) & 1))
1864 highest_bit_set = 32 - i - 1;
1866 while (++i < 32
1867 && ((highest_bit_set == -1)
1868 || (lowest_bit_set == -1)));
1870 /* If there are no bits set this should have gone out
1871 as one instruction! */
1872 if (lowest_bit_set == -1
1873 || highest_bit_set == -1)
1874 abort ();
1875 all_bits_between_are_set = 1;
1876 for (i = lowest_bit_set; i <= highest_bit_set; i++)
1878 if (i < 32)
1880 if ((low_bits & (1 << i)) != 0)
1881 continue;
1883 else
1885 if ((high_bits & (1 << (i - 32))) != 0)
1886 continue;
1888 all_bits_between_are_set = 0;
1889 break;
1891 *hbsp = highest_bit_set;
1892 *lbsp = lowest_bit_set;
1893 *abbasp = all_bits_between_are_set;
1896 static int const64_is_2insns
1897 PARAMS ((unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT));
1899 static int
1900 const64_is_2insns (high_bits, low_bits)
1901 unsigned HOST_WIDE_INT high_bits, low_bits;
1903 int highest_bit_set, lowest_bit_set, all_bits_between_are_set;
1905 if (high_bits == 0
1906 || high_bits == 0xffffffff)
1907 return 1;
1909 analyze_64bit_constant (high_bits, low_bits,
1910 &highest_bit_set, &lowest_bit_set,
1911 &all_bits_between_are_set);
1913 if ((highest_bit_set == 63
1914 || lowest_bit_set == 0)
1915 && all_bits_between_are_set != 0)
1916 return 1;
1918 if ((highest_bit_set - lowest_bit_set) < 21)
1919 return 1;
1921 return 0;
1924 static unsigned HOST_WIDE_INT create_simple_focus_bits
1925 PARAMS ((unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
1926 int, int));
1928 static unsigned HOST_WIDE_INT
1929 create_simple_focus_bits (high_bits, low_bits, lowest_bit_set, shift)
1930 unsigned HOST_WIDE_INT high_bits, low_bits;
1931 int lowest_bit_set, shift;
1933 HOST_WIDE_INT hi, lo;
1935 if (lowest_bit_set < 32)
1937 lo = (low_bits >> lowest_bit_set) << shift;
1938 hi = ((high_bits << (32 - lowest_bit_set)) << shift);
1940 else
1942 lo = 0;
1943 hi = ((high_bits >> (lowest_bit_set - 32)) << shift);
1945 if (hi & lo)
1946 abort ();
1947 return (hi | lo);
1950 /* Here we are sure to be arch64 and this is an integer constant
1951 being loaded into a register. Emit the most efficient
1952 insn sequence possible. Detection of all the 1-insn cases
1953 has been done already. */
1954 void
1955 sparc_emit_set_const64 (op0, op1)
1956 rtx op0;
1957 rtx op1;
1959 unsigned HOST_WIDE_INT high_bits, low_bits;
1960 int lowest_bit_set, highest_bit_set;
1961 int all_bits_between_are_set;
1962 rtx temp;
1964 /* Sanity check that we know what we are working with. */
1965 if (! TARGET_ARCH64)
1966 abort ();
1968 if (GET_CODE (op0) != SUBREG)
1970 if (GET_CODE (op0) != REG
1971 || (REGNO (op0) >= SPARC_FIRST_FP_REG
1972 && REGNO (op0) <= SPARC_LAST_V9_FP_REG))
1973 abort ();
1976 if (reload_in_progress || reload_completed)
1977 temp = op0;
1978 else
1979 temp = gen_reg_rtx (DImode);
1981 if (GET_CODE (op1) != CONST_DOUBLE
1982 && GET_CODE (op1) != CONST_INT)
1984 sparc_emit_set_symbolic_const64 (op0, op1, temp);
1985 return;
1988 if (GET_CODE (op1) == CONST_DOUBLE)
1990 #if HOST_BITS_PER_WIDE_INT == 64
1991 high_bits = (CONST_DOUBLE_LOW (op1) >> 32) & 0xffffffff;
1992 low_bits = CONST_DOUBLE_LOW (op1) & 0xffffffff;
1993 #else
1994 high_bits = CONST_DOUBLE_HIGH (op1);
1995 low_bits = CONST_DOUBLE_LOW (op1);
1996 #endif
1998 else
2000 #if HOST_BITS_PER_WIDE_INT == 64
2001 high_bits = ((INTVAL (op1) >> 32) & 0xffffffff);
2002 low_bits = (INTVAL (op1) & 0xffffffff);
2003 #else
2004 high_bits = ((INTVAL (op1) < 0) ?
2005 0xffffffff :
2006 0x00000000);
2007 low_bits = INTVAL (op1);
2008 #endif
2011 /* low_bits bits 0 --> 31
2012 high_bits bits 32 --> 63 */
2014 analyze_64bit_constant (high_bits, low_bits,
2015 &highest_bit_set, &lowest_bit_set,
2016 &all_bits_between_are_set);
2018 /* First try for a 2-insn sequence. */
2020 /* These situations are preferred because the optimizer can
2021 * do more things with them:
2022 * 1) mov -1, %reg
2023 * sllx %reg, shift, %reg
2024 * 2) mov -1, %reg
2025 * srlx %reg, shift, %reg
2026 * 3) mov some_small_const, %reg
2027 * sllx %reg, shift, %reg
2029 if (((highest_bit_set == 63
2030 || lowest_bit_set == 0)
2031 && all_bits_between_are_set != 0)
2032 || ((highest_bit_set - lowest_bit_set) < 12))
2034 HOST_WIDE_INT the_const = -1;
2035 int shift = lowest_bit_set;
2037 if ((highest_bit_set != 63
2038 && lowest_bit_set != 0)
2039 || all_bits_between_are_set == 0)
2041 the_const =
2042 create_simple_focus_bits (high_bits, low_bits,
2043 lowest_bit_set, 0);
2045 else if (lowest_bit_set == 0)
2046 shift = -(63 - highest_bit_set);
2048 if (! SPARC_SIMM13_P (the_const))
2049 abort ();
2051 emit_insn (gen_safe_SET64 (temp, the_const));
2052 if (shift > 0)
2053 emit_insn (gen_rtx_SET (VOIDmode,
2054 op0,
2055 gen_rtx_ASHIFT (DImode,
2056 temp,
2057 GEN_INT (shift))));
2058 else if (shift < 0)
2059 emit_insn (gen_rtx_SET (VOIDmode,
2060 op0,
2061 gen_rtx_LSHIFTRT (DImode,
2062 temp,
2063 GEN_INT (-shift))));
2064 else
2065 abort ();
2066 return;
2069 /* Now a range of 22 or less bits set somewhere.
2070 * 1) sethi %hi(focus_bits), %reg
2071 * sllx %reg, shift, %reg
2072 * 2) sethi %hi(focus_bits), %reg
2073 * srlx %reg, shift, %reg
2075 if ((highest_bit_set - lowest_bit_set) < 21)
2077 unsigned HOST_WIDE_INT focus_bits =
2078 create_simple_focus_bits (high_bits, low_bits,
2079 lowest_bit_set, 10);
2081 if (! SPARC_SETHI_P (focus_bits))
2082 abort ();
2084 sparc_emit_set_safe_HIGH64 (temp, focus_bits);
2086 /* If lowest_bit_set == 10 then a sethi alone could have done it. */
2087 if (lowest_bit_set < 10)
2088 emit_insn (gen_rtx_SET (VOIDmode,
2089 op0,
2090 gen_rtx_LSHIFTRT (DImode, temp,
2091 GEN_INT (10 - lowest_bit_set))));
2092 else if (lowest_bit_set > 10)
2093 emit_insn (gen_rtx_SET (VOIDmode,
2094 op0,
2095 gen_rtx_ASHIFT (DImode, temp,
2096 GEN_INT (lowest_bit_set - 10))));
2097 else
2098 abort ();
2099 return;
2102 /* 1) sethi %hi(low_bits), %reg
2103 * or %reg, %lo(low_bits), %reg
2104 * 2) sethi %hi(~low_bits), %reg
2105 * xor %reg, %lo(-0x400 | (low_bits & 0x3ff)), %reg
2107 if (high_bits == 0
2108 || high_bits == 0xffffffff)
2110 sparc_emit_set_const64_quick1 (op0, temp, low_bits,
2111 (high_bits == 0xffffffff));
2112 return;
2115 /* Now, try 3-insn sequences. */
2117 /* 1) sethi %hi(high_bits), %reg
2118 * or %reg, %lo(high_bits), %reg
2119 * sllx %reg, 32, %reg
2121 if (low_bits == 0)
2123 sparc_emit_set_const64_quick2 (op0, temp, high_bits, 0, 32);
2124 return;
2127 /* We may be able to do something quick
2128 when the constant is negated, so try that. */
2129 if (const64_is_2insns ((~high_bits) & 0xffffffff,
2130 (~low_bits) & 0xfffffc00))
2132 /* NOTE: The trailing bits get XOR'd so we need the
2133 non-negated bits, not the negated ones. */
2134 unsigned HOST_WIDE_INT trailing_bits = low_bits & 0x3ff;
2136 if ((((~high_bits) & 0xffffffff) == 0
2137 && ((~low_bits) & 0x80000000) == 0)
2138 || (((~high_bits) & 0xffffffff) == 0xffffffff
2139 && ((~low_bits) & 0x80000000) != 0))
2141 int fast_int = (~low_bits & 0xffffffff);
2143 if ((SPARC_SETHI_P (fast_int)
2144 && (~high_bits & 0xffffffff) == 0)
2145 || SPARC_SIMM13_P (fast_int))
2146 emit_insn (gen_safe_SET64 (temp, fast_int));
2147 else
2148 sparc_emit_set_const64 (temp, GEN_INT64 (fast_int));
2150 else
2152 rtx negated_const;
2153 #if HOST_BITS_PER_WIDE_INT == 64
2154 negated_const = GEN_INT (((~low_bits) & 0xfffffc00) |
2155 (((HOST_WIDE_INT)((~high_bits) & 0xffffffff))<<32));
2156 #else
2157 negated_const = immed_double_const ((~low_bits) & 0xfffffc00,
2158 (~high_bits) & 0xffffffff,
2159 DImode);
2160 #endif
2161 sparc_emit_set_const64 (temp, negated_const);
2164 /* If we are XOR'ing with -1, then we should emit a one's complement
2165 instead. This way the combiner will notice logical operations
2166 such as ANDN later on and substitute. */
2167 if (trailing_bits == 0x3ff)
2169 emit_insn (gen_rtx_SET (VOIDmode, op0,
2170 gen_rtx_NOT (DImode, temp)));
2172 else
2174 emit_insn (gen_rtx_SET (VOIDmode,
2175 op0,
2176 gen_safe_XOR64 (temp,
2177 (-0x400 | trailing_bits))));
2179 return;
2182 /* 1) sethi %hi(xxx), %reg
2183 * or %reg, %lo(xxx), %reg
2184 * sllx %reg, yyy, %reg
2186 * ??? This is just a generalized version of the low_bits==0
2187 * thing above, FIXME...
2189 if ((highest_bit_set - lowest_bit_set) < 32)
2191 unsigned HOST_WIDE_INT focus_bits =
2192 create_simple_focus_bits (high_bits, low_bits,
2193 lowest_bit_set, 0);
2195 /* We can't get here in this state. */
2196 if (highest_bit_set < 32
2197 || lowest_bit_set >= 32)
2198 abort ();
2200 /* So what we know is that the set bits straddle the
2201 middle of the 64-bit word. */
2202 sparc_emit_set_const64_quick2 (op0, temp,
2203 focus_bits, 0,
2204 lowest_bit_set);
2205 return;
2208 /* 1) sethi %hi(high_bits), %reg
2209 * or %reg, %lo(high_bits), %reg
2210 * sllx %reg, 32, %reg
2211 * or %reg, low_bits, %reg
2213 if (SPARC_SIMM13_P(low_bits)
2214 && ((int)low_bits > 0))
2216 sparc_emit_set_const64_quick2 (op0, temp, high_bits, low_bits, 32);
2217 return;
2220 /* The easiest way when all else fails, is full decomposition. */
2221 #if 0
2222 printf ("sparc_emit_set_const64: Hard constant [%08lx%08lx] neg[%08lx%08lx]\n",
2223 high_bits, low_bits, ~high_bits, ~low_bits);
2224 #endif
2225 sparc_emit_set_const64_longway (op0, temp, high_bits, low_bits);
2228 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
2229 return the mode to be used for the comparison. For floating-point,
2230 CCFP[E]mode is used. CC_NOOVmode should be used when the first operand
2231 is a PLUS, MINUS, NEG, or ASHIFT. CCmode should be used when no special
2232 processing is needed. */
2234 enum machine_mode
2235 select_cc_mode (op, x, y)
2236 enum rtx_code op;
2237 rtx x;
2238 rtx y ATTRIBUTE_UNUSED;
2240 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2242 switch (op)
2244 case EQ:
2245 case NE:
2246 case UNORDERED:
2247 case ORDERED:
2248 case UNLT:
2249 case UNLE:
2250 case UNGT:
2251 case UNGE:
2252 case UNEQ:
2253 case LTGT:
2254 return CCFPmode;
2256 case LT:
2257 case LE:
2258 case GT:
2259 case GE:
2260 return CCFPEmode;
2262 default:
2263 abort ();
2266 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
2267 || GET_CODE (x) == NEG || GET_CODE (x) == ASHIFT)
2269 if (TARGET_ARCH64 && GET_MODE (x) == DImode)
2270 return CCX_NOOVmode;
2271 else
2272 return CC_NOOVmode;
2274 else
2276 if (TARGET_ARCH64 && GET_MODE (x) == DImode)
2277 return CCXmode;
2278 else
2279 return CCmode;
2283 /* X and Y are two things to compare using CODE. Emit the compare insn and
2284 return the rtx for the cc reg in the proper mode. */
2287 gen_compare_reg (code, x, y)
2288 enum rtx_code code;
2289 rtx x, y;
2291 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2292 rtx cc_reg;
2294 /* ??? We don't have movcc patterns so we cannot generate pseudo regs for the
2295 fcc regs (cse can't tell they're really call clobbered regs and will
2296 remove a duplicate comparison even if there is an intervening function
2297 call - it will then try to reload the cc reg via an int reg which is why
2298 we need the movcc patterns). It is possible to provide the movcc
2299 patterns by using the ldxfsr/stxfsr v9 insns. I tried it: you need two
2300 registers (say %g1,%g5) and it takes about 6 insns. A better fix would be
2301 to tell cse that CCFPE mode registers (even pseudos) are call
2302 clobbered. */
2304 /* ??? This is an experiment. Rather than making changes to cse which may
2305 or may not be easy/clean, we do our own cse. This is possible because
2306 we will generate hard registers. Cse knows they're call clobbered (it
2307 doesn't know the same thing about pseudos). If we guess wrong, no big
2308 deal, but if we win, great! */
2310 if (TARGET_V9 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2311 #if 1 /* experiment */
2313 int reg;
2314 /* We cycle through the registers to ensure they're all exercised. */
2315 static int next_fcc_reg = 0;
2316 /* Previous x,y for each fcc reg. */
2317 static rtx prev_args[4][2];
2319 /* Scan prev_args for x,y. */
2320 for (reg = 0; reg < 4; reg++)
2321 if (prev_args[reg][0] == x && prev_args[reg][1] == y)
2322 break;
2323 if (reg == 4)
2325 reg = next_fcc_reg;
2326 prev_args[reg][0] = x;
2327 prev_args[reg][1] = y;
2328 next_fcc_reg = (next_fcc_reg + 1) & 3;
2330 cc_reg = gen_rtx_REG (mode, reg + SPARC_FIRST_V9_FCC_REG);
2332 #else
2333 cc_reg = gen_reg_rtx (mode);
2334 #endif /* ! experiment */
2335 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2336 cc_reg = gen_rtx_REG (mode, SPARC_FCC_REG);
2337 else
2338 cc_reg = gen_rtx_REG (mode, SPARC_ICC_REG);
2340 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
2341 gen_rtx_COMPARE (mode, x, y)));
2343 return cc_reg;
2346 /* This function is used for v9 only.
2347 CODE is the code for an Scc's comparison.
2348 OPERANDS[0] is the target of the Scc insn.
2349 OPERANDS[1] is the value we compare against const0_rtx (which hasn't
2350 been generated yet).
2352 This function is needed to turn
2354 (set (reg:SI 110)
2355 (gt (reg:CCX 100 %icc)
2356 (const_int 0)))
2357 into
2358 (set (reg:SI 110)
2359 (gt:DI (reg:CCX 100 %icc)
2360 (const_int 0)))
2362 IE: The instruction recognizer needs to see the mode of the comparison to
2363 find the right instruction. We could use "gt:DI" right in the
2364 define_expand, but leaving it out allows us to handle DI, SI, etc.
2366 We refer to the global sparc compare operands sparc_compare_op0 and
2367 sparc_compare_op1. */
2370 gen_v9_scc (compare_code, operands)
2371 enum rtx_code compare_code;
2372 register rtx *operands;
2374 rtx temp, op0, op1;
2376 if (! TARGET_ARCH64
2377 && (GET_MODE (sparc_compare_op0) == DImode
2378 || GET_MODE (operands[0]) == DImode))
2379 return 0;
2381 op0 = sparc_compare_op0;
2382 op1 = sparc_compare_op1;
2384 /* Try to use the movrCC insns. */
2385 if (TARGET_ARCH64
2386 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
2387 && op1 == const0_rtx
2388 && v9_regcmp_p (compare_code))
2390 /* Special case for op0 != 0. This can be done with one instruction if
2391 operands[0] == sparc_compare_op0. */
2393 if (compare_code == NE
2394 && GET_MODE (operands[0]) == DImode
2395 && rtx_equal_p (op0, operands[0]))
2397 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2398 gen_rtx_IF_THEN_ELSE (DImode,
2399 gen_rtx_fmt_ee (compare_code, DImode,
2400 op0, const0_rtx),
2401 const1_rtx,
2402 operands[0])));
2403 return 1;
2406 if (reg_overlap_mentioned_p (operands[0], op0))
2408 /* Handle the case where operands[0] == sparc_compare_op0.
2409 We "early clobber" the result. */
2410 op0 = gen_reg_rtx (GET_MODE (sparc_compare_op0));
2411 emit_move_insn (op0, sparc_compare_op0);
2414 emit_insn (gen_rtx_SET (VOIDmode, operands[0], const0_rtx));
2415 if (GET_MODE (op0) != DImode)
2417 temp = gen_reg_rtx (DImode);
2418 convert_move (temp, op0, 0);
2420 else
2421 temp = op0;
2422 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2423 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
2424 gen_rtx_fmt_ee (compare_code, DImode,
2425 temp, const0_rtx),
2426 const1_rtx,
2427 operands[0])));
2428 return 1;
2430 else
2432 operands[1] = gen_compare_reg (compare_code, op0, op1);
2434 switch (GET_MODE (operands[1]))
2436 case CCmode :
2437 case CCXmode :
2438 case CCFPEmode :
2439 case CCFPmode :
2440 break;
2441 default :
2442 abort ();
2444 emit_insn (gen_rtx_SET (VOIDmode, operands[0], const0_rtx));
2445 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2446 gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]),
2447 gen_rtx_fmt_ee (compare_code,
2448 GET_MODE (operands[1]),
2449 operands[1], const0_rtx),
2450 const1_rtx, operands[0])));
2451 return 1;
2455 /* Emit a conditional jump insn for the v9 architecture using comparison code
2456 CODE and jump target LABEL.
2457 This function exists to take advantage of the v9 brxx insns. */
2459 void
2460 emit_v9_brxx_insn (code, op0, label)
2461 enum rtx_code code;
2462 rtx op0, label;
2464 emit_jump_insn (gen_rtx_SET (VOIDmode,
2465 pc_rtx,
2466 gen_rtx_IF_THEN_ELSE (VOIDmode,
2467 gen_rtx_fmt_ee (code, GET_MODE (op0),
2468 op0, const0_rtx),
2469 gen_rtx_LABEL_REF (VOIDmode, label),
2470 pc_rtx)));
2473 /* Generate a DFmode part of a hard TFmode register.
2474 REG is the TFmode hard register, LOW is 1 for the
2475 low 64bit of the register and 0 otherwise.
2478 gen_df_reg (reg, low)
2479 rtx reg;
2480 int low;
2482 int regno = REGNO (reg);
2484 if ((WORDS_BIG_ENDIAN == 0) ^ (low != 0))
2485 regno += (TARGET_ARCH64 && regno < 32) ? 1 : 2;
2486 return gen_rtx_REG (DFmode, regno);
2489 /* Generate a call to FUNC with OPERANDS. Operand 0 is the return value.
2490 Unlike normal calls, TFmode operands are passed by reference. It is
2491 assumed that no more than 3 operands are required. */
2493 static void
2494 emit_soft_tfmode_libcall (func_name, nargs, operands)
2495 const char *func_name;
2496 int nargs;
2497 rtx *operands;
2499 rtx ret_slot = NULL, arg[3], func_sym;
2500 int i;
2502 /* We only expect to be called for conversions, unary, and binary ops. */
2503 if (nargs < 2 || nargs > 3)
2504 abort ();
2506 for (i = 0; i < nargs; ++i)
2508 rtx this_arg = operands[i];
2509 rtx this_slot;
2511 /* TFmode arguments and return values are passed by reference. */
2512 if (GET_MODE (this_arg) == TFmode)
2514 int force_stack_temp;
2516 force_stack_temp = 0;
2517 if (TARGET_BUGGY_QP_LIB && i == 0)
2518 force_stack_temp = 1;
2520 if (GET_CODE (this_arg) == MEM
2521 && ! force_stack_temp)
2522 this_arg = XEXP (this_arg, 0);
2523 else if (CONSTANT_P (this_arg)
2524 && ! force_stack_temp)
2526 this_slot = force_const_mem (TFmode, this_arg);
2527 this_arg = XEXP (this_slot, 0);
2529 else
2531 this_slot = assign_stack_temp (TFmode, GET_MODE_SIZE (TFmode), 0);
2533 /* Operand 0 is the return value. We'll copy it out later. */
2534 if (i > 0)
2535 emit_move_insn (this_slot, this_arg);
2536 else
2537 ret_slot = this_slot;
2539 this_arg = XEXP (this_slot, 0);
2543 arg[i] = this_arg;
2546 func_sym = gen_rtx_SYMBOL_REF (Pmode, func_name);
2548 if (GET_MODE (operands[0]) == TFmode)
2550 if (nargs == 2)
2551 emit_library_call (func_sym, LCT_NORMAL, VOIDmode, 2,
2552 arg[0], GET_MODE (arg[0]),
2553 arg[1], GET_MODE (arg[1]));
2554 else
2555 emit_library_call (func_sym, LCT_NORMAL, VOIDmode, 3,
2556 arg[0], GET_MODE (arg[0]),
2557 arg[1], GET_MODE (arg[1]),
2558 arg[2], GET_MODE (arg[2]));
2560 if (ret_slot)
2561 emit_move_insn (operands[0], ret_slot);
2563 else
2565 rtx ret;
2567 if (nargs != 2)
2568 abort ();
2570 ret = emit_library_call_value (func_sym, operands[0], LCT_NORMAL,
2571 GET_MODE (operands[0]), 1,
2572 arg[1], GET_MODE (arg[1]));
2574 if (ret != operands[0])
2575 emit_move_insn (operands[0], ret);
2579 /* Expand soft-float TFmode calls to sparc abi routines. */
2581 static void
2582 emit_soft_tfmode_binop (code, operands)
2583 enum rtx_code code;
2584 rtx *operands;
2586 const char *func;
2588 switch (code)
2590 case PLUS:
2591 func = "_Qp_add";
2592 break;
2593 case MINUS:
2594 func = "_Qp_sub";
2595 break;
2596 case MULT:
2597 func = "_Qp_mul";
2598 break;
2599 case DIV:
2600 func = "_Qp_div";
2601 break;
2602 default:
2603 abort ();
2606 emit_soft_tfmode_libcall (func, 3, operands);
2609 static void
2610 emit_soft_tfmode_unop (code, operands)
2611 enum rtx_code code;
2612 rtx *operands;
2614 const char *func;
2616 switch (code)
2618 case SQRT:
2619 func = "_Qp_sqrt";
2620 break;
2621 default:
2622 abort ();
2625 emit_soft_tfmode_libcall (func, 2, operands);
2628 static void
2629 emit_soft_tfmode_cvt (code, operands)
2630 enum rtx_code code;
2631 rtx *operands;
2633 const char *func;
2635 switch (code)
2637 case FLOAT_EXTEND:
2638 switch (GET_MODE (operands[1]))
2640 case SFmode:
2641 func = "_Qp_stoq";
2642 break;
2643 case DFmode:
2644 func = "_Qp_dtoq";
2645 break;
2646 default:
2647 abort ();
2649 break;
2651 case FLOAT_TRUNCATE:
2652 switch (GET_MODE (operands[0]))
2654 case SFmode:
2655 func = "_Qp_qtos";
2656 break;
2657 case DFmode:
2658 func = "_Qp_qtod";
2659 break;
2660 default:
2661 abort ();
2663 break;
2665 case FLOAT:
2666 switch (GET_MODE (operands[1]))
2668 case SImode:
2669 func = "_Qp_itoq";
2670 break;
2671 case DImode:
2672 func = "_Qp_xtoq";
2673 break;
2674 default:
2675 abort ();
2677 break;
2679 case UNSIGNED_FLOAT:
2680 switch (GET_MODE (operands[1]))
2682 case SImode:
2683 func = "_Qp_uitoq";
2684 break;
2685 case DImode:
2686 func = "_Qp_uxtoq";
2687 break;
2688 default:
2689 abort ();
2691 break;
2693 case FIX:
2694 switch (GET_MODE (operands[0]))
2696 case SImode:
2697 func = "_Qp_qtoi";
2698 break;
2699 case DImode:
2700 func = "_Qp_qtox";
2701 break;
2702 default:
2703 abort ();
2705 break;
2707 case UNSIGNED_FIX:
2708 switch (GET_MODE (operands[0]))
2710 case SImode:
2711 func = "_Qp_qtoui";
2712 break;
2713 case DImode:
2714 func = "_Qp_qtoux";
2715 break;
2716 default:
2717 abort ();
2719 break;
2721 default:
2722 abort ();
2725 emit_soft_tfmode_libcall (func, 2, operands);
2728 /* Expand a hard-float tfmode operation. All arguments must be in
2729 registers. */
2731 static void
2732 emit_hard_tfmode_operation (code, operands)
2733 enum rtx_code code;
2734 rtx *operands;
2736 rtx op, dest;
2738 if (GET_RTX_CLASS (code) == '1')
2740 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
2741 op = gen_rtx_fmt_e (code, GET_MODE (operands[0]), operands[1]);
2743 else
2745 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
2746 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
2747 op = gen_rtx_fmt_ee (code, GET_MODE (operands[0]),
2748 operands[1], operands[2]);
2751 if (register_operand (operands[0], VOIDmode))
2752 dest = operands[0];
2753 else
2754 dest = gen_reg_rtx (GET_MODE (operands[0]));
2756 emit_insn (gen_rtx_SET (VOIDmode, dest, op));
2758 if (dest != operands[0])
2759 emit_move_insn (operands[0], dest);
2762 void
2763 emit_tfmode_binop (code, operands)
2764 enum rtx_code code;
2765 rtx *operands;
2767 if (TARGET_HARD_QUAD)
2768 emit_hard_tfmode_operation (code, operands);
2769 else
2770 emit_soft_tfmode_binop (code, operands);
2773 void
2774 emit_tfmode_unop (code, operands)
2775 enum rtx_code code;
2776 rtx *operands;
2778 if (TARGET_HARD_QUAD)
2779 emit_hard_tfmode_operation (code, operands);
2780 else
2781 emit_soft_tfmode_unop (code, operands);
2784 void
2785 emit_tfmode_cvt (code, operands)
2786 enum rtx_code code;
2787 rtx *operands;
2789 if (TARGET_HARD_QUAD)
2790 emit_hard_tfmode_operation (code, operands);
2791 else
2792 emit_soft_tfmode_cvt (code, operands);
2795 /* Return nonzero if a return peephole merging return with
2796 setting of output register is ok. */
2798 leaf_return_peephole_ok ()
2800 return (actual_fsize == 0);
2803 /* Return nonzero if a branch/jump/call instruction will be emitting
2804 nop into its delay slot. */
2807 empty_delay_slot (insn)
2808 rtx insn;
2810 rtx seq;
2812 /* If no previous instruction (should not happen), return true. */
2813 if (PREV_INSN (insn) == NULL)
2814 return 1;
2816 seq = NEXT_INSN (PREV_INSN (insn));
2817 if (GET_CODE (PATTERN (seq)) == SEQUENCE)
2818 return 0;
2820 return 1;
2823 /* Return nonzero if TRIAL can go into the function epilogue's
2824 delay slot. SLOT is the slot we are trying to fill. */
2827 eligible_for_epilogue_delay (trial, slot)
2828 rtx trial;
2829 int slot;
2831 rtx pat, src;
2833 if (slot >= 1)
2834 return 0;
2836 if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
2837 return 0;
2839 if (get_attr_length (trial) != 1)
2840 return 0;
2842 /* If there are any call-saved registers, we should scan TRIAL if it
2843 does not reference them. For now just make it easy. */
2844 if (num_gfregs)
2845 return 0;
2847 /* If the function uses __builtin_eh_return, the eh_return machinery
2848 occupies the delay slot. */
2849 if (current_function_calls_eh_return)
2850 return 0;
2852 /* In the case of a true leaf function, anything can go into the delay slot.
2853 A delay slot only exists however if the frame size is zero, otherwise
2854 we will put an insn to adjust the stack after the return. */
2855 if (current_function_uses_only_leaf_regs)
2857 if (leaf_return_peephole_ok ())
2858 return ((get_attr_in_uncond_branch_delay (trial)
2859 == IN_BRANCH_DELAY_TRUE));
2860 return 0;
2863 pat = PATTERN (trial);
2865 /* Otherwise, only operations which can be done in tandem with
2866 a `restore' or `return' insn can go into the delay slot. */
2867 if (GET_CODE (SET_DEST (pat)) != REG
2868 || REGNO (SET_DEST (pat)) < 24)
2869 return 0;
2871 /* If this instruction sets up floating point register and we have a return
2872 instruction, it can probably go in. But restore will not work
2873 with FP_REGS. */
2874 if (REGNO (SET_DEST (pat)) >= 32)
2876 if (TARGET_V9 && ! epilogue_renumber (&pat, 1)
2877 && (get_attr_in_uncond_branch_delay (trial) == IN_BRANCH_DELAY_TRUE))
2878 return 1;
2879 return 0;
2882 /* The set of insns matched here must agree precisely with the set of
2883 patterns paired with a RETURN in sparc.md. */
2885 src = SET_SRC (pat);
2887 /* This matches "*return_[qhs]i" or even "*return_di" on TARGET_ARCH64. */
2888 if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2889 && arith_operand (src, GET_MODE (src)))
2891 if (TARGET_ARCH64)
2892 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2893 else
2894 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (SImode);
2897 /* This matches "*return_di". */
2898 else if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2899 && arith_double_operand (src, GET_MODE (src)))
2900 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
2902 /* This matches "*return_sf_no_fpu". */
2903 else if (! TARGET_FPU && restore_operand (SET_DEST (pat), SFmode)
2904 && register_operand (src, SFmode))
2905 return 1;
2907 /* If we have return instruction, anything that does not use
2908 local or output registers and can go into a delay slot wins. */
2909 else if (TARGET_V9 && ! epilogue_renumber (&pat, 1)
2910 && (get_attr_in_uncond_branch_delay (trial) == IN_BRANCH_DELAY_TRUE))
2911 return 1;
2913 /* This matches "*return_addsi". */
2914 else if (GET_CODE (src) == PLUS
2915 && arith_operand (XEXP (src, 0), SImode)
2916 && arith_operand (XEXP (src, 1), SImode)
2917 && (register_operand (XEXP (src, 0), SImode)
2918 || register_operand (XEXP (src, 1), SImode)))
2919 return 1;
2921 /* This matches "*return_adddi". */
2922 else if (GET_CODE (src) == PLUS
2923 && arith_double_operand (XEXP (src, 0), DImode)
2924 && arith_double_operand (XEXP (src, 1), DImode)
2925 && (register_operand (XEXP (src, 0), DImode)
2926 || register_operand (XEXP (src, 1), DImode)))
2927 return 1;
2929 /* This can match "*return_losum_[sd]i".
2930 Catch only some cases, so that return_losum* don't have
2931 to be too big. */
2932 else if (GET_CODE (src) == LO_SUM
2933 && ! TARGET_CM_MEDMID
2934 && ((register_operand (XEXP (src, 0), SImode)
2935 && immediate_operand (XEXP (src, 1), SImode))
2936 || (TARGET_ARCH64
2937 && register_operand (XEXP (src, 0), DImode)
2938 && immediate_operand (XEXP (src, 1), DImode))))
2939 return 1;
2941 /* sll{,x} reg,1,reg2 is add reg,reg,reg2 as well. */
2942 else if (GET_CODE (src) == ASHIFT
2943 && (register_operand (XEXP (src, 0), SImode)
2944 || register_operand (XEXP (src, 0), DImode))
2945 && XEXP (src, 1) == const1_rtx)
2946 return 1;
2948 return 0;
2951 /* Return nonzero if TRIAL can go into the sibling call
2952 delay slot. */
2955 eligible_for_sibcall_delay (trial)
2956 rtx trial;
2958 rtx pat, src;
2960 if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
2961 return 0;
2963 if (get_attr_length (trial) != 1)
2964 return 0;
2966 pat = PATTERN (trial);
2968 if (current_function_uses_only_leaf_regs)
2970 /* If the tail call is done using the call instruction,
2971 we have to restore %o7 in the delay slot. */
2972 if ((TARGET_ARCH64 && ! TARGET_CM_MEDLOW) || flag_pic)
2973 return 0;
2975 /* %g1 is used to build the function address */
2976 if (reg_mentioned_p (gen_rtx_REG (Pmode, 1), pat))
2977 return 0;
2979 return 1;
2982 /* Otherwise, only operations which can be done in tandem with
2983 a `restore' insn can go into the delay slot. */
2984 if (GET_CODE (SET_DEST (pat)) != REG
2985 || REGNO (SET_DEST (pat)) < 24
2986 || REGNO (SET_DEST (pat)) >= 32)
2987 return 0;
2989 /* If it mentions %o7, it can't go in, because sibcall will clobber it
2990 in most cases. */
2991 if (reg_mentioned_p (gen_rtx_REG (Pmode, 15), pat))
2992 return 0;
2994 src = SET_SRC (pat);
2996 if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
2997 && arith_operand (src, GET_MODE (src)))
2999 if (TARGET_ARCH64)
3000 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
3001 else
3002 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (SImode);
3005 else if (GET_MODE_CLASS (GET_MODE (src)) != MODE_FLOAT
3006 && arith_double_operand (src, GET_MODE (src)))
3007 return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode);
3009 else if (! TARGET_FPU && restore_operand (SET_DEST (pat), SFmode)
3010 && register_operand (src, SFmode))
3011 return 1;
3013 else if (GET_CODE (src) == PLUS
3014 && arith_operand (XEXP (src, 0), SImode)
3015 && arith_operand (XEXP (src, 1), SImode)
3016 && (register_operand (XEXP (src, 0), SImode)
3017 || register_operand (XEXP (src, 1), SImode)))
3018 return 1;
3020 else if (GET_CODE (src) == PLUS
3021 && arith_double_operand (XEXP (src, 0), DImode)
3022 && arith_double_operand (XEXP (src, 1), DImode)
3023 && (register_operand (XEXP (src, 0), DImode)
3024 || register_operand (XEXP (src, 1), DImode)))
3025 return 1;
3027 else if (GET_CODE (src) == LO_SUM
3028 && ! TARGET_CM_MEDMID
3029 && ((register_operand (XEXP (src, 0), SImode)
3030 && immediate_operand (XEXP (src, 1), SImode))
3031 || (TARGET_ARCH64
3032 && register_operand (XEXP (src, 0), DImode)
3033 && immediate_operand (XEXP (src, 1), DImode))))
3034 return 1;
3036 else if (GET_CODE (src) == ASHIFT
3037 && (register_operand (XEXP (src, 0), SImode)
3038 || register_operand (XEXP (src, 0), DImode))
3039 && XEXP (src, 1) == const1_rtx)
3040 return 1;
3042 return 0;
3045 static int
3046 check_return_regs (x)
3047 rtx x;
3049 switch (GET_CODE (x))
3051 case REG:
3052 return IN_OR_GLOBAL_P (x);
3054 case CONST_INT:
3055 case CONST_DOUBLE:
3056 case CONST:
3057 case SYMBOL_REF:
3058 case LABEL_REF:
3059 return 1;
3061 case SET:
3062 case IOR:
3063 case AND:
3064 case XOR:
3065 case PLUS:
3066 case MINUS:
3067 if (check_return_regs (XEXP (x, 1)) == 0)
3068 return 0;
3069 case NOT:
3070 case NEG:
3071 case MEM:
3072 return check_return_regs (XEXP (x, 0));
3074 default:
3075 return 0;
3081 short_branch (uid1, uid2)
3082 int uid1, uid2;
3084 int delta = INSN_ADDRESSES (uid1) - INSN_ADDRESSES (uid2);
3086 /* Leave a few words of "slop". */
3087 if (delta >= -1023 && delta <= 1022)
3088 return 1;
3090 return 0;
3093 /* Return nonzero if REG is not used after INSN.
3094 We assume REG is a reload reg, and therefore does
3095 not live past labels or calls or jumps. */
3097 reg_unused_after (reg, insn)
3098 rtx reg;
3099 rtx insn;
3101 enum rtx_code code, prev_code = UNKNOWN;
3103 while ((insn = NEXT_INSN (insn)))
3105 if (prev_code == CALL_INSN && call_used_regs[REGNO (reg)])
3106 return 1;
3108 code = GET_CODE (insn);
3109 if (GET_CODE (insn) == CODE_LABEL)
3110 return 1;
3112 if (GET_RTX_CLASS (code) == 'i')
3114 rtx set = single_set (insn);
3115 int in_src = set && reg_overlap_mentioned_p (reg, SET_SRC (set));
3116 if (set && in_src)
3117 return 0;
3118 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
3119 return 1;
3120 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
3121 return 0;
3123 prev_code = code;
3125 return 1;
3128 /* The table we use to reference PIC data. */
3129 static GTY(()) rtx global_offset_table;
3131 /* The function we use to get at it. */
3132 static GTY(()) rtx get_pc_symbol;
3133 static char get_pc_symbol_name[256];
3135 /* Ensure that we are not using patterns that are not OK with PIC. */
3138 check_pic (i)
3139 int i;
3141 switch (flag_pic)
3143 case 1:
3144 if (GET_CODE (recog_data.operand[i]) == SYMBOL_REF
3145 || (GET_CODE (recog_data.operand[i]) == CONST
3146 && ! (GET_CODE (XEXP (recog_data.operand[i], 0)) == MINUS
3147 && (XEXP (XEXP (recog_data.operand[i], 0), 0)
3148 == global_offset_table)
3149 && (GET_CODE (XEXP (XEXP (recog_data.operand[i], 0), 1))
3150 == CONST))))
3151 abort ();
3152 case 2:
3153 default:
3154 return 1;
3158 /* Return true if X is an address which needs a temporary register when
3159 reloaded while generating PIC code. */
3162 pic_address_needs_scratch (x)
3163 rtx x;
3165 /* An address which is a symbolic plus a non SMALL_INT needs a temp reg. */
3166 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
3167 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3168 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3169 && ! SMALL_INT (XEXP (XEXP (x, 0), 1)))
3170 return 1;
3172 return 0;
3175 /* Legitimize PIC addresses. If the address is already position-independent,
3176 we return ORIG. Newly generated position-independent addresses go into a
3177 reg. This is REG if nonzero, otherwise we allocate register(s) as
3178 necessary. */
3181 legitimize_pic_address (orig, mode, reg)
3182 rtx orig;
3183 enum machine_mode mode ATTRIBUTE_UNUSED;
3184 rtx reg;
3186 if (GET_CODE (orig) == SYMBOL_REF)
3188 rtx pic_ref, address;
3189 rtx insn;
3191 if (reg == 0)
3193 if (reload_in_progress || reload_completed)
3194 abort ();
3195 else
3196 reg = gen_reg_rtx (Pmode);
3199 if (flag_pic == 2)
3201 /* If not during reload, allocate another temp reg here for loading
3202 in the address, so that these instructions can be optimized
3203 properly. */
3204 rtx temp_reg = ((reload_in_progress || reload_completed)
3205 ? reg : gen_reg_rtx (Pmode));
3207 /* Must put the SYMBOL_REF inside an UNSPEC here so that cse
3208 won't get confused into thinking that these two instructions
3209 are loading in the true address of the symbol. If in the
3210 future a PIC rtx exists, that should be used instead. */
3211 if (Pmode == SImode)
3213 emit_insn (gen_movsi_high_pic (temp_reg, orig));
3214 emit_insn (gen_movsi_lo_sum_pic (temp_reg, temp_reg, orig));
3216 else
3218 emit_insn (gen_movdi_high_pic (temp_reg, orig));
3219 emit_insn (gen_movdi_lo_sum_pic (temp_reg, temp_reg, orig));
3221 address = temp_reg;
3223 else
3224 address = orig;
3226 pic_ref = gen_rtx_MEM (Pmode,
3227 gen_rtx_PLUS (Pmode,
3228 pic_offset_table_rtx, address));
3229 current_function_uses_pic_offset_table = 1;
3230 RTX_UNCHANGING_P (pic_ref) = 1;
3231 insn = emit_move_insn (reg, pic_ref);
3232 /* Put a REG_EQUAL note on this insn, so that it can be optimized
3233 by loop. */
3234 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
3235 REG_NOTES (insn));
3236 return reg;
3238 else if (GET_CODE (orig) == CONST)
3240 rtx base, offset;
3242 if (GET_CODE (XEXP (orig, 0)) == PLUS
3243 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3244 return orig;
3246 if (reg == 0)
3248 if (reload_in_progress || reload_completed)
3249 abort ();
3250 else
3251 reg = gen_reg_rtx (Pmode);
3254 if (GET_CODE (XEXP (orig, 0)) == PLUS)
3256 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3257 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3258 base == reg ? 0 : reg);
3260 else
3261 abort ();
3263 if (GET_CODE (offset) == CONST_INT)
3265 if (SMALL_INT (offset))
3266 return plus_constant (base, INTVAL (offset));
3267 else if (! reload_in_progress && ! reload_completed)
3268 offset = force_reg (Pmode, offset);
3269 else
3270 /* If we reach here, then something is seriously wrong. */
3271 abort ();
3273 return gen_rtx_PLUS (Pmode, base, offset);
3275 else if (GET_CODE (orig) == LABEL_REF)
3276 /* ??? Why do we do this? */
3277 /* Now movsi_pic_label_ref uses it, but we ought to be checking that
3278 the register is live instead, in case it is eliminated. */
3279 current_function_uses_pic_offset_table = 1;
3281 return orig;
3284 /* Emit special PIC prologues. */
3286 void
3287 load_pic_register ()
3289 /* Labels to get the PC in the prologue of this function. */
3290 int orig_flag_pic = flag_pic;
3292 if (! flag_pic)
3293 abort ();
3295 /* If we haven't emitted the special get_pc helper function, do so now. */
3296 if (get_pc_symbol_name[0] == 0)
3298 int align;
3300 ASM_GENERATE_INTERNAL_LABEL (get_pc_symbol_name, "LGETPC", 0);
3301 text_section ();
3303 align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
3304 if (align > 0)
3305 ASM_OUTPUT_ALIGN (asm_out_file, align);
3306 (*targetm.asm_out.internal_label) (asm_out_file, "LGETPC", 0);
3307 fputs ("\tretl\n\tadd\t%o7, %l7, %l7\n", asm_out_file);
3310 /* Initialize every time through, since we can't easily
3311 know this to be permanent. */
3312 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3313 get_pc_symbol = gen_rtx_SYMBOL_REF (Pmode, get_pc_symbol_name);
3314 flag_pic = 0;
3316 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
3317 get_pc_symbol));
3319 flag_pic = orig_flag_pic;
3321 /* Need to emit this whether or not we obey regdecls,
3322 since setjmp/longjmp can cause life info to screw up.
3323 ??? In the case where we don't obey regdecls, this is not sufficient
3324 since we may not fall out the bottom. */
3325 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3328 /* Return 1 if RTX is a MEM which is known to be aligned to at
3329 least a DESIRED byte boundary. */
3332 mem_min_alignment (mem, desired)
3333 rtx mem;
3334 int desired;
3336 rtx addr, base, offset;
3338 /* If it's not a MEM we can't accept it. */
3339 if (GET_CODE (mem) != MEM)
3340 return 0;
3342 addr = XEXP (mem, 0);
3343 base = offset = NULL_RTX;
3344 if (GET_CODE (addr) == PLUS)
3346 if (GET_CODE (XEXP (addr, 0)) == REG)
3348 base = XEXP (addr, 0);
3350 /* What we are saying here is that if the base
3351 REG is aligned properly, the compiler will make
3352 sure any REG based index upon it will be so
3353 as well. */
3354 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3355 offset = XEXP (addr, 1);
3356 else
3357 offset = const0_rtx;
3360 else if (GET_CODE (addr) == REG)
3362 base = addr;
3363 offset = const0_rtx;
3366 if (base != NULL_RTX)
3368 int regno = REGNO (base);
3370 if (regno != HARD_FRAME_POINTER_REGNUM && regno != STACK_POINTER_REGNUM)
3372 /* Check if the compiler has recorded some information
3373 about the alignment of the base REG. If reload has
3374 completed, we already matched with proper alignments.
3375 If not running global_alloc, reload might give us
3376 unaligned pointer to local stack though. */
3377 if (((cfun != 0
3378 && REGNO_POINTER_ALIGN (regno) >= desired * BITS_PER_UNIT)
3379 || (optimize && reload_completed))
3380 && (INTVAL (offset) & (desired - 1)) == 0)
3381 return 1;
3383 else
3385 if (((INTVAL (offset) - SPARC_STACK_BIAS) & (desired - 1)) == 0)
3386 return 1;
3389 else if (! TARGET_UNALIGNED_DOUBLES
3390 || CONSTANT_P (addr)
3391 || GET_CODE (addr) == LO_SUM)
3393 /* Anything else we know is properly aligned unless TARGET_UNALIGNED_DOUBLES
3394 is true, in which case we can only assume that an access is aligned if
3395 it is to a constant address, or the address involves a LO_SUM. */
3396 return 1;
3399 /* An obviously unaligned address. */
3400 return 0;
3404 /* Vectors to keep interesting information about registers where it can easily
3405 be got. We used to use the actual mode value as the bit number, but there
3406 are more than 32 modes now. Instead we use two tables: one indexed by
3407 hard register number, and one indexed by mode. */
3409 /* The purpose of sparc_mode_class is to shrink the range of modes so that
3410 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
3411 mapped into one sparc_mode_class mode. */
3413 enum sparc_mode_class {
3414 S_MODE, D_MODE, T_MODE, O_MODE,
3415 SF_MODE, DF_MODE, TF_MODE, OF_MODE,
3416 CC_MODE, CCFP_MODE
3419 /* Modes for single-word and smaller quantities. */
3420 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
3422 /* Modes for double-word and smaller quantities. */
3423 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
3425 /* Modes for quad-word and smaller quantities. */
3426 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
3428 /* Modes for 8-word and smaller quantities. */
3429 #define O_MODES (T_MODES | (1 << (int) O_MODE) | (1 << (int) OF_MODE))
3431 /* Modes for single-float quantities. We must allow any single word or
3432 smaller quantity. This is because the fix/float conversion instructions
3433 take integer inputs/outputs from the float registers. */
3434 #define SF_MODES (S_MODES)
3436 /* Modes for double-float and smaller quantities. */
3437 #define DF_MODES (S_MODES | D_MODES)
3439 /* Modes for double-float only quantities. */
3440 #define DF_MODES_NO_S ((1 << (int) D_MODE) | (1 << (int) DF_MODE))
3442 /* Modes for quad-float only quantities. */
3443 #define TF_ONLY_MODES (1 << (int) TF_MODE)
3445 /* Modes for quad-float and smaller quantities. */
3446 #define TF_MODES (DF_MODES | TF_ONLY_MODES)
3448 /* Modes for quad-float and double-float quantities. */
3449 #define TF_MODES_NO_S (DF_MODES_NO_S | TF_ONLY_MODES)
3451 /* Modes for quad-float pair only quantities. */
3452 #define OF_ONLY_MODES (1 << (int) OF_MODE)
3454 /* Modes for quad-float pairs and smaller quantities. */
3455 #define OF_MODES (TF_MODES | OF_ONLY_MODES)
3457 #define OF_MODES_NO_S (TF_MODES_NO_S | OF_ONLY_MODES)
3459 /* Modes for condition codes. */
3460 #define CC_MODES (1 << (int) CC_MODE)
3461 #define CCFP_MODES (1 << (int) CCFP_MODE)
3463 /* Value is 1 if register/mode pair is acceptable on sparc.
3464 The funny mixture of D and T modes is because integer operations
3465 do not specially operate on tetra quantities, so non-quad-aligned
3466 registers can hold quadword quantities (except %o4 and %i4 because
3467 they cross fixed registers). */
3469 /* This points to either the 32 bit or the 64 bit version. */
3470 const int *hard_regno_mode_classes;
3472 static const int hard_32bit_mode_classes[] = {
3473 S_MODES, S_MODES, T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES,
3474 T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES, D_MODES, S_MODES,
3475 T_MODES, S_MODES, T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES,
3476 T_MODES, S_MODES, T_MODES, S_MODES, D_MODES, S_MODES, D_MODES, S_MODES,
3478 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3479 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3480 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3481 OF_MODES, SF_MODES, DF_MODES, SF_MODES, TF_MODES, SF_MODES, DF_MODES, SF_MODES,
3483 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3484 and none can hold SFmode/SImode values. */
3485 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3486 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3487 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3488 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, TF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3490 /* %fcc[0123] */
3491 CCFP_MODES, CCFP_MODES, CCFP_MODES, CCFP_MODES,
3493 /* %icc */
3494 CC_MODES
3497 static const int hard_64bit_mode_classes[] = {
3498 D_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3499 O_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3500 T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3501 O_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES, T_MODES, D_MODES,
3503 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3504 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3505 OF_MODES, SF_MODES, DF_MODES, SF_MODES, OF_MODES, SF_MODES, DF_MODES, SF_MODES,
3506 OF_MODES, SF_MODES, DF_MODES, SF_MODES, TF_MODES, SF_MODES, DF_MODES, SF_MODES,
3508 /* FP regs f32 to f63. Only the even numbered registers actually exist,
3509 and none can hold SFmode/SImode values. */
3510 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3511 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3512 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, OF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3513 OF_MODES_NO_S, 0, DF_MODES_NO_S, 0, TF_MODES_NO_S, 0, DF_MODES_NO_S, 0,
3515 /* %fcc[0123] */
3516 CCFP_MODES, CCFP_MODES, CCFP_MODES, CCFP_MODES,
3518 /* %icc */
3519 CC_MODES
3522 int sparc_mode_class [NUM_MACHINE_MODES];
3524 enum reg_class sparc_regno_reg_class[FIRST_PSEUDO_REGISTER];
3526 static void
3527 sparc_init_modes ()
3529 int i;
3531 for (i = 0; i < NUM_MACHINE_MODES; i++)
3533 switch (GET_MODE_CLASS (i))
3535 case MODE_INT:
3536 case MODE_PARTIAL_INT:
3537 case MODE_COMPLEX_INT:
3538 if (GET_MODE_SIZE (i) <= 4)
3539 sparc_mode_class[i] = 1 << (int) S_MODE;
3540 else if (GET_MODE_SIZE (i) == 8)
3541 sparc_mode_class[i] = 1 << (int) D_MODE;
3542 else if (GET_MODE_SIZE (i) == 16)
3543 sparc_mode_class[i] = 1 << (int) T_MODE;
3544 else if (GET_MODE_SIZE (i) == 32)
3545 sparc_mode_class[i] = 1 << (int) O_MODE;
3546 else
3547 sparc_mode_class[i] = 0;
3548 break;
3549 case MODE_FLOAT:
3550 case MODE_COMPLEX_FLOAT:
3551 if (GET_MODE_SIZE (i) <= 4)
3552 sparc_mode_class[i] = 1 << (int) SF_MODE;
3553 else if (GET_MODE_SIZE (i) == 8)
3554 sparc_mode_class[i] = 1 << (int) DF_MODE;
3555 else if (GET_MODE_SIZE (i) == 16)
3556 sparc_mode_class[i] = 1 << (int) TF_MODE;
3557 else if (GET_MODE_SIZE (i) == 32)
3558 sparc_mode_class[i] = 1 << (int) OF_MODE;
3559 else
3560 sparc_mode_class[i] = 0;
3561 break;
3562 case MODE_CC:
3563 default:
3564 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
3565 we must explicitly check for them here. */
3566 if (i == (int) CCFPmode || i == (int) CCFPEmode)
3567 sparc_mode_class[i] = 1 << (int) CCFP_MODE;
3568 else if (i == (int) CCmode || i == (int) CC_NOOVmode
3569 || i == (int) CCXmode || i == (int) CCX_NOOVmode)
3570 sparc_mode_class[i] = 1 << (int) CC_MODE;
3571 else
3572 sparc_mode_class[i] = 0;
3573 break;
3577 if (TARGET_ARCH64)
3578 hard_regno_mode_classes = hard_64bit_mode_classes;
3579 else
3580 hard_regno_mode_classes = hard_32bit_mode_classes;
3582 /* Initialize the array used by REGNO_REG_CLASS. */
3583 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3585 if (i < 16 && TARGET_V8PLUS)
3586 sparc_regno_reg_class[i] = I64_REGS;
3587 else if (i < 32 || i == FRAME_POINTER_REGNUM)
3588 sparc_regno_reg_class[i] = GENERAL_REGS;
3589 else if (i < 64)
3590 sparc_regno_reg_class[i] = FP_REGS;
3591 else if (i < 96)
3592 sparc_regno_reg_class[i] = EXTRA_FP_REGS;
3593 else if (i < 100)
3594 sparc_regno_reg_class[i] = FPCC_REGS;
3595 else
3596 sparc_regno_reg_class[i] = NO_REGS;
3600 /* Save non call used registers from LOW to HIGH at BASE+OFFSET.
3601 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3602 v9 int regs as it simplifies the code. */
3604 static int
3605 save_regs (file, low, high, base, offset, n_regs, real_offset)
3606 FILE *file;
3607 int low, high;
3608 const char *base;
3609 int offset;
3610 int n_regs;
3611 int real_offset;
3613 int i;
3615 if (TARGET_ARCH64 && high <= 32)
3617 for (i = low; i < high; i++)
3619 if (regs_ever_live[i] && ! call_used_regs[i])
3621 fprintf (file, "\tstx\t%s, [%s+%d]\n",
3622 reg_names[i], base, offset + 4 * n_regs);
3623 if (dwarf2out_do_frame ())
3624 dwarf2out_reg_save ("", i, real_offset + 4 * n_regs);
3625 n_regs += 2;
3629 else
3631 for (i = low; i < high; i += 2)
3633 if (regs_ever_live[i] && ! call_used_regs[i])
3635 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3637 fprintf (file, "\tstd\t%s, [%s+%d]\n",
3638 reg_names[i], base, offset + 4 * n_regs);
3639 if (dwarf2out_do_frame ())
3641 char *l = dwarf2out_cfi_label ();
3642 dwarf2out_reg_save (l, i, real_offset + 4 * n_regs);
3643 dwarf2out_reg_save (l, i+1, real_offset + 4 * n_regs + 4);
3645 n_regs += 2;
3647 else
3649 fprintf (file, "\tst\t%s, [%s+%d]\n",
3650 reg_names[i], base, offset + 4 * n_regs);
3651 if (dwarf2out_do_frame ())
3652 dwarf2out_reg_save ("", i, real_offset + 4 * n_regs);
3653 n_regs += 2;
3656 else
3658 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3660 fprintf (file, "\tst\t%s, [%s+%d]\n",
3661 reg_names[i+1], base, offset + 4 * n_regs + 4);
3662 if (dwarf2out_do_frame ())
3663 dwarf2out_reg_save ("", i + 1, real_offset + 4 * n_regs + 4);
3664 n_regs += 2;
3669 return n_regs;
3672 /* Restore non call used registers from LOW to HIGH at BASE+OFFSET.
3674 N_REGS is the number of 4-byte regs saved thus far. This applies even to
3675 v9 int regs as it simplifies the code. */
3677 static int
3678 restore_regs (file, low, high, base, offset, n_regs)
3679 FILE *file;
3680 int low, high;
3681 const char *base;
3682 int offset;
3683 int n_regs;
3685 int i;
3687 if (TARGET_ARCH64 && high <= 32)
3689 for (i = low; i < high; i++)
3691 if (regs_ever_live[i] && ! call_used_regs[i])
3692 fprintf (file, "\tldx\t[%s+%d], %s\n",
3693 base, offset + 4 * n_regs, reg_names[i]),
3694 n_regs += 2;
3697 else
3699 for (i = low; i < high; i += 2)
3701 if (regs_ever_live[i] && ! call_used_regs[i])
3702 if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3703 fprintf (file, "\tldd\t[%s+%d], %s\n",
3704 base, offset + 4 * n_regs, reg_names[i]),
3705 n_regs += 2;
3706 else
3707 fprintf (file, "\tld\t[%s+%d], %s\n",
3708 base, offset + 4 * n_regs, reg_names[i]),
3709 n_regs += 2;
3710 else if (regs_ever_live[i+1] && ! call_used_regs[i+1])
3711 fprintf (file, "\tld\t[%s+%d], %s\n",
3712 base, offset + 4 * n_regs + 4, reg_names[i+1]),
3713 n_regs += 2;
3716 return n_regs;
3719 /* Compute the frame size required by the function. This function is called
3720 during the reload pass and also by output_function_prologue(). */
3723 compute_frame_size (size, leaf_function)
3724 int size;
3725 int leaf_function;
3727 int n_regs = 0, i;
3728 int outgoing_args_size = (current_function_outgoing_args_size
3729 + REG_PARM_STACK_SPACE (current_function_decl));
3731 /* N_REGS is the number of 4-byte regs saved thus far. This applies
3732 even to v9 int regs to be consistent with save_regs/restore_regs. */
3734 if (TARGET_ARCH64)
3736 for (i = 0; i < 8; i++)
3737 if (regs_ever_live[i] && ! call_used_regs[i])
3738 n_regs += 2;
3740 else
3742 for (i = 0; i < 8; i += 2)
3743 if ((regs_ever_live[i] && ! call_used_regs[i])
3744 || (regs_ever_live[i+1] && ! call_used_regs[i+1]))
3745 n_regs += 2;
3748 for (i = 32; i < (TARGET_V9 ? 96 : 64); i += 2)
3749 if ((regs_ever_live[i] && ! call_used_regs[i])
3750 || (regs_ever_live[i+1] && ! call_used_regs[i+1]))
3751 n_regs += 2;
3753 /* Set up values for use in `function_epilogue'. */
3754 num_gfregs = n_regs;
3756 if (leaf_function && n_regs == 0
3757 && size == 0 && current_function_outgoing_args_size == 0)
3759 actual_fsize = apparent_fsize = 0;
3761 else
3763 /* We subtract STARTING_FRAME_OFFSET, remember it's negative. */
3764 apparent_fsize = (size - STARTING_FRAME_OFFSET + 7) & -8;
3765 apparent_fsize += n_regs * 4;
3766 actual_fsize = apparent_fsize + ((outgoing_args_size + 7) & -8);
3769 /* Make sure nothing can clobber our register windows.
3770 If a SAVE must be done, or there is a stack-local variable,
3771 the register window area must be allocated.
3772 ??? For v8 we apparently need an additional 8 bytes of reserved space. */
3773 if (leaf_function == 0 || size > 0)
3774 actual_fsize += (16 * UNITS_PER_WORD) + (TARGET_ARCH64 ? 0 : 8);
3776 return SPARC_STACK_ALIGN (actual_fsize);
3779 /* Build a (32 bit) big number in a register. */
3780 /* ??? We may be able to use the set macro here too. */
3782 static void
3783 build_big_number (file, num, reg)
3784 FILE *file;
3785 int num;
3786 const char *reg;
3788 if (num >= 0 || ! TARGET_ARCH64)
3790 fprintf (file, "\tsethi\t%%hi(%d), %s\n", num, reg);
3791 if ((num & 0x3ff) != 0)
3792 fprintf (file, "\tor\t%s, %%lo(%d), %s\n", reg, num, reg);
3794 else /* num < 0 && TARGET_ARCH64 */
3796 /* Sethi does not sign extend, so we must use a little trickery
3797 to use it for negative numbers. Invert the constant before
3798 loading it in, then use xor immediate to invert the loaded bits
3799 (along with the upper 32 bits) to the desired constant. This
3800 works because the sethi and immediate fields overlap. */
3801 int asize = num;
3802 int inv = ~asize;
3803 int low = -0x400 + (asize & 0x3FF);
3805 fprintf (file, "\tsethi\t%%hi(%d), %s\n\txor\t%s, %d, %s\n",
3806 inv, reg, reg, low, reg);
3810 /* Output any necessary .register pseudo-ops. */
3811 void
3812 sparc_output_scratch_registers (file)
3813 FILE *file ATTRIBUTE_UNUSED;
3815 #ifdef HAVE_AS_REGISTER_PSEUDO_OP
3816 int i;
3818 if (TARGET_ARCH32)
3819 return;
3821 /* Check if %g[2367] were used without
3822 .register being printed for them already. */
3823 for (i = 2; i < 8; i++)
3825 if (regs_ever_live [i]
3826 && ! sparc_hard_reg_printed [i])
3828 sparc_hard_reg_printed [i] = 1;
3829 fprintf (file, "\t.register\t%%g%d, #scratch\n", i);
3831 if (i == 3) i = 5;
3833 #endif
3836 /* This function generates the assembly code for function entry.
3837 FILE is a stdio stream to output the code to.
3838 SIZE is an int: how many units of temporary storage to allocate.
3839 Refer to the array `regs_ever_live' to determine which registers
3840 to save; `regs_ever_live[I]' is nonzero if register number I
3841 is ever used in the function. This macro is responsible for
3842 knowing which registers should not be saved even if used. */
3844 /* On SPARC, move-double insns between fpu and cpu need an 8-byte block
3845 of memory. If any fpu reg is used in the function, we allocate
3846 such a block here, at the bottom of the frame, just in case it's needed.
3848 If this function is a leaf procedure, then we may choose not
3849 to do a "save" insn. The decision about whether or not
3850 to do this is made in regclass.c. */
3852 static void
3853 sparc_output_function_prologue (file, size)
3854 FILE *file;
3855 HOST_WIDE_INT size;
3857 if (TARGET_FLAT)
3858 sparc_flat_function_prologue (file, size);
3859 else
3860 sparc_nonflat_function_prologue (file, size,
3861 current_function_uses_only_leaf_regs);
3864 /* Output code for the function prologue. */
3866 static void
3867 sparc_nonflat_function_prologue (file, size, leaf_function)
3868 FILE *file;
3869 HOST_WIDE_INT size;
3870 int leaf_function;
3872 sparc_output_scratch_registers (file);
3874 /* Need to use actual_fsize, since we are also allocating
3875 space for our callee (and our own register save area). */
3876 actual_fsize = compute_frame_size (size, leaf_function);
3878 if (leaf_function)
3880 frame_base_name = "%sp";
3881 frame_base_offset = actual_fsize + SPARC_STACK_BIAS;
3883 else
3885 frame_base_name = "%fp";
3886 frame_base_offset = SPARC_STACK_BIAS;
3889 /* This is only for the human reader. */
3890 fprintf (file, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START);
3892 if (actual_fsize == 0)
3893 /* do nothing. */ ;
3894 else if (! leaf_function)
3896 if (actual_fsize <= 4096)
3897 fprintf (file, "\tsave\t%%sp, -%d, %%sp\n", actual_fsize);
3898 else if (actual_fsize <= 8192)
3900 fprintf (file, "\tsave\t%%sp, -4096, %%sp\n");
3901 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize - 4096);
3903 else
3905 build_big_number (file, -actual_fsize, "%g1");
3906 fprintf (file, "\tsave\t%%sp, %%g1, %%sp\n");
3909 else /* leaf function */
3911 if (actual_fsize <= 4096)
3912 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize);
3913 else if (actual_fsize <= 8192)
3915 fprintf (file, "\tadd\t%%sp, -4096, %%sp\n");
3916 fprintf (file, "\tadd\t%%sp, -%d, %%sp\n", actual_fsize - 4096);
3918 else
3920 build_big_number (file, -actual_fsize, "%g1");
3921 fprintf (file, "\tadd\t%%sp, %%g1, %%sp\n");
3925 if (dwarf2out_do_frame () && actual_fsize)
3927 char *label = dwarf2out_cfi_label ();
3929 /* The canonical frame address refers to the top of the frame. */
3930 dwarf2out_def_cfa (label, (leaf_function ? STACK_POINTER_REGNUM
3931 : HARD_FRAME_POINTER_REGNUM),
3932 frame_base_offset);
3934 if (! leaf_function)
3936 /* Note the register window save. This tells the unwinder that
3937 it needs to restore the window registers from the previous
3938 frame's window save area at 0(cfa). */
3939 dwarf2out_window_save (label);
3941 /* The return address (-8) is now in %i7. */
3942 dwarf2out_return_reg (label, 31);
3946 /* If doing anything with PIC, do it now. */
3947 if (! flag_pic)
3948 fprintf (file, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START);
3950 /* Call saved registers are saved just above the outgoing argument area. */
3951 if (num_gfregs)
3953 int offset, real_offset, n_regs;
3954 const char *base;
3956 real_offset = -apparent_fsize;
3957 offset = -apparent_fsize + frame_base_offset;
3958 if (offset < -4096 || offset + num_gfregs * 4 > 4096)
3960 /* ??? This might be optimized a little as %g1 might already have a
3961 value close enough that a single add insn will do. */
3962 /* ??? Although, all of this is probably only a temporary fix
3963 because if %g1 can hold a function result, then
3964 output_function_epilogue will lose (the result will get
3965 clobbered). */
3966 build_big_number (file, offset, "%g1");
3967 fprintf (file, "\tadd\t%s, %%g1, %%g1\n", frame_base_name);
3968 base = "%g1";
3969 offset = 0;
3971 else
3973 base = frame_base_name;
3976 n_regs = save_regs (file, 0, 8, base, offset, 0, real_offset);
3977 save_regs (file, 32, TARGET_V9 ? 96 : 64, base, offset, n_regs,
3978 real_offset);
3982 /* Output code to restore any call saved registers. */
3984 static void
3985 output_restore_regs (file, leaf_function)
3986 FILE *file;
3987 int leaf_function ATTRIBUTE_UNUSED;
3989 int offset, n_regs;
3990 const char *base;
3992 offset = -apparent_fsize + frame_base_offset;
3993 if (offset < -4096 || offset + num_gfregs * 4 > 4096 - 8 /*double*/)
3995 build_big_number (file, offset, "%g1");
3996 fprintf (file, "\tadd\t%s, %%g1, %%g1\n", frame_base_name);
3997 base = "%g1";
3998 offset = 0;
4000 else
4002 base = frame_base_name;
4005 n_regs = restore_regs (file, 0, 8, base, offset, 0);
4006 restore_regs (file, 32, TARGET_V9 ? 96 : 64, base, offset, n_regs);
4009 /* This function generates the assembly code for function exit,
4010 on machines that need it.
4012 The function epilogue should not depend on the current stack pointer!
4013 It should use the frame pointer only. This is mandatory because
4014 of alloca; we also take advantage of it to omit stack adjustments
4015 before returning. */
4017 static void
4018 sparc_output_function_epilogue (file, size)
4019 FILE *file;
4020 HOST_WIDE_INT size;
4022 if (TARGET_FLAT)
4023 sparc_flat_function_epilogue (file, size);
4024 else
4025 sparc_nonflat_function_epilogue (file, size,
4026 current_function_uses_only_leaf_regs);
4029 /* Output code for the function epilogue. */
4031 static void
4032 sparc_nonflat_function_epilogue (file, size, leaf_function)
4033 FILE *file;
4034 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
4035 int leaf_function;
4037 const char *ret;
4039 if (current_function_epilogue_delay_list == 0)
4041 /* If code does not drop into the epilogue, we need
4042 do nothing except output pending case vectors.
4044 We have to still output a dummy nop for the sake of
4045 sane backtraces. Otherwise, if the last two instructions
4046 of a function were call foo; dslot; this can make the return
4047 PC of foo (ie. address of call instruction plus 8) point to
4048 the first instruction in the next function. */
4049 rtx insn, last_real_insn;
4051 insn = get_last_insn ();
4053 last_real_insn = prev_real_insn (insn);
4054 if (last_real_insn
4055 && GET_CODE (last_real_insn) == INSN
4056 && GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
4057 last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
4059 if (last_real_insn && GET_CODE (last_real_insn) == CALL_INSN)
4060 fputs("\tnop\n", file);
4062 if (GET_CODE (insn) == NOTE)
4063 insn = prev_nonnote_insn (insn);
4064 if (insn && GET_CODE (insn) == BARRIER)
4065 goto output_vectors;
4068 if (num_gfregs)
4069 output_restore_regs (file, leaf_function);
4071 /* Work out how to skip the caller's unimp instruction if required. */
4072 if (leaf_function)
4073 ret = (SKIP_CALLERS_UNIMP_P ? "jmp\t%o7+12" : "retl");
4074 else
4075 ret = (SKIP_CALLERS_UNIMP_P ? "jmp\t%i7+12" : "ret");
4077 if (! leaf_function)
4079 if (current_function_calls_eh_return)
4081 if (current_function_epilogue_delay_list)
4082 abort ();
4083 if (SKIP_CALLERS_UNIMP_P)
4084 abort ();
4086 fputs ("\trestore\n\tretl\n\tadd\t%sp, %g1, %sp\n", file);
4088 /* If we wound up with things in our delay slot, flush them here. */
4089 else if (current_function_epilogue_delay_list)
4091 rtx delay = PATTERN (XEXP (current_function_epilogue_delay_list, 0));
4093 if (TARGET_V9 && ! epilogue_renumber (&delay, 1))
4095 epilogue_renumber (&delay, 0);
4096 fputs (SKIP_CALLERS_UNIMP_P
4097 ? "\treturn\t%i7+12\n"
4098 : "\treturn\t%i7+8\n", file);
4099 final_scan_insn (XEXP (current_function_epilogue_delay_list, 0),
4100 file, 1, 0, 0);
4102 else
4104 rtx insn, src;
4106 if (GET_CODE (delay) != SET)
4107 abort();
4109 src = SET_SRC (delay);
4110 if (GET_CODE (src) == ASHIFT)
4112 if (XEXP (src, 1) != const1_rtx)
4113 abort();
4114 SET_SRC (delay)
4115 = gen_rtx_PLUS (GET_MODE (src), XEXP (src, 0),
4116 XEXP (src, 0));
4119 insn = gen_rtx_PARALLEL (VOIDmode,
4120 gen_rtvec (2, delay,
4121 gen_rtx_RETURN (VOIDmode)));
4122 insn = emit_jump_insn (insn);
4124 sparc_emitting_epilogue = true;
4125 final_scan_insn (insn, file, 1, 0, 1);
4126 sparc_emitting_epilogue = false;
4129 else if (TARGET_V9 && ! SKIP_CALLERS_UNIMP_P)
4130 fputs ("\treturn\t%i7+8\n\tnop\n", file);
4131 else
4132 fprintf (file, "\t%s\n\trestore\n", ret);
4134 /* All of the following cases are for leaf functions. */
4135 else if (current_function_calls_eh_return)
4136 abort ();
4137 else if (current_function_epilogue_delay_list)
4139 /* eligible_for_epilogue_delay_slot ensures that if this is a
4140 leaf function, then we will only have insn in the delay slot
4141 if the frame size is zero, thus no adjust for the stack is
4142 needed here. */
4143 if (actual_fsize != 0)
4144 abort ();
4145 fprintf (file, "\t%s\n", ret);
4146 final_scan_insn (XEXP (current_function_epilogue_delay_list, 0),
4147 file, 1, 0, 1);
4149 /* Output 'nop' instead of 'sub %sp,-0,%sp' when no frame, so as to
4150 avoid generating confusing assembly language output. */
4151 else if (actual_fsize == 0)
4152 fprintf (file, "\t%s\n\tnop\n", ret);
4153 else if (actual_fsize <= 4096)
4154 fprintf (file, "\t%s\n\tsub\t%%sp, -%d, %%sp\n", ret, actual_fsize);
4155 else if (actual_fsize <= 8192)
4156 fprintf (file, "\tsub\t%%sp, -4096, %%sp\n\t%s\n\tsub\t%%sp, -%d, %%sp\n",
4157 ret, actual_fsize - 4096);
4158 else if ((actual_fsize & 0x3ff) == 0)
4159 fprintf (file, "\tsethi\t%%hi(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
4160 actual_fsize, ret);
4161 else
4162 fprintf (file, "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n\t%s\n\tadd\t%%sp, %%g1, %%sp\n",
4163 actual_fsize, actual_fsize, ret);
4165 output_vectors:
4166 sparc_output_deferred_case_vectors ();
4169 /* Output a sibling call. */
4171 const char *
4172 output_sibcall (insn, call_operand)
4173 rtx insn, call_operand;
4175 int leaf_regs = current_function_uses_only_leaf_regs;
4176 rtx operands[3];
4177 int delay_slot = dbr_sequence_length () > 0;
4179 if (num_gfregs)
4181 /* Call to restore global regs might clobber
4182 the delay slot. Instead of checking for this
4183 output the delay slot now. */
4184 if (delay_slot)
4186 rtx delay = NEXT_INSN (insn);
4188 if (! delay)
4189 abort ();
4191 final_scan_insn (delay, asm_out_file, 1, 0, 1);
4192 PATTERN (delay) = gen_blockage ();
4193 INSN_CODE (delay) = -1;
4194 delay_slot = 0;
4196 output_restore_regs (asm_out_file, leaf_regs);
4199 operands[0] = call_operand;
4201 if (leaf_regs)
4203 #ifdef HAVE_AS_RELAX_OPTION
4204 /* If as and ld are relaxing tail call insns into branch always,
4205 use or %o7,%g0,X; call Y; or X,%g0,%o7 always, so that it can
4206 be optimized. With sethi/jmpl as nor ld has no easy way how to
4207 find out if somebody does not branch between the sethi and jmpl. */
4208 int spare_slot = 0;
4209 #else
4210 int spare_slot = ((TARGET_ARCH32 || TARGET_CM_MEDLOW) && ! flag_pic);
4211 #endif
4212 int size = 0;
4214 if ((actual_fsize || ! spare_slot) && delay_slot)
4216 rtx delay = NEXT_INSN (insn);
4218 if (! delay)
4219 abort ();
4221 final_scan_insn (delay, asm_out_file, 1, 0, 1);
4222 PATTERN (delay) = gen_blockage ();
4223 INSN_CODE (delay) = -1;
4224 delay_slot = 0;
4226 if (actual_fsize)
4228 if (actual_fsize <= 4096)
4229 size = actual_fsize;
4230 else if (actual_fsize <= 8192)
4232 fputs ("\tsub\t%sp, -4096, %sp\n", asm_out_file);
4233 size = actual_fsize - 4096;
4235 else if ((actual_fsize & 0x3ff) == 0)
4236 fprintf (asm_out_file,
4237 "\tsethi\t%%hi(%d), %%g1\n\tadd\t%%sp, %%g1, %%sp\n",
4238 actual_fsize);
4239 else
4241 fprintf (asm_out_file,
4242 "\tsethi\t%%hi(%d), %%g1\n\tor\t%%g1, %%lo(%d), %%g1\n",
4243 actual_fsize, actual_fsize);
4244 fputs ("\tadd\t%%sp, %%g1, %%sp\n", asm_out_file);
4247 if (spare_slot)
4249 output_asm_insn ("sethi\t%%hi(%a0), %%g1", operands);
4250 output_asm_insn ("jmpl\t%%g1 + %%lo(%a0), %%g0", operands);
4251 if (size)
4252 fprintf (asm_out_file, "\t sub\t%%sp, -%d, %%sp\n", size);
4253 else if (! delay_slot)
4254 fputs ("\t nop\n", asm_out_file);
4256 else
4258 if (size)
4259 fprintf (asm_out_file, "\tsub\t%%sp, -%d, %%sp\n", size);
4260 /* Use or with rs2 %%g0 instead of mov, so that as/ld can optimize
4261 it into branch if possible. */
4262 output_asm_insn ("or\t%%o7, %%g0, %%g1", operands);
4263 output_asm_insn ("call\t%a0, 0", operands);
4264 output_asm_insn (" or\t%%g1, %%g0, %%o7", operands);
4266 return "";
4269 output_asm_insn ("call\t%a0, 0", operands);
4270 if (delay_slot)
4272 rtx delay = NEXT_INSN (insn), pat;
4274 if (! delay)
4275 abort ();
4277 pat = PATTERN (delay);
4278 if (GET_CODE (pat) != SET)
4279 abort ();
4281 operands[0] = SET_DEST (pat);
4282 pat = SET_SRC (pat);
4283 switch (GET_CODE (pat))
4285 case PLUS:
4286 operands[1] = XEXP (pat, 0);
4287 operands[2] = XEXP (pat, 1);
4288 output_asm_insn (" restore %r1, %2, %Y0", operands);
4289 break;
4290 case LO_SUM:
4291 operands[1] = XEXP (pat, 0);
4292 operands[2] = XEXP (pat, 1);
4293 output_asm_insn (" restore %r1, %%lo(%a2), %Y0", operands);
4294 break;
4295 case ASHIFT:
4296 operands[1] = XEXP (pat, 0);
4297 output_asm_insn (" restore %r1, %r1, %Y0", operands);
4298 break;
4299 default:
4300 operands[1] = pat;
4301 output_asm_insn (" restore %%g0, %1, %Y0", operands);
4302 break;
4304 PATTERN (delay) = gen_blockage ();
4305 INSN_CODE (delay) = -1;
4307 else
4308 fputs ("\t restore\n", asm_out_file);
4309 return "";
4312 /* Functions for handling argument passing.
4314 For v8 the first six args are normally in registers and the rest are
4315 pushed. Any arg that starts within the first 6 words is at least
4316 partially passed in a register unless its data type forbids.
4318 For v9, the argument registers are laid out as an array of 16 elements
4319 and arguments are added sequentially. The first 6 int args and up to the
4320 first 16 fp args (depending on size) are passed in regs.
4322 Slot Stack Integral Float Float in structure Double Long Double
4323 ---- ----- -------- ----- ------------------ ------ -----------
4324 15 [SP+248] %f31 %f30,%f31 %d30
4325 14 [SP+240] %f29 %f28,%f29 %d28 %q28
4326 13 [SP+232] %f27 %f26,%f27 %d26
4327 12 [SP+224] %f25 %f24,%f25 %d24 %q24
4328 11 [SP+216] %f23 %f22,%f23 %d22
4329 10 [SP+208] %f21 %f20,%f21 %d20 %q20
4330 9 [SP+200] %f19 %f18,%f19 %d18
4331 8 [SP+192] %f17 %f16,%f17 %d16 %q16
4332 7 [SP+184] %f15 %f14,%f15 %d14
4333 6 [SP+176] %f13 %f12,%f13 %d12 %q12
4334 5 [SP+168] %o5 %f11 %f10,%f11 %d10
4335 4 [SP+160] %o4 %f9 %f8,%f9 %d8 %q8
4336 3 [SP+152] %o3 %f7 %f6,%f7 %d6
4337 2 [SP+144] %o2 %f5 %f4,%f5 %d4 %q4
4338 1 [SP+136] %o1 %f3 %f2,%f3 %d2
4339 0 [SP+128] %o0 %f1 %f0,%f1 %d0 %q0
4341 Here SP = %sp if -mno-stack-bias or %sp+stack_bias otherwise.
4343 Integral arguments are always passed as 64 bit quantities appropriately
4344 extended.
4346 Passing of floating point values is handled as follows.
4347 If a prototype is in scope:
4348 If the value is in a named argument (i.e. not a stdarg function or a
4349 value not part of the `...') then the value is passed in the appropriate
4350 fp reg.
4351 If the value is part of the `...' and is passed in one of the first 6
4352 slots then the value is passed in the appropriate int reg.
4353 If the value is part of the `...' and is not passed in one of the first 6
4354 slots then the value is passed in memory.
4355 If a prototype is not in scope:
4356 If the value is one of the first 6 arguments the value is passed in the
4357 appropriate integer reg and the appropriate fp reg.
4358 If the value is not one of the first 6 arguments the value is passed in
4359 the appropriate fp reg and in memory.
4362 /* Maximum number of int regs for args. */
4363 #define SPARC_INT_ARG_MAX 6
4364 /* Maximum number of fp regs for args. */
4365 #define SPARC_FP_ARG_MAX 16
4367 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4369 /* Handle the INIT_CUMULATIVE_ARGS macro.
4370 Initialize a variable CUM of type CUMULATIVE_ARGS
4371 for a call to a function whose data type is FNTYPE.
4372 For a library call, FNTYPE is 0. */
4374 void
4375 init_cumulative_args (cum, fntype, libname, fndecl)
4376 CUMULATIVE_ARGS *cum;
4377 tree fntype;
4378 rtx libname ATTRIBUTE_UNUSED;
4379 tree fndecl ATTRIBUTE_UNUSED;
4381 cum->words = 0;
4382 cum->prototype_p = fntype && TYPE_ARG_TYPES (fntype);
4383 cum->libcall_p = fntype == 0;
4386 /* Compute the slot number to pass an argument in.
4387 Returns the slot number or -1 if passing on the stack.
4389 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4390 the preceding args and about the function being called.
4391 MODE is the argument's machine mode.
4392 TYPE is the data type of the argument (as a tree).
4393 This is null for libcalls where that information may
4394 not be available.
4395 NAMED is nonzero if this argument is a named parameter
4396 (otherwise it is an extra parameter matching an ellipsis).
4397 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
4398 *PREGNO records the register number to use if scalar type.
4399 *PPADDING records the amount of padding needed in words. */
4401 static int
4402 function_arg_slotno (cum, mode, type, named, incoming_p, pregno, ppadding)
4403 const CUMULATIVE_ARGS *cum;
4404 enum machine_mode mode;
4405 tree type;
4406 int named;
4407 int incoming_p;
4408 int *pregno;
4409 int *ppadding;
4411 int regbase = (incoming_p
4412 ? SPARC_INCOMING_INT_ARG_FIRST
4413 : SPARC_OUTGOING_INT_ARG_FIRST);
4414 int slotno = cum->words;
4415 int regno;
4417 *ppadding = 0;
4419 if (type != 0 && TREE_ADDRESSABLE (type))
4420 return -1;
4421 if (TARGET_ARCH32
4422 && type != 0 && mode == BLKmode
4423 && TYPE_ALIGN (type) % PARM_BOUNDARY != 0)
4424 return -1;
4426 switch (mode)
4428 case VOIDmode :
4429 /* MODE is VOIDmode when generating the actual call.
4430 See emit_call_1. */
4431 return -1;
4433 case QImode : case CQImode :
4434 case HImode : case CHImode :
4435 case SImode : case CSImode :
4436 case DImode : case CDImode :
4437 case TImode : case CTImode :
4438 if (slotno >= SPARC_INT_ARG_MAX)
4439 return -1;
4440 regno = regbase + slotno;
4441 break;
4443 case SFmode : case SCmode :
4444 case DFmode : case DCmode :
4445 case TFmode : case TCmode :
4446 if (TARGET_ARCH32)
4448 if (slotno >= SPARC_INT_ARG_MAX)
4449 return -1;
4450 regno = regbase + slotno;
4452 else
4454 if ((mode == TFmode || mode == TCmode)
4455 && (slotno & 1) != 0)
4456 slotno++, *ppadding = 1;
4457 if (TARGET_FPU && named)
4459 if (slotno >= SPARC_FP_ARG_MAX)
4460 return -1;
4461 regno = SPARC_FP_ARG_FIRST + slotno * 2;
4462 if (mode == SFmode)
4463 regno++;
4465 else
4467 if (slotno >= SPARC_INT_ARG_MAX)
4468 return -1;
4469 regno = regbase + slotno;
4472 break;
4474 case BLKmode :
4475 /* For sparc64, objects requiring 16 byte alignment get it. */
4476 if (TARGET_ARCH64)
4478 if (type && TYPE_ALIGN (type) == 128 && (slotno & 1) != 0)
4479 slotno++, *ppadding = 1;
4482 if (TARGET_ARCH32
4483 || (type && TREE_CODE (type) == UNION_TYPE))
4485 if (slotno >= SPARC_INT_ARG_MAX)
4486 return -1;
4487 regno = regbase + slotno;
4489 else
4491 tree field;
4492 int intregs_p = 0, fpregs_p = 0;
4493 /* The ABI obviously doesn't specify how packed
4494 structures are passed. These are defined to be passed
4495 in int regs if possible, otherwise memory. */
4496 int packed_p = 0;
4498 /* First see what kinds of registers we need. */
4499 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4501 if (TREE_CODE (field) == FIELD_DECL)
4503 if (TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4504 && TARGET_FPU)
4505 fpregs_p = 1;
4506 else
4507 intregs_p = 1;
4508 if (DECL_PACKED (field))
4509 packed_p = 1;
4512 if (packed_p || !named)
4513 fpregs_p = 0, intregs_p = 1;
4515 /* If all arg slots are filled, then must pass on stack. */
4516 if (fpregs_p && slotno >= SPARC_FP_ARG_MAX)
4517 return -1;
4518 /* If there are only int args and all int arg slots are filled,
4519 then must pass on stack. */
4520 if (!fpregs_p && intregs_p && slotno >= SPARC_INT_ARG_MAX)
4521 return -1;
4522 /* Note that even if all int arg slots are filled, fp members may
4523 still be passed in regs if such regs are available.
4524 *PREGNO isn't set because there may be more than one, it's up
4525 to the caller to compute them. */
4526 return slotno;
4528 break;
4530 default :
4531 abort ();
4534 *pregno = regno;
4535 return slotno;
4538 /* Handle recursive register counting for structure field layout. */
4540 struct function_arg_record_value_parms
4542 rtx ret; /* return expression being built. */
4543 int slotno; /* slot number of the argument. */
4544 int named; /* whether the argument is named. */
4545 int regbase; /* regno of the base register. */
4546 int stack; /* 1 if part of the argument is on the stack. */
4547 int intoffset; /* offset of the pending integer field. */
4548 unsigned int nregs; /* number of words passed in registers. */
4551 static void function_arg_record_value_3
4552 PARAMS ((HOST_WIDE_INT, struct function_arg_record_value_parms *));
4553 static void function_arg_record_value_2
4554 PARAMS ((tree, HOST_WIDE_INT,
4555 struct function_arg_record_value_parms *));
4556 static void function_arg_record_value_1
4557 PARAMS ((tree, HOST_WIDE_INT,
4558 struct function_arg_record_value_parms *));
4559 static rtx function_arg_record_value
4560 PARAMS ((tree, enum machine_mode, int, int, int));
4562 /* A subroutine of function_arg_record_value. Traverse the structure
4563 recursively and determine how many registers will be required. */
4565 static void
4566 function_arg_record_value_1 (type, startbitpos, parms)
4567 tree type;
4568 HOST_WIDE_INT startbitpos;
4569 struct function_arg_record_value_parms *parms;
4571 tree field;
4573 /* The ABI obviously doesn't specify how packed structures are
4574 passed. These are defined to be passed in int regs if possible,
4575 otherwise memory. */
4576 int packed_p = 0;
4578 /* We need to compute how many registers are needed so we can
4579 allocate the PARALLEL but before we can do that we need to know
4580 whether there are any packed fields. If there are, int regs are
4581 used regardless of whether there are fp values present. */
4582 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4584 if (TREE_CODE (field) == FIELD_DECL && DECL_PACKED (field))
4586 packed_p = 1;
4587 break;
4591 /* Compute how many registers we need. */
4592 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4594 if (TREE_CODE (field) == FIELD_DECL)
4596 HOST_WIDE_INT bitpos = startbitpos;
4598 if (DECL_SIZE (field) != 0
4599 && host_integerp (bit_position (field), 1))
4600 bitpos += int_bit_position (field);
4602 /* ??? FIXME: else assume zero offset. */
4604 if (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE)
4605 function_arg_record_value_1 (TREE_TYPE (field), bitpos, parms);
4606 else if ((TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4607 || (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE
4608 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field)))
4609 == REAL_TYPE)))
4610 && TARGET_FPU
4611 && ! packed_p
4612 && parms->named)
4614 if (parms->intoffset != -1)
4616 int intslots, this_slotno;
4618 intslots = (bitpos - parms->intoffset + BITS_PER_WORD - 1)
4619 / BITS_PER_WORD;
4620 this_slotno = parms->slotno + parms->intoffset
4621 / BITS_PER_WORD;
4623 if (intslots > 0 && intslots > SPARC_INT_ARG_MAX - this_slotno)
4625 intslots = MAX (0, SPARC_INT_ARG_MAX - this_slotno);
4626 /* We need to pass this field on the stack. */
4627 parms->stack = 1;
4630 parms->nregs += intslots;
4631 parms->intoffset = -1;
4634 /* There's no need to check this_slotno < SPARC_FP_ARG MAX.
4635 If it wasn't true we wouldn't be here. */
4636 parms->nregs += 1;
4637 if (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE)
4638 parms->nregs += 1;
4640 else
4642 if (parms->intoffset == -1)
4643 parms->intoffset = bitpos;
4649 /* A subroutine of function_arg_record_value. Assign the bits of the
4650 structure between parms->intoffset and bitpos to integer registers. */
4652 static void
4653 function_arg_record_value_3 (bitpos, parms)
4654 HOST_WIDE_INT bitpos;
4655 struct function_arg_record_value_parms *parms;
4657 enum machine_mode mode;
4658 unsigned int regno;
4659 unsigned int startbit, endbit;
4660 int this_slotno, intslots, intoffset;
4661 rtx reg;
4663 if (parms->intoffset == -1)
4664 return;
4666 intoffset = parms->intoffset;
4667 parms->intoffset = -1;
4669 startbit = intoffset & -BITS_PER_WORD;
4670 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4671 intslots = (endbit - startbit) / BITS_PER_WORD;
4672 this_slotno = parms->slotno + intoffset / BITS_PER_WORD;
4674 intslots = MIN (intslots, SPARC_INT_ARG_MAX - this_slotno);
4675 if (intslots <= 0)
4676 return;
4678 /* If this is the trailing part of a word, only load that much into
4679 the register. Otherwise load the whole register. Note that in
4680 the latter case we may pick up unwanted bits. It's not a problem
4681 at the moment but may wish to revisit. */
4683 if (intoffset % BITS_PER_WORD != 0)
4684 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4685 MODE_INT, 0);
4686 else
4687 mode = word_mode;
4689 intoffset /= BITS_PER_UNIT;
4692 regno = parms->regbase + this_slotno;
4693 reg = gen_rtx_REG (mode, regno);
4694 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4695 = gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4697 this_slotno += 1;
4698 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4699 parms->nregs += 1;
4700 intslots -= 1;
4702 while (intslots > 0);
4705 /* A subroutine of function_arg_record_value. Traverse the structure
4706 recursively and assign bits to floating point registers. Track which
4707 bits in between need integer registers; invoke function_arg_record_value_3
4708 to make that happen. */
4710 static void
4711 function_arg_record_value_2 (type, startbitpos, parms)
4712 tree type;
4713 HOST_WIDE_INT startbitpos;
4714 struct function_arg_record_value_parms *parms;
4716 tree field;
4717 int packed_p = 0;
4719 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4721 if (TREE_CODE (field) == FIELD_DECL && DECL_PACKED (field))
4723 packed_p = 1;
4724 break;
4728 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4730 if (TREE_CODE (field) == FIELD_DECL)
4732 HOST_WIDE_INT bitpos = startbitpos;
4734 if (DECL_SIZE (field) != 0
4735 && host_integerp (bit_position (field), 1))
4736 bitpos += int_bit_position (field);
4738 /* ??? FIXME: else assume zero offset. */
4740 if (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE)
4741 function_arg_record_value_2 (TREE_TYPE (field), bitpos, parms);
4742 else if ((TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
4743 || (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE
4744 && (TREE_CODE (TREE_TYPE (TREE_TYPE (field)))
4745 == REAL_TYPE)))
4746 && TARGET_FPU
4747 && ! packed_p
4748 && parms->named)
4750 int this_slotno = parms->slotno + bitpos / BITS_PER_WORD;
4751 int regno;
4752 enum machine_mode mode = DECL_MODE (field);
4753 rtx reg;
4755 function_arg_record_value_3 (bitpos, parms);
4756 regno = SPARC_FP_ARG_FIRST + this_slotno * 2
4757 + ((mode == SFmode || mode == SCmode)
4758 && (bitpos & 32) != 0);
4759 switch (mode)
4761 case SCmode: mode = SFmode; break;
4762 case DCmode: mode = DFmode; break;
4763 case TCmode: mode = TFmode; break;
4764 default: break;
4766 reg = gen_rtx_REG (mode, regno);
4767 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4768 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4769 GEN_INT (bitpos / BITS_PER_UNIT));
4770 parms->nregs += 1;
4771 if (TREE_CODE (TREE_TYPE (field)) == COMPLEX_TYPE)
4773 regno += GET_MODE_SIZE (mode) / 4;
4774 reg = gen_rtx_REG (mode, regno);
4775 XVECEXP (parms->ret, 0, parms->stack + parms->nregs)
4776 = gen_rtx_EXPR_LIST (VOIDmode, reg,
4777 GEN_INT ((bitpos + GET_MODE_BITSIZE (mode))
4778 / BITS_PER_UNIT));
4779 parms->nregs += 1;
4782 else
4784 if (parms->intoffset == -1)
4785 parms->intoffset = bitpos;
4791 /* Used by function_arg and function_value to implement the complex
4792 conventions of the 64-bit ABI for passing and returning structures.
4793 Return an expression valid as a return value for the two macros
4794 FUNCTION_ARG and FUNCTION_VALUE.
4796 TYPE is the data type of the argument (as a tree).
4797 This is null for libcalls where that information may
4798 not be available.
4799 MODE is the argument's machine mode.
4800 SLOTNO is the index number of the argument's slot in the parameter array.
4801 NAMED is nonzero if this argument is a named parameter
4802 (otherwise it is an extra parameter matching an ellipsis).
4803 REGBASE is the regno of the base register for the parameter array. */
4805 static rtx
4806 function_arg_record_value (type, mode, slotno, named, regbase)
4807 tree type;
4808 enum machine_mode mode;
4809 int slotno, named, regbase;
4811 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4812 struct function_arg_record_value_parms parms;
4813 unsigned int nregs;
4815 parms.ret = NULL_RTX;
4816 parms.slotno = slotno;
4817 parms.named = named;
4818 parms.regbase = regbase;
4819 parms.stack = 0;
4821 /* Compute how many registers we need. */
4822 parms.nregs = 0;
4823 parms.intoffset = 0;
4824 function_arg_record_value_1 (type, 0, &parms);
4826 if (parms.intoffset != -1)
4828 unsigned int startbit, endbit;
4829 int intslots, this_slotno;
4831 startbit = parms.intoffset & -BITS_PER_WORD;
4832 endbit = (typesize*BITS_PER_UNIT + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4833 intslots = (endbit - startbit) / BITS_PER_WORD;
4834 this_slotno = slotno + parms.intoffset / BITS_PER_WORD;
4836 if (intslots > 0 && intslots > SPARC_INT_ARG_MAX - this_slotno)
4838 intslots = MAX (0, SPARC_INT_ARG_MAX - this_slotno);
4839 /* We need to pass this field on the stack. */
4840 parms.stack = 1;
4843 parms.nregs += intslots;
4845 nregs = parms.nregs;
4847 /* Allocate the vector and handle some annoying special cases. */
4848 if (nregs == 0)
4850 /* ??? Empty structure has no value? Duh? */
4851 if (typesize <= 0)
4853 /* Though there's nothing really to store, return a word register
4854 anyway so the rest of gcc doesn't go nuts. Returning a PARALLEL
4855 leads to breakage due to the fact that there are zero bytes to
4856 load. */
4857 return gen_rtx_REG (mode, regbase);
4859 else
4861 /* ??? C++ has structures with no fields, and yet a size. Give up
4862 for now and pass everything back in integer registers. */
4863 nregs = (typesize + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4865 if (nregs + slotno > SPARC_INT_ARG_MAX)
4866 nregs = SPARC_INT_ARG_MAX - slotno;
4868 if (nregs == 0)
4869 abort ();
4871 parms.ret = gen_rtx_PARALLEL (mode, rtvec_alloc (parms.stack + nregs));
4873 /* If at least one field must be passed on the stack, generate
4874 (parallel [(expr_list (nil) ...) ...]) so that all fields will
4875 also be passed on the stack. We can't do much better because the
4876 semantics of FUNCTION_ARG_PARTIAL_NREGS doesn't handle the case
4877 of structures for which the fields passed exclusively in registers
4878 are not at the beginning of the structure. */
4879 if (parms.stack)
4880 XVECEXP (parms.ret, 0, 0)
4881 = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4883 /* Fill in the entries. */
4884 parms.nregs = 0;
4885 parms.intoffset = 0;
4886 function_arg_record_value_2 (type, 0, &parms);
4887 function_arg_record_value_3 (typesize * BITS_PER_UNIT, &parms);
4889 if (parms.nregs != nregs)
4890 abort ();
4892 return parms.ret;
4895 /* Handle the FUNCTION_ARG macro.
4896 Determine where to put an argument to a function.
4897 Value is zero to push the argument on the stack,
4898 or a hard register in which to store the argument.
4900 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4901 the preceding args and about the function being called.
4902 MODE is the argument's machine mode.
4903 TYPE is the data type of the argument (as a tree).
4904 This is null for libcalls where that information may
4905 not be available.
4906 NAMED is nonzero if this argument is a named parameter
4907 (otherwise it is an extra parameter matching an ellipsis).
4908 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG. */
4911 function_arg (cum, mode, type, named, incoming_p)
4912 const CUMULATIVE_ARGS *cum;
4913 enum machine_mode mode;
4914 tree type;
4915 int named;
4916 int incoming_p;
4918 int regbase = (incoming_p
4919 ? SPARC_INCOMING_INT_ARG_FIRST
4920 : SPARC_OUTGOING_INT_ARG_FIRST);
4921 int slotno, regno, padding;
4922 rtx reg;
4924 slotno = function_arg_slotno (cum, mode, type, named, incoming_p,
4925 &regno, &padding);
4927 if (slotno == -1)
4928 return 0;
4930 if (TARGET_ARCH32)
4932 reg = gen_rtx_REG (mode, regno);
4933 return reg;
4936 /* v9 fp args in reg slots beyond the int reg slots get passed in regs
4937 but also have the slot allocated for them.
4938 If no prototype is in scope fp values in register slots get passed
4939 in two places, either fp regs and int regs or fp regs and memory. */
4940 if ((GET_MODE_CLASS (mode) == MODE_FLOAT
4941 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
4942 && SPARC_FP_REG_P (regno))
4944 reg = gen_rtx_REG (mode, regno);
4945 if (cum->prototype_p || cum->libcall_p)
4947 /* "* 2" because fp reg numbers are recorded in 4 byte
4948 quantities. */
4949 #if 0
4950 /* ??? This will cause the value to be passed in the fp reg and
4951 in the stack. When a prototype exists we want to pass the
4952 value in the reg but reserve space on the stack. That's an
4953 optimization, and is deferred [for a bit]. */
4954 if ((regno - SPARC_FP_ARG_FIRST) >= SPARC_INT_ARG_MAX * 2)
4955 return gen_rtx_PARALLEL (mode,
4956 gen_rtvec (2,
4957 gen_rtx_EXPR_LIST (VOIDmode,
4958 NULL_RTX, const0_rtx),
4959 gen_rtx_EXPR_LIST (VOIDmode,
4960 reg, const0_rtx)));
4961 else
4962 #else
4963 /* ??? It seems that passing back a register even when past
4964 the area declared by REG_PARM_STACK_SPACE will allocate
4965 space appropriately, and will not copy the data onto the
4966 stack, exactly as we desire.
4968 This is due to locate_and_pad_parm being called in
4969 expand_call whenever reg_parm_stack_space > 0, which
4970 while beneficial to our example here, would seem to be
4971 in error from what had been intended. Ho hum... -- r~ */
4972 #endif
4973 return reg;
4975 else
4977 rtx v0, v1;
4979 if ((regno - SPARC_FP_ARG_FIRST) < SPARC_INT_ARG_MAX * 2)
4981 int intreg;
4983 /* On incoming, we don't need to know that the value
4984 is passed in %f0 and %i0, and it confuses other parts
4985 causing needless spillage even on the simplest cases. */
4986 if (incoming_p)
4987 return reg;
4989 intreg = (SPARC_OUTGOING_INT_ARG_FIRST
4990 + (regno - SPARC_FP_ARG_FIRST) / 2);
4992 v0 = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4993 v1 = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (mode, intreg),
4994 const0_rtx);
4995 return gen_rtx_PARALLEL (mode, gen_rtvec (2, v0, v1));
4997 else
4999 v0 = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5000 v1 = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
5001 return gen_rtx_PARALLEL (mode, gen_rtvec (2, v0, v1));
5005 else if (type && TREE_CODE (type) == RECORD_TYPE)
5007 /* Structures up to 16 bytes in size are passed in arg slots on the
5008 stack and are promoted to registers where possible. */
5010 if (int_size_in_bytes (type) > 16)
5011 abort (); /* shouldn't get here */
5013 return function_arg_record_value (type, mode, slotno, named, regbase);
5015 else if (type && TREE_CODE (type) == UNION_TYPE)
5017 enum machine_mode mode;
5018 int bytes = int_size_in_bytes (type);
5020 if (bytes > 16)
5021 abort ();
5023 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 0);
5024 reg = gen_rtx_REG (mode, regno);
5026 else
5028 /* Scalar or complex int. */
5029 reg = gen_rtx_REG (mode, regno);
5032 return reg;
5035 /* Handle the FUNCTION_ARG_PARTIAL_NREGS macro.
5036 For an arg passed partly in registers and partly in memory,
5037 this is the number of registers used.
5038 For args passed entirely in registers or entirely in memory, zero.
5040 Any arg that starts in the first 6 regs but won't entirely fit in them
5041 needs partial registers on v8. On v9, structures with integer
5042 values in arg slots 5,6 will be passed in %o5 and SP+176, and complex fp
5043 values that begin in the last fp reg [where "last fp reg" varies with the
5044 mode] will be split between that reg and memory. */
5047 function_arg_partial_nregs (cum, mode, type, named)
5048 const CUMULATIVE_ARGS *cum;
5049 enum machine_mode mode;
5050 tree type;
5051 int named;
5053 int slotno, regno, padding;
5055 /* We pass 0 for incoming_p here, it doesn't matter. */
5056 slotno = function_arg_slotno (cum, mode, type, named, 0, &regno, &padding);
5058 if (slotno == -1)
5059 return 0;
5061 if (TARGET_ARCH32)
5063 if ((slotno + (mode == BLKmode
5064 ? ROUND_ADVANCE (int_size_in_bytes (type))
5065 : ROUND_ADVANCE (GET_MODE_SIZE (mode))))
5066 > NPARM_REGS (SImode))
5067 return NPARM_REGS (SImode) - slotno;
5068 return 0;
5070 else
5072 if (type && AGGREGATE_TYPE_P (type))
5074 int size = int_size_in_bytes (type);
5075 int align = TYPE_ALIGN (type);
5077 if (align == 16)
5078 slotno += slotno & 1;
5079 if (size > 8 && size <= 16
5080 && slotno == SPARC_INT_ARG_MAX - 1)
5081 return 1;
5083 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5084 || (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
5085 && ! (TARGET_FPU && named)))
5087 if (GET_MODE_ALIGNMENT (mode) == 128)
5089 slotno += slotno & 1;
5090 if (slotno == SPARC_INT_ARG_MAX - 2)
5091 return 1;
5093 else
5095 if (slotno == SPARC_INT_ARG_MAX - 1)
5096 return 1;
5099 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5101 if (GET_MODE_ALIGNMENT (mode) == 128)
5102 slotno += slotno & 1;
5103 if ((slotno + GET_MODE_SIZE (mode) / UNITS_PER_WORD)
5104 > SPARC_FP_ARG_MAX)
5105 return 1;
5107 return 0;
5111 /* Handle the FUNCTION_ARG_PASS_BY_REFERENCE macro.
5112 !v9: The SPARC ABI stipulates passing struct arguments (of any size) and
5113 quad-precision floats by invisible reference.
5114 v9: Aggregates greater than 16 bytes are passed by reference.
5115 For Pascal, also pass arrays by reference. */
5118 function_arg_pass_by_reference (cum, mode, type, named)
5119 const CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
5120 enum machine_mode mode;
5121 tree type;
5122 int named ATTRIBUTE_UNUSED;
5124 if (TARGET_ARCH32)
5126 return ((type && AGGREGATE_TYPE_P (type))
5127 || mode == TFmode || mode == TCmode);
5129 else
5131 return ((type && TREE_CODE (type) == ARRAY_TYPE)
5132 /* Consider complex values as aggregates, so care for TCmode. */
5133 || GET_MODE_SIZE (mode) > 16
5134 || (type
5135 && AGGREGATE_TYPE_P (type)
5136 && (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 16));
5140 /* Handle the FUNCTION_ARG_ADVANCE macro.
5141 Update the data in CUM to advance over an argument
5142 of mode MODE and data type TYPE.
5143 TYPE is null for libcalls where that information may not be available. */
5145 void
5146 function_arg_advance (cum, mode, type, named)
5147 CUMULATIVE_ARGS *cum;
5148 enum machine_mode mode;
5149 tree type;
5150 int named;
5152 int slotno, regno, padding;
5154 /* We pass 0 for incoming_p here, it doesn't matter. */
5155 slotno = function_arg_slotno (cum, mode, type, named, 0, &regno, &padding);
5157 /* If register required leading padding, add it. */
5158 if (slotno != -1)
5159 cum->words += padding;
5161 if (TARGET_ARCH32)
5163 cum->words += (mode != BLKmode
5164 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
5165 : ROUND_ADVANCE (int_size_in_bytes (type)));
5167 else
5169 if (type && AGGREGATE_TYPE_P (type))
5171 int size = int_size_in_bytes (type);
5173 if (size <= 8)
5174 ++cum->words;
5175 else if (size <= 16)
5176 cum->words += 2;
5177 else /* passed by reference */
5178 ++cum->words;
5180 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
5182 cum->words += 2;
5184 else if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5186 cum->words += GET_MODE_SIZE (mode) / UNITS_PER_WORD;
5188 else
5190 cum->words += (mode != BLKmode
5191 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
5192 : ROUND_ADVANCE (int_size_in_bytes (type)));
5197 /* Handle the FUNCTION_ARG_PADDING macro.
5198 For the 64 bit ABI structs are always stored left shifted in their
5199 argument slot. */
5201 enum direction
5202 function_arg_padding (mode, type)
5203 enum machine_mode mode;
5204 tree type;
5206 if (TARGET_ARCH64 && type != 0 && AGGREGATE_TYPE_P (type))
5207 return upward;
5209 /* This is the default definition. */
5210 return (! BYTES_BIG_ENDIAN
5211 ? upward
5212 : ((mode == BLKmode
5213 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5214 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
5215 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
5216 ? downward : upward));
5219 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE macros.
5220 For v9, function return values are subject to the same rules as arguments,
5221 except that up to 32-bytes may be returned in registers. */
5224 function_value (type, mode, incoming_p)
5225 tree type;
5226 enum machine_mode mode;
5227 int incoming_p;
5229 int regno;
5230 int regbase = (incoming_p
5231 ? SPARC_OUTGOING_INT_ARG_FIRST
5232 : SPARC_INCOMING_INT_ARG_FIRST);
5234 if (TARGET_ARCH64 && type)
5236 if (TREE_CODE (type) == RECORD_TYPE)
5238 /* Structures up to 32 bytes in size are passed in registers,
5239 promoted to fp registers where possible. */
5241 if (int_size_in_bytes (type) > 32)
5242 abort (); /* shouldn't get here */
5244 return function_arg_record_value (type, mode, 0, 1, regbase);
5246 else if (AGGREGATE_TYPE_P (type))
5248 /* All other aggregate types are passed in an integer register
5249 in a mode corresponding to the size of the type. */
5250 HOST_WIDE_INT bytes = int_size_in_bytes (type);
5252 if (bytes > 32)
5253 abort ();
5255 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 0);
5259 if (TARGET_ARCH64
5260 && GET_MODE_CLASS (mode) == MODE_INT
5261 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
5262 && type && ! AGGREGATE_TYPE_P (type))
5263 mode = DImode;
5265 if (incoming_p)
5266 regno = BASE_RETURN_VALUE_REG (mode);
5267 else
5268 regno = BASE_OUTGOING_VALUE_REG (mode);
5270 return gen_rtx_REG (mode, regno);
5273 /* Do what is necessary for `va_start'. We look at the current function
5274 to determine if stdarg or varargs is used and return the address of
5275 the first unnamed parameter. */
5278 sparc_builtin_saveregs ()
5280 int first_reg = current_function_args_info.words;
5281 rtx address;
5282 int regno;
5284 for (regno = first_reg; regno < NPARM_REGS (word_mode); regno++)
5285 emit_move_insn (gen_rtx_MEM (word_mode,
5286 gen_rtx_PLUS (Pmode,
5287 frame_pointer_rtx,
5288 GEN_INT (FIRST_PARM_OFFSET (0)
5289 + (UNITS_PER_WORD
5290 * regno)))),
5291 gen_rtx_REG (word_mode,
5292 BASE_INCOMING_ARG_REG (word_mode) + regno));
5294 address = gen_rtx_PLUS (Pmode,
5295 frame_pointer_rtx,
5296 GEN_INT (FIRST_PARM_OFFSET (0)
5297 + UNITS_PER_WORD * first_reg));
5299 return address;
5302 /* Implement `va_start' for varargs and stdarg. */
5304 void
5305 sparc_va_start (valist, nextarg)
5306 tree valist;
5307 rtx nextarg;
5309 nextarg = expand_builtin_saveregs ();
5310 std_expand_builtin_va_start (valist, nextarg);
5313 /* Implement `va_arg'. */
5316 sparc_va_arg (valist, type)
5317 tree valist, type;
5319 HOST_WIDE_INT size, rsize, align;
5320 tree addr, incr;
5321 rtx addr_rtx;
5322 int indirect = 0;
5324 /* Round up sizeof(type) to a word. */
5325 size = int_size_in_bytes (type);
5326 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
5327 align = 0;
5329 if (TARGET_ARCH64)
5331 if (TYPE_ALIGN (type) >= 2 * (unsigned) BITS_PER_WORD)
5332 align = 2 * UNITS_PER_WORD;
5334 if (AGGREGATE_TYPE_P (type))
5336 if ((unsigned HOST_WIDE_INT) size > 16)
5338 indirect = 1;
5339 size = rsize = UNITS_PER_WORD;
5340 align = 0;
5342 /* SPARC v9 ABI states that structures up to 8 bytes in size are
5343 given one 8 byte slot. */
5344 else if (size == 0)
5345 size = rsize = UNITS_PER_WORD;
5346 else
5347 size = rsize;
5350 else
5352 if (AGGREGATE_TYPE_P (type)
5353 || TYPE_MODE (type) == TFmode
5354 || TYPE_MODE (type) == TCmode)
5356 indirect = 1;
5357 size = rsize = UNITS_PER_WORD;
5361 incr = valist;
5362 if (align)
5364 incr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5365 build_int_2 (align - 1, 0)));
5366 incr = fold (build (BIT_AND_EXPR, ptr_type_node, incr,
5367 build_int_2 (-align, -1)));
5370 addr = incr = save_expr (incr);
5371 if (BYTES_BIG_ENDIAN && size < rsize)
5373 addr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5374 build_int_2 (rsize - size, 0)));
5376 incr = fold (build (PLUS_EXPR, ptr_type_node, incr,
5377 build_int_2 (rsize, 0)));
5379 incr = build (MODIFY_EXPR, ptr_type_node, valist, incr);
5380 TREE_SIDE_EFFECTS (incr) = 1;
5381 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
5383 addr_rtx = expand_expr (addr, NULL, Pmode, EXPAND_NORMAL);
5385 /* If the address isn't aligned properly for the type,
5386 we may need to copy to a temporary.
5387 FIXME: This is inefficient. Usually we can do this
5388 in registers. */
5389 if (align == 0
5390 && TYPE_ALIGN (type) > BITS_PER_WORD
5391 && !indirect)
5393 /* FIXME: We really need to specify that the temporary is live
5394 for the whole function because expand_builtin_va_arg wants
5395 the alias set to be get_varargs_alias_set (), but in this
5396 case the alias set is that for TYPE and if the memory gets
5397 reused it will be reused with alias set TYPE. */
5398 rtx tmp = assign_temp (type, 0, 1, 0);
5399 rtx dest_addr;
5401 addr_rtx = force_reg (Pmode, addr_rtx);
5402 addr_rtx = gen_rtx_MEM (BLKmode, addr_rtx);
5403 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
5404 set_mem_align (addr_rtx, BITS_PER_WORD);
5405 tmp = shallow_copy_rtx (tmp);
5406 PUT_MODE (tmp, BLKmode);
5407 set_mem_alias_set (tmp, 0);
5409 dest_addr = emit_block_move (tmp, addr_rtx, GEN_INT (rsize),
5410 BLOCK_OP_NORMAL);
5411 if (dest_addr != NULL_RTX)
5412 addr_rtx = dest_addr;
5413 else
5414 addr_rtx = XCEXP (tmp, 0, MEM);
5417 if (indirect)
5419 addr_rtx = force_reg (Pmode, addr_rtx);
5420 addr_rtx = gen_rtx_MEM (Pmode, addr_rtx);
5421 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
5424 return addr_rtx;
5427 /* Return the string to output a conditional branch to LABEL, which is
5428 the operand number of the label. OP is the conditional expression.
5429 XEXP (OP, 0) is assumed to be a condition code register (integer or
5430 floating point) and its mode specifies what kind of comparison we made.
5432 REVERSED is nonzero if we should reverse the sense of the comparison.
5434 ANNUL is nonzero if we should generate an annulling branch.
5436 NOOP is nonzero if we have to follow this branch by a noop.
5438 INSN, if set, is the insn. */
5440 char *
5441 output_cbranch (op, dest, label, reversed, annul, noop, insn)
5442 rtx op, dest;
5443 int label;
5444 int reversed, annul, noop;
5445 rtx insn;
5447 static char string[50];
5448 enum rtx_code code = GET_CODE (op);
5449 rtx cc_reg = XEXP (op, 0);
5450 enum machine_mode mode = GET_MODE (cc_reg);
5451 const char *labelno, *branch;
5452 int spaces = 8, far;
5453 char *p;
5455 /* v9 branches are limited to +-1MB. If it is too far away,
5456 change
5458 bne,pt %xcc, .LC30
5462 be,pn %xcc, .+12
5464 ba .LC30
5468 fbne,a,pn %fcc2, .LC29
5472 fbe,pt %fcc2, .+16
5474 ba .LC29 */
5476 far = get_attr_length (insn) >= 3;
5477 if (reversed ^ far)
5479 /* Reversal of FP compares takes care -- an ordered compare
5480 becomes an unordered compare and vice versa. */
5481 if (mode == CCFPmode || mode == CCFPEmode)
5482 code = reverse_condition_maybe_unordered (code);
5483 else
5484 code = reverse_condition (code);
5487 /* Start by writing the branch condition. */
5488 if (mode == CCFPmode || mode == CCFPEmode)
5490 switch (code)
5492 case NE:
5493 branch = "fbne";
5494 break;
5495 case EQ:
5496 branch = "fbe";
5497 break;
5498 case GE:
5499 branch = "fbge";
5500 break;
5501 case GT:
5502 branch = "fbg";
5503 break;
5504 case LE:
5505 branch = "fble";
5506 break;
5507 case LT:
5508 branch = "fbl";
5509 break;
5510 case UNORDERED:
5511 branch = "fbu";
5512 break;
5513 case ORDERED:
5514 branch = "fbo";
5515 break;
5516 case UNGT:
5517 branch = "fbug";
5518 break;
5519 case UNLT:
5520 branch = "fbul";
5521 break;
5522 case UNEQ:
5523 branch = "fbue";
5524 break;
5525 case UNGE:
5526 branch = "fbuge";
5527 break;
5528 case UNLE:
5529 branch = "fbule";
5530 break;
5531 case LTGT:
5532 branch = "fblg";
5533 break;
5535 default:
5536 abort ();
5539 /* ??? !v9: FP branches cannot be preceded by another floating point
5540 insn. Because there is currently no concept of pre-delay slots,
5541 we can fix this only by always emitting a nop before a floating
5542 point branch. */
5544 string[0] = '\0';
5545 if (! TARGET_V9)
5546 strcpy (string, "nop\n\t");
5547 strcat (string, branch);
5549 else
5551 switch (code)
5553 case NE:
5554 branch = "bne";
5555 break;
5556 case EQ:
5557 branch = "be";
5558 break;
5559 case GE:
5560 if (mode == CC_NOOVmode || mode == CCX_NOOVmode)
5561 branch = "bpos";
5562 else
5563 branch = "bge";
5564 break;
5565 case GT:
5566 branch = "bg";
5567 break;
5568 case LE:
5569 branch = "ble";
5570 break;
5571 case LT:
5572 if (mode == CC_NOOVmode || mode == CCX_NOOVmode)
5573 branch = "bneg";
5574 else
5575 branch = "bl";
5576 break;
5577 case GEU:
5578 branch = "bgeu";
5579 break;
5580 case GTU:
5581 branch = "bgu";
5582 break;
5583 case LEU:
5584 branch = "bleu";
5585 break;
5586 case LTU:
5587 branch = "blu";
5588 break;
5590 default:
5591 abort ();
5593 strcpy (string, branch);
5595 spaces -= strlen (branch);
5596 p = strchr (string, '\0');
5598 /* Now add the annulling, the label, and a possible noop. */
5599 if (annul && ! far)
5601 strcpy (p, ",a");
5602 p += 2;
5603 spaces -= 2;
5606 if (! TARGET_V9)
5607 labelno = "";
5608 else
5610 rtx note;
5611 int v8 = 0;
5613 if (! far && insn && INSN_ADDRESSES_SET_P ())
5615 int delta = (INSN_ADDRESSES (INSN_UID (dest))
5616 - INSN_ADDRESSES (INSN_UID (insn)));
5617 /* Leave some instructions for "slop". */
5618 if (delta < -260000 || delta >= 260000)
5619 v8 = 1;
5622 if (mode == CCFPmode || mode == CCFPEmode)
5624 static char v9_fcc_labelno[] = "%%fccX, ";
5625 /* Set the char indicating the number of the fcc reg to use. */
5626 v9_fcc_labelno[5] = REGNO (cc_reg) - SPARC_FIRST_V9_FCC_REG + '0';
5627 labelno = v9_fcc_labelno;
5628 if (v8)
5630 if (REGNO (cc_reg) == SPARC_FCC_REG)
5631 labelno = "";
5632 else
5633 abort ();
5636 else if (mode == CCXmode || mode == CCX_NOOVmode)
5638 labelno = "%%xcc, ";
5639 if (v8)
5640 abort ();
5642 else
5644 labelno = "%%icc, ";
5645 if (v8)
5646 labelno = "";
5649 if (*labelno && insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
5651 strcpy (p,
5652 ((INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2) ^ far)
5653 ? ",pt" : ",pn");
5654 p += 3;
5655 spaces -= 3;
5658 if (spaces > 0)
5659 *p++ = '\t';
5660 else
5661 *p++ = ' ';
5662 strcpy (p, labelno);
5663 p = strchr (p, '\0');
5664 if (far)
5666 strcpy (p, ".+12\n\tnop\n\tb\t");
5667 if (annul || noop)
5668 p[3] = '6';
5669 p += 13;
5671 *p++ = '%';
5672 *p++ = 'l';
5673 /* Set the char indicating the number of the operand containing the
5674 label_ref. */
5675 *p++ = label + '0';
5676 *p = '\0';
5677 if (noop)
5678 strcpy (p, "\n\tnop");
5680 return string;
5683 /* Emit a library call comparison between floating point X and Y.
5684 COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.).
5685 TARGET_ARCH64 uses _Qp_* functions, which use pointers to TFmode
5686 values as arguments instead of the TFmode registers themselves,
5687 that's why we cannot call emit_float_lib_cmp. */
5688 void
5689 sparc_emit_float_lib_cmp (x, y, comparison)
5690 rtx x, y;
5691 enum rtx_code comparison;
5693 const char *qpfunc;
5694 rtx slot0, slot1, result, tem, tem2;
5695 enum machine_mode mode;
5697 switch (comparison)
5699 case EQ:
5700 qpfunc = (TARGET_ARCH64) ? "_Qp_feq" : "_Q_feq";
5701 break;
5703 case NE:
5704 qpfunc = (TARGET_ARCH64) ? "_Qp_fne" : "_Q_fne";
5705 break;
5707 case GT:
5708 qpfunc = (TARGET_ARCH64) ? "_Qp_fgt" : "_Q_fgt";
5709 break;
5711 case GE:
5712 qpfunc = (TARGET_ARCH64) ? "_Qp_fge" : "_Q_fge";
5713 break;
5715 case LT:
5716 qpfunc = (TARGET_ARCH64) ? "_Qp_flt" : "_Q_flt";
5717 break;
5719 case LE:
5720 qpfunc = (TARGET_ARCH64) ? "_Qp_fle" : "_Q_fle";
5721 break;
5723 case ORDERED:
5724 case UNORDERED:
5725 case UNGT:
5726 case UNLT:
5727 case UNEQ:
5728 case UNGE:
5729 case UNLE:
5730 case LTGT:
5731 qpfunc = (TARGET_ARCH64) ? "_Qp_cmp" : "_Q_cmp";
5732 break;
5734 default:
5735 abort();
5736 break;
5739 if (TARGET_ARCH64)
5741 if (GET_CODE (x) != MEM)
5743 slot0 = assign_stack_temp (TFmode, GET_MODE_SIZE(TFmode), 0);
5744 emit_insn (gen_rtx_SET (VOIDmode, slot0, x));
5746 else
5747 slot0 = x;
5749 if (GET_CODE (y) != MEM)
5751 slot1 = assign_stack_temp (TFmode, GET_MODE_SIZE(TFmode), 0);
5752 emit_insn (gen_rtx_SET (VOIDmode, slot1, y));
5754 else
5755 slot1 = y;
5757 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, qpfunc), LCT_NORMAL,
5758 DImode, 2,
5759 XEXP (slot0, 0), Pmode,
5760 XEXP (slot1, 0), Pmode);
5762 mode = DImode;
5764 else
5766 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, qpfunc), LCT_NORMAL,
5767 SImode, 2,
5768 x, TFmode, y, TFmode);
5770 mode = SImode;
5774 /* Immediately move the result of the libcall into a pseudo
5775 register so reload doesn't clobber the value if it needs
5776 the return register for a spill reg. */
5777 result = gen_reg_rtx (mode);
5778 emit_move_insn (result, hard_libcall_value (mode));
5780 switch (comparison)
5782 default:
5783 emit_cmp_insn (result, const0_rtx, NE, NULL_RTX, mode, 0);
5784 break;
5785 case ORDERED:
5786 case UNORDERED:
5787 emit_cmp_insn (result, GEN_INT(3), comparison == UNORDERED ? EQ : NE,
5788 NULL_RTX, mode, 0);
5789 break;
5790 case UNGT:
5791 case UNGE:
5792 emit_cmp_insn (result, const1_rtx,
5793 comparison == UNGT ? GT : NE, NULL_RTX, mode, 0);
5794 break;
5795 case UNLE:
5796 emit_cmp_insn (result, const2_rtx, NE, NULL_RTX, mode, 0);
5797 break;
5798 case UNLT:
5799 tem = gen_reg_rtx (mode);
5800 if (TARGET_ARCH32)
5801 emit_insn (gen_andsi3 (tem, result, const1_rtx));
5802 else
5803 emit_insn (gen_anddi3 (tem, result, const1_rtx));
5804 emit_cmp_insn (tem, const0_rtx, NE, NULL_RTX, mode, 0);
5805 break;
5806 case UNEQ:
5807 case LTGT:
5808 tem = gen_reg_rtx (mode);
5809 if (TARGET_ARCH32)
5810 emit_insn (gen_addsi3 (tem, result, const1_rtx));
5811 else
5812 emit_insn (gen_adddi3 (tem, result, const1_rtx));
5813 tem2 = gen_reg_rtx (mode);
5814 if (TARGET_ARCH32)
5815 emit_insn (gen_andsi3 (tem2, tem, const2_rtx));
5816 else
5817 emit_insn (gen_anddi3 (tem2, tem, const2_rtx));
5818 emit_cmp_insn (tem2, const0_rtx, comparison == UNEQ ? EQ : NE,
5819 NULL_RTX, mode, 0);
5820 break;
5824 /* Generate an unsigned DImode to FP conversion. This is the same code
5825 optabs would emit if we didn't have TFmode patterns. */
5827 void
5828 sparc_emit_floatunsdi (operands)
5829 rtx operands[2];
5831 rtx neglab, donelab, i0, i1, f0, in, out;
5832 enum machine_mode mode;
5834 out = operands[0];
5835 in = force_reg (DImode, operands[1]);
5836 mode = GET_MODE (out);
5837 neglab = gen_label_rtx ();
5838 donelab = gen_label_rtx ();
5839 i0 = gen_reg_rtx (DImode);
5840 i1 = gen_reg_rtx (DImode);
5841 f0 = gen_reg_rtx (mode);
5843 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
5845 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
5846 emit_jump_insn (gen_jump (donelab));
5847 emit_barrier ();
5849 emit_label (neglab);
5851 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
5852 emit_insn (gen_anddi3 (i1, in, const1_rtx));
5853 emit_insn (gen_iordi3 (i0, i0, i1));
5854 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
5855 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
5857 emit_label (donelab);
5860 /* Return the string to output a conditional branch to LABEL, testing
5861 register REG. LABEL is the operand number of the label; REG is the
5862 operand number of the reg. OP is the conditional expression. The mode
5863 of REG says what kind of comparison we made.
5865 REVERSED is nonzero if we should reverse the sense of the comparison.
5867 ANNUL is nonzero if we should generate an annulling branch.
5869 NOOP is nonzero if we have to follow this branch by a noop. */
5871 char *
5872 output_v9branch (op, dest, reg, label, reversed, annul, noop, insn)
5873 rtx op, dest;
5874 int reg, label;
5875 int reversed, annul, noop;
5876 rtx insn;
5878 static char string[50];
5879 enum rtx_code code = GET_CODE (op);
5880 enum machine_mode mode = GET_MODE (XEXP (op, 0));
5881 rtx note;
5882 int far;
5883 char *p;
5885 /* branch on register are limited to +-128KB. If it is too far away,
5886 change
5888 brnz,pt %g1, .LC30
5892 brz,pn %g1, .+12
5894 ba,pt %xcc, .LC30
5898 brgez,a,pn %o1, .LC29
5902 brlz,pt %o1, .+16
5904 ba,pt %xcc, .LC29 */
5906 far = get_attr_length (insn) >= 3;
5908 /* If not floating-point or if EQ or NE, we can just reverse the code. */
5909 if (reversed ^ far)
5910 code = reverse_condition (code);
5912 /* Only 64 bit versions of these instructions exist. */
5913 if (mode != DImode)
5914 abort ();
5916 /* Start by writing the branch condition. */
5918 switch (code)
5920 case NE:
5921 strcpy (string, "brnz");
5922 break;
5924 case EQ:
5925 strcpy (string, "brz");
5926 break;
5928 case GE:
5929 strcpy (string, "brgez");
5930 break;
5932 case LT:
5933 strcpy (string, "brlz");
5934 break;
5936 case LE:
5937 strcpy (string, "brlez");
5938 break;
5940 case GT:
5941 strcpy (string, "brgz");
5942 break;
5944 default:
5945 abort ();
5948 p = strchr (string, '\0');
5950 /* Now add the annulling, reg, label, and nop. */
5951 if (annul && ! far)
5953 strcpy (p, ",a");
5954 p += 2;
5957 if (insn && (note = find_reg_note (insn, REG_BR_PROB, NULL_RTX)))
5959 strcpy (p,
5960 ((INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2) ^ far)
5961 ? ",pt" : ",pn");
5962 p += 3;
5965 *p = p < string + 8 ? '\t' : ' ';
5966 p++;
5967 *p++ = '%';
5968 *p++ = '0' + reg;
5969 *p++ = ',';
5970 *p++ = ' ';
5971 if (far)
5973 int veryfar = 1, delta;
5975 if (INSN_ADDRESSES_SET_P ())
5977 delta = (INSN_ADDRESSES (INSN_UID (dest))
5978 - INSN_ADDRESSES (INSN_UID (insn)));
5979 /* Leave some instructions for "slop". */
5980 if (delta >= -260000 && delta < 260000)
5981 veryfar = 0;
5984 strcpy (p, ".+12\n\tnop\n\t");
5985 if (annul || noop)
5986 p[3] = '6';
5987 p += 11;
5988 if (veryfar)
5990 strcpy (p, "b\t");
5991 p += 2;
5993 else
5995 strcpy (p, "ba,pt\t%%xcc, ");
5996 p += 13;
5999 *p++ = '%';
6000 *p++ = 'l';
6001 *p++ = '0' + label;
6002 *p = '\0';
6004 if (noop)
6005 strcpy (p, "\n\tnop");
6007 return string;
6010 /* Return 1, if any of the registers of the instruction are %l[0-7] or %o[0-7].
6011 Such instructions cannot be used in the delay slot of return insn on v9.
6012 If TEST is 0, also rename all %i[0-7] registers to their %o[0-7] counterparts.
6015 static int
6016 epilogue_renumber (where, test)
6017 register rtx *where;
6018 int test;
6020 register const char *fmt;
6021 register int i;
6022 register enum rtx_code code;
6024 if (*where == 0)
6025 return 0;
6027 code = GET_CODE (*where);
6029 switch (code)
6031 case REG:
6032 if (REGNO (*where) >= 8 && REGNO (*where) < 24) /* oX or lX */
6033 return 1;
6034 if (! test && REGNO (*where) >= 24 && REGNO (*where) < 32)
6035 *where = gen_rtx (REG, GET_MODE (*where), OUTGOING_REGNO (REGNO(*where)));
6036 case SCRATCH:
6037 case CC0:
6038 case PC:
6039 case CONST_INT:
6040 case CONST_DOUBLE:
6041 return 0;
6043 /* Do not replace the frame pointer with the stack pointer because
6044 it can cause the delayed instruction to load below the stack.
6045 This occurs when instructions like:
6047 (set (reg/i:SI 24 %i0)
6048 (mem/f:SI (plus:SI (reg/f:SI 30 %fp)
6049 (const_int -20 [0xffffffec])) 0))
6051 are in the return delayed slot. */
6052 case PLUS:
6053 if (GET_CODE (XEXP (*where, 0)) == REG
6054 && REGNO (XEXP (*where, 0)) == HARD_FRAME_POINTER_REGNUM
6055 && (GET_CODE (XEXP (*where, 1)) != CONST_INT
6056 || INTVAL (XEXP (*where, 1)) < SPARC_STACK_BIAS))
6057 return 1;
6058 break;
6060 case MEM:
6061 if (SPARC_STACK_BIAS
6062 && GET_CODE (XEXP (*where, 0)) == REG
6063 && REGNO (XEXP (*where, 0)) == HARD_FRAME_POINTER_REGNUM)
6064 return 1;
6065 break;
6067 default:
6068 break;
6071 fmt = GET_RTX_FORMAT (code);
6073 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6075 if (fmt[i] == 'E')
6077 register int j;
6078 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
6079 if (epilogue_renumber (&(XVECEXP (*where, i, j)), test))
6080 return 1;
6082 else if (fmt[i] == 'e'
6083 && epilogue_renumber (&(XEXP (*where, i)), test))
6084 return 1;
6086 return 0;
6089 /* Leaf functions and non-leaf functions have different needs. */
6091 static const int
6092 reg_leaf_alloc_order[] = REG_LEAF_ALLOC_ORDER;
6094 static const int
6095 reg_nonleaf_alloc_order[] = REG_ALLOC_ORDER;
6097 static const int *const reg_alloc_orders[] = {
6098 reg_leaf_alloc_order,
6099 reg_nonleaf_alloc_order};
6101 void
6102 order_regs_for_local_alloc ()
6104 static int last_order_nonleaf = 1;
6106 if (regs_ever_live[15] != last_order_nonleaf)
6108 last_order_nonleaf = !last_order_nonleaf;
6109 memcpy ((char *) reg_alloc_order,
6110 (const char *) reg_alloc_orders[last_order_nonleaf],
6111 FIRST_PSEUDO_REGISTER * sizeof (int));
6115 /* Return 1 if REG and MEM are legitimate enough to allow the various
6116 mem<-->reg splits to be run. */
6119 sparc_splitdi_legitimate (reg, mem)
6120 rtx reg;
6121 rtx mem;
6123 /* Punt if we are here by mistake. */
6124 if (! reload_completed)
6125 abort ();
6127 /* We must have an offsettable memory reference. */
6128 if (! offsettable_memref_p (mem))
6129 return 0;
6131 /* If we have legitimate args for ldd/std, we do not want
6132 the split to happen. */
6133 if ((REGNO (reg) % 2) == 0
6134 && mem_min_alignment (mem, 8))
6135 return 0;
6137 /* Success. */
6138 return 1;
6141 /* Return 1 if x and y are some kind of REG and they refer to
6142 different hard registers. This test is guaranteed to be
6143 run after reload. */
6146 sparc_absnegfloat_split_legitimate (x, y)
6147 rtx x, y;
6149 if (GET_CODE (x) != REG)
6150 return 0;
6151 if (GET_CODE (y) != REG)
6152 return 0;
6153 if (REGNO (x) == REGNO (y))
6154 return 0;
6155 return 1;
6158 /* Return 1 if REGNO (reg1) is even and REGNO (reg1) == REGNO (reg2) - 1.
6159 This makes them candidates for using ldd and std insns.
6161 Note reg1 and reg2 *must* be hard registers. */
6164 registers_ok_for_ldd_peep (reg1, reg2)
6165 rtx reg1, reg2;
6167 /* We might have been passed a SUBREG. */
6168 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6169 return 0;
6171 if (REGNO (reg1) % 2 != 0)
6172 return 0;
6174 /* Integer ldd is deprecated in SPARC V9 */
6175 if (TARGET_V9 && REGNO (reg1) < 32)
6176 return 0;
6178 return (REGNO (reg1) == REGNO (reg2) - 1);
6181 /* Return 1 if the addresses in mem1 and mem2 are suitable for use in
6182 an ldd or std insn.
6184 This can only happen when addr1 and addr2, the addresses in mem1
6185 and mem2, are consecutive memory locations (addr1 + 4 == addr2).
6186 addr1 must also be aligned on a 64-bit boundary.
6188 Also iff dependent_reg_rtx is not null it should not be used to
6189 compute the address for mem1, i.e. we cannot optimize a sequence
6190 like:
6191 ld [%o0], %o0
6192 ld [%o0 + 4], %o1
6194 ldd [%o0], %o0
6195 nor:
6196 ld [%g3 + 4], %g3
6197 ld [%g3], %g2
6199 ldd [%g3], %g2
6201 But, note that the transformation from:
6202 ld [%g2 + 4], %g3
6203 ld [%g2], %g2
6205 ldd [%g2], %g2
6206 is perfectly fine. Thus, the peephole2 patterns always pass us
6207 the destination register of the first load, never the second one.
6209 For stores we don't have a similar problem, so dependent_reg_rtx is
6210 NULL_RTX. */
6213 mems_ok_for_ldd_peep (mem1, mem2, dependent_reg_rtx)
6214 rtx mem1, mem2, dependent_reg_rtx;
6216 rtx addr1, addr2;
6217 unsigned int reg1;
6218 int offset1;
6220 /* The mems cannot be volatile. */
6221 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
6222 return 0;
6224 /* MEM1 should be aligned on a 64-bit boundary. */
6225 if (MEM_ALIGN (mem1) < 64)
6226 return 0;
6228 addr1 = XEXP (mem1, 0);
6229 addr2 = XEXP (mem2, 0);
6231 /* Extract a register number and offset (if used) from the first addr. */
6232 if (GET_CODE (addr1) == PLUS)
6234 /* If not a REG, return zero. */
6235 if (GET_CODE (XEXP (addr1, 0)) != REG)
6236 return 0;
6237 else
6239 reg1 = REGNO (XEXP (addr1, 0));
6240 /* The offset must be constant! */
6241 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6242 return 0;
6243 offset1 = INTVAL (XEXP (addr1, 1));
6246 else if (GET_CODE (addr1) != REG)
6247 return 0;
6248 else
6250 reg1 = REGNO (addr1);
6251 /* This was a simple (mem (reg)) expression. Offset is 0. */
6252 offset1 = 0;
6255 /* Make sure the second address is a (mem (plus (reg) (const_int). */
6256 if (GET_CODE (addr2) != PLUS)
6257 return 0;
6259 if (GET_CODE (XEXP (addr2, 0)) != REG
6260 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
6261 return 0;
6263 if (reg1 != REGNO (XEXP (addr2, 0)))
6264 return 0;
6266 if (dependent_reg_rtx != NULL_RTX && reg1 == REGNO (dependent_reg_rtx))
6267 return 0;
6269 /* The first offset must be evenly divisible by 8 to ensure the
6270 address is 64 bit aligned. */
6271 if (offset1 % 8 != 0)
6272 return 0;
6274 /* The offset for the second addr must be 4 more than the first addr. */
6275 if (INTVAL (XEXP (addr2, 1)) != offset1 + 4)
6276 return 0;
6278 /* All the tests passed. addr1 and addr2 are valid for ldd and std
6279 instructions. */
6280 return 1;
6283 /* Return 1 if reg is a pseudo, or is the first register in
6284 a hard register pair. This makes it a candidate for use in
6285 ldd and std insns. */
6288 register_ok_for_ldd (reg)
6289 rtx reg;
6291 /* We might have been passed a SUBREG. */
6292 if (GET_CODE (reg) != REG)
6293 return 0;
6295 if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
6296 return (REGNO (reg) % 2 == 0);
6297 else
6298 return 1;
6301 /* Print operand X (an rtx) in assembler syntax to file FILE.
6302 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
6303 For `%' followed by punctuation, CODE is the punctuation and X is null. */
6305 void
6306 print_operand (file, x, code)
6307 FILE *file;
6308 rtx x;
6309 int code;
6311 switch (code)
6313 case '#':
6314 /* Output a 'nop' if there's nothing for the delay slot. */
6315 if (dbr_sequence_length () == 0)
6316 fputs ("\n\t nop", file);
6317 return;
6318 case '*':
6319 /* Output an annul flag if there's nothing for the delay slot and we
6320 are optimizing. This is always used with '(' below. */
6321 /* Sun OS 4.1.1 dbx can't handle an annulled unconditional branch;
6322 this is a dbx bug. So, we only do this when optimizing. */
6323 /* On UltraSPARC, a branch in a delay slot causes a pipeline flush.
6324 Always emit a nop in case the next instruction is a branch. */
6325 if (dbr_sequence_length () == 0
6326 && (optimize && (int)sparc_cpu < PROCESSOR_V9))
6327 fputs (",a", file);
6328 return;
6329 case '(':
6330 /* Output a 'nop' if there's nothing for the delay slot and we are
6331 not optimizing. This is always used with '*' above. */
6332 if (dbr_sequence_length () == 0
6333 && ! (optimize && (int)sparc_cpu < PROCESSOR_V9))
6334 fputs ("\n\t nop", file);
6335 return;
6336 case '_':
6337 /* Output the Embedded Medium/Anywhere code model base register. */
6338 fputs (EMBMEDANY_BASE_REG, file);
6339 return;
6340 case '@':
6341 /* Print out what we are using as the frame pointer. This might
6342 be %fp, or might be %sp+offset. */
6343 /* ??? What if offset is too big? Perhaps the caller knows it isn't? */
6344 fprintf (file, "%s+%d", frame_base_name, frame_base_offset);
6345 return;
6346 case 'Y':
6347 /* Adjust the operand to take into account a RESTORE operation. */
6348 if (GET_CODE (x) == CONST_INT)
6349 break;
6350 else if (GET_CODE (x) != REG)
6351 output_operand_lossage ("invalid %%Y operand");
6352 else if (REGNO (x) < 8)
6353 fputs (reg_names[REGNO (x)], file);
6354 else if (REGNO (x) >= 24 && REGNO (x) < 32)
6355 fputs (reg_names[REGNO (x)-16], file);
6356 else
6357 output_operand_lossage ("invalid %%Y operand");
6358 return;
6359 case 'L':
6360 /* Print out the low order register name of a register pair. */
6361 if (WORDS_BIG_ENDIAN)
6362 fputs (reg_names[REGNO (x)+1], file);
6363 else
6364 fputs (reg_names[REGNO (x)], file);
6365 return;
6366 case 'H':
6367 /* Print out the high order register name of a register pair. */
6368 if (WORDS_BIG_ENDIAN)
6369 fputs (reg_names[REGNO (x)], file);
6370 else
6371 fputs (reg_names[REGNO (x)+1], file);
6372 return;
6373 case 'R':
6374 /* Print out the second register name of a register pair or quad.
6375 I.e., R (%o0) => %o1. */
6376 fputs (reg_names[REGNO (x)+1], file);
6377 return;
6378 case 'S':
6379 /* Print out the third register name of a register quad.
6380 I.e., S (%o0) => %o2. */
6381 fputs (reg_names[REGNO (x)+2], file);
6382 return;
6383 case 'T':
6384 /* Print out the fourth register name of a register quad.
6385 I.e., T (%o0) => %o3. */
6386 fputs (reg_names[REGNO (x)+3], file);
6387 return;
6388 case 'x':
6389 /* Print a condition code register. */
6390 if (REGNO (x) == SPARC_ICC_REG)
6392 /* We don't handle CC[X]_NOOVmode because they're not supposed
6393 to occur here. */
6394 if (GET_MODE (x) == CCmode)
6395 fputs ("%icc", file);
6396 else if (GET_MODE (x) == CCXmode)
6397 fputs ("%xcc", file);
6398 else
6399 abort ();
6401 else
6402 /* %fccN register */
6403 fputs (reg_names[REGNO (x)], file);
6404 return;
6405 case 'm':
6406 /* Print the operand's address only. */
6407 output_address (XEXP (x, 0));
6408 return;
6409 case 'r':
6410 /* In this case we need a register. Use %g0 if the
6411 operand is const0_rtx. */
6412 if (x == const0_rtx
6413 || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
6415 fputs ("%g0", file);
6416 return;
6418 else
6419 break;
6421 case 'A':
6422 switch (GET_CODE (x))
6424 case IOR: fputs ("or", file); break;
6425 case AND: fputs ("and", file); break;
6426 case XOR: fputs ("xor", file); break;
6427 default: output_operand_lossage ("invalid %%A operand");
6429 return;
6431 case 'B':
6432 switch (GET_CODE (x))
6434 case IOR: fputs ("orn", file); break;
6435 case AND: fputs ("andn", file); break;
6436 case XOR: fputs ("xnor", file); break;
6437 default: output_operand_lossage ("invalid %%B operand");
6439 return;
6441 /* These are used by the conditional move instructions. */
6442 case 'c' :
6443 case 'C':
6445 enum rtx_code rc = GET_CODE (x);
6447 if (code == 'c')
6449 enum machine_mode mode = GET_MODE (XEXP (x, 0));
6450 if (mode == CCFPmode || mode == CCFPEmode)
6451 rc = reverse_condition_maybe_unordered (GET_CODE (x));
6452 else
6453 rc = reverse_condition (GET_CODE (x));
6455 switch (rc)
6457 case NE: fputs ("ne", file); break;
6458 case EQ: fputs ("e", file); break;
6459 case GE: fputs ("ge", file); break;
6460 case GT: fputs ("g", file); break;
6461 case LE: fputs ("le", file); break;
6462 case LT: fputs ("l", file); break;
6463 case GEU: fputs ("geu", file); break;
6464 case GTU: fputs ("gu", file); break;
6465 case LEU: fputs ("leu", file); break;
6466 case LTU: fputs ("lu", file); break;
6467 case LTGT: fputs ("lg", file); break;
6468 case UNORDERED: fputs ("u", file); break;
6469 case ORDERED: fputs ("o", file); break;
6470 case UNLT: fputs ("ul", file); break;
6471 case UNLE: fputs ("ule", file); break;
6472 case UNGT: fputs ("ug", file); break;
6473 case UNGE: fputs ("uge", file); break;
6474 case UNEQ: fputs ("ue", file); break;
6475 default: output_operand_lossage (code == 'c'
6476 ? "invalid %%c operand"
6477 : "invalid %%C operand");
6479 return;
6482 /* These are used by the movr instruction pattern. */
6483 case 'd':
6484 case 'D':
6486 enum rtx_code rc = (code == 'd'
6487 ? reverse_condition (GET_CODE (x))
6488 : GET_CODE (x));
6489 switch (rc)
6491 case NE: fputs ("ne", file); break;
6492 case EQ: fputs ("e", file); break;
6493 case GE: fputs ("gez", file); break;
6494 case LT: fputs ("lz", file); break;
6495 case LE: fputs ("lez", file); break;
6496 case GT: fputs ("gz", file); break;
6497 default: output_operand_lossage (code == 'd'
6498 ? "invalid %%d operand"
6499 : "invalid %%D operand");
6501 return;
6504 case 'b':
6506 /* Print a sign-extended character. */
6507 int i = trunc_int_for_mode (INTVAL (x), QImode);
6508 fprintf (file, "%d", i);
6509 return;
6512 case 'f':
6513 /* Operand must be a MEM; write its address. */
6514 if (GET_CODE (x) != MEM)
6515 output_operand_lossage ("invalid %%f operand");
6516 output_address (XEXP (x, 0));
6517 return;
6519 case 's':
6521 /* Print a sign-extended 32-bit value. */
6522 HOST_WIDE_INT i;
6523 if (GET_CODE(x) == CONST_INT)
6524 i = INTVAL (x);
6525 else if (GET_CODE(x) == CONST_DOUBLE)
6526 i = CONST_DOUBLE_LOW (x);
6527 else
6529 output_operand_lossage ("invalid %%s operand");
6530 return;
6532 i = trunc_int_for_mode (i, SImode);
6533 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
6534 return;
6537 case 0:
6538 /* Do nothing special. */
6539 break;
6541 default:
6542 /* Undocumented flag. */
6543 output_operand_lossage ("invalid operand output code");
6546 if (GET_CODE (x) == REG)
6547 fputs (reg_names[REGNO (x)], file);
6548 else if (GET_CODE (x) == MEM)
6550 fputc ('[', file);
6551 /* Poor Sun assembler doesn't understand absolute addressing. */
6552 if (CONSTANT_P (XEXP (x, 0)))
6553 fputs ("%g0+", file);
6554 output_address (XEXP (x, 0));
6555 fputc (']', file);
6557 else if (GET_CODE (x) == HIGH)
6559 fputs ("%hi(", file);
6560 output_addr_const (file, XEXP (x, 0));
6561 fputc (')', file);
6563 else if (GET_CODE (x) == LO_SUM)
6565 print_operand (file, XEXP (x, 0), 0);
6566 if (TARGET_CM_MEDMID)
6567 fputs ("+%l44(", file);
6568 else
6569 fputs ("+%lo(", file);
6570 output_addr_const (file, XEXP (x, 1));
6571 fputc (')', file);
6573 else if (GET_CODE (x) == CONST_DOUBLE
6574 && (GET_MODE (x) == VOIDmode
6575 || GET_MODE_CLASS (GET_MODE (x)) == MODE_INT))
6577 if (CONST_DOUBLE_HIGH (x) == 0)
6578 fprintf (file, "%u", (unsigned int) CONST_DOUBLE_LOW (x));
6579 else if (CONST_DOUBLE_HIGH (x) == -1
6580 && CONST_DOUBLE_LOW (x) < 0)
6581 fprintf (file, "%d", (int) CONST_DOUBLE_LOW (x));
6582 else
6583 output_operand_lossage ("long long constant not a valid immediate operand");
6585 else if (GET_CODE (x) == CONST_DOUBLE)
6586 output_operand_lossage ("floating point constant not a valid immediate operand");
6587 else { output_addr_const (file, x); }
6590 /* Target hook for assembling integer objects. The sparc version has
6591 special handling for aligned DI-mode objects. */
6593 static bool
6594 sparc_assemble_integer (x, size, aligned_p)
6595 rtx x;
6596 unsigned int size;
6597 int aligned_p;
6599 /* ??? We only output .xword's for symbols and only then in environments
6600 where the assembler can handle them. */
6601 if (aligned_p && size == 8
6602 && (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_DOUBLE))
6604 if (TARGET_V9)
6606 assemble_integer_with_op ("\t.xword\t", x);
6607 return true;
6609 else
6611 assemble_aligned_integer (4, const0_rtx);
6612 assemble_aligned_integer (4, x);
6613 return true;
6616 return default_assemble_integer (x, size, aligned_p);
6619 /* Return the value of a code used in the .proc pseudo-op that says
6620 what kind of result this function returns. For non-C types, we pick
6621 the closest C type. */
6623 #ifndef SHORT_TYPE_SIZE
6624 #define SHORT_TYPE_SIZE (BITS_PER_UNIT * 2)
6625 #endif
6627 #ifndef INT_TYPE_SIZE
6628 #define INT_TYPE_SIZE BITS_PER_WORD
6629 #endif
6631 #ifndef LONG_TYPE_SIZE
6632 #define LONG_TYPE_SIZE BITS_PER_WORD
6633 #endif
6635 #ifndef LONG_LONG_TYPE_SIZE
6636 #define LONG_LONG_TYPE_SIZE (BITS_PER_WORD * 2)
6637 #endif
6639 #ifndef FLOAT_TYPE_SIZE
6640 #define FLOAT_TYPE_SIZE BITS_PER_WORD
6641 #endif
6643 #ifndef DOUBLE_TYPE_SIZE
6644 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6645 #endif
6647 #ifndef LONG_DOUBLE_TYPE_SIZE
6648 #define LONG_DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
6649 #endif
6651 unsigned long
6652 sparc_type_code (type)
6653 register tree type;
6655 register unsigned long qualifiers = 0;
6656 register unsigned shift;
6658 /* Only the first 30 bits of the qualifier are valid. We must refrain from
6659 setting more, since some assemblers will give an error for this. Also,
6660 we must be careful to avoid shifts of 32 bits or more to avoid getting
6661 unpredictable results. */
6663 for (shift = 6; shift < 30; shift += 2, type = TREE_TYPE (type))
6665 switch (TREE_CODE (type))
6667 case ERROR_MARK:
6668 return qualifiers;
6670 case ARRAY_TYPE:
6671 qualifiers |= (3 << shift);
6672 break;
6674 case FUNCTION_TYPE:
6675 case METHOD_TYPE:
6676 qualifiers |= (2 << shift);
6677 break;
6679 case POINTER_TYPE:
6680 case REFERENCE_TYPE:
6681 case OFFSET_TYPE:
6682 qualifiers |= (1 << shift);
6683 break;
6685 case RECORD_TYPE:
6686 return (qualifiers | 8);
6688 case UNION_TYPE:
6689 case QUAL_UNION_TYPE:
6690 return (qualifiers | 9);
6692 case ENUMERAL_TYPE:
6693 return (qualifiers | 10);
6695 case VOID_TYPE:
6696 return (qualifiers | 16);
6698 case INTEGER_TYPE:
6699 /* If this is a range type, consider it to be the underlying
6700 type. */
6701 if (TREE_TYPE (type) != 0)
6702 break;
6704 /* Carefully distinguish all the standard types of C,
6705 without messing up if the language is not C. We do this by
6706 testing TYPE_PRECISION and TREE_UNSIGNED. The old code used to
6707 look at both the names and the above fields, but that's redundant.
6708 Any type whose size is between two C types will be considered
6709 to be the wider of the two types. Also, we do not have a
6710 special code to use for "long long", so anything wider than
6711 long is treated the same. Note that we can't distinguish
6712 between "int" and "long" in this code if they are the same
6713 size, but that's fine, since neither can the assembler. */
6715 if (TYPE_PRECISION (type) <= CHAR_TYPE_SIZE)
6716 return (qualifiers | (TREE_UNSIGNED (type) ? 12 : 2));
6718 else if (TYPE_PRECISION (type) <= SHORT_TYPE_SIZE)
6719 return (qualifiers | (TREE_UNSIGNED (type) ? 13 : 3));
6721 else if (TYPE_PRECISION (type) <= INT_TYPE_SIZE)
6722 return (qualifiers | (TREE_UNSIGNED (type) ? 14 : 4));
6724 else
6725 return (qualifiers | (TREE_UNSIGNED (type) ? 15 : 5));
6727 case REAL_TYPE:
6728 /* If this is a range type, consider it to be the underlying
6729 type. */
6730 if (TREE_TYPE (type) != 0)
6731 break;
6733 /* Carefully distinguish all the standard types of C,
6734 without messing up if the language is not C. */
6736 if (TYPE_PRECISION (type) == FLOAT_TYPE_SIZE)
6737 return (qualifiers | 6);
6739 else
6740 return (qualifiers | 7);
6742 case COMPLEX_TYPE: /* GNU Fortran COMPLEX type. */
6743 /* ??? We need to distinguish between double and float complex types,
6744 but I don't know how yet because I can't reach this code from
6745 existing front-ends. */
6746 return (qualifiers | 7); /* Who knows? */
6748 case CHAR_TYPE: /* GNU Pascal CHAR type. Not used in C. */
6749 case BOOLEAN_TYPE: /* GNU Fortran BOOLEAN type. */
6750 case FILE_TYPE: /* GNU Pascal FILE type. */
6751 case SET_TYPE: /* GNU Pascal SET type. */
6752 case LANG_TYPE: /* ? */
6753 return qualifiers;
6755 default:
6756 abort (); /* Not a type! */
6760 return qualifiers;
6763 /* Nested function support. */
6765 /* Emit RTL insns to initialize the variable parts of a trampoline.
6766 FNADDR is an RTX for the address of the function's pure code.
6767 CXT is an RTX for the static chain value for the function.
6769 This takes 16 insns: 2 shifts & 2 ands (to split up addresses), 4 sethi
6770 (to load in opcodes), 4 iors (to merge address and opcodes), and 4 writes
6771 (to store insns). This is a bit excessive. Perhaps a different
6772 mechanism would be better here.
6774 Emit enough FLUSH insns to synchronize the data and instruction caches. */
6776 void
6777 sparc_initialize_trampoline (tramp, fnaddr, cxt)
6778 rtx tramp, fnaddr, cxt;
6780 /* SPARC 32 bit trampoline:
6782 sethi %hi(fn), %g1
6783 sethi %hi(static), %g2
6784 jmp %g1+%lo(fn)
6785 or %g2, %lo(static), %g2
6787 SETHI i,r = 00rr rrr1 00ii iiii iiii iiii iiii iiii
6788 JMPL r+i,d = 10dd ddd1 1100 0rrr rr1i iiii iiii iiii
6790 #ifdef TRANSFER_FROM_TRAMPOLINE
6791 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__enable_execute_stack"),
6792 LCT_NORMAL, VOIDmode, 1, tramp, Pmode);
6793 #endif
6795 emit_move_insn
6796 (gen_rtx_MEM (SImode, plus_constant (tramp, 0)),
6797 expand_binop (SImode, ior_optab,
6798 expand_shift (RSHIFT_EXPR, SImode, fnaddr,
6799 size_int (10), 0, 1),
6800 GEN_INT (trunc_int_for_mode (0x03000000, SImode)),
6801 NULL_RTX, 1, OPTAB_DIRECT));
6803 emit_move_insn
6804 (gen_rtx_MEM (SImode, plus_constant (tramp, 4)),
6805 expand_binop (SImode, ior_optab,
6806 expand_shift (RSHIFT_EXPR, SImode, cxt,
6807 size_int (10), 0, 1),
6808 GEN_INT (trunc_int_for_mode (0x05000000, SImode)),
6809 NULL_RTX, 1, OPTAB_DIRECT));
6811 emit_move_insn
6812 (gen_rtx_MEM (SImode, plus_constant (tramp, 8)),
6813 expand_binop (SImode, ior_optab,
6814 expand_and (SImode, fnaddr, GEN_INT (0x3ff), NULL_RTX),
6815 GEN_INT (trunc_int_for_mode (0x81c06000, SImode)),
6816 NULL_RTX, 1, OPTAB_DIRECT));
6818 emit_move_insn
6819 (gen_rtx_MEM (SImode, plus_constant (tramp, 12)),
6820 expand_binop (SImode, ior_optab,
6821 expand_and (SImode, cxt, GEN_INT (0x3ff), NULL_RTX),
6822 GEN_INT (trunc_int_for_mode (0x8410a000, SImode)),
6823 NULL_RTX, 1, OPTAB_DIRECT));
6825 /* On UltraSPARC a flush flushes an entire cache line. The trampoline is
6826 aligned on a 16 byte boundary so one flush clears it all. */
6827 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode, tramp))));
6828 if (sparc_cpu != PROCESSOR_ULTRASPARC
6829 && sparc_cpu != PROCESSOR_ULTRASPARC3)
6830 emit_insn (gen_flush (validize_mem (gen_rtx_MEM (SImode,
6831 plus_constant (tramp, 8)))));
6834 /* The 64 bit version is simpler because it makes more sense to load the
6835 values as "immediate" data out of the trampoline. It's also easier since
6836 we can read the PC without clobbering a register. */
6838 void
6839 sparc64_initialize_trampoline (tramp, fnaddr, cxt)
6840 rtx tramp, fnaddr, cxt;
6842 #ifdef TRANSFER_FROM_TRAMPOLINE
6843 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__enable_execute_stack"),
6844 LCT_NORMAL, VOIDmode, 1, tramp, Pmode);
6845 #endif
6848 rd %pc, %g1
6849 ldx [%g1+24], %g5
6850 jmp %g5
6851 ldx [%g1+16], %g5
6852 +16 bytes data
6855 emit_move_insn (gen_rtx_MEM (SImode, tramp),
6856 GEN_INT (trunc_int_for_mode (0x83414000, SImode)));
6857 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 4)),
6858 GEN_INT (trunc_int_for_mode (0xca586018, SImode)));
6859 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 8)),
6860 GEN_INT (trunc_int_for_mode (0x81c14000, SImode)));
6861 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (tramp, 12)),
6862 GEN_INT (trunc_int_for_mode (0xca586010, SImode)));
6863 emit_move_insn (gen_rtx_MEM (DImode, plus_constant (tramp, 16)), cxt);
6864 emit_move_insn (gen_rtx_MEM (DImode, plus_constant (tramp, 24)), fnaddr);
6865 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode, tramp))));
6867 if (sparc_cpu != PROCESSOR_ULTRASPARC
6868 && sparc_cpu != PROCESSOR_ULTRASPARC3)
6869 emit_insn (gen_flushdi (validize_mem (gen_rtx_MEM (DImode, plus_constant (tramp, 8)))));
6872 /* Subroutines to support a flat (single) register window calling
6873 convention. */
6875 /* Single-register window sparc stack frames look like:
6877 Before call After call
6878 +-----------------------+ +-----------------------+
6879 high | | | |
6880 mem | caller's temps. | | caller's temps. |
6881 | | | |
6882 +-----------------------+ +-----------------------+
6883 | | | |
6884 | arguments on stack. | | arguments on stack. |
6885 | | | |
6886 +-----------------------+FP+92->+-----------------------+
6887 | 6 words to save | | 6 words to save |
6888 | arguments passed | | arguments passed |
6889 | in registers, even | | in registers, even |
6890 | if not passed. | | if not passed. |
6891 SP+68->+-----------------------+FP+68->+-----------------------+
6892 | 1 word struct addr | | 1 word struct addr |
6893 +-----------------------+FP+64->+-----------------------+
6894 | | | |
6895 | 16 word reg save area | | 16 word reg save area |
6896 | | | |
6897 SP->+-----------------------+ FP->+-----------------------+
6898 | 4 word area for |
6899 | fp/alu reg moves |
6900 FP-16->+-----------------------+
6902 | local variables |
6904 +-----------------------+
6906 | fp register save |
6908 +-----------------------+
6910 | gp register save |
6912 +-----------------------+
6914 | alloca allocations |
6916 +-----------------------+
6918 | arguments on stack |
6920 SP+92->+-----------------------+
6921 | 6 words to save |
6922 | arguments passed |
6923 | in registers, even |
6924 low | if not passed. |
6925 memory SP+68->+-----------------------+
6926 | 1 word struct addr |
6927 SP+64->+-----------------------+
6929 I 16 word reg save area |
6931 SP->+-----------------------+ */
6933 /* Structure to be filled in by sparc_flat_compute_frame_size with register
6934 save masks, and offsets for the current function. */
6936 struct sparc_frame_info
6938 unsigned long total_size; /* # bytes that the entire frame takes up. */
6939 unsigned long var_size; /* # bytes that variables take up. */
6940 unsigned long args_size; /* # bytes that outgoing arguments take up. */
6941 unsigned long extra_size; /* # bytes of extra gunk. */
6942 unsigned int gp_reg_size; /* # bytes needed to store gp regs. */
6943 unsigned int fp_reg_size; /* # bytes needed to store fp regs. */
6944 unsigned long gmask; /* Mask of saved gp registers. */
6945 unsigned long fmask; /* Mask of saved fp registers. */
6946 unsigned long reg_offset; /* Offset from new sp to store regs. */
6947 int initialized; /* Nonzero if frame size already calculated. */
6950 /* Current frame information calculated by sparc_flat_compute_frame_size. */
6951 struct sparc_frame_info current_frame_info;
6953 /* Zero structure to initialize current_frame_info. */
6954 struct sparc_frame_info zero_frame_info;
6956 /* Tell prologue and epilogue if register REGNO should be saved / restored. */
6958 #define RETURN_ADDR_REGNUM 15
6959 #define HARD_FRAME_POINTER_MASK (1 << (HARD_FRAME_POINTER_REGNUM))
6960 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
6962 #define MUST_SAVE_REGISTER(regno) \
6963 ((regs_ever_live[regno] && !call_used_regs[regno]) \
6964 || (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed) \
6965 || (regno == RETURN_ADDR_REGNUM && regs_ever_live[RETURN_ADDR_REGNUM]))
6967 /* Return the bytes needed to compute the frame pointer from the current
6968 stack pointer. */
6970 unsigned long
6971 sparc_flat_compute_frame_size (size)
6972 int size; /* # of var. bytes allocated. */
6974 int regno;
6975 unsigned long total_size; /* # bytes that the entire frame takes up. */
6976 unsigned long var_size; /* # bytes that variables take up. */
6977 unsigned long args_size; /* # bytes that outgoing arguments take up. */
6978 unsigned long extra_size; /* # extra bytes. */
6979 unsigned int gp_reg_size; /* # bytes needed to store gp regs. */
6980 unsigned int fp_reg_size; /* # bytes needed to store fp regs. */
6981 unsigned long gmask; /* Mask of saved gp registers. */
6982 unsigned long fmask; /* Mask of saved fp registers. */
6983 unsigned long reg_offset; /* Offset to register save area. */
6984 int need_aligned_p; /* 1 if need the save area 8 byte aligned. */
6986 /* This is the size of the 16 word reg save area, 1 word struct addr
6987 area, and 4 word fp/alu register copy area. */
6988 extra_size = -STARTING_FRAME_OFFSET + FIRST_PARM_OFFSET(0);
6989 var_size = size;
6990 gp_reg_size = 0;
6991 fp_reg_size = 0;
6992 gmask = 0;
6993 fmask = 0;
6994 reg_offset = 0;
6995 need_aligned_p = 0;
6997 args_size = 0;
6998 if (!leaf_function_p ())
7000 /* Also include the size needed for the 6 parameter registers. */
7001 args_size = current_function_outgoing_args_size + 24;
7003 total_size = var_size + args_size;
7005 /* Calculate space needed for gp registers. */
7006 for (regno = 1; regno <= 31; regno++)
7008 if (MUST_SAVE_REGISTER (regno))
7010 /* If we need to save two regs in a row, ensure there's room to bump
7011 up the address to align it to a doubleword boundary. */
7012 if ((regno & 0x1) == 0 && MUST_SAVE_REGISTER (regno+1))
7014 if (gp_reg_size % 8 != 0)
7015 gp_reg_size += 4;
7016 gp_reg_size += 2 * UNITS_PER_WORD;
7017 gmask |= 3 << regno;
7018 regno++;
7019 need_aligned_p = 1;
7021 else
7023 gp_reg_size += UNITS_PER_WORD;
7024 gmask |= 1 << regno;
7029 /* Calculate space needed for fp registers. */
7030 for (regno = 32; regno <= 63; regno++)
7032 if (regs_ever_live[regno] && !call_used_regs[regno])
7034 fp_reg_size += UNITS_PER_WORD;
7035 fmask |= 1 << (regno - 32);
7039 if (gmask || fmask)
7041 int n;
7042 reg_offset = FIRST_PARM_OFFSET(0) + args_size;
7043 /* Ensure save area is 8 byte aligned if we need it. */
7044 n = reg_offset % 8;
7045 if (need_aligned_p && n != 0)
7047 total_size += 8 - n;
7048 reg_offset += 8 - n;
7050 total_size += gp_reg_size + fp_reg_size;
7053 /* If we must allocate a stack frame at all, we must also allocate
7054 room for register window spillage, so as to be binary compatible
7055 with libraries and operating systems that do not use -mflat. */
7056 if (total_size > 0)
7057 total_size += extra_size;
7058 else
7059 extra_size = 0;
7061 total_size = SPARC_STACK_ALIGN (total_size);
7063 /* Save other computed information. */
7064 current_frame_info.total_size = total_size;
7065 current_frame_info.var_size = var_size;
7066 current_frame_info.args_size = args_size;
7067 current_frame_info.extra_size = extra_size;
7068 current_frame_info.gp_reg_size = gp_reg_size;
7069 current_frame_info.fp_reg_size = fp_reg_size;
7070 current_frame_info.gmask = gmask;
7071 current_frame_info.fmask = fmask;
7072 current_frame_info.reg_offset = reg_offset;
7073 current_frame_info.initialized = reload_completed;
7075 /* Ok, we're done. */
7076 return total_size;
7079 /* Save/restore registers in GMASK and FMASK at register BASE_REG plus offset
7080 OFFSET.
7082 BASE_REG must be 8 byte aligned. This allows us to test OFFSET for
7083 appropriate alignment and use DOUBLEWORD_OP when we can. We assume
7084 [BASE_REG+OFFSET] will always be a valid address.
7086 WORD_OP is either "st" for save, "ld" for restore.
7087 DOUBLEWORD_OP is either "std" for save, "ldd" for restore. */
7089 void
7090 sparc_flat_save_restore (file, base_reg, offset, gmask, fmask, word_op,
7091 doubleword_op, base_offset)
7092 FILE *file;
7093 const char *base_reg;
7094 unsigned int offset;
7095 unsigned long gmask;
7096 unsigned long fmask;
7097 const char *word_op;
7098 const char *doubleword_op;
7099 unsigned long base_offset;
7101 int regno;
7103 if (gmask == 0 && fmask == 0)
7104 return;
7106 /* Save registers starting from high to low. We've already saved the
7107 previous frame pointer and previous return address for the debugger's
7108 sake. The debugger allows us to not need a nop in the epilog if at least
7109 one register is reloaded in addition to return address. */
7111 if (gmask)
7113 for (regno = 1; regno <= 31; regno++)
7115 if ((gmask & (1L << regno)) != 0)
7117 if ((regno & 0x1) == 0 && ((gmask & (1L << (regno+1))) != 0))
7119 /* We can save two registers in a row. If we're not at a
7120 double word boundary, move to one.
7121 sparc_flat_compute_frame_size ensures there's room to do
7122 this. */
7123 if (offset % 8 != 0)
7124 offset += UNITS_PER_WORD;
7126 if (word_op[0] == 's')
7128 fprintf (file, "\t%s\t%s, [%s+%d]\n",
7129 doubleword_op, reg_names[regno],
7130 base_reg, offset);
7131 if (dwarf2out_do_frame ())
7133 char *l = dwarf2out_cfi_label ();
7134 dwarf2out_reg_save (l, regno, offset + base_offset);
7135 dwarf2out_reg_save
7136 (l, regno+1, offset+base_offset + UNITS_PER_WORD);
7139 else
7140 fprintf (file, "\t%s\t[%s+%d], %s\n",
7141 doubleword_op, base_reg, offset,
7142 reg_names[regno]);
7144 offset += 2 * UNITS_PER_WORD;
7145 regno++;
7147 else
7149 if (word_op[0] == 's')
7151 fprintf (file, "\t%s\t%s, [%s+%d]\n",
7152 word_op, reg_names[regno],
7153 base_reg, offset);
7154 if (dwarf2out_do_frame ())
7155 dwarf2out_reg_save ("", regno, offset + base_offset);
7157 else
7158 fprintf (file, "\t%s\t[%s+%d], %s\n",
7159 word_op, base_reg, offset, reg_names[regno]);
7161 offset += UNITS_PER_WORD;
7167 if (fmask)
7169 for (regno = 32; regno <= 63; regno++)
7171 if ((fmask & (1L << (regno - 32))) != 0)
7173 if (word_op[0] == 's')
7175 fprintf (file, "\t%s\t%s, [%s+%d]\n",
7176 word_op, reg_names[regno],
7177 base_reg, offset);
7178 if (dwarf2out_do_frame ())
7179 dwarf2out_reg_save ("", regno, offset + base_offset);
7181 else
7182 fprintf (file, "\t%s\t[%s+%d], %s\n",
7183 word_op, base_reg, offset, reg_names[regno]);
7185 offset += UNITS_PER_WORD;
7191 /* Set up the stack and frame (if desired) for the function. */
7193 static void
7194 sparc_flat_function_prologue (file, size)
7195 FILE *file;
7196 HOST_WIDE_INT size;
7198 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
7199 unsigned long gmask = current_frame_info.gmask;
7201 sparc_output_scratch_registers (file);
7203 /* This is only for the human reader. */
7204 fprintf (file, "\t%s#PROLOGUE# 0\n", ASM_COMMENT_START);
7205 fprintf (file, "\t%s# vars= %ld, regs= %d/%d, args= %d, extra= %ld\n",
7206 ASM_COMMENT_START,
7207 current_frame_info.var_size,
7208 current_frame_info.gp_reg_size / 4,
7209 current_frame_info.fp_reg_size / 4,
7210 current_function_outgoing_args_size,
7211 current_frame_info.extra_size);
7213 size = SPARC_STACK_ALIGN (size);
7214 size = (! current_frame_info.initialized
7215 ? sparc_flat_compute_frame_size (size)
7216 : current_frame_info.total_size);
7218 /* These cases shouldn't happen. Catch them now. */
7219 if (size == 0 && (gmask || current_frame_info.fmask))
7220 abort ();
7222 /* Allocate our stack frame by decrementing %sp.
7223 At present, the only algorithm gdb can use to determine if this is a
7224 flat frame is if we always set %i7 if we set %sp. This can be optimized
7225 in the future by putting in some sort of debugging information that says
7226 this is a `flat' function. However, there is still the case of debugging
7227 code without such debugging information (including cases where most fns
7228 have such info, but there is one that doesn't). So, always do this now
7229 so we don't get a lot of code out there that gdb can't handle.
7230 If the frame pointer isn't needn't then that's ok - gdb won't be able to
7231 distinguish us from a non-flat function but there won't (and shouldn't)
7232 be any differences anyway. The return pc is saved (if necessary) right
7233 after %i7 so gdb won't have to look too far to find it. */
7234 if (size > 0)
7236 unsigned int reg_offset = current_frame_info.reg_offset;
7237 const char *const fp_str = reg_names[HARD_FRAME_POINTER_REGNUM];
7238 static const char *const t1_str = "%g1";
7240 /* Things get a little tricky if local variables take up more than ~4096
7241 bytes and outgoing arguments take up more than ~4096 bytes. When that
7242 happens, the register save area can't be accessed from either end of
7243 the frame. Handle this by decrementing %sp to the start of the gp
7244 register save area, save the regs, update %i7, and then set %sp to its
7245 final value. Given that we only have one scratch register to play
7246 with it is the cheapest solution, and it helps gdb out as it won't
7247 slow down recognition of flat functions.
7248 Don't change the order of insns emitted here without checking with
7249 the gdb folk first. */
7251 /* Is the entire register save area offsettable from %sp? */
7252 if (reg_offset < 4096 - 64 * (unsigned) UNITS_PER_WORD)
7254 if (size <= 4096)
7256 fprintf (file, "\tadd\t%s, %d, %s\n",
7257 sp_str, (int) -size, sp_str);
7258 if (gmask & HARD_FRAME_POINTER_MASK)
7260 fprintf (file, "\tst\t%s, [%s+%d]\n",
7261 fp_str, sp_str, reg_offset);
7262 fprintf (file, "\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7263 sp_str, (int) -size, fp_str, ASM_COMMENT_START);
7264 reg_offset += 4;
7267 else
7269 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7270 ", %s\n\tsub\t%s, %s, %s\n",
7271 size, t1_str, sp_str, t1_str, sp_str);
7272 if (gmask & HARD_FRAME_POINTER_MASK)
7274 fprintf (file, "\tst\t%s, [%s+%d]\n",
7275 fp_str, sp_str, reg_offset);
7276 fprintf (file, "\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7277 sp_str, t1_str, fp_str, ASM_COMMENT_START);
7278 reg_offset += 4;
7281 if (dwarf2out_do_frame ())
7283 char *l = dwarf2out_cfi_label ();
7284 if (gmask & HARD_FRAME_POINTER_MASK)
7286 dwarf2out_reg_save (l, HARD_FRAME_POINTER_REGNUM,
7287 reg_offset - 4 - size);
7288 dwarf2out_def_cfa (l, HARD_FRAME_POINTER_REGNUM, 0);
7290 else
7291 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, size);
7293 if (gmask & RETURN_ADDR_MASK)
7295 fprintf (file, "\tst\t%s, [%s+%d]\n",
7296 reg_names[RETURN_ADDR_REGNUM], sp_str, reg_offset);
7297 if (dwarf2out_do_frame ())
7298 dwarf2out_return_save ("", reg_offset - size);
7299 reg_offset += 4;
7301 sparc_flat_save_restore (file, sp_str, reg_offset,
7302 gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7303 current_frame_info.fmask,
7304 "st", "std", -size);
7306 else
7308 /* Subtract %sp in two steps, but make sure there is always a
7309 64 byte register save area, and %sp is properly aligned. */
7310 /* Amount to decrement %sp by, the first time. */
7311 unsigned HOST_WIDE_INT size1 = ((size - reg_offset + 64) + 15) & -16;
7312 /* Offset to register save area from %sp. */
7313 unsigned HOST_WIDE_INT offset = size1 - (size - reg_offset);
7315 if (size1 <= 4096)
7317 fprintf (file, "\tadd\t%s, %d, %s\n",
7318 sp_str, (int) -size1, sp_str);
7319 if (gmask & HARD_FRAME_POINTER_MASK)
7321 fprintf (file, "\tst\t%s, [%s+%d]\n\tsub\t%s, %d, %s\t%s# set up frame pointer\n",
7322 fp_str, sp_str, (int) offset, sp_str, (int) -size1,
7323 fp_str, ASM_COMMENT_START);
7324 offset += 4;
7327 else
7329 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7330 ", %s\n\tsub\t%s, %s, %s\n",
7331 size1, t1_str, sp_str, t1_str, sp_str);
7332 if (gmask & HARD_FRAME_POINTER_MASK)
7334 fprintf (file, "\tst\t%s, [%s+%d]\n\tadd\t%s, %s, %s\t%s# set up frame pointer\n",
7335 fp_str, sp_str, (int) offset, sp_str, t1_str,
7336 fp_str, ASM_COMMENT_START);
7337 offset += 4;
7340 if (dwarf2out_do_frame ())
7342 char *l = dwarf2out_cfi_label ();
7343 if (gmask & HARD_FRAME_POINTER_MASK)
7345 dwarf2out_reg_save (l, HARD_FRAME_POINTER_REGNUM,
7346 offset - 4 - size1);
7347 dwarf2out_def_cfa (l, HARD_FRAME_POINTER_REGNUM, 0);
7349 else
7350 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, size1);
7352 if (gmask & RETURN_ADDR_MASK)
7354 fprintf (file, "\tst\t%s, [%s+%d]\n",
7355 reg_names[RETURN_ADDR_REGNUM], sp_str, (int) offset);
7356 if (dwarf2out_do_frame ())
7357 /* offset - size1 == reg_offset - size
7358 if reg_offset were updated above like offset. */
7359 dwarf2out_return_save ("", offset - size1);
7360 offset += 4;
7362 sparc_flat_save_restore (file, sp_str, offset,
7363 gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7364 current_frame_info.fmask,
7365 "st", "std", -size1);
7366 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7367 ", %s\n\tsub\t%s, %s, %s\n",
7368 size - size1, t1_str, sp_str, t1_str, sp_str);
7369 if (dwarf2out_do_frame ())
7370 if (! (gmask & HARD_FRAME_POINTER_MASK))
7371 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, size);
7375 fprintf (file, "\t%s#PROLOGUE# 1\n", ASM_COMMENT_START);
7378 /* Do any necessary cleanup after a function to restore stack, frame,
7379 and regs. */
7381 static void
7382 sparc_flat_function_epilogue (file, size)
7383 FILE *file;
7384 HOST_WIDE_INT size;
7386 rtx epilogue_delay = current_function_epilogue_delay_list;
7387 int noepilogue = FALSE;
7389 /* This is only for the human reader. */
7390 fprintf (file, "\t%s#EPILOGUE#\n", ASM_COMMENT_START);
7392 /* The epilogue does not depend on any registers, but the stack
7393 registers, so we assume that if we have 1 pending nop, it can be
7394 ignored, and 2 it must be filled (2 nops occur for integer
7395 multiply and divide). */
7397 size = SPARC_STACK_ALIGN (size);
7398 size = (!current_frame_info.initialized
7399 ? sparc_flat_compute_frame_size (size)
7400 : current_frame_info.total_size);
7402 if (size == 0 && epilogue_delay == 0)
7404 rtx insn = get_last_insn ();
7406 /* If the last insn was a BARRIER, we don't have to write any code
7407 because a jump (aka return) was put there. */
7408 if (GET_CODE (insn) == NOTE)
7409 insn = prev_nonnote_insn (insn);
7410 if (insn && GET_CODE (insn) == BARRIER)
7411 noepilogue = TRUE;
7414 if (!noepilogue)
7416 unsigned HOST_WIDE_INT reg_offset = current_frame_info.reg_offset;
7417 unsigned HOST_WIDE_INT size1;
7418 const char *const sp_str = reg_names[STACK_POINTER_REGNUM];
7419 const char *const fp_str = reg_names[HARD_FRAME_POINTER_REGNUM];
7420 static const char *const t1_str = "%g1";
7422 /* In the reload sequence, we don't need to fill the load delay
7423 slots for most of the loads, also see if we can fill the final
7424 delay slot if not otherwise filled by the reload sequence. */
7426 if (size > 4095)
7427 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC ", %s\n",
7428 size, t1_str);
7430 if (frame_pointer_needed)
7432 if (size > 4095)
7433 fprintf (file,"\tsub\t%s, %s, %s\t\t%s# sp not trusted here\n",
7434 fp_str, t1_str, sp_str, ASM_COMMENT_START);
7435 else
7436 fprintf (file,"\tsub\t%s, %d, %s\t\t%s# sp not trusted here\n",
7437 fp_str, (int) size, sp_str, ASM_COMMENT_START);
7440 /* Is the entire register save area offsettable from %sp? */
7441 if (reg_offset < 4096 - 64 * (unsigned) UNITS_PER_WORD)
7443 size1 = 0;
7445 else
7447 /* Restore %sp in two steps, but make sure there is always a
7448 64 byte register save area, and %sp is properly aligned. */
7449 /* Amount to increment %sp by, the first time. */
7450 size1 = ((reg_offset - 64 - 16) + 15) & -16;
7451 /* Offset to register save area from %sp. */
7452 reg_offset = size1 - reg_offset;
7454 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC
7455 ", %s\n\tadd\t%s, %s, %s\n",
7456 size1, t1_str, sp_str, t1_str, sp_str);
7459 /* We must restore the frame pointer and return address reg first
7460 because they are treated specially by the prologue output code. */
7461 if (current_frame_info.gmask & HARD_FRAME_POINTER_MASK)
7463 fprintf (file, "\tld\t[%s+%d], %s\n",
7464 sp_str, (int) reg_offset, fp_str);
7465 reg_offset += 4;
7467 if (current_frame_info.gmask & RETURN_ADDR_MASK)
7469 fprintf (file, "\tld\t[%s+%d], %s\n",
7470 sp_str, (int) reg_offset, reg_names[RETURN_ADDR_REGNUM]);
7471 reg_offset += 4;
7474 /* Restore any remaining saved registers. */
7475 sparc_flat_save_restore (file, sp_str, reg_offset,
7476 current_frame_info.gmask & ~(HARD_FRAME_POINTER_MASK | RETURN_ADDR_MASK),
7477 current_frame_info.fmask,
7478 "ld", "ldd", 0);
7480 /* If we had to increment %sp in two steps, record it so the second
7481 restoration in the epilogue finishes up. */
7482 if (size1 > 0)
7484 size -= size1;
7485 if (size > 4095)
7486 fprintf (file, "\tset\t" HOST_WIDE_INT_PRINT_DEC ", %s\n",
7487 size, t1_str);
7490 if (current_function_returns_struct)
7491 fprintf (file, "\tjmp\t%%o7+12\n");
7492 else
7493 fprintf (file, "\tretl\n");
7495 /* If the only register saved is the return address, we need a
7496 nop, unless we have an instruction to put into it. Otherwise
7497 we don't since reloading multiple registers doesn't reference
7498 the register being loaded. */
7500 if (epilogue_delay)
7502 if (size)
7503 abort ();
7504 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
7507 else if (size > 4095)
7508 fprintf (file, "\tadd\t%s, %s, %s\n", sp_str, t1_str, sp_str);
7510 else if (size > 0)
7511 fprintf (file, "\tadd\t%s, %d, %s\n", sp_str, (int) size, sp_str);
7513 else
7514 fprintf (file, "\tnop\n");
7517 /* Reset state info for each function. */
7518 current_frame_info = zero_frame_info;
7520 sparc_output_deferred_case_vectors ();
7523 /* Define the number of delay slots needed for the function epilogue.
7525 On the sparc, we need a slot if either no stack has been allocated,
7526 or the only register saved is the return register. */
7529 sparc_flat_epilogue_delay_slots ()
7531 if (!current_frame_info.initialized)
7532 (void) sparc_flat_compute_frame_size (get_frame_size ());
7534 if (current_frame_info.total_size == 0)
7535 return 1;
7537 return 0;
7540 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
7541 Any single length instruction which doesn't reference the stack or frame
7542 pointer is OK. */
7545 sparc_flat_eligible_for_epilogue_delay (trial, slot)
7546 rtx trial;
7547 int slot ATTRIBUTE_UNUSED;
7549 rtx pat = PATTERN (trial);
7551 if (get_attr_length (trial) != 1)
7552 return 0;
7554 if (! reg_mentioned_p (stack_pointer_rtx, pat)
7555 && ! reg_mentioned_p (frame_pointer_rtx, pat))
7556 return 1;
7558 return 0;
7561 /* Adjust the cost of a scheduling dependency. Return the new cost of
7562 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
7564 static int
7565 supersparc_adjust_cost (insn, link, dep_insn, cost)
7566 rtx insn;
7567 rtx link;
7568 rtx dep_insn;
7569 int cost;
7571 enum attr_type insn_type;
7573 if (! recog_memoized (insn))
7574 return 0;
7576 insn_type = get_attr_type (insn);
7578 if (REG_NOTE_KIND (link) == 0)
7580 /* Data dependency; DEP_INSN writes a register that INSN reads some
7581 cycles later. */
7583 /* if a load, then the dependence must be on the memory address;
7584 add an extra "cycle". Note that the cost could be two cycles
7585 if the reg was written late in an instruction group; we ca not tell
7586 here. */
7587 if (insn_type == TYPE_LOAD || insn_type == TYPE_FPLOAD)
7588 return cost + 3;
7590 /* Get the delay only if the address of the store is the dependence. */
7591 if (insn_type == TYPE_STORE || insn_type == TYPE_FPSTORE)
7593 rtx pat = PATTERN(insn);
7594 rtx dep_pat = PATTERN (dep_insn);
7596 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
7597 return cost; /* This should not happen! */
7599 /* The dependency between the two instructions was on the data that
7600 is being stored. Assume that this implies that the address of the
7601 store is not dependent. */
7602 if (rtx_equal_p (SET_DEST (dep_pat), SET_SRC (pat)))
7603 return cost;
7605 return cost + 3; /* An approximation. */
7608 /* A shift instruction cannot receive its data from an instruction
7609 in the same cycle; add a one cycle penalty. */
7610 if (insn_type == TYPE_SHIFT)
7611 return cost + 3; /* Split before cascade into shift. */
7613 else
7615 /* Anti- or output- dependency; DEP_INSN reads/writes a register that
7616 INSN writes some cycles later. */
7618 /* These are only significant for the fpu unit; writing a fp reg before
7619 the fpu has finished with it stalls the processor. */
7621 /* Reusing an integer register causes no problems. */
7622 if (insn_type == TYPE_IALU || insn_type == TYPE_SHIFT)
7623 return 0;
7626 return cost;
7629 static int
7630 hypersparc_adjust_cost (insn, link, dep_insn, cost)
7631 rtx insn;
7632 rtx link;
7633 rtx dep_insn;
7634 int cost;
7636 enum attr_type insn_type, dep_type;
7637 rtx pat = PATTERN(insn);
7638 rtx dep_pat = PATTERN (dep_insn);
7640 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
7641 return cost;
7643 insn_type = get_attr_type (insn);
7644 dep_type = get_attr_type (dep_insn);
7646 switch (REG_NOTE_KIND (link))
7648 case 0:
7649 /* Data dependency; DEP_INSN writes a register that INSN reads some
7650 cycles later. */
7652 switch (insn_type)
7654 case TYPE_STORE:
7655 case TYPE_FPSTORE:
7656 /* Get the delay iff the address of the store is the dependence. */
7657 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
7658 return cost;
7660 if (rtx_equal_p (SET_DEST (dep_pat), SET_SRC (pat)))
7661 return cost;
7662 return cost + 3;
7664 case TYPE_LOAD:
7665 case TYPE_SLOAD:
7666 case TYPE_FPLOAD:
7667 /* If a load, then the dependence must be on the memory address. If
7668 the addresses aren't equal, then it might be a false dependency */
7669 if (dep_type == TYPE_STORE || dep_type == TYPE_FPSTORE)
7671 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET
7672 || GET_CODE (SET_DEST (dep_pat)) != MEM
7673 || GET_CODE (SET_SRC (pat)) != MEM
7674 || ! rtx_equal_p (XEXP (SET_DEST (dep_pat), 0),
7675 XEXP (SET_SRC (pat), 0)))
7676 return cost + 2;
7678 return cost + 8;
7680 break;
7682 case TYPE_BRANCH:
7683 /* Compare to branch latency is 0. There is no benefit from
7684 separating compare and branch. */
7685 if (dep_type == TYPE_COMPARE)
7686 return 0;
7687 /* Floating point compare to branch latency is less than
7688 compare to conditional move. */
7689 if (dep_type == TYPE_FPCMP)
7690 return cost - 1;
7691 break;
7692 default:
7693 break;
7695 break;
7697 case REG_DEP_ANTI:
7698 /* Anti-dependencies only penalize the fpu unit. */
7699 if (insn_type == TYPE_IALU || insn_type == TYPE_SHIFT)
7700 return 0;
7701 break;
7703 default:
7704 break;
7707 return cost;
7710 static int
7711 sparc_adjust_cost(insn, link, dep, cost)
7712 rtx insn;
7713 rtx link;
7714 rtx dep;
7715 int cost;
7717 switch (sparc_cpu)
7719 case PROCESSOR_SUPERSPARC:
7720 cost = supersparc_adjust_cost (insn, link, dep, cost);
7721 break;
7722 case PROCESSOR_HYPERSPARC:
7723 case PROCESSOR_SPARCLITE86X:
7724 cost = hypersparc_adjust_cost (insn, link, dep, cost);
7725 break;
7726 default:
7727 break;
7729 return cost;
7732 static void
7733 sparc_sched_init (dump, sched_verbose, max_ready)
7734 FILE *dump ATTRIBUTE_UNUSED;
7735 int sched_verbose ATTRIBUTE_UNUSED;
7736 int max_ready ATTRIBUTE_UNUSED;
7740 static int
7741 sparc_use_dfa_pipeline_interface ()
7743 if ((1 << sparc_cpu) &
7744 ((1 << PROCESSOR_ULTRASPARC) | (1 << PROCESSOR_CYPRESS) |
7745 (1 << PROCESSOR_SUPERSPARC) | (1 << PROCESSOR_HYPERSPARC) |
7746 (1 << PROCESSOR_SPARCLITE86X) | (1 << PROCESSOR_TSC701) |
7747 (1 << PROCESSOR_ULTRASPARC3)))
7748 return 1;
7749 return 0;
7752 static int
7753 sparc_use_sched_lookahead ()
7755 if (sparc_cpu == PROCESSOR_ULTRASPARC
7756 || sparc_cpu == PROCESSOR_ULTRASPARC3)
7757 return 4;
7758 if ((1 << sparc_cpu) &
7759 ((1 << PROCESSOR_SUPERSPARC) | (1 << PROCESSOR_HYPERSPARC) |
7760 (1 << PROCESSOR_SPARCLITE86X)))
7761 return 3;
7762 return 0;
7765 static int
7766 sparc_issue_rate ()
7768 switch (sparc_cpu)
7770 default:
7771 return 1;
7772 case PROCESSOR_V9:
7773 /* Assume V9 processors are capable of at least dual-issue. */
7774 return 2;
7775 case PROCESSOR_SUPERSPARC:
7776 return 3;
7777 case PROCESSOR_HYPERSPARC:
7778 case PROCESSOR_SPARCLITE86X:
7779 return 2;
7780 case PROCESSOR_ULTRASPARC:
7781 case PROCESSOR_ULTRASPARC3:
7782 return 4;
7786 static int
7787 set_extends (insn)
7788 rtx insn;
7790 register rtx pat = PATTERN (insn);
7792 switch (GET_CODE (SET_SRC (pat)))
7794 /* Load and some shift instructions zero extend. */
7795 case MEM:
7796 case ZERO_EXTEND:
7797 /* sethi clears the high bits */
7798 case HIGH:
7799 /* LO_SUM is used with sethi. sethi cleared the high
7800 bits and the values used with lo_sum are positive */
7801 case LO_SUM:
7802 /* Store flag stores 0 or 1 */
7803 case LT: case LTU:
7804 case GT: case GTU:
7805 case LE: case LEU:
7806 case GE: case GEU:
7807 case EQ:
7808 case NE:
7809 return 1;
7810 case AND:
7812 rtx op0 = XEXP (SET_SRC (pat), 0);
7813 rtx op1 = XEXP (SET_SRC (pat), 1);
7814 if (GET_CODE (op1) == CONST_INT)
7815 return INTVAL (op1) >= 0;
7816 if (GET_CODE (op0) != REG)
7817 return 0;
7818 if (sparc_check_64 (op0, insn) == 1)
7819 return 1;
7820 return (GET_CODE (op1) == REG && sparc_check_64 (op1, insn) == 1);
7822 case IOR:
7823 case XOR:
7825 rtx op0 = XEXP (SET_SRC (pat), 0);
7826 rtx op1 = XEXP (SET_SRC (pat), 1);
7827 if (GET_CODE (op0) != REG || sparc_check_64 (op0, insn) <= 0)
7828 return 0;
7829 if (GET_CODE (op1) == CONST_INT)
7830 return INTVAL (op1) >= 0;
7831 return (GET_CODE (op1) == REG && sparc_check_64 (op1, insn) == 1);
7833 case LSHIFTRT:
7834 return GET_MODE (SET_SRC (pat)) == SImode;
7835 /* Positive integers leave the high bits zero. */
7836 case CONST_DOUBLE:
7837 return ! (CONST_DOUBLE_LOW (SET_SRC (pat)) & 0x80000000);
7838 case CONST_INT:
7839 return ! (INTVAL (SET_SRC (pat)) & 0x80000000);
7840 case ASHIFTRT:
7841 case SIGN_EXTEND:
7842 return - (GET_MODE (SET_SRC (pat)) == SImode);
7843 case REG:
7844 return sparc_check_64 (SET_SRC (pat), insn);
7845 default:
7846 return 0;
7850 /* We _ought_ to have only one kind per function, but... */
7851 static GTY(()) rtx sparc_addr_diff_list;
7852 static GTY(()) rtx sparc_addr_list;
7854 void
7855 sparc_defer_case_vector (lab, vec, diff)
7856 rtx lab, vec;
7857 int diff;
7859 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
7860 if (diff)
7861 sparc_addr_diff_list
7862 = gen_rtx_EXPR_LIST (VOIDmode, vec, sparc_addr_diff_list);
7863 else
7864 sparc_addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec, sparc_addr_list);
7867 static void
7868 sparc_output_addr_vec (vec)
7869 rtx vec;
7871 rtx lab = XEXP (vec, 0), body = XEXP (vec, 1);
7872 int idx, vlen = XVECLEN (body, 0);
7874 #ifdef ASM_OUTPUT_ADDR_VEC_START
7875 ASM_OUTPUT_ADDR_VEC_START (asm_out_file);
7876 #endif
7878 #ifdef ASM_OUTPUT_CASE_LABEL
7879 ASM_OUTPUT_CASE_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (lab),
7880 NEXT_INSN (lab));
7881 #else
7882 (*targetm.asm_out.internal_label) (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
7883 #endif
7885 for (idx = 0; idx < vlen; idx++)
7887 ASM_OUTPUT_ADDR_VEC_ELT
7888 (asm_out_file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
7891 #ifdef ASM_OUTPUT_ADDR_VEC_END
7892 ASM_OUTPUT_ADDR_VEC_END (asm_out_file);
7893 #endif
7896 static void
7897 sparc_output_addr_diff_vec (vec)
7898 rtx vec;
7900 rtx lab = XEXP (vec, 0), body = XEXP (vec, 1);
7901 rtx base = XEXP (XEXP (body, 0), 0);
7902 int idx, vlen = XVECLEN (body, 1);
7904 #ifdef ASM_OUTPUT_ADDR_VEC_START
7905 ASM_OUTPUT_ADDR_VEC_START (asm_out_file);
7906 #endif
7908 #ifdef ASM_OUTPUT_CASE_LABEL
7909 ASM_OUTPUT_CASE_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (lab),
7910 NEXT_INSN (lab));
7911 #else
7912 (*targetm.asm_out.internal_label) (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
7913 #endif
7915 for (idx = 0; idx < vlen; idx++)
7917 ASM_OUTPUT_ADDR_DIFF_ELT
7918 (asm_out_file,
7919 body,
7920 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
7921 CODE_LABEL_NUMBER (base));
7924 #ifdef ASM_OUTPUT_ADDR_VEC_END
7925 ASM_OUTPUT_ADDR_VEC_END (asm_out_file);
7926 #endif
7929 static void
7930 sparc_output_deferred_case_vectors ()
7932 rtx t;
7933 int align;
7935 if (sparc_addr_list == NULL_RTX
7936 && sparc_addr_diff_list == NULL_RTX)
7937 return;
7939 /* Align to cache line in the function's code section. */
7940 function_section (current_function_decl);
7942 align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
7943 if (align > 0)
7944 ASM_OUTPUT_ALIGN (asm_out_file, align);
7946 for (t = sparc_addr_list; t ; t = XEXP (t, 1))
7947 sparc_output_addr_vec (XEXP (t, 0));
7948 for (t = sparc_addr_diff_list; t ; t = XEXP (t, 1))
7949 sparc_output_addr_diff_vec (XEXP (t, 0));
7951 sparc_addr_list = sparc_addr_diff_list = NULL_RTX;
7954 /* Return 0 if the high 32 bits of X (the low word of X, if DImode) are
7955 unknown. Return 1 if the high bits are zero, -1 if the register is
7956 sign extended. */
7958 sparc_check_64 (x, insn)
7959 rtx x, insn;
7961 /* If a register is set only once it is safe to ignore insns this
7962 code does not know how to handle. The loop will either recognize
7963 the single set and return the correct value or fail to recognize
7964 it and return 0. */
7965 int set_once = 0;
7966 rtx y = x;
7968 if (GET_CODE (x) != REG)
7969 abort ();
7971 if (GET_MODE (x) == DImode)
7972 y = gen_rtx_REG (SImode, REGNO (x) + WORDS_BIG_ENDIAN);
7974 if (flag_expensive_optimizations
7975 && REG_N_SETS (REGNO (y)) == 1)
7976 set_once = 1;
7978 if (insn == 0)
7980 if (set_once)
7981 insn = get_last_insn_anywhere ();
7982 else
7983 return 0;
7986 while ((insn = PREV_INSN (insn)))
7988 switch (GET_CODE (insn))
7990 case JUMP_INSN:
7991 case NOTE:
7992 break;
7993 case CODE_LABEL:
7994 case CALL_INSN:
7995 default:
7996 if (! set_once)
7997 return 0;
7998 break;
7999 case INSN:
8001 rtx pat = PATTERN (insn);
8002 if (GET_CODE (pat) != SET)
8003 return 0;
8004 if (rtx_equal_p (x, SET_DEST (pat)))
8005 return set_extends (insn);
8006 if (y && rtx_equal_p (y, SET_DEST (pat)))
8007 return set_extends (insn);
8008 if (reg_overlap_mentioned_p (SET_DEST (pat), y))
8009 return 0;
8013 return 0;
8016 /* Returns assembly code to perform a DImode shift using
8017 a 64-bit global or out register on SPARC-V8+. */
8018 char *
8019 sparc_v8plus_shift (operands, insn, opcode)
8020 rtx *operands;
8021 rtx insn;
8022 const char *opcode;
8024 static char asm_code[60];
8026 /* The scratch register is only required when the destination
8027 register is not a 64-bit global or out register. */
8028 if (which_alternative != 2)
8029 operands[3] = operands[0];
8031 if (GET_CODE (operands[1]) == CONST_INT)
8033 output_asm_insn ("mov\t%1, %3", operands);
8035 else
8037 output_asm_insn ("sllx\t%H1, 32, %3", operands);
8038 if (sparc_check_64 (operands[1], insn) <= 0)
8039 output_asm_insn ("srl\t%L1, 0, %L1", operands);
8040 output_asm_insn ("or\t%L1, %3, %3", operands);
8043 strcpy(asm_code, opcode);
8045 if (which_alternative != 2)
8046 return strcat (asm_code, "\t%0, %2, %L0\n\tsrlx\t%L0, 32, %H0");
8047 else
8048 return strcat (asm_code, "\t%3, %2, %3\n\tsrlx\t%3, 32, %H0\n\tmov\t%3, %L0");
8051 /* Output rtl to increment the profiler label LABELNO
8052 for profiling a function entry. */
8054 void
8055 sparc_profile_hook (labelno)
8056 int labelno;
8058 char buf[32];
8059 rtx lab, fun;
8061 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
8062 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8063 fun = gen_rtx_SYMBOL_REF (Pmode, MCOUNT_FUNCTION);
8065 emit_library_call (fun, LCT_NORMAL, VOIDmode, 1, lab, Pmode);
8068 #ifdef OBJECT_FORMAT_ELF
8069 static void
8070 sparc_elf_asm_named_section (name, flags)
8071 const char *name;
8072 unsigned int flags;
8074 if (flags & SECTION_MERGE)
8076 /* entsize cannot be expressed in this section attributes
8077 encoding style. */
8078 default_elf_asm_named_section (name, flags);
8079 return;
8082 fprintf (asm_out_file, "\t.section\t\"%s\"", name);
8084 if (!(flags & SECTION_DEBUG))
8085 fputs (",#alloc", asm_out_file);
8086 if (flags & SECTION_WRITE)
8087 fputs (",#write", asm_out_file);
8088 if (flags & SECTION_CODE)
8089 fputs (",#execinstr", asm_out_file);
8091 /* ??? Handle SECTION_BSS. */
8093 fputc ('\n', asm_out_file);
8095 #endif /* OBJECT_FORMAT_ELF */
8097 /* We do not allow sibling calls if -mflat, nor
8098 we do not allow indirect calls to be optimized into sibling calls.
8100 Also, on sparc 32-bit we cannot emit a sibling call when the
8101 current function returns a structure. This is because the "unimp
8102 after call" convention would cause the callee to return to the
8103 wrong place. The generic code already disallows cases where the
8104 function being called returns a structure.
8106 It may seem strange how this last case could occur. Usually there
8107 is code after the call which jumps to epilogue code which dumps the
8108 return value into the struct return area. That ought to invalidate
8109 the sibling call right? Well, in the c++ case we can end up passing
8110 the pointer to the struct return area to a constructor (which returns
8111 void) and then nothing else happens. Such a sibling call would look
8112 valid without the added check here. */
8113 static bool
8114 sparc_function_ok_for_sibcall (decl, exp)
8115 tree decl;
8116 tree exp ATTRIBUTE_UNUSED;
8118 return (decl
8119 && ! TARGET_FLAT
8120 && (TARGET_ARCH64 || ! current_function_returns_struct));
8123 /* ??? Similar to the standard section selection, but force reloc-y-ness
8124 if SUNOS4_SHARED_LIBRARIES. Unclear why this helps (as opposed to
8125 pretending PIC always on), but that's what the old code did. */
8127 static void
8128 sparc_aout_select_section (t, reloc, align)
8129 tree t;
8130 int reloc;
8131 unsigned HOST_WIDE_INT align;
8133 default_select_section (t, reloc | SUNOS4_SHARED_LIBRARIES, align);
8136 /* Use text section for a constant unless we need more alignment than
8137 that offers. */
8139 static void
8140 sparc_aout_select_rtx_section (mode, x, align)
8141 enum machine_mode mode;
8142 rtx x;
8143 unsigned HOST_WIDE_INT align;
8145 if (align <= MAX_TEXT_ALIGN
8146 && ! (flag_pic && (symbolic_operand (x, mode)
8147 || SUNOS4_SHARED_LIBRARIES)))
8148 readonly_data_section ();
8149 else
8150 data_section ();
8154 sparc_extra_constraint_check (op, c, strict)
8155 rtx op;
8156 int c;
8157 int strict;
8159 int reload_ok_mem;
8161 if (TARGET_ARCH64
8162 && (c == 'T' || c == 'U'))
8163 return 0;
8165 switch (c)
8167 case 'Q':
8168 return fp_sethi_p (op);
8170 case 'R':
8171 return fp_mov_p (op);
8173 case 'S':
8174 return fp_high_losum_p (op);
8176 case 'U':
8177 if (! strict
8178 || (GET_CODE (op) == REG
8179 && (REGNO (op) < FIRST_PSEUDO_REGISTER
8180 || reg_renumber[REGNO (op)] >= 0)))
8181 return register_ok_for_ldd (op);
8183 return 0;
8185 case 'W':
8186 case 'T':
8187 break;
8189 default:
8190 return 0;
8193 /* Our memory extra constraints have to emulate the
8194 behavior of 'm' and 'o' in order for reload to work
8195 correctly. */
8196 if (GET_CODE (op) == MEM)
8198 reload_ok_mem = 0;
8199 if ((TARGET_ARCH64 || mem_min_alignment (op, 8))
8200 && (! strict
8201 || strict_memory_address_p (Pmode, XEXP (op, 0))))
8202 reload_ok_mem = 1;
8204 else
8206 reload_ok_mem = (reload_in_progress
8207 && GET_CODE (op) == REG
8208 && REGNO (op) >= FIRST_PSEUDO_REGISTER
8209 && reg_renumber [REGNO (op)] < 0);
8212 return reload_ok_mem;
8215 /* ??? This duplicates information provided to the compiler by the
8216 ??? scheduler description. Some day, teach genautomata to output
8217 ??? the latencies and then CSE will just use that. */
8219 static bool
8220 sparc_rtx_costs (x, code, outer_code, total)
8221 rtx x;
8222 int code, outer_code, *total;
8224 switch (code)
8226 case PLUS: case MINUS: case ABS: case NEG:
8227 case FLOAT: case UNSIGNED_FLOAT:
8228 case FIX: case UNSIGNED_FIX:
8229 case FLOAT_EXTEND: case FLOAT_TRUNCATE:
8230 if (FLOAT_MODE_P (GET_MODE (x)))
8232 switch (sparc_cpu)
8234 case PROCESSOR_ULTRASPARC:
8235 case PROCESSOR_ULTRASPARC3:
8236 *total = COSTS_N_INSNS (4);
8237 return true;
8239 case PROCESSOR_SUPERSPARC:
8240 *total = COSTS_N_INSNS (3);
8241 return true;
8243 case PROCESSOR_CYPRESS:
8244 *total = COSTS_N_INSNS (5);
8245 return true;
8247 case PROCESSOR_HYPERSPARC:
8248 case PROCESSOR_SPARCLITE86X:
8249 default:
8250 *total = COSTS_N_INSNS (1);
8251 return true;
8255 *total = COSTS_N_INSNS (1);
8256 return true;
8258 case SQRT:
8259 switch (sparc_cpu)
8261 case PROCESSOR_ULTRASPARC:
8262 if (GET_MODE (x) == SFmode)
8263 *total = COSTS_N_INSNS (13);
8264 else
8265 *total = COSTS_N_INSNS (23);
8266 return true;
8268 case PROCESSOR_ULTRASPARC3:
8269 if (GET_MODE (x) == SFmode)
8270 *total = COSTS_N_INSNS (20);
8271 else
8272 *total = COSTS_N_INSNS (29);
8273 return true;
8275 case PROCESSOR_SUPERSPARC:
8276 *total = COSTS_N_INSNS (12);
8277 return true;
8279 case PROCESSOR_CYPRESS:
8280 *total = COSTS_N_INSNS (63);
8281 return true;
8283 case PROCESSOR_HYPERSPARC:
8284 case PROCESSOR_SPARCLITE86X:
8285 *total = COSTS_N_INSNS (17);
8286 return true;
8288 default:
8289 *total = COSTS_N_INSNS (30);
8290 return true;
8293 case COMPARE:
8294 if (FLOAT_MODE_P (GET_MODE (x)))
8296 switch (sparc_cpu)
8298 case PROCESSOR_ULTRASPARC:
8299 case PROCESSOR_ULTRASPARC3:
8300 *total = COSTS_N_INSNS (1);
8301 return true;
8303 case PROCESSOR_SUPERSPARC:
8304 *total = COSTS_N_INSNS (3);
8305 return true;
8307 case PROCESSOR_CYPRESS:
8308 *total = COSTS_N_INSNS (5);
8309 return true;
8311 case PROCESSOR_HYPERSPARC:
8312 case PROCESSOR_SPARCLITE86X:
8313 default:
8314 *total = COSTS_N_INSNS (1);
8315 return true;
8319 /* ??? Maybe mark integer compares as zero cost on
8320 ??? all UltraSPARC processors because the result
8321 ??? can be bypassed to a branch in the same group. */
8323 *total = COSTS_N_INSNS (1);
8324 return true;
8326 case MULT:
8327 if (FLOAT_MODE_P (GET_MODE (x)))
8329 switch (sparc_cpu)
8331 case PROCESSOR_ULTRASPARC:
8332 case PROCESSOR_ULTRASPARC3:
8333 *total = COSTS_N_INSNS (4);
8334 return true;
8336 case PROCESSOR_SUPERSPARC:
8337 *total = COSTS_N_INSNS (3);
8338 return true;
8340 case PROCESSOR_CYPRESS:
8341 *total = COSTS_N_INSNS (7);
8342 return true;
8344 case PROCESSOR_HYPERSPARC:
8345 case PROCESSOR_SPARCLITE86X:
8346 *total = COSTS_N_INSNS (1);
8347 return true;
8349 default:
8350 *total = COSTS_N_INSNS (5);
8351 return true;
8355 /* The latency is actually variable for Ultra-I/II
8356 And if one of the inputs have a known constant
8357 value, we could calculate this precisely.
8359 However, for that to be useful we would need to
8360 add some machine description changes which would
8361 make sure small constants ended up in rs1 of the
8362 multiply instruction. This is because the multiply
8363 latency is determined by the number of clear (or
8364 set if the value is negative) bits starting from
8365 the most significant bit of the first input.
8367 The algorithm for computing num_cycles of a multiply
8368 on Ultra-I/II is:
8370 if (rs1 < 0)
8371 highest_bit = highest_clear_bit(rs1);
8372 else
8373 highest_bit = highest_set_bit(rs1);
8374 if (num_bits < 3)
8375 highest_bit = 3;
8376 num_cycles = 4 + ((highest_bit - 3) / 2);
8378 If we did that we would have to also consider register
8379 allocation issues that would result from forcing such
8380 a value into a register.
8382 There are other similar tricks we could play if we
8383 knew, for example, that one input was an array index.
8385 Since we do not play any such tricks currently the
8386 safest thing to do is report the worst case latency. */
8387 if (sparc_cpu == PROCESSOR_ULTRASPARC)
8389 *total = (GET_MODE (x) == DImode
8390 ? COSTS_N_INSNS (34) : COSTS_N_INSNS (19));
8391 return true;
8394 /* Multiply latency on Ultra-III, fortunately, is constant. */
8395 if (sparc_cpu == PROCESSOR_ULTRASPARC3)
8397 *total = COSTS_N_INSNS (6);
8398 return true;
8401 if (sparc_cpu == PROCESSOR_HYPERSPARC
8402 || sparc_cpu == PROCESSOR_SPARCLITE86X)
8404 *total = COSTS_N_INSNS (17);
8405 return true;
8408 *total = (TARGET_HARD_MUL ? COSTS_N_INSNS (5) : COSTS_N_INSNS (25));
8409 return true;
8411 case DIV:
8412 case UDIV:
8413 case MOD:
8414 case UMOD:
8415 if (FLOAT_MODE_P (GET_MODE (x)))
8417 switch (sparc_cpu)
8419 case PROCESSOR_ULTRASPARC:
8420 if (GET_MODE (x) == SFmode)
8421 *total = COSTS_N_INSNS (13);
8422 else
8423 *total = COSTS_N_INSNS (23);
8424 return true;
8426 case PROCESSOR_ULTRASPARC3:
8427 if (GET_MODE (x) == SFmode)
8428 *total = COSTS_N_INSNS (17);
8429 else
8430 *total = COSTS_N_INSNS (20);
8431 return true;
8433 case PROCESSOR_SUPERSPARC:
8434 if (GET_MODE (x) == SFmode)
8435 *total = COSTS_N_INSNS (6);
8436 else
8437 *total = COSTS_N_INSNS (9);
8438 return true;
8440 case PROCESSOR_HYPERSPARC:
8441 case PROCESSOR_SPARCLITE86X:
8442 if (GET_MODE (x) == SFmode)
8443 *total = COSTS_N_INSNS (8);
8444 else
8445 *total = COSTS_N_INSNS (12);
8446 return true;
8448 default:
8449 *total = COSTS_N_INSNS (7);
8450 return true;
8454 if (sparc_cpu == PROCESSOR_ULTRASPARC)
8455 *total = (GET_MODE (x) == DImode
8456 ? COSTS_N_INSNS (68) : COSTS_N_INSNS (37));
8457 else if (sparc_cpu == PROCESSOR_ULTRASPARC3)
8458 *total = (GET_MODE (x) == DImode
8459 ? COSTS_N_INSNS (71) : COSTS_N_INSNS (40));
8460 else
8461 *total = COSTS_N_INSNS (25);
8462 return true;
8464 case IF_THEN_ELSE:
8465 /* Conditional moves. */
8466 switch (sparc_cpu)
8468 case PROCESSOR_ULTRASPARC:
8469 *total = COSTS_N_INSNS (2);
8470 return true;
8472 case PROCESSOR_ULTRASPARC3:
8473 if (FLOAT_MODE_P (GET_MODE (x)))
8474 *total = COSTS_N_INSNS (3);
8475 else
8476 *total = COSTS_N_INSNS (2);
8477 return true;
8479 default:
8480 *total = COSTS_N_INSNS (1);
8481 return true;
8484 case MEM:
8485 /* If outer-code is SIGN/ZERO extension we have to subtract
8486 out COSTS_N_INSNS (1) from whatever we return in determining
8487 the cost. */
8488 switch (sparc_cpu)
8490 case PROCESSOR_ULTRASPARC:
8491 if (outer_code == ZERO_EXTEND)
8492 *total = COSTS_N_INSNS (1);
8493 else
8494 *total = COSTS_N_INSNS (2);
8495 return true;
8497 case PROCESSOR_ULTRASPARC3:
8498 if (outer_code == ZERO_EXTEND)
8500 if (GET_MODE (x) == QImode
8501 || GET_MODE (x) == HImode
8502 || outer_code == SIGN_EXTEND)
8503 *total = COSTS_N_INSNS (2);
8504 else
8505 *total = COSTS_N_INSNS (1);
8507 else
8509 /* This handles sign extension (3 cycles)
8510 and everything else (2 cycles). */
8511 *total = COSTS_N_INSNS (2);
8513 return true;
8515 case PROCESSOR_SUPERSPARC:
8516 if (FLOAT_MODE_P (GET_MODE (x))
8517 || outer_code == ZERO_EXTEND
8518 || outer_code == SIGN_EXTEND)
8519 *total = COSTS_N_INSNS (0);
8520 else
8521 *total = COSTS_N_INSNS (1);
8522 return true;
8524 case PROCESSOR_TSC701:
8525 if (outer_code == ZERO_EXTEND
8526 || outer_code == SIGN_EXTEND)
8527 *total = COSTS_N_INSNS (2);
8528 else
8529 *total = COSTS_N_INSNS (3);
8530 return true;
8532 case PROCESSOR_CYPRESS:
8533 if (outer_code == ZERO_EXTEND
8534 || outer_code == SIGN_EXTEND)
8535 *total = COSTS_N_INSNS (1);
8536 else
8537 *total = COSTS_N_INSNS (2);
8538 return true;
8540 case PROCESSOR_HYPERSPARC:
8541 case PROCESSOR_SPARCLITE86X:
8542 default:
8543 if (outer_code == ZERO_EXTEND
8544 || outer_code == SIGN_EXTEND)
8545 *total = COSTS_N_INSNS (0);
8546 else
8547 *total = COSTS_N_INSNS (1);
8548 return true;
8551 case CONST_INT:
8552 if (INTVAL (x) < 0x1000 && INTVAL (x) >= -0x1000)
8554 *total = 0;
8555 return true;
8557 /* FALLTHRU */
8559 case HIGH:
8560 *total = 2;
8561 return true;
8563 case CONST:
8564 case LABEL_REF:
8565 case SYMBOL_REF:
8566 *total = 4;
8567 return true;
8569 case CONST_DOUBLE:
8570 if (GET_MODE (x) == DImode
8571 && ((XINT (x, 3) == 0
8572 && (unsigned HOST_WIDE_INT) XINT (x, 2) < 0x1000)
8573 || (XINT (x, 3) == -1
8574 && XINT (x, 2) < 0
8575 && XINT (x, 2) >= -0x1000)))
8576 *total = 0;
8577 else
8578 *total = 8;
8579 return true;
8581 default:
8582 return false;
8586 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
8587 Used for C++ multiple inheritance. */
8589 static void
8590 sparc_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
8591 FILE *file;
8592 tree thunk_fndecl ATTRIBUTE_UNUSED;
8593 HOST_WIDE_INT delta;
8594 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
8595 tree function;
8597 rtx this, insn, funexp, delta_rtx, tmp;
8599 reload_completed = 1;
8600 epilogue_completed = 1;
8601 no_new_pseudos = 1;
8602 current_function_uses_only_leaf_regs = 1;
8604 emit_note (NOTE_INSN_PROLOGUE_END);
8606 /* Find the "this" pointer. Normally in %o0, but in ARCH64 if the function
8607 returns a structure, the structure return pointer is there instead. */
8608 if (TARGET_ARCH64 && aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
8609 this = gen_rtx_REG (Pmode, SPARC_INCOMING_INT_ARG_FIRST + 1);
8610 else
8611 this = gen_rtx_REG (Pmode, SPARC_INCOMING_INT_ARG_FIRST);
8613 /* Add DELTA. When possible use a plain add, otherwise load it into
8614 a register first. */
8615 delta_rtx = GEN_INT (delta);
8616 if (!SPARC_SIMM13_P (delta))
8618 rtx scratch = gen_rtx_REG (Pmode, 1);
8619 if (TARGET_ARCH64)
8620 sparc_emit_set_const64 (scratch, delta_rtx);
8621 else
8622 sparc_emit_set_const32 (scratch, delta_rtx);
8623 delta_rtx = scratch;
8626 tmp = gen_rtx_PLUS (Pmode, this, delta_rtx);
8627 emit_insn (gen_rtx_SET (VOIDmode, this, tmp));
8629 /* Generate a tail call to the target function. */
8630 if (! TREE_USED (function))
8632 assemble_external (function);
8633 TREE_USED (function) = 1;
8635 funexp = XEXP (DECL_RTL (function), 0);
8636 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
8637 insn = emit_call_insn (gen_sibcall (funexp));
8638 SIBLING_CALL_P (insn) = 1;
8639 emit_barrier ();
8641 /* Run just enough of rest_of_compilation to get the insns emitted.
8642 There's not really enough bulk here to make other passes such as
8643 instruction scheduling worth while. Note that use_thunk calls
8644 assemble_start_function and assemble_end_function. */
8645 insn = get_insns ();
8646 insn_locators_initialize ();
8647 shorten_branches (insn);
8648 final_start_function (insn, file, 1);
8649 final (insn, file, 1, 0);
8650 final_end_function ();
8652 reload_completed = 0;
8653 epilogue_completed = 0;
8654 no_new_pseudos = 0;
8657 #include "gt-sparc.h"