* config/c4x/c4x-c.c, config/c4x/c4x-modes.def,
[official-gcc.git] / gcc / config / c4x / c4x.c
blob0547850a8cd08a236673fa829b14e394db95eb67
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "c-tree.h"
48 #include "ggc.h"
49 #include "cpplib.h"
50 #include "toplev.h"
51 #include "tm_p.h"
52 #include "target.h"
53 #include "target-def.h"
55 rtx smulhi3_libfunc;
56 rtx umulhi3_libfunc;
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
153 rtx c4x_compare_op0;
154 rtx c4x_compare_op1;
156 const char *c4x_rpts_cycles_string;
157 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
158 const char *c4x_cpu_version_string;
159 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
161 /* Pragma definitions. */
163 tree code_tree = NULL_TREE;
164 tree data_tree = NULL_TREE;
165 tree pure_tree = NULL_TREE;
166 tree noreturn_tree = NULL_TREE;
167 tree interrupt_tree = NULL_TREE;
168 tree naked_tree = NULL_TREE;
170 /* Forward declarations */
171 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
172 static int c4x_leaf_function_p PARAMS ((void));
173 static int c4x_naked_function_p PARAMS ((void));
174 static int c4x_immed_float_p PARAMS ((rtx));
175 static int c4x_a_register PARAMS ((rtx));
176 static int c4x_x_register PARAMS ((rtx));
177 static int c4x_immed_int_constant PARAMS ((rtx));
178 static int c4x_immed_float_constant PARAMS ((rtx));
179 static int c4x_K_constant PARAMS ((rtx));
180 static int c4x_N_constant PARAMS ((rtx));
181 static int c4x_O_constant PARAMS ((rtx));
182 static int c4x_R_indirect PARAMS ((rtx));
183 static int c4x_S_indirect PARAMS ((rtx));
184 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
185 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
186 enum machine_mode, int));
187 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
188 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
189 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
190 static int c4x_r11_set_p PARAMS ((rtx));
191 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
192 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
193 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
194 const struct attribute_spec c4x_attribute_table[];
195 static void c4x_insert_attributes PARAMS ((tree, tree *));
196 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
197 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
198 static void c4x_encode_section_info PARAMS ((tree, int));
199 static void c4x_globalize_label PARAMS ((FILE *, const char *));
200 static bool c4x_rtx_costs PARAMS ((rtx, int, int, int *));
201 static int c4x_address_cost PARAMS ((rtx));
203 /* Initialize the GCC target structure. */
204 #undef TARGET_ASM_BYTE_OP
205 #define TARGET_ASM_BYTE_OP "\t.word\t"
206 #undef TARGET_ASM_ALIGNED_HI_OP
207 #define TARGET_ASM_ALIGNED_HI_OP NULL
208 #undef TARGET_ASM_ALIGNED_SI_OP
209 #define TARGET_ASM_ALIGNED_SI_OP NULL
211 #undef TARGET_ATTRIBUTE_TABLE
212 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
214 #undef TARGET_INSERT_ATTRIBUTES
215 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
217 #undef TARGET_INIT_BUILTINS
218 #define TARGET_INIT_BUILTINS c4x_init_builtins
220 #undef TARGET_EXPAND_BUILTIN
221 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
223 #undef TARGET_SCHED_ADJUST_COST
224 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
226 #undef TARGET_ENCODE_SECTION_INFO
227 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
229 #undef TARGET_ASM_GLOBALIZE_LABEL
230 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
232 #undef TARGET_RTX_COSTS
233 #define TARGET_RTX_COSTS c4x_rtx_costs
234 #undef TARGET_ADDRESS_COST
235 #define TARGET_ADDRESS_COST c4x_address_cost
237 struct gcc_target targetm = TARGET_INITIALIZER;
239 /* Override command line options.
240 Called once after all options have been parsed.
241 Mostly we process the processor
242 type and sometimes adjust other TARGET_ options. */
244 void
245 c4x_override_options ()
247 if (c4x_rpts_cycles_string)
248 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
249 else
250 c4x_rpts_cycles = 0;
252 if (TARGET_C30)
253 c4x_cpu_version = 30;
254 else if (TARGET_C31)
255 c4x_cpu_version = 31;
256 else if (TARGET_C32)
257 c4x_cpu_version = 32;
258 else if (TARGET_C33)
259 c4x_cpu_version = 33;
260 else if (TARGET_C40)
261 c4x_cpu_version = 40;
262 else if (TARGET_C44)
263 c4x_cpu_version = 44;
264 else
265 c4x_cpu_version = 40;
267 /* -mcpu=xx overrides -m40 etc. */
268 if (c4x_cpu_version_string)
270 const char *p = c4x_cpu_version_string;
272 /* Also allow -mcpu=c30 etc. */
273 if (*p == 'c' || *p == 'C')
274 p++;
275 c4x_cpu_version = atoi (p);
278 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
279 C40_FLAG | C44_FLAG);
281 switch (c4x_cpu_version)
283 case 30: target_flags |= C30_FLAG; break;
284 case 31: target_flags |= C31_FLAG; break;
285 case 32: target_flags |= C32_FLAG; break;
286 case 33: target_flags |= C33_FLAG; break;
287 case 40: target_flags |= C40_FLAG; break;
288 case 44: target_flags |= C44_FLAG; break;
289 default:
290 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
291 c4x_cpu_version = 40;
292 target_flags |= C40_FLAG;
295 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
296 target_flags |= C3X_FLAG;
297 else
298 target_flags &= ~C3X_FLAG;
300 /* Convert foo / 8.0 into foo * 0.125, etc. */
301 set_fast_math_flags (1);
303 /* We should phase out the following at some stage.
304 This provides compatibility with the old -mno-aliases option. */
305 if (! TARGET_ALIASES && ! flag_argument_noalias)
306 flag_argument_noalias = 1;
308 /* We're C4X floating point, not IEEE floating point. */
309 memset (real_format_for_mode, 0, sizeof real_format_for_mode);
310 real_format_for_mode[QFmode - QFmode] = &c4x_single_format;
311 real_format_for_mode[HFmode - QFmode] = &c4x_extended_format;
315 /* This is called before c4x_override_options. */
317 void
318 c4x_optimization_options (level, size)
319 int level ATTRIBUTE_UNUSED;
320 int size ATTRIBUTE_UNUSED;
322 /* Scheduling before register allocation can screw up global
323 register allocation, especially for functions that use MPY||ADD
324 instructions. The benefit we gain we get by scheduling before
325 register allocation is probably marginal anyhow. */
326 flag_schedule_insns = 0;
330 /* Write an ASCII string. */
332 #define C4X_ASCII_LIMIT 40
334 void
335 c4x_output_ascii (stream, ptr, len)
336 FILE *stream;
337 const char *ptr;
338 int len;
340 char sbuf[C4X_ASCII_LIMIT + 1];
341 int s, l, special, first = 1, onlys;
343 if (len)
344 fprintf (stream, "\t.byte\t");
346 for (s = l = 0; len > 0; --len, ++ptr)
348 onlys = 0;
350 /* Escape " and \ with a \". */
351 special = *ptr == '\"' || *ptr == '\\';
353 /* If printable - add to buff. */
354 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
356 if (special)
357 sbuf[s++] = '\\';
358 sbuf[s++] = *ptr;
359 if (s < C4X_ASCII_LIMIT - 1)
360 continue;
361 onlys = 1;
363 if (s)
365 if (first)
366 first = 0;
367 else
369 fputc (',', stream);
370 l++;
373 sbuf[s] = 0;
374 fprintf (stream, "\"%s\"", sbuf);
375 l += s + 2;
376 if (TARGET_TI && l >= 80 && len > 1)
378 fprintf (stream, "\n\t.byte\t");
379 first = 1;
380 l = 0;
383 s = 0;
385 if (onlys)
386 continue;
388 if (first)
389 first = 0;
390 else
392 fputc (',', stream);
393 l++;
396 fprintf (stream, "%d", *ptr);
397 l += 3;
398 if (TARGET_TI && l >= 80 && len > 1)
400 fprintf (stream, "\n\t.byte\t");
401 first = 1;
402 l = 0;
405 if (s)
407 if (! first)
408 fputc (',', stream);
410 sbuf[s] = 0;
411 fprintf (stream, "\"%s\"", sbuf);
412 s = 0;
414 fputc ('\n', stream);
419 c4x_hard_regno_mode_ok (regno, mode)
420 unsigned int regno;
421 enum machine_mode mode;
423 switch (mode)
425 #if Pmode != QImode
426 case Pmode: /* Pointer (24/32 bits). */
427 #endif
428 case QImode: /* Integer (32 bits). */
429 return IS_INT_REGNO (regno);
431 case QFmode: /* Float, Double (32 bits). */
432 case HFmode: /* Long Double (40 bits). */
433 return IS_EXT_REGNO (regno);
435 case CCmode: /* Condition Codes. */
436 case CC_NOOVmode: /* Condition Codes. */
437 return IS_ST_REGNO (regno);
439 case HImode: /* Long Long (64 bits). */
440 /* We need two registers to store long longs. Note that
441 it is much easier to constrain the first register
442 to start on an even boundary. */
443 return IS_INT_REGNO (regno)
444 && IS_INT_REGNO (regno + 1)
445 && (regno & 1) == 0;
447 default:
448 return 0; /* We don't support these modes. */
451 return 0;
454 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
456 c4x_hard_regno_rename_ok (regno1, regno2)
457 unsigned int regno1;
458 unsigned int regno2;
460 /* We can not copy call saved registers from mode QI into QF or from
461 mode QF into QI. */
462 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
463 return 0;
464 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
465 return 0;
466 /* We cannot copy from an extended (40 bit) register to a standard
467 (32 bit) register because we only set the condition codes for
468 extended registers. */
469 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
470 return 0;
471 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
472 return 0;
473 return 1;
476 /* The TI C3x C compiler register argument runtime model uses 6 registers,
477 AR2, R2, R3, RC, RS, RE.
479 The first two floating point arguments (float, double, long double)
480 that are found scanning from left to right are assigned to R2 and R3.
482 The remaining integer (char, short, int, long) or pointer arguments
483 are assigned to the remaining registers in the order AR2, R2, R3,
484 RC, RS, RE when scanning left to right, except for the last named
485 argument prior to an ellipsis denoting variable number of
486 arguments. We don't have to worry about the latter condition since
487 function.c treats the last named argument as anonymous (unnamed).
489 All arguments that cannot be passed in registers are pushed onto
490 the stack in reverse order (right to left). GCC handles that for us.
492 c4x_init_cumulative_args() is called at the start, so we can parse
493 the args to see how many floating point arguments and how many
494 integer (or pointer) arguments there are. c4x_function_arg() is
495 then called (sometimes repeatedly) for each argument (parsed left
496 to right) to obtain the register to pass the argument in, or zero
497 if the argument is to be passed on the stack. Once the compiler is
498 happy, c4x_function_arg_advance() is called.
500 Don't use R0 to pass arguments in, we use 0 to indicate a stack
501 argument. */
503 static const int c4x_int_reglist[3][6] =
505 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
506 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
507 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
510 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
513 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
514 function whose data type is FNTYPE.
515 For a library call, FNTYPE is 0. */
517 void
518 c4x_init_cumulative_args (cum, fntype, libname)
519 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
520 tree fntype; /* Tree ptr for function decl. */
521 rtx libname; /* SYMBOL_REF of library name or 0. */
523 tree param, next_param;
525 cum->floats = cum->ints = 0;
526 cum->init = 0;
527 cum->var = 0;
528 cum->args = 0;
530 if (TARGET_DEBUG)
532 fprintf (stderr, "\nc4x_init_cumulative_args (");
533 if (fntype)
535 tree ret_type = TREE_TYPE (fntype);
537 fprintf (stderr, "fntype code = %s, ret code = %s",
538 tree_code_name[(int) TREE_CODE (fntype)],
539 tree_code_name[(int) TREE_CODE (ret_type)]);
541 else
542 fprintf (stderr, "no fntype");
544 if (libname)
545 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
548 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
550 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
551 param; param = next_param)
553 tree type;
555 next_param = TREE_CHAIN (param);
557 type = TREE_VALUE (param);
558 if (type && type != void_type_node)
560 enum machine_mode mode;
562 /* If the last arg doesn't have void type then we have
563 variable arguments. */
564 if (! next_param)
565 cum->var = 1;
567 if ((mode = TYPE_MODE (type)))
569 if (! MUST_PASS_IN_STACK (mode, type))
571 /* Look for float, double, or long double argument. */
572 if (mode == QFmode || mode == HFmode)
573 cum->floats++;
574 /* Look for integer, enumeral, boolean, char, or pointer
575 argument. */
576 else if (mode == QImode || mode == Pmode)
577 cum->ints++;
580 cum->args++;
584 if (TARGET_DEBUG)
585 fprintf (stderr, "%s%s, args = %d)\n",
586 cum->prototype ? ", prototype" : "",
587 cum->var ? ", variable args" : "",
588 cum->args);
592 /* Update the data in CUM to advance over an argument
593 of mode MODE and data type TYPE.
594 (TYPE is null for libcalls where that information may not be available.) */
596 void
597 c4x_function_arg_advance (cum, mode, type, named)
598 CUMULATIVE_ARGS *cum; /* Current arg information. */
599 enum machine_mode mode; /* Current arg mode. */
600 tree type; /* Type of the arg or 0 if lib support. */
601 int named; /* Whether or not the argument was named. */
603 if (TARGET_DEBUG)
604 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
605 GET_MODE_NAME (mode), named);
606 if (! TARGET_MEMPARM
607 && named
608 && type
609 && ! MUST_PASS_IN_STACK (mode, type))
611 /* Look for float, double, or long double argument. */
612 if (mode == QFmode || mode == HFmode)
613 cum->floats++;
614 /* Look for integer, enumeral, boolean, char, or pointer argument. */
615 else if (mode == QImode || mode == Pmode)
616 cum->ints++;
618 else if (! TARGET_MEMPARM && ! type)
620 /* Handle libcall arguments. */
621 if (mode == QFmode || mode == HFmode)
622 cum->floats++;
623 else if (mode == QImode || mode == Pmode)
624 cum->ints++;
626 return;
630 /* Define where to put the arguments to a function. Value is zero to
631 push the argument on the stack, or a hard register in which to
632 store the argument.
634 MODE is the argument's machine mode.
635 TYPE is the data type of the argument (as a tree).
636 This is null for libcalls where that information may
637 not be available.
638 CUM is a variable of type CUMULATIVE_ARGS which gives info about
639 the preceding args and about the function being called.
640 NAMED is nonzero if this argument is a named parameter
641 (otherwise it is an extra parameter matching an ellipsis). */
643 struct rtx_def *
644 c4x_function_arg (cum, mode, type, named)
645 CUMULATIVE_ARGS *cum; /* Current arg information. */
646 enum machine_mode mode; /* Current arg mode. */
647 tree type; /* Type of the arg or 0 if lib support. */
648 int named; /* != 0 for normal args, == 0 for ... args. */
650 int reg = 0; /* Default to passing argument on stack. */
652 if (! cum->init)
654 /* We can handle at most 2 floats in R2, R3. */
655 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
657 /* We can handle at most 6 integers minus number of floats passed
658 in registers. */
659 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
660 6 - cum->maxfloats : cum->ints;
662 /* If there is no prototype, assume all the arguments are integers. */
663 if (! cum->prototype)
664 cum->maxints = 6;
666 cum->ints = cum->floats = 0;
667 cum->init = 1;
670 /* This marks the last argument. We don't need to pass this through
671 to the call insn. */
672 if (type == void_type_node)
673 return 0;
675 if (! TARGET_MEMPARM
676 && named
677 && type
678 && ! MUST_PASS_IN_STACK (mode, type))
680 /* Look for float, double, or long double argument. */
681 if (mode == QFmode || mode == HFmode)
683 if (cum->floats < cum->maxfloats)
684 reg = c4x_fp_reglist[cum->floats];
686 /* Look for integer, enumeral, boolean, char, or pointer argument. */
687 else if (mode == QImode || mode == Pmode)
689 if (cum->ints < cum->maxints)
690 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
693 else if (! TARGET_MEMPARM && ! type)
695 /* We could use a different argument calling model for libcalls,
696 since we're only calling functions in libgcc. Thus we could
697 pass arguments for long longs in registers rather than on the
698 stack. In the meantime, use the odd TI format. We make the
699 assumption that we won't have more than two floating point
700 args, six integer args, and that all the arguments are of the
701 same mode. */
702 if (mode == QFmode || mode == HFmode)
703 reg = c4x_fp_reglist[cum->floats];
704 else if (mode == QImode || mode == Pmode)
705 reg = c4x_int_reglist[0][cum->ints];
708 if (TARGET_DEBUG)
710 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
711 GET_MODE_NAME (mode), named);
712 if (reg)
713 fprintf (stderr, ", reg=%s", reg_names[reg]);
714 else
715 fprintf (stderr, ", stack");
716 fprintf (stderr, ")\n");
718 if (reg)
719 return gen_rtx_REG (mode, reg);
720 else
721 return NULL_RTX;
724 /* C[34]x arguments grow in weird ways (downwards) that the standard
725 varargs stuff can't handle.. */
727 c4x_va_arg (valist, type)
728 tree valist, type;
730 tree t;
732 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
733 build_int_2 (int_size_in_bytes (type), 0));
734 TREE_SIDE_EFFECTS (t) = 1;
736 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
740 static int
741 c4x_isr_reg_used_p (regno)
742 unsigned int regno;
744 /* Don't save/restore FP or ST, we handle them separately. */
745 if (regno == FRAME_POINTER_REGNUM
746 || IS_ST_REGNO (regno))
747 return 0;
749 /* We could be a little smarter abut saving/restoring DP.
750 We'll only save if for the big memory model or if
751 we're paranoid. ;-) */
752 if (IS_DP_REGNO (regno))
753 return ! TARGET_SMALL || TARGET_PARANOID;
755 /* Only save/restore regs in leaf function that are used. */
756 if (c4x_leaf_function)
757 return regs_ever_live[regno] && fixed_regs[regno] == 0;
759 /* Only save/restore regs that are used by the ISR and regs
760 that are likely to be used by functions the ISR calls
761 if they are not fixed. */
762 return IS_EXT_REGNO (regno)
763 || ((regs_ever_live[regno] || call_used_regs[regno])
764 && fixed_regs[regno] == 0);
768 static int
769 c4x_leaf_function_p ()
771 /* A leaf function makes no calls, so we only need
772 to save/restore the registers we actually use.
773 For the global variable leaf_function to be set, we need
774 to define LEAF_REGISTERS and all that it entails.
775 Let's check ourselves... */
777 if (lookup_attribute ("leaf_pretend",
778 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
779 return 1;
781 /* Use the leaf_pretend attribute at your own risk. This is a hack
782 to speed up ISRs that call a function infrequently where the
783 overhead of saving and restoring the additional registers is not
784 warranted. You must save and restore the additional registers
785 required by the called function. Caveat emptor. Here's enough
786 rope... */
788 if (leaf_function_p ())
789 return 1;
791 return 0;
795 static int
796 c4x_naked_function_p ()
798 tree type;
800 type = TREE_TYPE (current_function_decl);
801 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
806 c4x_interrupt_function_p ()
808 if (lookup_attribute ("interrupt",
809 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
810 return 1;
812 /* Look for TI style c_intnn. */
813 return current_function_name[0] == 'c'
814 && current_function_name[1] == '_'
815 && current_function_name[2] == 'i'
816 && current_function_name[3] == 'n'
817 && current_function_name[4] == 't'
818 && ISDIGIT (current_function_name[5])
819 && ISDIGIT (current_function_name[6]);
822 void
823 c4x_expand_prologue ()
825 unsigned int regno;
826 int size = get_frame_size ();
827 rtx insn;
829 /* In functions where ar3 is not used but frame pointers are still
830 specified, frame pointers are not adjusted (if >= -O2) and this
831 is used so it won't needlessly push the frame pointer. */
832 int dont_push_ar3;
834 /* For __naked__ function don't build a prologue. */
835 if (c4x_naked_function_p ())
837 return;
840 /* For __interrupt__ function build specific prologue. */
841 if (c4x_interrupt_function_p ())
843 c4x_leaf_function = c4x_leaf_function_p ();
845 insn = emit_insn (gen_push_st ());
846 RTX_FRAME_RELATED_P (insn) = 1;
847 if (size)
849 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
850 RTX_FRAME_RELATED_P (insn) = 1;
851 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
852 gen_rtx_REG (QImode, SP_REGNO)));
853 RTX_FRAME_RELATED_P (insn) = 1;
854 /* We require that an ISR uses fewer than 32768 words of
855 local variables, otherwise we have to go to lots of
856 effort to save a register, load it with the desired size,
857 adjust the stack pointer, and then restore the modified
858 register. Frankly, I think it is a poor ISR that
859 requires more than 32767 words of local temporary
860 storage! */
861 if (size > 32767)
862 error ("ISR %s requires %d words of local vars, max is 32767",
863 current_function_name, size);
865 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
866 gen_rtx_REG (QImode, SP_REGNO),
867 GEN_INT (size)));
868 RTX_FRAME_RELATED_P (insn) = 1;
870 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
872 if (c4x_isr_reg_used_p (regno))
874 if (regno == DP_REGNO)
876 insn = emit_insn (gen_push_dp ());
877 RTX_FRAME_RELATED_P (insn) = 1;
879 else
881 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
882 RTX_FRAME_RELATED_P (insn) = 1;
883 if (IS_EXT_REGNO (regno))
885 insn = emit_insn (gen_pushqf
886 (gen_rtx_REG (QFmode, regno)));
887 RTX_FRAME_RELATED_P (insn) = 1;
892 /* We need to clear the repeat mode flag if the ISR is
893 going to use a RPTB instruction or uses the RC, RS, or RE
894 registers. */
895 if (regs_ever_live[RC_REGNO]
896 || regs_ever_live[RS_REGNO]
897 || regs_ever_live[RE_REGNO])
899 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
900 RTX_FRAME_RELATED_P (insn) = 1;
903 /* Reload DP reg if we are paranoid about some turkey
904 violating small memory model rules. */
905 if (TARGET_SMALL && TARGET_PARANOID)
907 insn = emit_insn (gen_set_ldp_prologue
908 (gen_rtx_REG (QImode, DP_REGNO),
909 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
910 RTX_FRAME_RELATED_P (insn) = 1;
913 else
915 if (frame_pointer_needed)
917 if ((size != 0)
918 || (current_function_args_size != 0)
919 || (optimize < 2))
921 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
922 RTX_FRAME_RELATED_P (insn) = 1;
923 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
924 gen_rtx_REG (QImode, SP_REGNO)));
925 RTX_FRAME_RELATED_P (insn) = 1;
926 dont_push_ar3 = 1;
928 else
930 /* Since ar3 is not used, we don't need to push it. */
931 dont_push_ar3 = 1;
934 else
936 /* If we use ar3, we need to push it. */
937 dont_push_ar3 = 0;
938 if ((size != 0) || (current_function_args_size != 0))
940 /* If we are omitting the frame pointer, we still have
941 to make space for it so the offsets are correct
942 unless we don't use anything on the stack at all. */
943 size += 1;
947 if (size > 32767)
949 /* Local vars are too big, it will take multiple operations
950 to increment SP. */
951 if (TARGET_C3X)
953 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
954 GEN_INT(size >> 16)));
955 RTX_FRAME_RELATED_P (insn) = 1;
956 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
957 gen_rtx_REG (QImode, R1_REGNO),
958 GEN_INT(-16)));
959 RTX_FRAME_RELATED_P (insn) = 1;
961 else
963 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
964 GEN_INT(size & ~0xffff)));
965 RTX_FRAME_RELATED_P (insn) = 1;
967 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
968 gen_rtx_REG (QImode, R1_REGNO),
969 GEN_INT(size & 0xffff)));
970 RTX_FRAME_RELATED_P (insn) = 1;
971 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, SP_REGNO),
973 gen_rtx_REG (QImode, R1_REGNO)));
974 RTX_FRAME_RELATED_P (insn) = 1;
976 else if (size != 0)
978 /* Local vars take up less than 32767 words, so we can directly
979 add the number. */
980 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
981 gen_rtx_REG (QImode, SP_REGNO),
982 GEN_INT (size)));
983 RTX_FRAME_RELATED_P (insn) = 1;
986 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
988 if (regs_ever_live[regno] && ! call_used_regs[regno])
990 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
992 if (TARGET_PRESERVE_FLOAT)
994 insn = emit_insn (gen_pushqi
995 (gen_rtx_REG (QImode, regno)));
996 RTX_FRAME_RELATED_P (insn) = 1;
998 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
999 RTX_FRAME_RELATED_P (insn) = 1;
1001 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1003 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1004 RTX_FRAME_RELATED_P (insn) = 1;
1012 void
1013 c4x_expand_epilogue()
1015 int regno;
1016 int jump = 0;
1017 int dont_pop_ar3;
1018 rtx insn;
1019 int size = get_frame_size ();
1021 /* For __naked__ function build no epilogue. */
1022 if (c4x_naked_function_p ())
1024 insn = emit_jump_insn (gen_return_from_epilogue ());
1025 RTX_FRAME_RELATED_P (insn) = 1;
1026 return;
1029 /* For __interrupt__ function build specific epilogue. */
1030 if (c4x_interrupt_function_p ())
1032 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1034 if (! c4x_isr_reg_used_p (regno))
1035 continue;
1036 if (regno == DP_REGNO)
1038 insn = emit_insn (gen_pop_dp ());
1039 RTX_FRAME_RELATED_P (insn) = 1;
1041 else
1043 /* We have to use unspec because the compiler will delete insns
1044 that are not call-saved. */
1045 if (IS_EXT_REGNO (regno))
1047 insn = emit_insn (gen_popqf_unspec
1048 (gen_rtx_REG (QFmode, regno)));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1051 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1052 RTX_FRAME_RELATED_P (insn) = 1;
1055 if (size)
1057 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1058 gen_rtx_REG (QImode, SP_REGNO),
1059 GEN_INT(size)));
1060 RTX_FRAME_RELATED_P (insn) = 1;
1061 insn = emit_insn (gen_popqi
1062 (gen_rtx_REG (QImode, AR3_REGNO)));
1063 RTX_FRAME_RELATED_P (insn) = 1;
1065 insn = emit_insn (gen_pop_st ());
1066 RTX_FRAME_RELATED_P (insn) = 1;
1067 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1068 RTX_FRAME_RELATED_P (insn) = 1;
1070 else
1072 if (frame_pointer_needed)
1074 if ((size != 0)
1075 || (current_function_args_size != 0)
1076 || (optimize < 2))
1078 insn = emit_insn
1079 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1080 gen_rtx_MEM (QImode,
1081 gen_rtx_PLUS
1082 (QImode, gen_rtx_REG (QImode,
1083 AR3_REGNO),
1084 GEN_INT(-1)))));
1085 RTX_FRAME_RELATED_P (insn) = 1;
1087 /* We already have the return value and the fp,
1088 so we need to add those to the stack. */
1089 size += 2;
1090 jump = 1;
1091 dont_pop_ar3 = 1;
1093 else
1095 /* Since ar3 is not used for anything, we don't need to
1096 pop it. */
1097 dont_pop_ar3 = 1;
1100 else
1102 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1103 if (size || current_function_args_size)
1105 /* If we are ommitting the frame pointer, we still have
1106 to make space for it so the offsets are correct
1107 unless we don't use anything on the stack at all. */
1108 size += 1;
1112 /* Now restore the saved registers, putting in the delayed branch
1113 where required. */
1114 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1116 if (regs_ever_live[regno] && ! call_used_regs[regno])
1118 if (regno == AR3_REGNO && dont_pop_ar3)
1119 continue;
1121 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1123 insn = emit_insn (gen_popqf_unspec
1124 (gen_rtx_REG (QFmode, regno)));
1125 RTX_FRAME_RELATED_P (insn) = 1;
1126 if (TARGET_PRESERVE_FLOAT)
1128 insn = emit_insn (gen_popqi_unspec
1129 (gen_rtx_REG (QImode, regno)));
1130 RTX_FRAME_RELATED_P (insn) = 1;
1133 else
1135 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1136 RTX_FRAME_RELATED_P (insn) = 1;
1141 if (frame_pointer_needed)
1143 if ((size != 0)
1144 || (current_function_args_size != 0)
1145 || (optimize < 2))
1147 /* Restore the old FP. */
1148 insn = emit_insn
1149 (gen_movqi
1150 (gen_rtx_REG (QImode, AR3_REGNO),
1151 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1153 RTX_FRAME_RELATED_P (insn) = 1;
1157 if (size > 32767)
1159 /* Local vars are too big, it will take multiple operations
1160 to decrement SP. */
1161 if (TARGET_C3X)
1163 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1164 GEN_INT(size >> 16)));
1165 RTX_FRAME_RELATED_P (insn) = 1;
1166 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1167 gen_rtx_REG (QImode, R3_REGNO),
1168 GEN_INT(-16)));
1169 RTX_FRAME_RELATED_P (insn) = 1;
1171 else
1173 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1174 GEN_INT(size & ~0xffff)));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1177 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1178 gen_rtx_REG (QImode, R3_REGNO),
1179 GEN_INT(size & 0xffff)));
1180 RTX_FRAME_RELATED_P (insn) = 1;
1181 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1182 gen_rtx_REG (QImode, SP_REGNO),
1183 gen_rtx_REG (QImode, R3_REGNO)));
1184 RTX_FRAME_RELATED_P (insn) = 1;
1186 else if (size != 0)
1188 /* Local vars take up less than 32768 words, so we can directly
1189 subtract the number. */
1190 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1191 gen_rtx_REG (QImode, SP_REGNO),
1192 GEN_INT(size)));
1193 RTX_FRAME_RELATED_P (insn) = 1;
1196 if (jump)
1198 insn = emit_jump_insn (gen_return_indirect_internal
1199 (gen_rtx_REG (QImode, R2_REGNO)));
1200 RTX_FRAME_RELATED_P (insn) = 1;
1202 else
1204 insn = emit_jump_insn (gen_return_from_epilogue ());
1205 RTX_FRAME_RELATED_P (insn) = 1;
1212 c4x_null_epilogue_p ()
1214 int regno;
1216 if (reload_completed
1217 && ! c4x_naked_function_p ()
1218 && ! c4x_interrupt_function_p ()
1219 && ! current_function_calls_alloca
1220 && ! current_function_args_size
1221 && ! (optimize < 2)
1222 && ! get_frame_size ())
1224 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1225 if (regs_ever_live[regno] && ! call_used_regs[regno]
1226 && (regno != AR3_REGNO))
1227 return 1;
1228 return 0;
1230 return 1;
1235 c4x_emit_move_sequence (operands, mode)
1236 rtx *operands;
1237 enum machine_mode mode;
1239 rtx op0 = operands[0];
1240 rtx op1 = operands[1];
1242 if (! reload_in_progress
1243 && ! REG_P (op0)
1244 && ! REG_P (op1)
1245 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1246 op1 = force_reg (mode, op1);
1248 if (GET_CODE (op1) == LO_SUM
1249 && GET_MODE (op1) == Pmode
1250 && dp_reg_operand (XEXP (op1, 0), mode))
1252 /* expand_increment will sometimes create a LO_SUM immediate
1253 address. */
1254 op1 = XEXP (op1, 1);
1256 else if (symbolic_address_operand (op1, mode))
1258 if (TARGET_LOAD_ADDRESS)
1260 /* Alias analysis seems to do a better job if we force
1261 constant addresses to memory after reload. */
1262 emit_insn (gen_load_immed_address (op0, op1));
1263 return 1;
1265 else
1267 /* Stick symbol or label address into the constant pool. */
1268 op1 = force_const_mem (Pmode, op1);
1271 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1273 /* We could be a lot smarter about loading some of these
1274 constants... */
1275 op1 = force_const_mem (mode, op1);
1278 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1279 and emit associated (HIGH (SYMREF)) if large memory model.
1280 c4x_legitimize_address could be used to do this,
1281 perhaps by calling validize_address. */
1282 if (TARGET_EXPOSE_LDP
1283 && ! (reload_in_progress || reload_completed)
1284 && GET_CODE (op1) == MEM
1285 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1287 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1288 if (! TARGET_SMALL)
1289 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1290 op1 = change_address (op1, mode,
1291 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1294 if (TARGET_EXPOSE_LDP
1295 && ! (reload_in_progress || reload_completed)
1296 && GET_CODE (op0) == MEM
1297 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1299 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1300 if (! TARGET_SMALL)
1301 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1302 op0 = change_address (op0, mode,
1303 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1306 if (GET_CODE (op0) == SUBREG
1307 && mixed_subreg_operand (op0, mode))
1309 /* We should only generate these mixed mode patterns
1310 during RTL generation. If we need do it later on
1311 then we'll have to emit patterns that won't clobber CC. */
1312 if (reload_in_progress || reload_completed)
1313 abort ();
1314 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1315 op0 = SUBREG_REG (op0);
1316 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1318 op0 = copy_rtx (op0);
1319 PUT_MODE (op0, QImode);
1321 else
1322 abort ();
1324 if (mode == QFmode)
1325 emit_insn (gen_storeqf_int_clobber (op0, op1));
1326 else
1327 abort ();
1328 return 1;
1331 if (GET_CODE (op1) == SUBREG
1332 && mixed_subreg_operand (op1, mode))
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress || reload_completed)
1338 abort ();
1339 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1340 op1 = SUBREG_REG (op1);
1341 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1343 op1 = copy_rtx (op1);
1344 PUT_MODE (op1, QImode);
1346 else
1347 abort ();
1349 if (mode == QFmode)
1350 emit_insn (gen_loadqf_int_clobber (op0, op1));
1351 else
1352 abort ();
1353 return 1;
1356 if (mode == QImode
1357 && reg_operand (op0, mode)
1358 && const_int_operand (op1, mode)
1359 && ! IS_INT16_CONST (INTVAL (op1))
1360 && ! IS_HIGH_CONST (INTVAL (op1)))
1362 emit_insn (gen_loadqi_big_constant (op0, op1));
1363 return 1;
1366 if (mode == HImode
1367 && reg_operand (op0, mode)
1368 && const_int_operand (op1, mode))
1370 emit_insn (gen_loadhi_big_constant (op0, op1));
1371 return 1;
1374 /* Adjust operands in case we have modified them. */
1375 operands[0] = op0;
1376 operands[1] = op1;
1378 /* Emit normal pattern. */
1379 return 0;
1383 void
1384 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1385 rtx libcall;
1386 enum rtx_code code;
1387 enum machine_mode dmode;
1388 enum machine_mode smode;
1389 int noperands;
1390 rtx *operands;
1392 rtx ret;
1393 rtx insns;
1394 rtx equiv;
1396 start_sequence ();
1397 switch (noperands)
1399 case 2:
1400 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1401 operands[1], smode);
1402 equiv = gen_rtx (code, dmode, operands[1]);
1403 break;
1405 case 3:
1406 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1407 operands[1], smode, operands[2], smode);
1408 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1409 break;
1411 default:
1412 abort ();
1415 insns = get_insns ();
1416 end_sequence ();
1417 emit_libcall_block (insns, operands[0], ret, equiv);
1421 void
1422 c4x_emit_libcall3 (libcall, code, mode, operands)
1423 rtx libcall;
1424 enum rtx_code code;
1425 enum machine_mode mode;
1426 rtx *operands;
1428 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1432 void
1433 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1434 rtx libcall;
1435 enum rtx_code code;
1436 enum machine_mode mode;
1437 rtx *operands;
1439 rtx ret;
1440 rtx insns;
1441 rtx equiv;
1443 start_sequence ();
1444 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1445 operands[1], mode, operands[2], mode);
1446 equiv = gen_rtx_TRUNCATE (mode,
1447 gen_rtx_LSHIFTRT (HImode,
1448 gen_rtx_MULT (HImode,
1449 gen_rtx (code, HImode, operands[1]),
1450 gen_rtx (code, HImode, operands[2])),
1451 GEN_INT (32)));
1452 insns = get_insns ();
1453 end_sequence ();
1454 emit_libcall_block (insns, operands[0], ret, equiv);
1458 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1459 yet use this info. */
1461 static void
1462 c4x_encode_section_info (decl, first)
1463 tree decl;
1464 int first ATTRIBUTE_UNUSED;
1466 if (TREE_CODE (decl) == FUNCTION_DECL)
1467 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1472 c4x_check_legit_addr (mode, addr, strict)
1473 enum machine_mode mode;
1474 rtx addr;
1475 int strict;
1477 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1478 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1479 rtx disp = NULL_RTX; /* Displacement. */
1480 enum rtx_code code;
1482 code = GET_CODE (addr);
1483 switch (code)
1485 /* Register indirect with auto increment/decrement. We don't
1486 allow SP here---push_operand should recognize an operand
1487 being pushed on the stack. */
1489 case PRE_DEC:
1490 case PRE_INC:
1491 case POST_DEC:
1492 if (mode != QImode && mode != QFmode)
1493 return 0;
1495 case POST_INC:
1496 base = XEXP (addr, 0);
1497 if (! REG_P (base))
1498 return 0;
1499 break;
1501 case PRE_MODIFY:
1502 case POST_MODIFY:
1504 rtx op0 = XEXP (addr, 0);
1505 rtx op1 = XEXP (addr, 1);
1507 if (mode != QImode && mode != QFmode)
1508 return 0;
1510 if (! REG_P (op0)
1511 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1512 return 0;
1513 base = XEXP (op1, 0);
1514 if (base != op0)
1515 return 0;
1516 if (REG_P (XEXP (op1, 1)))
1517 indx = XEXP (op1, 1);
1518 else
1519 disp = XEXP (op1, 1);
1521 break;
1523 /* Register indirect. */
1524 case REG:
1525 base = addr;
1526 break;
1528 /* Register indirect with displacement or index. */
1529 case PLUS:
1531 rtx op0 = XEXP (addr, 0);
1532 rtx op1 = XEXP (addr, 1);
1533 enum rtx_code code0 = GET_CODE (op0);
1535 switch (code0)
1537 case REG:
1538 if (REG_P (op1))
1540 base = op0; /* Base + index. */
1541 indx = op1;
1542 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1544 base = op1;
1545 indx = op0;
1548 else
1550 base = op0; /* Base + displacement. */
1551 disp = op1;
1553 break;
1555 default:
1556 return 0;
1559 break;
1561 /* Direct addressing with DP register. */
1562 case LO_SUM:
1564 rtx op0 = XEXP (addr, 0);
1565 rtx op1 = XEXP (addr, 1);
1567 /* HImode and HFmode direct memory references aren't truly
1568 offsettable (consider case at end of data page). We
1569 probably get better code by loading a pointer and using an
1570 indirect memory reference. */
1571 if (mode == HImode || mode == HFmode)
1572 return 0;
1574 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1575 return 0;
1577 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1578 return 1;
1580 if (GET_CODE (op1) == CONST)
1581 return 1;
1582 return 0;
1584 break;
1586 /* Direct addressing with some work for the assembler... */
1587 case CONST:
1588 /* Direct addressing. */
1589 case LABEL_REF:
1590 case SYMBOL_REF:
1591 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1592 return 1;
1593 /* These need to be converted to a LO_SUM (...).
1594 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1595 return 0;
1597 /* Do not allow direct memory access to absolute addresses.
1598 This is more pain than it's worth, especially for the
1599 small memory model where we can't guarantee that
1600 this address is within the data page---we don't want
1601 to modify the DP register in the small memory model,
1602 even temporarily, since an interrupt can sneak in.... */
1603 case CONST_INT:
1604 return 0;
1606 /* Indirect indirect addressing. */
1607 case MEM:
1608 return 0;
1610 case CONST_DOUBLE:
1611 fatal_insn ("using CONST_DOUBLE for address", addr);
1613 default:
1614 return 0;
1617 /* Validate the base register. */
1618 if (base)
1620 /* Check that the address is offsettable for HImode and HFmode. */
1621 if (indx && (mode == HImode || mode == HFmode))
1622 return 0;
1624 /* Handle DP based stuff. */
1625 if (REGNO (base) == DP_REGNO)
1626 return 1;
1627 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1628 return 0;
1629 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1630 return 0;
1633 /* Now validate the index register. */
1634 if (indx)
1636 if (GET_CODE (indx) != REG)
1637 return 0;
1638 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1639 return 0;
1640 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1641 return 0;
1644 /* Validate displacement. */
1645 if (disp)
1647 if (GET_CODE (disp) != CONST_INT)
1648 return 0;
1649 if (mode == HImode || mode == HFmode)
1651 /* The offset displacement must be legitimate. */
1652 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1653 return 0;
1655 else
1657 if (! IS_DISP8_CONST (INTVAL (disp)))
1658 return 0;
1660 /* Can't add an index with a disp. */
1661 if (indx)
1662 return 0;
1664 return 1;
1669 c4x_legitimize_address (orig, mode)
1670 rtx orig ATTRIBUTE_UNUSED;
1671 enum machine_mode mode ATTRIBUTE_UNUSED;
1673 if (GET_CODE (orig) == SYMBOL_REF
1674 || GET_CODE (orig) == LABEL_REF)
1676 if (mode == HImode || mode == HFmode)
1678 /* We need to force the address into
1679 a register so that it is offsettable. */
1680 rtx addr_reg = gen_reg_rtx (Pmode);
1681 emit_move_insn (addr_reg, orig);
1682 return addr_reg;
1684 else
1686 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1688 if (! TARGET_SMALL)
1689 emit_insn (gen_set_ldp (dp_reg, orig));
1691 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1695 return NULL_RTX;
1699 /* Provide the costs of an addressing mode that contains ADDR.
1700 If ADDR is not a valid address, its cost is irrelevant.
1701 This is used in cse and loop optimisation to determine
1702 if it is worthwhile storing a common address into a register.
1703 Unfortunately, the C4x address cost depends on other operands. */
1705 static int
1706 c4x_address_cost (addr)
1707 rtx addr;
1709 switch (GET_CODE (addr))
1711 case REG:
1712 return 1;
1714 case POST_INC:
1715 case POST_DEC:
1716 case PRE_INC:
1717 case PRE_DEC:
1718 return 1;
1720 /* These shouldn't be directly generated. */
1721 case SYMBOL_REF:
1722 case LABEL_REF:
1723 case CONST:
1724 return 10;
1726 case LO_SUM:
1728 rtx op1 = XEXP (addr, 1);
1730 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1731 return TARGET_SMALL ? 3 : 4;
1733 if (GET_CODE (op1) == CONST)
1735 rtx offset = const0_rtx;
1737 op1 = eliminate_constant_term (op1, &offset);
1739 /* ??? These costs need rethinking... */
1740 if (GET_CODE (op1) == LABEL_REF)
1741 return 3;
1743 if (GET_CODE (op1) != SYMBOL_REF)
1744 return 4;
1746 if (INTVAL (offset) == 0)
1747 return 3;
1749 return 4;
1751 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1753 break;
1755 case PLUS:
1757 register rtx op0 = XEXP (addr, 0);
1758 register rtx op1 = XEXP (addr, 1);
1760 if (GET_CODE (op0) != REG)
1761 break;
1763 switch (GET_CODE (op1))
1765 default:
1766 break;
1768 case REG:
1769 /* This cost for REG+REG must be greater than the cost
1770 for REG if we want autoincrement addressing modes. */
1771 return 2;
1773 case CONST_INT:
1774 /* The following tries to improve GIV combination
1775 in strength reduce but appears not to help. */
1776 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1777 return 1;
1779 if (IS_DISP1_CONST (INTVAL (op1)))
1780 return 1;
1782 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1783 return 2;
1785 return 3;
1788 default:
1789 break;
1792 return 4;
1797 c4x_gen_compare_reg (code, x, y)
1798 enum rtx_code code;
1799 rtx x, y;
1801 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1802 rtx cc_reg;
1804 if (mode == CC_NOOVmode
1805 && (code == LE || code == GE || code == LT || code == GT))
1806 return NULL_RTX;
1808 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1809 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1810 gen_rtx_COMPARE (mode, x, y)));
1811 return cc_reg;
1814 char *
1815 c4x_output_cbranch (form, seq)
1816 const char *form;
1817 rtx seq;
1819 int delayed = 0;
1820 int annultrue = 0;
1821 int annulfalse = 0;
1822 rtx delay;
1823 char *cp;
1824 static char str[100];
1826 if (final_sequence)
1828 delay = XVECEXP (final_sequence, 0, 1);
1829 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1830 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1831 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1833 strcpy (str, form);
1834 cp = &str [strlen (str)];
1835 if (delayed)
1837 *cp++ = '%';
1838 *cp++ = '#';
1840 if (annultrue)
1842 *cp++ = 'a';
1843 *cp++ = 't';
1845 if (annulfalse)
1847 *cp++ = 'a';
1848 *cp++ = 'f';
1850 *cp++ = '\t';
1851 *cp++ = '%';
1852 *cp++ = 'l';
1853 *cp++ = '1';
1854 *cp = 0;
1855 return str;
1858 void
1859 c4x_print_operand (file, op, letter)
1860 FILE *file; /* File to write to. */
1861 rtx op; /* Operand to print. */
1862 int letter; /* %<letter> or 0. */
1864 rtx op1;
1865 enum rtx_code code;
1867 switch (letter)
1869 case '#': /* Delayed. */
1870 if (final_sequence)
1871 fprintf (file, "d");
1872 return;
1875 code = GET_CODE (op);
1876 switch (letter)
1878 case 'A': /* Direct address. */
1879 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1880 fprintf (file, "@");
1881 break;
1883 case 'H': /* Sethi. */
1884 output_addr_const (file, op);
1885 return;
1887 case 'I': /* Reversed condition. */
1888 code = reverse_condition (code);
1889 break;
1891 case 'L': /* Log 2 of constant. */
1892 if (code != CONST_INT)
1893 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1894 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1895 return;
1897 case 'N': /* Ones complement of small constant. */
1898 if (code != CONST_INT)
1899 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1900 fprintf (file, "%d", ~INTVAL (op));
1901 return;
1903 case 'K': /* Generate ldp(k) if direct address. */
1904 if (! TARGET_SMALL
1905 && code == MEM
1906 && GET_CODE (XEXP (op, 0)) == LO_SUM
1907 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1908 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1910 op1 = XEXP (XEXP (op, 0), 1);
1911 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1913 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1914 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1915 fprintf (file, "\n");
1918 return;
1920 case 'M': /* Generate ldp(k) if direct address. */
1921 if (! TARGET_SMALL /* Only used in asm statements. */
1922 && code == MEM
1923 && (GET_CODE (XEXP (op, 0)) == CONST
1924 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1926 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1927 output_address (XEXP (op, 0));
1928 fprintf (file, "\n\t");
1930 return;
1932 case 'O': /* Offset address. */
1933 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1934 break;
1935 else if (code == MEM)
1936 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1937 else if (code == REG)
1938 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1939 else
1940 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1941 return;
1943 case 'C': /* Call. */
1944 break;
1946 case 'U': /* Call/callu. */
1947 if (code != SYMBOL_REF)
1948 fprintf (file, "u");
1949 return;
1951 default:
1952 break;
1955 switch (code)
1957 case REG:
1958 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1959 && ! TARGET_TI)
1960 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1961 else
1962 fprintf (file, "%s", reg_names[REGNO (op)]);
1963 break;
1965 case MEM:
1966 output_address (XEXP (op, 0));
1967 break;
1969 case CONST_DOUBLE:
1971 char str[64];
1973 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1974 sizeof (str), 0, 1);
1975 fprintf (file, "%s", str);
1977 break;
1979 case CONST_INT:
1980 fprintf (file, "%d", INTVAL (op));
1981 break;
1983 case NE:
1984 fprintf (file, "ne");
1985 break;
1987 case EQ:
1988 fprintf (file, "eq");
1989 break;
1991 case GE:
1992 fprintf (file, "ge");
1993 break;
1995 case GT:
1996 fprintf (file, "gt");
1997 break;
1999 case LE:
2000 fprintf (file, "le");
2001 break;
2003 case LT:
2004 fprintf (file, "lt");
2005 break;
2007 case GEU:
2008 fprintf (file, "hs");
2009 break;
2011 case GTU:
2012 fprintf (file, "hi");
2013 break;
2015 case LEU:
2016 fprintf (file, "ls");
2017 break;
2019 case LTU:
2020 fprintf (file, "lo");
2021 break;
2023 case SYMBOL_REF:
2024 output_addr_const (file, op);
2025 break;
2027 case CONST:
2028 output_addr_const (file, XEXP (op, 0));
2029 break;
2031 case CODE_LABEL:
2032 break;
2034 default:
2035 fatal_insn ("c4x_print_operand: Bad operand case", op);
2036 break;
2041 void
2042 c4x_print_operand_address (file, addr)
2043 FILE *file;
2044 rtx addr;
2046 switch (GET_CODE (addr))
2048 case REG:
2049 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2050 break;
2052 case PRE_DEC:
2053 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2054 break;
2056 case POST_INC:
2057 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2058 break;
2060 case POST_MODIFY:
2062 rtx op0 = XEXP (XEXP (addr, 1), 0);
2063 rtx op1 = XEXP (XEXP (addr, 1), 1);
2065 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2066 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2067 reg_names[REGNO (op1)]);
2068 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2069 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2070 INTVAL (op1));
2071 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2072 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2073 -INTVAL (op1));
2074 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2075 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2076 reg_names[REGNO (op1)]);
2077 else
2078 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2080 break;
2082 case PRE_MODIFY:
2084 rtx op0 = XEXP (XEXP (addr, 1), 0);
2085 rtx op1 = XEXP (XEXP (addr, 1), 1);
2087 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2088 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2089 reg_names[REGNO (op1)]);
2090 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2091 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2092 INTVAL (op1));
2093 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2094 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2095 -INTVAL (op1));
2096 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2097 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2098 reg_names[REGNO (op1)]);
2099 else
2100 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2102 break;
2104 case PRE_INC:
2105 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2106 break;
2108 case POST_DEC:
2109 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2110 break;
2112 case PLUS: /* Indirect with displacement. */
2114 rtx op0 = XEXP (addr, 0);
2115 rtx op1 = XEXP (addr, 1);
2117 if (REG_P (op0))
2119 if (REG_P (op1))
2121 if (IS_INDEX_REG (op0))
2123 fprintf (file, "*+%s(%s)",
2124 reg_names[REGNO (op1)],
2125 reg_names[REGNO (op0)]); /* Index + base. */
2127 else
2129 fprintf (file, "*+%s(%s)",
2130 reg_names[REGNO (op0)],
2131 reg_names[REGNO (op1)]); /* Base + index. */
2134 else if (INTVAL (op1) < 0)
2136 fprintf (file, "*-%s(%d)",
2137 reg_names[REGNO (op0)],
2138 -INTVAL (op1)); /* Base - displacement. */
2140 else
2142 fprintf (file, "*+%s(%d)",
2143 reg_names[REGNO (op0)],
2144 INTVAL (op1)); /* Base + displacement. */
2147 else
2148 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2150 break;
2152 case LO_SUM:
2154 rtx op0 = XEXP (addr, 0);
2155 rtx op1 = XEXP (addr, 1);
2157 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2158 c4x_print_operand_address (file, op1);
2159 else
2160 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2162 break;
2164 case CONST:
2165 case SYMBOL_REF:
2166 case LABEL_REF:
2167 fprintf (file, "@");
2168 output_addr_const (file, addr);
2169 break;
2171 /* We shouldn't access CONST_INT addresses. */
2172 case CONST_INT:
2174 default:
2175 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2176 break;
2181 /* Return nonzero if the floating point operand will fit
2182 in the immediate field. */
2184 static int
2185 c4x_immed_float_p (op)
2186 rtx op;
2188 long convval[2];
2189 int exponent;
2190 REAL_VALUE_TYPE r;
2192 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2193 if (GET_MODE (op) == HFmode)
2194 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2195 else
2197 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2198 convval[1] = 0;
2201 /* Sign extend exponent. */
2202 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2203 if (exponent == -128)
2204 return 1; /* 0.0 */
2205 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2206 return 0; /* Precision doesn't fit. */
2207 return (exponent <= 7) /* Positive exp. */
2208 && (exponent >= -7); /* Negative exp. */
2212 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2213 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2215 None of the last four instructions from the bottom of the block can
2216 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2217 BcondAT or RETIcondD.
2219 This routine scans the four previous insns for a jump insn, and if
2220 one is found, returns 1 so that we bung in a nop instruction.
2221 This simple minded strategy will add a nop, when it may not
2222 be required. Say when there is a JUMP_INSN near the end of the
2223 block that doesn't get converted into a delayed branch.
2225 Note that we cannot have a call insn, since we don't generate
2226 repeat loops with calls in them (although I suppose we could, but
2227 there's no benefit.)
2229 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2232 c4x_rptb_nop_p (insn)
2233 rtx insn;
2235 rtx start_label;
2236 int i;
2238 /* Extract the start label from the jump pattern (rptb_end). */
2239 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2241 /* If there is a label at the end of the loop we must insert
2242 a NOP. */
2243 do {
2244 insn = previous_insn (insn);
2245 } while (GET_CODE (insn) == NOTE
2246 || GET_CODE (insn) == USE
2247 || GET_CODE (insn) == CLOBBER);
2248 if (GET_CODE (insn) == CODE_LABEL)
2249 return 1;
2251 for (i = 0; i < 4; i++)
2253 /* Search back for prev non-note and non-label insn. */
2254 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2255 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2257 if (insn == start_label)
2258 return i == 0;
2260 insn = previous_insn (insn);
2263 /* If we have a jump instruction we should insert a NOP. If we
2264 hit repeat block top we should only insert a NOP if the loop
2265 is empty. */
2266 if (GET_CODE (insn) == JUMP_INSN)
2267 return 1;
2268 insn = previous_insn (insn);
2270 return 0;
2274 /* The C4x looping instruction needs to be emitted at the top of the
2275 loop. Emitting the true RTL for a looping instruction at the top of
2276 the loop can cause problems with flow analysis. So instead, a dummy
2277 doloop insn is emitted at the end of the loop. This routine checks
2278 for the presence of this doloop insn and then searches back to the
2279 top of the loop, where it inserts the true looping insn (provided
2280 there are no instructions in the loop which would cause problems).
2281 Any additional labels can be emitted at this point. In addition, if
2282 the desired loop count register was not allocated, this routine does
2283 nothing.
2285 Before we can create a repeat block looping instruction we have to
2286 verify that there are no jumps outside the loop and no jumps outside
2287 the loop go into this loop. This can happen in the basic blocks reorder
2288 pass. The C4x cpu can not handle this. */
2290 static int
2291 c4x_label_ref_used_p (x, code_label)
2292 rtx x, code_label;
2294 enum rtx_code code;
2295 int i, j;
2296 const char *fmt;
2298 if (x == 0)
2299 return 0;
2301 code = GET_CODE (x);
2302 if (code == LABEL_REF)
2303 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2305 fmt = GET_RTX_FORMAT (code);
2306 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2308 if (fmt[i] == 'e')
2310 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2311 return 1;
2313 else if (fmt[i] == 'E')
2314 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2315 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2316 return 1;
2318 return 0;
2322 static int
2323 c4x_rptb_valid_p (insn, start_label)
2324 rtx insn, start_label;
2326 rtx end = insn;
2327 rtx start;
2328 rtx tmp;
2330 /* Find the start label. */
2331 for (; insn; insn = PREV_INSN (insn))
2332 if (insn == start_label)
2333 break;
2335 /* Note found then we can not use a rptb or rpts. The label was
2336 probably moved by the basic block reorder pass. */
2337 if (! insn)
2338 return 0;
2340 start = insn;
2341 /* If any jump jumps inside this block then we must fail. */
2342 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2344 if (GET_CODE (insn) == CODE_LABEL)
2346 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2347 if (GET_CODE (tmp) == JUMP_INSN
2348 && c4x_label_ref_used_p (tmp, insn))
2349 return 0;
2352 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2354 if (GET_CODE (insn) == CODE_LABEL)
2356 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2357 if (GET_CODE (tmp) == JUMP_INSN
2358 && c4x_label_ref_used_p (tmp, insn))
2359 return 0;
2362 /* If any jump jumps outside this block then we must fail. */
2363 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2365 if (GET_CODE (insn) == CODE_LABEL)
2367 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2368 if (GET_CODE (tmp) == JUMP_INSN
2369 && c4x_label_ref_used_p (tmp, insn))
2370 return 0;
2371 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2372 if (GET_CODE (tmp) == JUMP_INSN
2373 && c4x_label_ref_used_p (tmp, insn))
2374 return 0;
2378 /* All checks OK. */
2379 return 1;
2383 void
2384 c4x_rptb_insert (insn)
2385 rtx insn;
2387 rtx end_label;
2388 rtx start_label;
2389 rtx new_start_label;
2390 rtx count_reg;
2392 /* If the count register has not been allocated to RC, say if
2393 there is a movstr pattern in the loop, then do not insert a
2394 RPTB instruction. Instead we emit a decrement and branch
2395 at the end of the loop. */
2396 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2397 if (REGNO (count_reg) != RC_REGNO)
2398 return;
2400 /* Extract the start label from the jump pattern (rptb_end). */
2401 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2403 if (! c4x_rptb_valid_p (insn, start_label))
2405 /* We can not use the rptb insn. Replace it so reorg can use
2406 the delay slots of the jump insn. */
2407 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2408 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2409 emit_insn_before (gen_bge (start_label), insn);
2410 LABEL_NUSES (start_label)++;
2411 delete_insn (insn);
2412 return;
2415 end_label = gen_label_rtx ();
2416 LABEL_NUSES (end_label)++;
2417 emit_label_after (end_label, insn);
2419 new_start_label = gen_label_rtx ();
2420 LABEL_NUSES (new_start_label)++;
2422 for (; insn; insn = PREV_INSN (insn))
2424 if (insn == start_label)
2425 break;
2426 if (GET_CODE (insn) == JUMP_INSN &&
2427 JUMP_LABEL (insn) == start_label)
2428 redirect_jump (insn, new_start_label, 0);
2430 if (! insn)
2431 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2433 emit_label_after (new_start_label, insn);
2435 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2436 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2437 else
2438 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2439 if (LABEL_NUSES (start_label) == 0)
2440 delete_insn (start_label);
2444 /* This function is a C4x special called immediately before delayed
2445 branch scheduling. We fix up RTPB style loops that didn't get RC
2446 allocated as the loop counter. */
2448 void
2449 c4x_process_after_reload (first)
2450 rtx first;
2452 rtx insn;
2454 for (insn = first; insn; insn = NEXT_INSN (insn))
2456 /* Look for insn. */
2457 if (INSN_P (insn))
2459 int insn_code_number;
2460 rtx old;
2462 insn_code_number = recog_memoized (insn);
2464 if (insn_code_number < 0)
2465 continue;
2467 /* Insert the RTX for RPTB at the top of the loop
2468 and a label at the end of the loop. */
2469 if (insn_code_number == CODE_FOR_rptb_end)
2470 c4x_rptb_insert(insn);
2472 /* We need to split the insn here. Otherwise the calls to
2473 force_const_mem will not work for load_immed_address. */
2474 old = insn;
2476 /* Don't split the insn if it has been deleted. */
2477 if (! INSN_DELETED_P (old))
2478 insn = try_split (PATTERN(old), old, 1);
2480 /* When not optimizing, the old insn will be still left around
2481 with only the 'deleted' bit set. Transform it into a note
2482 to avoid confusion of subsequent processing. */
2483 if (INSN_DELETED_P (old))
2485 PUT_CODE (old, NOTE);
2486 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2487 NOTE_SOURCE_FILE (old) = 0;
2494 static int
2495 c4x_a_register (op)
2496 rtx op;
2498 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2502 static int
2503 c4x_x_register (op)
2504 rtx op;
2506 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2510 static int
2511 c4x_immed_int_constant (op)
2512 rtx op;
2514 if (GET_CODE (op) != CONST_INT)
2515 return 0;
2517 return GET_MODE (op) == VOIDmode
2518 || GET_MODE_CLASS (op) == MODE_INT
2519 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2523 static int
2524 c4x_immed_float_constant (op)
2525 rtx op;
2527 if (GET_CODE (op) != CONST_DOUBLE)
2528 return 0;
2530 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2531 present this only means that a MEM rtx has been generated. It does
2532 not mean the rtx is really in memory. */
2534 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2539 c4x_shiftable_constant (op)
2540 rtx op;
2542 int i;
2543 int mask;
2544 int val = INTVAL (op);
2546 for (i = 0; i < 16; i++)
2548 if (val & (1 << i))
2549 break;
2551 mask = ((0xffff >> i) << 16) | 0xffff;
2552 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2553 : (val >> i) & mask))
2554 return i;
2555 return -1;
2560 c4x_H_constant (op)
2561 rtx op;
2563 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2568 c4x_I_constant (op)
2569 rtx op;
2571 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2576 c4x_J_constant (op)
2577 rtx op;
2579 if (TARGET_C3X)
2580 return 0;
2581 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2585 static int
2586 c4x_K_constant (op)
2587 rtx op;
2589 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2590 return 0;
2591 return IS_INT5_CONST (INTVAL (op));
2596 c4x_L_constant (op)
2597 rtx op;
2599 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2603 static int
2604 c4x_N_constant (op)
2605 rtx op;
2607 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2611 static int
2612 c4x_O_constant (op)
2613 rtx op;
2615 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2619 /* The constraints do not have to check the register class,
2620 except when needed to discriminate between the constraints.
2621 The operand has been checked by the predicates to be valid. */
2623 /* ARx + 9-bit signed const or IRn
2624 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2625 We don't include the pre/post inc/dec forms here since
2626 they are handled by the <> constraints. */
2629 c4x_Q_constraint (op)
2630 rtx op;
2632 enum machine_mode mode = GET_MODE (op);
2634 if (GET_CODE (op) != MEM)
2635 return 0;
2636 op = XEXP (op, 0);
2637 switch (GET_CODE (op))
2639 case REG:
2640 return 1;
2642 case PLUS:
2644 rtx op0 = XEXP (op, 0);
2645 rtx op1 = XEXP (op, 1);
2647 if (! REG_P (op0))
2648 return 0;
2650 if (REG_P (op1))
2651 return 1;
2653 if (GET_CODE (op1) != CONST_INT)
2654 return 0;
2656 /* HImode and HFmode must be offsettable. */
2657 if (mode == HImode || mode == HFmode)
2658 return IS_DISP8_OFF_CONST (INTVAL (op1));
2660 return IS_DISP8_CONST (INTVAL (op1));
2662 break;
2664 default:
2665 break;
2667 return 0;
2671 /* ARx + 5-bit unsigned const
2672 *ARx, *+ARx(n) for n < 32. */
2675 c4x_R_constraint (op)
2676 rtx op;
2678 enum machine_mode mode = GET_MODE (op);
2680 if (TARGET_C3X)
2681 return 0;
2682 if (GET_CODE (op) != MEM)
2683 return 0;
2684 op = XEXP (op, 0);
2685 switch (GET_CODE (op))
2687 case REG:
2688 return 1;
2690 case PLUS:
2692 rtx op0 = XEXP (op, 0);
2693 rtx op1 = XEXP (op, 1);
2695 if (! REG_P (op0))
2696 return 0;
2698 if (GET_CODE (op1) != CONST_INT)
2699 return 0;
2701 /* HImode and HFmode must be offsettable. */
2702 if (mode == HImode || mode == HFmode)
2703 return IS_UINT5_CONST (INTVAL (op1) + 1);
2705 return IS_UINT5_CONST (INTVAL (op1));
2707 break;
2709 default:
2710 break;
2712 return 0;
2716 static int
2717 c4x_R_indirect (op)
2718 rtx op;
2720 enum machine_mode mode = GET_MODE (op);
2722 if (TARGET_C3X || GET_CODE (op) != MEM)
2723 return 0;
2725 op = XEXP (op, 0);
2726 switch (GET_CODE (op))
2728 case REG:
2729 return IS_ADDR_OR_PSEUDO_REG (op);
2731 case PLUS:
2733 rtx op0 = XEXP (op, 0);
2734 rtx op1 = XEXP (op, 1);
2736 /* HImode and HFmode must be offsettable. */
2737 if (mode == HImode || mode == HFmode)
2738 return IS_ADDR_OR_PSEUDO_REG (op0)
2739 && GET_CODE (op1) == CONST_INT
2740 && IS_UINT5_CONST (INTVAL (op1) + 1);
2742 return REG_P (op0)
2743 && IS_ADDR_OR_PSEUDO_REG (op0)
2744 && GET_CODE (op1) == CONST_INT
2745 && IS_UINT5_CONST (INTVAL (op1));
2747 break;
2749 default:
2750 break;
2752 return 0;
2756 /* ARx + 1-bit unsigned const or IRn
2757 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2758 We don't include the pre/post inc/dec forms here since
2759 they are handled by the <> constraints. */
2762 c4x_S_constraint (op)
2763 rtx op;
2765 enum machine_mode mode = GET_MODE (op);
2766 if (GET_CODE (op) != MEM)
2767 return 0;
2768 op = XEXP (op, 0);
2769 switch (GET_CODE (op))
2771 case REG:
2772 return 1;
2774 case PRE_MODIFY:
2775 case POST_MODIFY:
2777 rtx op0 = XEXP (op, 0);
2778 rtx op1 = XEXP (op, 1);
2780 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2781 || (op0 != XEXP (op1, 0)))
2782 return 0;
2784 op0 = XEXP (op1, 0);
2785 op1 = XEXP (op1, 1);
2786 return REG_P (op0) && REG_P (op1);
2787 /* Pre or post_modify with a displacement of 0 or 1
2788 should not be generated. */
2790 break;
2792 case PLUS:
2794 rtx op0 = XEXP (op, 0);
2795 rtx op1 = XEXP (op, 1);
2797 if (!REG_P (op0))
2798 return 0;
2800 if (REG_P (op1))
2801 return 1;
2803 if (GET_CODE (op1) != CONST_INT)
2804 return 0;
2806 /* HImode and HFmode must be offsettable. */
2807 if (mode == HImode || mode == HFmode)
2808 return IS_DISP1_OFF_CONST (INTVAL (op1));
2810 return IS_DISP1_CONST (INTVAL (op1));
2812 break;
2814 default:
2815 break;
2817 return 0;
2821 static int
2822 c4x_S_indirect (op)
2823 rtx op;
2825 enum machine_mode mode = GET_MODE (op);
2826 if (GET_CODE (op) != MEM)
2827 return 0;
2829 op = XEXP (op, 0);
2830 switch (GET_CODE (op))
2832 case PRE_DEC:
2833 case POST_DEC:
2834 if (mode != QImode && mode != QFmode)
2835 return 0;
2836 case PRE_INC:
2837 case POST_INC:
2838 op = XEXP (op, 0);
2840 case REG:
2841 return IS_ADDR_OR_PSEUDO_REG (op);
2843 case PRE_MODIFY:
2844 case POST_MODIFY:
2846 rtx op0 = XEXP (op, 0);
2847 rtx op1 = XEXP (op, 1);
2849 if (mode != QImode && mode != QFmode)
2850 return 0;
2852 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2853 || (op0 != XEXP (op1, 0)))
2854 return 0;
2856 op0 = XEXP (op1, 0);
2857 op1 = XEXP (op1, 1);
2858 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2859 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2860 /* Pre or post_modify with a displacement of 0 or 1
2861 should not be generated. */
2864 case PLUS:
2866 rtx op0 = XEXP (op, 0);
2867 rtx op1 = XEXP (op, 1);
2869 if (REG_P (op0))
2871 /* HImode and HFmode must be offsettable. */
2872 if (mode == HImode || mode == HFmode)
2873 return IS_ADDR_OR_PSEUDO_REG (op0)
2874 && GET_CODE (op1) == CONST_INT
2875 && IS_DISP1_OFF_CONST (INTVAL (op1));
2877 if (REG_P (op1))
2878 return (IS_INDEX_OR_PSEUDO_REG (op1)
2879 && IS_ADDR_OR_PSEUDO_REG (op0))
2880 || (IS_ADDR_OR_PSEUDO_REG (op1)
2881 && IS_INDEX_OR_PSEUDO_REG (op0));
2883 return IS_ADDR_OR_PSEUDO_REG (op0)
2884 && GET_CODE (op1) == CONST_INT
2885 && IS_DISP1_CONST (INTVAL (op1));
2888 break;
2890 default:
2891 break;
2893 return 0;
2897 /* Direct memory operand. */
2900 c4x_T_constraint (op)
2901 rtx op;
2903 if (GET_CODE (op) != MEM)
2904 return 0;
2905 op = XEXP (op, 0);
2907 if (GET_CODE (op) != LO_SUM)
2909 /* Allow call operands. */
2910 return GET_CODE (op) == SYMBOL_REF
2911 && GET_MODE (op) == Pmode
2912 && SYMBOL_REF_FLAG (op);
2915 /* HImode and HFmode are not offsettable. */
2916 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2917 return 0;
2919 if ((GET_CODE (XEXP (op, 0)) == REG)
2920 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2921 return c4x_U_constraint (XEXP (op, 1));
2923 return 0;
2927 /* Symbolic operand. */
2930 c4x_U_constraint (op)
2931 rtx op;
2933 /* Don't allow direct addressing to an arbitrary constant. */
2934 return GET_CODE (op) == CONST
2935 || GET_CODE (op) == SYMBOL_REF
2936 || GET_CODE (op) == LABEL_REF;
2941 c4x_autoinc_operand (op, mode)
2942 rtx op;
2943 enum machine_mode mode ATTRIBUTE_UNUSED;
2945 if (GET_CODE (op) == MEM)
2947 enum rtx_code code = GET_CODE (XEXP (op, 0));
2949 if (code == PRE_INC
2950 || code == PRE_DEC
2951 || code == POST_INC
2952 || code == POST_DEC
2953 || code == PRE_MODIFY
2954 || code == POST_MODIFY
2956 return 1;
2958 return 0;
2962 /* Match any operand. */
2965 any_operand (op, mode)
2966 register rtx op ATTRIBUTE_UNUSED;
2967 enum machine_mode mode ATTRIBUTE_UNUSED;
2969 return 1;
2973 /* Nonzero if OP is a floating point value with value 0.0. */
2976 fp_zero_operand (op, mode)
2977 rtx op;
2978 enum machine_mode mode ATTRIBUTE_UNUSED;
2980 REAL_VALUE_TYPE r;
2982 if (GET_CODE (op) != CONST_DOUBLE)
2983 return 0;
2984 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2985 return REAL_VALUES_EQUAL (r, dconst0);
2990 const_operand (op, mode)
2991 register rtx op;
2992 register enum machine_mode mode;
2994 switch (mode)
2996 case QFmode:
2997 case HFmode:
2998 if (GET_CODE (op) != CONST_DOUBLE
2999 || GET_MODE (op) != mode
3000 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3001 return 0;
3003 return c4x_immed_float_p (op);
3005 #if Pmode != QImode
3006 case Pmode:
3007 #endif
3008 case QImode:
3009 if (GET_CODE (op) == CONSTANT_P_RTX)
3010 return 1;
3012 if (GET_CODE (op) != CONST_INT
3013 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3014 || GET_MODE_CLASS (mode) != MODE_INT)
3015 return 0;
3017 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3019 case HImode:
3020 return 0;
3022 default:
3023 return 0;
3029 stik_const_operand (op, mode)
3030 rtx op;
3031 enum machine_mode mode ATTRIBUTE_UNUSED;
3033 return c4x_K_constant (op);
3038 not_const_operand (op, mode)
3039 rtx op;
3040 enum machine_mode mode ATTRIBUTE_UNUSED;
3042 return c4x_N_constant (op);
3047 reg_operand (op, mode)
3048 rtx op;
3049 enum machine_mode mode;
3051 if (GET_CODE (op) == SUBREG
3052 && GET_MODE (op) == QFmode)
3053 return 0;
3054 return register_operand (op, mode);
3059 mixed_subreg_operand (op, mode)
3060 rtx op;
3061 enum machine_mode mode ATTRIBUTE_UNUSED;
3063 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3064 int and a long double. */
3065 if (GET_CODE (op) == SUBREG
3066 && (GET_MODE (op) == QFmode)
3067 && (GET_MODE (SUBREG_REG (op)) == QImode
3068 || GET_MODE (SUBREG_REG (op)) == HImode))
3069 return 1;
3070 return 0;
3075 reg_imm_operand (op, mode)
3076 rtx op;
3077 enum machine_mode mode ATTRIBUTE_UNUSED;
3079 if (REG_P (op) || CONSTANT_P (op))
3080 return 1;
3081 return 0;
3086 not_modify_reg (op, mode)
3087 rtx op;
3088 enum machine_mode mode ATTRIBUTE_UNUSED;
3090 if (REG_P (op) || CONSTANT_P (op))
3091 return 1;
3092 if (GET_CODE (op) != MEM)
3093 return 0;
3094 op = XEXP (op, 0);
3095 switch (GET_CODE (op))
3097 case REG:
3098 return 1;
3100 case PLUS:
3102 rtx op0 = XEXP (op, 0);
3103 rtx op1 = XEXP (op, 1);
3105 if (! REG_P (op0))
3106 return 0;
3108 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3109 return 1;
3112 case LO_SUM:
3114 rtx op0 = XEXP (op, 0);
3116 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3117 return 1;
3119 break;
3121 case CONST:
3122 case SYMBOL_REF:
3123 case LABEL_REF:
3124 return 1;
3126 default:
3127 break;
3129 return 0;
3134 not_rc_reg (op, mode)
3135 rtx op;
3136 enum machine_mode mode ATTRIBUTE_UNUSED;
3138 if (REG_P (op) && REGNO (op) == RC_REGNO)
3139 return 0;
3140 return 1;
3144 /* Extended precision register R0-R1. */
3147 r0r1_reg_operand (op, mode)
3148 rtx op;
3149 enum machine_mode mode;
3151 if (! reg_operand (op, mode))
3152 return 0;
3153 if (GET_CODE (op) == SUBREG)
3154 op = SUBREG_REG (op);
3155 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3159 /* Extended precision register R2-R3. */
3162 r2r3_reg_operand (op, mode)
3163 rtx op;
3164 enum machine_mode mode;
3166 if (! reg_operand (op, mode))
3167 return 0;
3168 if (GET_CODE (op) == SUBREG)
3169 op = SUBREG_REG (op);
3170 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3174 /* Low extended precision register R0-R7. */
3177 ext_low_reg_operand (op, mode)
3178 rtx op;
3179 enum machine_mode mode;
3181 if (! reg_operand (op, mode))
3182 return 0;
3183 if (GET_CODE (op) == SUBREG)
3184 op = SUBREG_REG (op);
3185 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3189 /* Extended precision register. */
3192 ext_reg_operand (op, mode)
3193 rtx op;
3194 enum machine_mode mode;
3196 if (! reg_operand (op, mode))
3197 return 0;
3198 if (GET_CODE (op) == SUBREG)
3199 op = SUBREG_REG (op);
3200 if (! REG_P (op))
3201 return 0;
3202 return IS_EXT_OR_PSEUDO_REG (op);
3206 /* Standard precision register. */
3209 std_reg_operand (op, mode)
3210 rtx op;
3211 enum machine_mode mode;
3213 if (! reg_operand (op, mode))
3214 return 0;
3215 if (GET_CODE (op) == SUBREG)
3216 op = SUBREG_REG (op);
3217 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3220 /* Standard precision or normal register. */
3223 std_or_reg_operand (op, mode)
3224 rtx op;
3225 enum machine_mode mode;
3227 if (reload_in_progress)
3228 return std_reg_operand (op, mode);
3229 return reg_operand (op, mode);
3232 /* Address register. */
3235 addr_reg_operand (op, mode)
3236 rtx op;
3237 enum machine_mode mode;
3239 if (! reg_operand (op, mode))
3240 return 0;
3241 return c4x_a_register (op);
3245 /* Index register. */
3248 index_reg_operand (op, mode)
3249 rtx op;
3250 enum machine_mode mode;
3252 if (! reg_operand (op, mode))
3253 return 0;
3254 if (GET_CODE (op) == SUBREG)
3255 op = SUBREG_REG (op);
3256 return c4x_x_register (op);
3260 /* DP register. */
3263 dp_reg_operand (op, mode)
3264 rtx op;
3265 enum machine_mode mode ATTRIBUTE_UNUSED;
3267 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3271 /* SP register. */
3274 sp_reg_operand (op, mode)
3275 rtx op;
3276 enum machine_mode mode ATTRIBUTE_UNUSED;
3278 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3282 /* ST register. */
3285 st_reg_operand (op, mode)
3286 register rtx op;
3287 enum machine_mode mode ATTRIBUTE_UNUSED;
3289 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3293 /* RC register. */
3296 rc_reg_operand (op, mode)
3297 register rtx op;
3298 enum machine_mode mode ATTRIBUTE_UNUSED;
3300 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3305 call_address_operand (op, mode)
3306 rtx op;
3307 enum machine_mode mode ATTRIBUTE_UNUSED;
3309 return (REG_P (op) || symbolic_address_operand (op, mode));
3313 /* Symbolic address operand. */
3316 symbolic_address_operand (op, mode)
3317 register rtx op;
3318 enum machine_mode mode ATTRIBUTE_UNUSED;
3320 switch (GET_CODE (op))
3322 case CONST:
3323 case SYMBOL_REF:
3324 case LABEL_REF:
3325 return 1;
3326 default:
3327 return 0;
3332 /* Check dst operand of a move instruction. */
3335 dst_operand (op, mode)
3336 rtx op;
3337 enum machine_mode mode;
3339 if (GET_CODE (op) == SUBREG
3340 && mixed_subreg_operand (op, mode))
3341 return 0;
3343 if (REG_P (op))
3344 return reg_operand (op, mode);
3346 return nonimmediate_operand (op, mode);
3350 /* Check src operand of two operand arithmetic instructions. */
3353 src_operand (op, mode)
3354 rtx op;
3355 enum machine_mode mode;
3357 if (GET_CODE (op) == SUBREG
3358 && mixed_subreg_operand (op, mode))
3359 return 0;
3361 if (REG_P (op))
3362 return reg_operand (op, mode);
3364 if (mode == VOIDmode)
3365 mode = GET_MODE (op);
3367 if (GET_CODE (op) == CONST_INT)
3368 return (mode == QImode || mode == Pmode || mode == HImode)
3369 && c4x_I_constant (op);
3371 /* We don't like CONST_DOUBLE integers. */
3372 if (GET_CODE (op) == CONST_DOUBLE)
3373 return c4x_H_constant (op);
3375 /* Disallow symbolic addresses. Only the predicate
3376 symbolic_address_operand will match these. */
3377 if (GET_CODE (op) == SYMBOL_REF
3378 || GET_CODE (op) == LABEL_REF
3379 || GET_CODE (op) == CONST)
3380 return 0;
3382 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3383 access to symbolic addresses. These operands will get forced
3384 into a register and the movqi expander will generate a
3385 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3386 if (GET_CODE (op) == MEM
3387 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3388 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3389 || GET_CODE (XEXP (op, 0)) == CONST)))
3390 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3392 return general_operand (op, mode);
3397 src_hi_operand (op, mode)
3398 rtx op;
3399 enum machine_mode mode;
3401 if (c4x_O_constant (op))
3402 return 1;
3403 return src_operand (op, mode);
3407 /* Check src operand of two operand logical instructions. */
3410 lsrc_operand (op, mode)
3411 rtx op;
3412 enum machine_mode mode;
3414 if (mode == VOIDmode)
3415 mode = GET_MODE (op);
3417 if (mode != QImode && mode != Pmode)
3418 fatal_insn ("mode not QImode", op);
3420 if (GET_CODE (op) == CONST_INT)
3421 return c4x_L_constant (op) || c4x_J_constant (op);
3423 return src_operand (op, mode);
3427 /* Check src operand of two operand tricky instructions. */
3430 tsrc_operand (op, mode)
3431 rtx op;
3432 enum machine_mode mode;
3434 if (mode == VOIDmode)
3435 mode = GET_MODE (op);
3437 if (mode != QImode && mode != Pmode)
3438 fatal_insn ("mode not QImode", op);
3440 if (GET_CODE (op) == CONST_INT)
3441 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3443 return src_operand (op, mode);
3447 /* Check src operand of two operand non immedidate instructions. */
3450 nonimmediate_src_operand (op, mode)
3451 rtx op;
3452 enum machine_mode mode;
3454 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3455 return 0;
3457 return src_operand (op, mode);
3461 /* Check logical src operand of two operand non immedidate instructions. */
3464 nonimmediate_lsrc_operand (op, mode)
3465 rtx op;
3466 enum machine_mode mode;
3468 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3469 return 0;
3471 return lsrc_operand (op, mode);
3476 reg_or_const_operand (op, mode)
3477 rtx op;
3478 enum machine_mode mode;
3480 return reg_operand (op, mode) || const_operand (op, mode);
3484 /* Check for indirect operands allowable in parallel instruction. */
3487 par_ind_operand (op, mode)
3488 rtx op;
3489 enum machine_mode mode;
3491 if (mode != VOIDmode && mode != GET_MODE (op))
3492 return 0;
3494 return c4x_S_indirect (op);
3498 /* Check for operands allowable in parallel instruction. */
3501 parallel_operand (op, mode)
3502 rtx op;
3503 enum machine_mode mode;
3505 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3509 static void
3510 c4x_S_address_parse (op, base, incdec, index, disp)
3511 rtx op;
3512 int *base;
3513 int *incdec;
3514 int *index;
3515 int *disp;
3517 *base = 0;
3518 *incdec = 0;
3519 *index = 0;
3520 *disp = 0;
3522 if (GET_CODE (op) != MEM)
3523 fatal_insn ("invalid indirect memory address", op);
3525 op = XEXP (op, 0);
3526 switch (GET_CODE (op))
3528 case PRE_DEC:
3529 *base = REGNO (XEXP (op, 0));
3530 *incdec = 1;
3531 *disp = -1;
3532 return;
3534 case POST_DEC:
3535 *base = REGNO (XEXP (op, 0));
3536 *incdec = 1;
3537 *disp = 0;
3538 return;
3540 case PRE_INC:
3541 *base = REGNO (XEXP (op, 0));
3542 *incdec = 1;
3543 *disp = 1;
3544 return;
3546 case POST_INC:
3547 *base = REGNO (XEXP (op, 0));
3548 *incdec = 1;
3549 *disp = 0;
3550 return;
3552 case POST_MODIFY:
3553 *base = REGNO (XEXP (op, 0));
3554 if (REG_P (XEXP (XEXP (op, 1), 1)))
3556 *index = REGNO (XEXP (XEXP (op, 1), 1));
3557 *disp = 0; /* ??? */
3559 else
3560 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3561 *incdec = 1;
3562 return;
3564 case PRE_MODIFY:
3565 *base = REGNO (XEXP (op, 0));
3566 if (REG_P (XEXP (XEXP (op, 1), 1)))
3568 *index = REGNO (XEXP (XEXP (op, 1), 1));
3569 *disp = 1; /* ??? */
3571 else
3572 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3573 *incdec = 1;
3575 return;
3577 case REG:
3578 *base = REGNO (op);
3579 return;
3581 case PLUS:
3583 rtx op0 = XEXP (op, 0);
3584 rtx op1 = XEXP (op, 1);
3586 if (c4x_a_register (op0))
3588 if (c4x_x_register (op1))
3590 *base = REGNO (op0);
3591 *index = REGNO (op1);
3592 return;
3594 else if ((GET_CODE (op1) == CONST_INT
3595 && IS_DISP1_CONST (INTVAL (op1))))
3597 *base = REGNO (op0);
3598 *disp = INTVAL (op1);
3599 return;
3602 else if (c4x_x_register (op0) && c4x_a_register (op1))
3604 *base = REGNO (op1);
3605 *index = REGNO (op0);
3606 return;
3609 /* Fallthrough. */
3611 default:
3612 fatal_insn ("invalid indirect (S) memory address", op);
3618 c4x_address_conflict (op0, op1, store0, store1)
3619 rtx op0;
3620 rtx op1;
3621 int store0;
3622 int store1;
3624 int base0;
3625 int base1;
3626 int incdec0;
3627 int incdec1;
3628 int index0;
3629 int index1;
3630 int disp0;
3631 int disp1;
3633 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3634 return 1;
3636 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3637 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3639 if (store0 && store1)
3641 /* If we have two stores in parallel to the same address, then
3642 the C4x only executes one of the stores. This is unlikely to
3643 cause problems except when writing to a hardware device such
3644 as a FIFO since the second write will be lost. The user
3645 should flag the hardware location as being volatile so that
3646 we don't do this optimisation. While it is unlikely that we
3647 have an aliased address if both locations are not marked
3648 volatile, it is probably safer to flag a potential conflict
3649 if either location is volatile. */
3650 if (! flag_argument_noalias)
3652 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3653 return 1;
3657 /* If have a parallel load and a store to the same address, the load
3658 is performed first, so there is no conflict. Similarly, there is
3659 no conflict if have parallel loads from the same address. */
3661 /* Cannot use auto increment or auto decrement twice for same
3662 base register. */
3663 if (base0 == base1 && incdec0 && incdec0)
3664 return 1;
3666 /* It might be too confusing for GCC if we have use a base register
3667 with a side effect and a memory reference using the same register
3668 in parallel. */
3669 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3670 return 1;
3672 /* We can not optimize the case where op1 and op2 refer to the same
3673 address. */
3674 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3675 return 1;
3677 /* No conflict. */
3678 return 0;
3682 /* Check for while loop inside a decrement and branch loop. */
3685 c4x_label_conflict (insn, jump, db)
3686 rtx insn;
3687 rtx jump;
3688 rtx db;
3690 while (insn)
3692 if (GET_CODE (insn) == CODE_LABEL)
3694 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3695 return 1;
3696 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3697 return 0;
3699 insn = PREV_INSN (insn);
3701 return 1;
3705 /* Validate combination of operands for parallel load/store instructions. */
3708 valid_parallel_load_store (operands, mode)
3709 rtx *operands;
3710 enum machine_mode mode ATTRIBUTE_UNUSED;
3712 rtx op0 = operands[0];
3713 rtx op1 = operands[1];
3714 rtx op2 = operands[2];
3715 rtx op3 = operands[3];
3717 if (GET_CODE (op0) == SUBREG)
3718 op0 = SUBREG_REG (op0);
3719 if (GET_CODE (op1) == SUBREG)
3720 op1 = SUBREG_REG (op1);
3721 if (GET_CODE (op2) == SUBREG)
3722 op2 = SUBREG_REG (op2);
3723 if (GET_CODE (op3) == SUBREG)
3724 op3 = SUBREG_REG (op3);
3726 /* The patterns should only allow ext_low_reg_operand() or
3727 par_ind_operand() operands. Thus of the 4 operands, only 2
3728 should be REGs and the other 2 should be MEMs. */
3730 /* This test prevents the multipack pass from using this pattern if
3731 op0 is used as an index or base register in op2 or op3, since
3732 this combination will require reloading. */
3733 if (GET_CODE (op0) == REG
3734 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3735 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3736 return 0;
3738 /* LDI||LDI. */
3739 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3740 return (REGNO (op0) != REGNO (op2))
3741 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3742 && ! c4x_address_conflict (op1, op3, 0, 0);
3744 /* STI||STI. */
3745 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3746 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3747 && ! c4x_address_conflict (op0, op2, 1, 1);
3749 /* LDI||STI. */
3750 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3751 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3752 && ! c4x_address_conflict (op1, op2, 0, 1);
3754 /* STI||LDI. */
3755 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3756 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3757 && ! c4x_address_conflict (op0, op3, 1, 0);
3759 return 0;
3764 valid_parallel_operands_4 (operands, mode)
3765 rtx *operands;
3766 enum machine_mode mode ATTRIBUTE_UNUSED;
3768 rtx op0 = operands[0];
3769 rtx op2 = operands[2];
3771 if (GET_CODE (op0) == SUBREG)
3772 op0 = SUBREG_REG (op0);
3773 if (GET_CODE (op2) == SUBREG)
3774 op2 = SUBREG_REG (op2);
3776 /* This test prevents the multipack pass from using this pattern if
3777 op0 is used as an index or base register in op2, since this combination
3778 will require reloading. */
3779 if (GET_CODE (op0) == REG
3780 && GET_CODE (op2) == MEM
3781 && reg_mentioned_p (op0, XEXP (op2, 0)))
3782 return 0;
3784 return 1;
3789 valid_parallel_operands_5 (operands, mode)
3790 rtx *operands;
3791 enum machine_mode mode ATTRIBUTE_UNUSED;
3793 int regs = 0;
3794 rtx op0 = operands[0];
3795 rtx op1 = operands[1];
3796 rtx op2 = operands[2];
3797 rtx op3 = operands[3];
3799 if (GET_CODE (op0) == SUBREG)
3800 op0 = SUBREG_REG (op0);
3801 if (GET_CODE (op1) == SUBREG)
3802 op1 = SUBREG_REG (op1);
3803 if (GET_CODE (op2) == SUBREG)
3804 op2 = SUBREG_REG (op2);
3806 /* The patterns should only allow ext_low_reg_operand() or
3807 par_ind_operand() operands. Operands 1 and 2 may be commutative
3808 but only one of them can be a register. */
3809 if (GET_CODE (op1) == REG)
3810 regs++;
3811 if (GET_CODE (op2) == REG)
3812 regs++;
3814 if (regs != 1)
3815 return 0;
3817 /* This test prevents the multipack pass from using this pattern if
3818 op0 is used as an index or base register in op3, since this combination
3819 will require reloading. */
3820 if (GET_CODE (op0) == REG
3821 && GET_CODE (op3) == MEM
3822 && reg_mentioned_p (op0, XEXP (op3, 0)))
3823 return 0;
3825 return 1;
3830 valid_parallel_operands_6 (operands, mode)
3831 rtx *operands;
3832 enum machine_mode mode ATTRIBUTE_UNUSED;
3834 int regs = 0;
3835 rtx op0 = operands[0];
3836 rtx op1 = operands[1];
3837 rtx op2 = operands[2];
3838 rtx op4 = operands[4];
3839 rtx op5 = operands[5];
3841 if (GET_CODE (op1) == SUBREG)
3842 op1 = SUBREG_REG (op1);
3843 if (GET_CODE (op2) == SUBREG)
3844 op2 = SUBREG_REG (op2);
3845 if (GET_CODE (op4) == SUBREG)
3846 op4 = SUBREG_REG (op4);
3847 if (GET_CODE (op5) == SUBREG)
3848 op5 = SUBREG_REG (op5);
3850 /* The patterns should only allow ext_low_reg_operand() or
3851 par_ind_operand() operands. Thus of the 4 input operands, only 2
3852 should be REGs and the other 2 should be MEMs. */
3854 if (GET_CODE (op1) == REG)
3855 regs++;
3856 if (GET_CODE (op2) == REG)
3857 regs++;
3858 if (GET_CODE (op4) == REG)
3859 regs++;
3860 if (GET_CODE (op5) == REG)
3861 regs++;
3863 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3864 Perhaps we should count the MEMs as well? */
3865 if (regs != 2)
3866 return 0;
3868 /* This test prevents the multipack pass from using this pattern if
3869 op0 is used as an index or base register in op4 or op5, since
3870 this combination will require reloading. */
3871 if (GET_CODE (op0) == REG
3872 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3873 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3874 return 0;
3876 return 1;
3880 /* Validate combination of src operands. Note that the operands have
3881 been screened by the src_operand predicate. We just have to check
3882 that the combination of operands is valid. If FORCE is set, ensure
3883 that the destination regno is valid if we have a 2 operand insn. */
3885 static int
3886 c4x_valid_operands (code, operands, mode, force)
3887 enum rtx_code code;
3888 rtx *operands;
3889 enum machine_mode mode ATTRIBUTE_UNUSED;
3890 int force;
3892 rtx op1;
3893 rtx op2;
3894 enum rtx_code code1;
3895 enum rtx_code code2;
3897 if (code == COMPARE)
3899 op1 = operands[0];
3900 op2 = operands[1];
3902 else
3904 op1 = operands[1];
3905 op2 = operands[2];
3908 if (GET_CODE (op1) == SUBREG)
3909 op1 = SUBREG_REG (op1);
3910 if (GET_CODE (op2) == SUBREG)
3911 op2 = SUBREG_REG (op2);
3913 code1 = GET_CODE (op1);
3914 code2 = GET_CODE (op2);
3916 if (code1 == REG && code2 == REG)
3917 return 1;
3919 if (code1 == MEM && code2 == MEM)
3921 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3922 return 1;
3923 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3926 if (code1 == code2)
3927 return 0;
3929 if (code1 == REG)
3931 switch (code2)
3933 case CONST_INT:
3934 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3935 return 1;
3936 break;
3938 case CONST_DOUBLE:
3939 if (! c4x_H_constant (op2))
3940 return 0;
3941 break;
3943 /* Any valid memory operand screened by src_operand is OK. */
3944 case MEM:
3946 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3947 into a stack slot memory address comprising a PLUS and a
3948 constant. */
3949 case ADDRESSOF:
3950 break;
3952 default:
3953 fatal_insn ("c4x_valid_operands: Internal error", op2);
3954 break;
3957 /* Check that we have a valid destination register for a two operand
3958 instruction. */
3959 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3962 /* We assume MINUS is commutative since the subtract patterns
3963 also support the reverse subtract instructions. Since op1
3964 is not a register, and op2 is a register, op1 can only
3965 be a restricted memory operand for a shift instruction. */
3966 if (code == ASHIFTRT || code == LSHIFTRT
3967 || code == ASHIFT || code == COMPARE)
3968 return code2 == REG
3969 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3971 switch (code1)
3973 case CONST_INT:
3974 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3975 return 1;
3976 break;
3978 case CONST_DOUBLE:
3979 if (! c4x_H_constant (op1))
3980 return 0;
3981 break;
3983 /* Any valid memory operand screened by src_operand is OK. */
3984 case MEM:
3985 #if 0
3986 if (code2 != REG)
3987 return 0;
3988 #endif
3989 break;
3991 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3992 into a stack slot memory address comprising a PLUS and a
3993 constant. */
3994 case ADDRESSOF:
3995 break;
3997 default:
3998 abort ();
3999 break;
4002 /* Check that we have a valid destination register for a two operand
4003 instruction. */
4004 return ! force || REGNO (op1) == REGNO (operands[0]);
4008 int valid_operands (code, operands, mode)
4009 enum rtx_code code;
4010 rtx *operands;
4011 enum machine_mode mode;
4014 /* If we are not optimizing then we have to let anything go and let
4015 reload fix things up. instantiate_decl in function.c can produce
4016 invalid insns by changing the offset of a memory operand from a
4017 valid one into an invalid one, when the second operand is also a
4018 memory operand. The alternative is not to allow two memory
4019 operands for an insn when not optimizing. The problem only rarely
4020 occurs, for example with the C-torture program DFcmp.c. */
4022 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4027 legitimize_operands (code, operands, mode)
4028 enum rtx_code code;
4029 rtx *operands;
4030 enum machine_mode mode;
4032 /* Compare only has 2 operands. */
4033 if (code == COMPARE)
4035 /* During RTL generation, force constants into pseudos so that
4036 they can get hoisted out of loops. This will tie up an extra
4037 register but can save an extra cycle. Only do this if loop
4038 optimisation enabled. (We cannot pull this trick for add and
4039 sub instructions since the flow pass won't find
4040 autoincrements etc.) This allows us to generate compare
4041 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4042 of LDI *AR0++, R0; CMPI 42, R0.
4044 Note that expand_binops will try to load an expensive constant
4045 into a register if it is used within a loop. Unfortunately,
4046 the cost mechanism doesn't allow us to look at the other
4047 operand to decide whether the constant is expensive. */
4049 if (! reload_in_progress
4050 && TARGET_HOIST
4051 && optimize > 0
4052 && GET_CODE (operands[1]) == CONST_INT
4053 && preserve_subexpressions_p ()
4054 && rtx_cost (operands[1], code) > 1)
4055 operands[1] = force_reg (mode, operands[1]);
4057 if (! reload_in_progress
4058 && ! c4x_valid_operands (code, operands, mode, 0))
4059 operands[0] = force_reg (mode, operands[0]);
4060 return 1;
4063 /* We cannot do this for ADDI/SUBI insns since we will
4064 defeat the flow pass from finding autoincrement addressing
4065 opportunities. */
4066 if (! reload_in_progress
4067 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4068 && TARGET_HOIST
4069 && optimize > 1
4070 && GET_CODE (operands[2]) == CONST_INT
4071 && preserve_subexpressions_p ()
4072 && rtx_cost (operands[2], code) > 1)
4073 operands[2] = force_reg (mode, operands[2]);
4075 /* We can get better code on a C30 if we force constant shift counts
4076 into a register. This way they can get hoisted out of loops,
4077 tying up a register, but saving an instruction. The downside is
4078 that they may get allocated to an address or index register, and
4079 thus we will get a pipeline conflict if there is a nearby
4080 indirect address using an address register.
4082 Note that expand_binops will not try to load an expensive constant
4083 into a register if it is used within a loop for a shift insn. */
4085 if (! reload_in_progress
4086 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4088 /* If the operand combination is invalid, we force operand1 into a
4089 register, preventing reload from having doing to do this at a
4090 later stage. */
4091 operands[1] = force_reg (mode, operands[1]);
4092 if (TARGET_FORCE)
4094 emit_move_insn (operands[0], operands[1]);
4095 operands[1] = copy_rtx (operands[0]);
4097 else
4099 /* Just in case... */
4100 if (! c4x_valid_operands (code, operands, mode, 0))
4101 operands[2] = force_reg (mode, operands[2]);
4105 /* Right shifts require a negative shift count, but GCC expects
4106 a positive count, so we emit a NEG. */
4107 if ((code == ASHIFTRT || code == LSHIFTRT)
4108 && (GET_CODE (operands[2]) != CONST_INT))
4109 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4111 return 1;
4115 /* The following predicates are used for instruction scheduling. */
4118 group1_reg_operand (op, mode)
4119 rtx op;
4120 enum machine_mode mode;
4122 if (mode != VOIDmode && mode != GET_MODE (op))
4123 return 0;
4124 if (GET_CODE (op) == SUBREG)
4125 op = SUBREG_REG (op);
4126 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4131 group1_mem_operand (op, mode)
4132 rtx op;
4133 enum machine_mode mode;
4135 if (mode != VOIDmode && mode != GET_MODE (op))
4136 return 0;
4138 if (GET_CODE (op) == MEM)
4140 op = XEXP (op, 0);
4141 if (GET_CODE (op) == PLUS)
4143 rtx op0 = XEXP (op, 0);
4144 rtx op1 = XEXP (op, 1);
4146 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4147 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4148 return 1;
4150 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4151 return 1;
4154 return 0;
4158 /* Return true if any one of the address registers. */
4161 arx_reg_operand (op, mode)
4162 rtx op;
4163 enum machine_mode mode;
4165 if (mode != VOIDmode && mode != GET_MODE (op))
4166 return 0;
4167 if (GET_CODE (op) == SUBREG)
4168 op = SUBREG_REG (op);
4169 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4173 static int
4174 c4x_arn_reg_operand (op, mode, regno)
4175 rtx op;
4176 enum machine_mode mode;
4177 unsigned int regno;
4179 if (mode != VOIDmode && mode != GET_MODE (op))
4180 return 0;
4181 if (GET_CODE (op) == SUBREG)
4182 op = SUBREG_REG (op);
4183 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4187 static int
4188 c4x_arn_mem_operand (op, mode, regno)
4189 rtx op;
4190 enum machine_mode mode;
4191 unsigned int regno;
4193 if (mode != VOIDmode && mode != GET_MODE (op))
4194 return 0;
4196 if (GET_CODE (op) == MEM)
4198 op = XEXP (op, 0);
4199 switch (GET_CODE (op))
4201 case PRE_DEC:
4202 case POST_DEC:
4203 case PRE_INC:
4204 case POST_INC:
4205 op = XEXP (op, 0);
4207 case REG:
4208 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4210 case PRE_MODIFY:
4211 case POST_MODIFY:
4212 if (REG_P (XEXP (op, 0)) && (! reload_completed
4213 || (REGNO (XEXP (op, 0)) == regno)))
4214 return 1;
4215 if (REG_P (XEXP (XEXP (op, 1), 1))
4216 && (! reload_completed
4217 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4218 return 1;
4219 break;
4221 case PLUS:
4223 rtx op0 = XEXP (op, 0);
4224 rtx op1 = XEXP (op, 1);
4226 if ((REG_P (op0) && (! reload_completed
4227 || (REGNO (op0) == regno)))
4228 || (REG_P (op1) && (! reload_completed
4229 || (REGNO (op1) == regno))))
4230 return 1;
4232 break;
4234 default:
4235 break;
4238 return 0;
4243 ar0_reg_operand (op, mode)
4244 rtx op;
4245 enum machine_mode mode;
4247 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4252 ar0_mem_operand (op, mode)
4253 rtx op;
4254 enum machine_mode mode;
4256 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4261 ar1_reg_operand (op, mode)
4262 rtx op;
4263 enum machine_mode mode;
4265 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4270 ar1_mem_operand (op, mode)
4271 rtx op;
4272 enum machine_mode mode;
4274 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4279 ar2_reg_operand (op, mode)
4280 rtx op;
4281 enum machine_mode mode;
4283 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4288 ar2_mem_operand (op, mode)
4289 rtx op;
4290 enum machine_mode mode;
4292 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4297 ar3_reg_operand (op, mode)
4298 rtx op;
4299 enum machine_mode mode;
4301 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4306 ar3_mem_operand (op, mode)
4307 rtx op;
4308 enum machine_mode mode;
4310 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4315 ar4_reg_operand (op, mode)
4316 rtx op;
4317 enum machine_mode mode;
4319 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4324 ar4_mem_operand (op, mode)
4325 rtx op;
4326 enum machine_mode mode;
4328 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4333 ar5_reg_operand (op, mode)
4334 rtx op;
4335 enum machine_mode mode;
4337 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4342 ar5_mem_operand (op, mode)
4343 rtx op;
4344 enum machine_mode mode;
4346 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4351 ar6_reg_operand (op, mode)
4352 rtx op;
4353 enum machine_mode mode;
4355 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4360 ar6_mem_operand (op, mode)
4361 rtx op;
4362 enum machine_mode mode;
4364 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4369 ar7_reg_operand (op, mode)
4370 rtx op;
4371 enum machine_mode mode;
4373 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4378 ar7_mem_operand (op, mode)
4379 rtx op;
4380 enum machine_mode mode;
4382 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4387 ir0_reg_operand (op, mode)
4388 rtx op;
4389 enum machine_mode mode;
4391 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4396 ir0_mem_operand (op, mode)
4397 rtx op;
4398 enum machine_mode mode;
4400 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4405 ir1_reg_operand (op, mode)
4406 rtx op;
4407 enum machine_mode mode;
4409 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4414 ir1_mem_operand (op, mode)
4415 rtx op;
4416 enum machine_mode mode;
4418 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4422 /* This is similar to operand_subword but allows autoincrement
4423 addressing. */
4426 c4x_operand_subword (op, i, validate_address, mode)
4427 rtx op;
4428 int i;
4429 int validate_address;
4430 enum machine_mode mode;
4432 if (mode != HImode && mode != HFmode)
4433 fatal_insn ("c4x_operand_subword: invalid mode", op);
4435 if (mode == HFmode && REG_P (op))
4436 fatal_insn ("c4x_operand_subword: invalid operand", op);
4438 if (GET_CODE (op) == MEM)
4440 enum rtx_code code = GET_CODE (XEXP (op, 0));
4441 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4442 enum machine_mode submode;
4444 submode = mode;
4445 if (mode == HImode)
4446 submode = QImode;
4447 else if (mode == HFmode)
4448 submode = QFmode;
4450 switch (code)
4452 case POST_INC:
4453 case PRE_INC:
4454 return gen_rtx_MEM (submode, XEXP (op, 0));
4456 case POST_DEC:
4457 case PRE_DEC:
4458 case PRE_MODIFY:
4459 case POST_MODIFY:
4460 /* We could handle these with some difficulty.
4461 e.g., *p-- => *(p-=2); *(p+1). */
4462 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4464 case SYMBOL_REF:
4465 case LABEL_REF:
4466 case CONST:
4467 case CONST_INT:
4468 fatal_insn ("c4x_operand_subword: invalid address", op);
4470 /* Even though offsettable_address_p considers (MEM
4471 (LO_SUM)) to be offsettable, it is not safe if the
4472 address is at the end of the data page since we also have
4473 to fix up the associated high PART. In this case where
4474 we are trying to split a HImode or HFmode memory
4475 reference, we would have to emit another insn to reload a
4476 new HIGH value. It's easier to disable LO_SUM memory references
4477 in HImode or HFmode and we probably get better code. */
4478 case LO_SUM:
4479 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4481 default:
4482 break;
4486 return operand_subword (op, i, validate_address, mode);
4489 struct name_list
4491 struct name_list *next;
4492 const char *name;
4495 static struct name_list *global_head;
4496 static struct name_list *extern_head;
4499 /* Add NAME to list of global symbols and remove from external list if
4500 present on external list. */
4502 void
4503 c4x_global_label (name)
4504 const char *name;
4506 struct name_list *p, *last;
4508 /* Do not insert duplicate names, so linearly search through list of
4509 existing names. */
4510 p = global_head;
4511 while (p)
4513 if (strcmp (p->name, name) == 0)
4514 return;
4515 p = p->next;
4517 p = (struct name_list *) xmalloc (sizeof *p);
4518 p->next = global_head;
4519 p->name = name;
4520 global_head = p;
4522 /* Remove this name from ref list if present. */
4523 last = NULL;
4524 p = extern_head;
4525 while (p)
4527 if (strcmp (p->name, name) == 0)
4529 if (last)
4530 last->next = p->next;
4531 else
4532 extern_head = p->next;
4533 break;
4535 last = p;
4536 p = p->next;
4541 /* Add NAME to list of external symbols. */
4543 void
4544 c4x_external_ref (name)
4545 const char *name;
4547 struct name_list *p;
4549 /* Do not insert duplicate names. */
4550 p = extern_head;
4551 while (p)
4553 if (strcmp (p->name, name) == 0)
4554 return;
4555 p = p->next;
4558 /* Do not insert ref if global found. */
4559 p = global_head;
4560 while (p)
4562 if (strcmp (p->name, name) == 0)
4563 return;
4564 p = p->next;
4566 p = (struct name_list *) xmalloc (sizeof *p);
4567 p->next = extern_head;
4568 p->name = name;
4569 extern_head = p;
4573 void
4574 c4x_file_end (fp)
4575 FILE *fp;
4577 struct name_list *p;
4579 /* Output all external names that are not global. */
4580 p = extern_head;
4581 while (p)
4583 fprintf (fp, "\t.ref\t");
4584 assemble_name (fp, p->name);
4585 fprintf (fp, "\n");
4586 p = p->next;
4588 fprintf (fp, "\t.end\n");
4592 static void
4593 c4x_check_attribute (attrib, list, decl, attributes)
4594 const char *attrib;
4595 tree list, decl, *attributes;
4597 while (list != NULL_TREE
4598 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4599 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4600 list = TREE_CHAIN (list);
4601 if (list)
4602 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4603 *attributes);
4607 static void
4608 c4x_insert_attributes (decl, attributes)
4609 tree decl, *attributes;
4611 switch (TREE_CODE (decl))
4613 case FUNCTION_DECL:
4614 c4x_check_attribute ("section", code_tree, decl, attributes);
4615 c4x_check_attribute ("const", pure_tree, decl, attributes);
4616 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4617 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4618 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4619 break;
4621 case VAR_DECL:
4622 c4x_check_attribute ("section", data_tree, decl, attributes);
4623 break;
4625 default:
4626 break;
4630 /* Table of valid machine attributes. */
4631 const struct attribute_spec c4x_attribute_table[] =
4633 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4634 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4635 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4636 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4637 { NULL, 0, 0, false, false, false, NULL }
4640 /* Handle an attribute requiring a FUNCTION_TYPE;
4641 arguments as in struct attribute_spec.handler. */
4642 static tree
4643 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4644 tree *node;
4645 tree name;
4646 tree args ATTRIBUTE_UNUSED;
4647 int flags ATTRIBUTE_UNUSED;
4648 bool *no_add_attrs;
4650 if (TREE_CODE (*node) != FUNCTION_TYPE)
4652 warning ("`%s' attribute only applies to functions",
4653 IDENTIFIER_POINTER (name));
4654 *no_add_attrs = true;
4657 return NULL_TREE;
4661 /* !!! FIXME to emit RPTS correctly. */
4664 c4x_rptb_rpts_p (insn, op)
4665 rtx insn, op;
4667 /* The next insn should be our label marking where the
4668 repeat block starts. */
4669 insn = NEXT_INSN (insn);
4670 if (GET_CODE (insn) != CODE_LABEL)
4672 /* Some insns may have been shifted between the RPTB insn
4673 and the top label... They were probably destined to
4674 be moved out of the loop. For now, let's leave them
4675 where they are and print a warning. We should
4676 probably move these insns before the repeat block insn. */
4677 if (TARGET_DEBUG)
4678 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4679 insn);
4680 return 0;
4683 /* Skip any notes. */
4684 insn = next_nonnote_insn (insn);
4686 /* This should be our first insn in the loop. */
4687 if (! INSN_P (insn))
4688 return 0;
4690 /* Skip any notes. */
4691 insn = next_nonnote_insn (insn);
4693 if (! INSN_P (insn))
4694 return 0;
4696 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4697 return 0;
4699 if (TARGET_RPTS)
4700 return 1;
4702 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4706 /* Check if register r11 is used as the destination of an insn. */
4708 static int
4709 c4x_r11_set_p(x)
4710 rtx x;
4712 rtx set;
4713 int i, j;
4714 const char *fmt;
4716 if (x == 0)
4717 return 0;
4719 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4720 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4722 if (INSN_P (x) && (set = single_set (x)))
4723 x = SET_DEST (set);
4725 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4726 return 1;
4728 fmt = GET_RTX_FORMAT (GET_CODE (x));
4729 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4731 if (fmt[i] == 'e')
4733 if (c4x_r11_set_p (XEXP (x, i)))
4734 return 1;
4736 else if (fmt[i] == 'E')
4737 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4738 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4739 return 1;
4741 return 0;
4745 /* The c4x sometimes has a problem when the insn before the laj insn
4746 sets the r11 register. Check for this situation. */
4749 c4x_check_laj_p (insn)
4750 rtx insn;
4752 insn = prev_nonnote_insn (insn);
4754 /* If this is the start of the function no nop is needed. */
4755 if (insn == 0)
4756 return 0;
4758 /* If the previous insn is a code label we have to insert a nop. This
4759 could be a jump or table jump. We can find the normal jumps by
4760 scanning the function but this will not find table jumps. */
4761 if (GET_CODE (insn) == CODE_LABEL)
4762 return 1;
4764 /* If the previous insn sets register r11 we have to insert a nop. */
4765 if (c4x_r11_set_p (insn))
4766 return 1;
4768 /* No nop needed. */
4769 return 0;
4773 /* Adjust the cost of a scheduling dependency. Return the new cost of
4774 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4775 A set of an address register followed by a use occurs a 2 cycle
4776 stall (reduced to a single cycle on the c40 using LDA), while
4777 a read of an address register followed by a use occurs a single cycle. */
4779 #define SET_USE_COST 3
4780 #define SETLDA_USE_COST 2
4781 #define READ_USE_COST 2
4783 static int
4784 c4x_adjust_cost (insn, link, dep_insn, cost)
4785 rtx insn;
4786 rtx link;
4787 rtx dep_insn;
4788 int cost;
4790 /* Don't worry about this until we know what registers have been
4791 assigned. */
4792 if (flag_schedule_insns == 0 && ! reload_completed)
4793 return 0;
4795 /* How do we handle dependencies where a read followed by another
4796 read causes a pipeline stall? For example, a read of ar0 followed
4797 by the use of ar0 for a memory reference. It looks like we
4798 need to extend the scheduler to handle this case. */
4800 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4801 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4802 so only deal with insns we know about. */
4803 if (recog_memoized (dep_insn) < 0)
4804 return 0;
4806 if (REG_NOTE_KIND (link) == 0)
4808 int max = 0;
4810 /* Data dependency; DEP_INSN writes a register that INSN reads some
4811 cycles later. */
4812 if (TARGET_C3X)
4814 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4815 max = SET_USE_COST > max ? SET_USE_COST : max;
4816 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4817 max = READ_USE_COST > max ? READ_USE_COST : max;
4819 else
4821 /* This could be significantly optimized. We should look
4822 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4823 insn uses ar0-ar7. We then test if the same register
4824 is used. The tricky bit is that some operands will
4825 use several registers... */
4826 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4827 max = SET_USE_COST > max ? SET_USE_COST : max;
4828 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4829 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4830 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4831 max = READ_USE_COST > max ? READ_USE_COST : max;
4833 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4834 max = SET_USE_COST > max ? SET_USE_COST : max;
4835 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4836 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4837 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4838 max = READ_USE_COST > max ? READ_USE_COST : max;
4840 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4841 max = SET_USE_COST > max ? SET_USE_COST : max;
4842 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4843 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4844 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4845 max = READ_USE_COST > max ? READ_USE_COST : max;
4847 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4848 max = SET_USE_COST > max ? SET_USE_COST : max;
4849 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4850 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4851 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4852 max = READ_USE_COST > max ? READ_USE_COST : max;
4854 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4855 max = SET_USE_COST > max ? SET_USE_COST : max;
4856 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4857 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4858 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4859 max = READ_USE_COST > max ? READ_USE_COST : max;
4861 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4862 max = SET_USE_COST > max ? SET_USE_COST : max;
4863 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4864 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4865 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4866 max = READ_USE_COST > max ? READ_USE_COST : max;
4868 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4869 max = SET_USE_COST > max ? SET_USE_COST : max;
4870 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4871 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4872 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4873 max = READ_USE_COST > max ? READ_USE_COST : max;
4875 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4876 max = SET_USE_COST > max ? SET_USE_COST : max;
4877 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4878 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4879 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4880 max = READ_USE_COST > max ? READ_USE_COST : max;
4882 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4883 max = SET_USE_COST > max ? SET_USE_COST : max;
4884 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4885 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4887 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4888 max = SET_USE_COST > max ? SET_USE_COST : max;
4889 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4890 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4893 if (max)
4894 cost = max;
4896 /* For other data dependencies, the default cost specified in the
4897 md is correct. */
4898 return cost;
4900 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4902 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4903 cycles later. */
4905 /* For c4x anti dependencies, the cost is 0. */
4906 return 0;
4908 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4910 /* Output dependency; DEP_INSN writes a register that INSN writes some
4911 cycles later. */
4913 /* For c4x output dependencies, the cost is 0. */
4914 return 0;
4916 else
4917 abort ();
4920 void
4921 c4x_init_builtins ()
4923 tree endlink = void_list_node;
4925 builtin_function ("fast_ftoi",
4926 build_function_type
4927 (integer_type_node,
4928 tree_cons (NULL_TREE, double_type_node, endlink)),
4929 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4930 builtin_function ("ansi_ftoi",
4931 build_function_type
4932 (integer_type_node,
4933 tree_cons (NULL_TREE, double_type_node, endlink)),
4934 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4935 if (TARGET_C3X)
4936 builtin_function ("fast_imult",
4937 build_function_type
4938 (integer_type_node,
4939 tree_cons (NULL_TREE, integer_type_node,
4940 tree_cons (NULL_TREE,
4941 integer_type_node, endlink))),
4942 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4943 else
4945 builtin_function ("toieee",
4946 build_function_type
4947 (double_type_node,
4948 tree_cons (NULL_TREE, double_type_node, endlink)),
4949 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4950 builtin_function ("frieee",
4951 build_function_type
4952 (double_type_node,
4953 tree_cons (NULL_TREE, double_type_node, endlink)),
4954 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4955 builtin_function ("fast_invf",
4956 build_function_type
4957 (double_type_node,
4958 tree_cons (NULL_TREE, double_type_node, endlink)),
4959 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4965 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4966 tree exp;
4967 rtx target;
4968 rtx subtarget ATTRIBUTE_UNUSED;
4969 enum machine_mode mode ATTRIBUTE_UNUSED;
4970 int ignore ATTRIBUTE_UNUSED;
4972 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4973 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4974 tree arglist = TREE_OPERAND (exp, 1);
4975 tree arg0, arg1;
4976 rtx r0, r1;
4978 switch (fcode)
4980 case C4X_BUILTIN_FIX:
4981 arg0 = TREE_VALUE (arglist);
4982 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4983 r0 = protect_from_queue (r0, 0);
4984 if (! target || ! register_operand (target, QImode))
4985 target = gen_reg_rtx (QImode);
4986 emit_insn (gen_fixqfqi_clobber (target, r0));
4987 return target;
4989 case C4X_BUILTIN_FIX_ANSI:
4990 arg0 = TREE_VALUE (arglist);
4991 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4992 r0 = protect_from_queue (r0, 0);
4993 if (! target || ! register_operand (target, QImode))
4994 target = gen_reg_rtx (QImode);
4995 emit_insn (gen_fix_truncqfqi2 (target, r0));
4996 return target;
4998 case C4X_BUILTIN_MPYI:
4999 if (! TARGET_C3X)
5000 break;
5001 arg0 = TREE_VALUE (arglist);
5002 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5003 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5004 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5005 r0 = protect_from_queue (r0, 0);
5006 r1 = protect_from_queue (r1, 0);
5007 if (! target || ! register_operand (target, QImode))
5008 target = gen_reg_rtx (QImode);
5009 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5010 return target;
5012 case C4X_BUILTIN_TOIEEE:
5013 if (TARGET_C3X)
5014 break;
5015 arg0 = TREE_VALUE (arglist);
5016 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5017 r0 = protect_from_queue (r0, 0);
5018 if (! target || ! register_operand (target, QFmode))
5019 target = gen_reg_rtx (QFmode);
5020 emit_insn (gen_toieee (target, r0));
5021 return target;
5023 case C4X_BUILTIN_FRIEEE:
5024 if (TARGET_C3X)
5025 break;
5026 arg0 = TREE_VALUE (arglist);
5027 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5028 put_var_into_stack (arg0);
5029 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5030 r0 = protect_from_queue (r0, 0);
5031 if (register_operand (r0, QFmode))
5033 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5034 emit_move_insn (r1, r0);
5035 r0 = r1;
5037 if (! target || ! register_operand (target, QFmode))
5038 target = gen_reg_rtx (QFmode);
5039 emit_insn (gen_frieee (target, r0));
5040 return target;
5042 case C4X_BUILTIN_RCPF:
5043 if (TARGET_C3X)
5044 break;
5045 arg0 = TREE_VALUE (arglist);
5046 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5047 r0 = protect_from_queue (r0, 0);
5048 if (! target || ! register_operand (target, QFmode))
5049 target = gen_reg_rtx (QFmode);
5050 emit_insn (gen_rcpfqf_clobber (target, r0));
5051 return target;
5053 return NULL_RTX;
5056 static void
5057 c4x_asm_named_section (name, flags)
5058 const char *name;
5059 unsigned int flags ATTRIBUTE_UNUSED;
5061 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5064 static void
5065 c4x_globalize_label (stream, name)
5066 FILE *stream;
5067 const char *name;
5069 default_globalize_label (stream, name);
5070 c4x_global_label (name);
5073 #define SHIFT_CODE_P(C) \
5074 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
5075 #define LOGICAL_CODE_P(C) \
5076 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
5078 /* Compute a (partial) cost for rtx X. Return true if the complete
5079 cost has been computed, and false if subexpressions should be
5080 scanned. In either case, *TOTAL contains the cost result. */
5082 static bool
5083 c4x_rtx_costs (x, code, outer_code, total)
5084 rtx x;
5085 int code, outer_code;
5086 int *total;
5088 HOST_WIDE_INT val;
5090 switch (code)
5092 /* Some small integers are effectively free for the C40. We should
5093 also consider if we are using the small memory model. With
5094 the big memory model we require an extra insn for a constant
5095 loaded from memory. */
5097 case CONST_INT:
5098 val = INTVAL (x);
5099 if (c4x_J_constant (x))
5100 *total = 0;
5101 else if (! TARGET_C3X
5102 && outer_code == AND
5103 && (val == 255 || val == 65535))
5104 *total = 0;
5105 else if (! TARGET_C3X
5106 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
5107 && (val == 16 || val == 24))
5108 *total = 0;
5109 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
5110 *total = 3;
5111 else if (LOGICAL_CODE_P (outer_code)
5112 ? c4x_L_constant (x) : c4x_I_constant (x))
5113 *total = 2;
5114 else
5115 *total = 4;
5116 return true;
5118 case CONST:
5119 case LABEL_REF:
5120 case SYMBOL_REF:
5121 *total = 4;
5122 return true;
5124 case CONST_DOUBLE:
5125 if (c4x_H_constant (x))
5126 *total = 2;
5127 else if (GET_MODE (x) == QFmode)
5128 *total = 4;
5129 else
5130 *total = 8;
5131 return true;
5133 /* ??? Note that we return true, rather than false so that rtx_cost
5134 doesn't include the constant costs. Otherwise expand_mult will
5135 think that it is cheaper to synthesize a multiply rather than to
5136 use a multiply instruction. I think this is because the algorithm
5137 synth_mult doesn't take into account the loading of the operands,
5138 whereas the calculation of mult_cost does. */
5139 case PLUS:
5140 case MINUS:
5141 case AND:
5142 case IOR:
5143 case XOR:
5144 case ASHIFT:
5145 case ASHIFTRT:
5146 case LSHIFTRT:
5147 *total = COSTS_N_INSNS (1);
5148 return true;
5150 case MULT:
5151 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5152 || TARGET_MPYI ? 1 : 14);
5153 return true;
5155 case DIV:
5156 case UDIV:
5157 case MOD:
5158 case UMOD:
5159 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5160 ? 15 : 50);
5161 return true;
5163 default:
5164 return false;