* config/alpha/alpha.c: Follow spelling conventions.
[official-gcc.git] / gcc / config / c4x / c4x.c
blob83ab33124ff39742e1b29fa999df63b66a92aea6
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "loop.h"
44 #include "recog.h"
45 #include "c-tree.h"
46 #include "ggc.h"
47 #include "cpplib.h"
48 #include "toplev.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
53 rtx smulhi3_libfunc;
54 rtx umulhi3_libfunc;
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 rtx c4x_compare_op0;
152 rtx c4x_compare_op1;
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
169 static int c4x_leaf_function_p PARAMS ((void));
170 static int c4x_assembler_function_p PARAMS ((void));
171 static int c4x_immed_float_p PARAMS ((rtx));
172 static int c4x_a_register PARAMS ((rtx));
173 static int c4x_x_register PARAMS ((rtx));
174 static int c4x_immed_int_constant PARAMS ((rtx));
175 static int c4x_immed_float_constant PARAMS ((rtx));
176 static int c4x_K_constant PARAMS ((rtx));
177 static int c4x_N_constant PARAMS ((rtx));
178 static int c4x_O_constant PARAMS ((rtx));
179 static int c4x_R_indirect PARAMS ((rtx));
180 static int c4x_S_indirect PARAMS ((rtx));
181 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
182 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
183 enum machine_mode, int));
184 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
190 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
191 const struct attribute_spec c4x_attribute_table[];
192 static void c4x_insert_attributes PARAMS ((tree, tree *));
193 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
195 static void c4x_encode_section_info PARAMS ((tree, int));
196 static void c4x_globalize_label PARAMS ((FILE *, const char *));
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
224 #undef TARGET_ASM_GLOBALIZE_LABEL
225 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
227 struct gcc_target targetm = TARGET_INITIALIZER;
229 /* Override command line options.
230 Called once after all options have been parsed.
231 Mostly we process the processor
232 type and sometimes adjust other TARGET_ options. */
234 void
235 c4x_override_options ()
237 if (c4x_rpts_cycles_string)
238 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
239 else
240 c4x_rpts_cycles = 0;
242 if (TARGET_C30)
243 c4x_cpu_version = 30;
244 else if (TARGET_C31)
245 c4x_cpu_version = 31;
246 else if (TARGET_C32)
247 c4x_cpu_version = 32;
248 else if (TARGET_C33)
249 c4x_cpu_version = 33;
250 else if (TARGET_C40)
251 c4x_cpu_version = 40;
252 else if (TARGET_C44)
253 c4x_cpu_version = 44;
254 else
255 c4x_cpu_version = 40;
257 /* -mcpu=xx overrides -m40 etc. */
258 if (c4x_cpu_version_string)
260 const char *p = c4x_cpu_version_string;
262 /* Also allow -mcpu=c30 etc. */
263 if (*p == 'c' || *p == 'C')
264 p++;
265 c4x_cpu_version = atoi (p);
268 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
269 C40_FLAG | C44_FLAG);
271 switch (c4x_cpu_version)
273 case 30: target_flags |= C30_FLAG; break;
274 case 31: target_flags |= C31_FLAG; break;
275 case 32: target_flags |= C32_FLAG; break;
276 case 33: target_flags |= C33_FLAG; break;
277 case 40: target_flags |= C40_FLAG; break;
278 case 44: target_flags |= C44_FLAG; break;
279 default:
280 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
281 c4x_cpu_version = 40;
282 target_flags |= C40_FLAG;
285 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
286 target_flags |= C3X_FLAG;
287 else
288 target_flags &= ~C3X_FLAG;
290 /* Convert foo / 8.0 into foo * 0.125, etc. */
291 set_fast_math_flags (1);
293 /* We should phase out the following at some stage.
294 This provides compatibility with the old -mno-aliases option. */
295 if (! TARGET_ALIASES && ! flag_argument_noalias)
296 flag_argument_noalias = 1;
300 /* This is called before c4x_override_options. */
302 void
303 c4x_optimization_options (level, size)
304 int level ATTRIBUTE_UNUSED;
305 int size ATTRIBUTE_UNUSED;
307 /* Scheduling before register allocation can screw up global
308 register allocation, especially for functions that use MPY||ADD
309 instructions. The benefit we gain we get by scheduling before
310 register allocation is probably marginal anyhow. */
311 flag_schedule_insns = 0;
315 /* Write an ASCII string. */
317 #define C4X_ASCII_LIMIT 40
319 void
320 c4x_output_ascii (stream, ptr, len)
321 FILE *stream;
322 const char *ptr;
323 int len;
325 char sbuf[C4X_ASCII_LIMIT + 1];
326 int s, l, special, first = 1, onlys;
328 if (len)
329 fprintf (stream, "\t.byte\t");
331 for (s = l = 0; len > 0; --len, ++ptr)
333 onlys = 0;
335 /* Escape " and \ with a \". */
336 special = *ptr == '\"' || *ptr == '\\';
338 /* If printable - add to buff. */
339 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
341 if (special)
342 sbuf[s++] = '\\';
343 sbuf[s++] = *ptr;
344 if (s < C4X_ASCII_LIMIT - 1)
345 continue;
346 onlys = 1;
348 if (s)
350 if (first)
351 first = 0;
352 else
354 fputc (',', stream);
355 l++;
358 sbuf[s] = 0;
359 fprintf (stream, "\"%s\"", sbuf);
360 l += s + 2;
361 if (TARGET_TI && l >= 80 && len > 1)
363 fprintf (stream, "\n\t.byte\t");
364 first = 1;
365 l = 0;
368 s = 0;
370 if (onlys)
371 continue;
373 if (first)
374 first = 0;
375 else
377 fputc (',', stream);
378 l++;
381 fprintf (stream, "%d", *ptr);
382 l += 3;
383 if (TARGET_TI && l >= 80 && len > 1)
385 fprintf (stream, "\n\t.byte\t");
386 first = 1;
387 l = 0;
390 if (s)
392 if (! first)
393 fputc (',', stream);
395 sbuf[s] = 0;
396 fprintf (stream, "\"%s\"", sbuf);
397 s = 0;
399 fputc ('\n', stream);
404 c4x_hard_regno_mode_ok (regno, mode)
405 unsigned int regno;
406 enum machine_mode mode;
408 switch (mode)
410 #if Pmode != QImode
411 case Pmode: /* Pointer (24/32 bits). */
412 #endif
413 case QImode: /* Integer (32 bits). */
414 return IS_INT_REGNO (regno);
416 case QFmode: /* Float, Double (32 bits). */
417 case HFmode: /* Long Double (40 bits). */
418 return IS_EXT_REGNO (regno);
420 case CCmode: /* Condition Codes. */
421 case CC_NOOVmode: /* Condition Codes. */
422 return IS_ST_REGNO (regno);
424 case HImode: /* Long Long (64 bits). */
425 /* We need two registers to store long longs. Note that
426 it is much easier to constrain the first register
427 to start on an even boundary. */
428 return IS_INT_REGNO (regno)
429 && IS_INT_REGNO (regno + 1)
430 && (regno & 1) == 0;
432 default:
433 return 0; /* We don't support these modes. */
436 return 0;
439 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
441 c4x_hard_regno_rename_ok (regno1, regno2)
442 unsigned int regno1;
443 unsigned int regno2;
445 /* We can not copy call saved registers from mode QI into QF or from
446 mode QF into QI. */
447 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
448 return 0;
449 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
450 return 0;
451 /* We cannot copy from an extended (40 bit) register to a standard
452 (32 bit) register because we only set the condition codes for
453 extended registers. */
454 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
455 return 0;
456 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
457 return 0;
458 return 1;
461 /* The TI C3x C compiler register argument runtime model uses 6 registers,
462 AR2, R2, R3, RC, RS, RE.
464 The first two floating point arguments (float, double, long double)
465 that are found scanning from left to right are assigned to R2 and R3.
467 The remaining integer (char, short, int, long) or pointer arguments
468 are assigned to the remaining registers in the order AR2, R2, R3,
469 RC, RS, RE when scanning left to right, except for the last named
470 argument prior to an ellipsis denoting variable number of
471 arguments. We don't have to worry about the latter condition since
472 function.c treats the last named argument as anonymous (unnamed).
474 All arguments that cannot be passed in registers are pushed onto
475 the stack in reverse order (right to left). GCC handles that for us.
477 c4x_init_cumulative_args() is called at the start, so we can parse
478 the args to see how many floating point arguments and how many
479 integer (or pointer) arguments there are. c4x_function_arg() is
480 then called (sometimes repeatedly) for each argument (parsed left
481 to right) to obtain the register to pass the argument in, or zero
482 if the argument is to be passed on the stack. Once the compiler is
483 happy, c4x_function_arg_advance() is called.
485 Don't use R0 to pass arguments in, we use 0 to indicate a stack
486 argument. */
488 static const int c4x_int_reglist[3][6] =
490 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
491 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
492 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
495 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
498 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
499 function whose data type is FNTYPE.
500 For a library call, FNTYPE is 0. */
502 void
503 c4x_init_cumulative_args (cum, fntype, libname)
504 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
505 tree fntype; /* Tree ptr for function decl. */
506 rtx libname; /* SYMBOL_REF of library name or 0. */
508 tree param, next_param;
510 cum->floats = cum->ints = 0;
511 cum->init = 0;
512 cum->var = 0;
513 cum->args = 0;
515 if (TARGET_DEBUG)
517 fprintf (stderr, "\nc4x_init_cumulative_args (");
518 if (fntype)
520 tree ret_type = TREE_TYPE (fntype);
522 fprintf (stderr, "fntype code = %s, ret code = %s",
523 tree_code_name[(int) TREE_CODE (fntype)],
524 tree_code_name[(int) TREE_CODE (ret_type)]);
526 else
527 fprintf (stderr, "no fntype");
529 if (libname)
530 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
533 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
535 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
536 param; param = next_param)
538 tree type;
540 next_param = TREE_CHAIN (param);
542 type = TREE_VALUE (param);
543 if (type && type != void_type_node)
545 enum machine_mode mode;
547 /* If the last arg doesn't have void type then we have
548 variable arguments. */
549 if (! next_param)
550 cum->var = 1;
552 if ((mode = TYPE_MODE (type)))
554 if (! MUST_PASS_IN_STACK (mode, type))
556 /* Look for float, double, or long double argument. */
557 if (mode == QFmode || mode == HFmode)
558 cum->floats++;
559 /* Look for integer, enumeral, boolean, char, or pointer
560 argument. */
561 else if (mode == QImode || mode == Pmode)
562 cum->ints++;
565 cum->args++;
569 if (TARGET_DEBUG)
570 fprintf (stderr, "%s%s, args = %d)\n",
571 cum->prototype ? ", prototype" : "",
572 cum->var ? ", variable args" : "",
573 cum->args);
577 /* Update the data in CUM to advance over an argument
578 of mode MODE and data type TYPE.
579 (TYPE is null for libcalls where that information may not be available.) */
581 void
582 c4x_function_arg_advance (cum, mode, type, named)
583 CUMULATIVE_ARGS *cum; /* Current arg information. */
584 enum machine_mode mode; /* Current arg mode. */
585 tree type; /* Type of the arg or 0 if lib support. */
586 int named; /* Whether or not the argument was named. */
588 if (TARGET_DEBUG)
589 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
590 GET_MODE_NAME (mode), named);
591 if (! TARGET_MEMPARM
592 && named
593 && type
594 && ! MUST_PASS_IN_STACK (mode, type))
596 /* Look for float, double, or long double argument. */
597 if (mode == QFmode || mode == HFmode)
598 cum->floats++;
599 /* Look for integer, enumeral, boolean, char, or pointer argument. */
600 else if (mode == QImode || mode == Pmode)
601 cum->ints++;
603 else if (! TARGET_MEMPARM && ! type)
605 /* Handle libcall arguments. */
606 if (mode == QFmode || mode == HFmode)
607 cum->floats++;
608 else if (mode == QImode || mode == Pmode)
609 cum->ints++;
611 return;
615 /* Define where to put the arguments to a function. Value is zero to
616 push the argument on the stack, or a hard register in which to
617 store the argument.
619 MODE is the argument's machine mode.
620 TYPE is the data type of the argument (as a tree).
621 This is null for libcalls where that information may
622 not be available.
623 CUM is a variable of type CUMULATIVE_ARGS which gives info about
624 the preceding args and about the function being called.
625 NAMED is nonzero if this argument is a named parameter
626 (otherwise it is an extra parameter matching an ellipsis). */
628 struct rtx_def *
629 c4x_function_arg (cum, mode, type, named)
630 CUMULATIVE_ARGS *cum; /* Current arg information. */
631 enum machine_mode mode; /* Current arg mode. */
632 tree type; /* Type of the arg or 0 if lib support. */
633 int named; /* != 0 for normal args, == 0 for ... args. */
635 int reg = 0; /* Default to passing argument on stack. */
637 if (! cum->init)
639 /* We can handle at most 2 floats in R2, R3. */
640 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
642 /* We can handle at most 6 integers minus number of floats passed
643 in registers. */
644 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
645 6 - cum->maxfloats : cum->ints;
647 /* If there is no prototype, assume all the arguments are integers. */
648 if (! cum->prototype)
649 cum->maxints = 6;
651 cum->ints = cum->floats = 0;
652 cum->init = 1;
655 /* This marks the last argument. We don't need to pass this through
656 to the call insn. */
657 if (type == void_type_node)
658 return 0;
660 if (! TARGET_MEMPARM
661 && named
662 && type
663 && ! MUST_PASS_IN_STACK (mode, type))
665 /* Look for float, double, or long double argument. */
666 if (mode == QFmode || mode == HFmode)
668 if (cum->floats < cum->maxfloats)
669 reg = c4x_fp_reglist[cum->floats];
671 /* Look for integer, enumeral, boolean, char, or pointer argument. */
672 else if (mode == QImode || mode == Pmode)
674 if (cum->ints < cum->maxints)
675 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
678 else if (! TARGET_MEMPARM && ! type)
680 /* We could use a different argument calling model for libcalls,
681 since we're only calling functions in libgcc. Thus we could
682 pass arguments for long longs in registers rather than on the
683 stack. In the meantime, use the odd TI format. We make the
684 assumption that we won't have more than two floating point
685 args, six integer args, and that all the arguments are of the
686 same mode. */
687 if (mode == QFmode || mode == HFmode)
688 reg = c4x_fp_reglist[cum->floats];
689 else if (mode == QImode || mode == Pmode)
690 reg = c4x_int_reglist[0][cum->ints];
693 if (TARGET_DEBUG)
695 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
696 GET_MODE_NAME (mode), named);
697 if (reg)
698 fprintf (stderr, ", reg=%s", reg_names[reg]);
699 else
700 fprintf (stderr, ", stack");
701 fprintf (stderr, ")\n");
703 if (reg)
704 return gen_rtx_REG (mode, reg);
705 else
706 return NULL_RTX;
709 /* C[34]x arguments grow in weird ways (downwards) that the standard
710 varargs stuff can't handle.. */
712 c4x_va_arg (valist, type)
713 tree valist, type;
715 tree t;
717 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
718 build_int_2 (int_size_in_bytes (type), 0));
719 TREE_SIDE_EFFECTS (t) = 1;
721 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
725 static int
726 c4x_isr_reg_used_p (regno)
727 unsigned int regno;
729 /* Don't save/restore FP or ST, we handle them separately. */
730 if (regno == FRAME_POINTER_REGNUM
731 || IS_ST_REGNO (regno))
732 return 0;
734 /* We could be a little smarter abut saving/restoring DP.
735 We'll only save if for the big memory model or if
736 we're paranoid. ;-) */
737 if (IS_DP_REGNO (regno))
738 return ! TARGET_SMALL || TARGET_PARANOID;
740 /* Only save/restore regs in leaf function that are used. */
741 if (c4x_leaf_function)
742 return regs_ever_live[regno] && fixed_regs[regno] == 0;
744 /* Only save/restore regs that are used by the ISR and regs
745 that are likely to be used by functions the ISR calls
746 if they are not fixed. */
747 return IS_EXT_REGNO (regno)
748 || ((regs_ever_live[regno] || call_used_regs[regno])
749 && fixed_regs[regno] == 0);
753 static int
754 c4x_leaf_function_p ()
756 /* A leaf function makes no calls, so we only need
757 to save/restore the registers we actually use.
758 For the global variable leaf_function to be set, we need
759 to define LEAF_REGISTERS and all that it entails.
760 Let's check ourselves... */
762 if (lookup_attribute ("leaf_pretend",
763 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
764 return 1;
766 /* Use the leaf_pretend attribute at your own risk. This is a hack
767 to speed up ISRs that call a function infrequently where the
768 overhead of saving and restoring the additional registers is not
769 warranted. You must save and restore the additional registers
770 required by the called function. Caveat emptor. Here's enough
771 rope... */
773 if (leaf_function_p ())
774 return 1;
776 return 0;
780 static int
781 c4x_assembler_function_p ()
783 tree type;
785 type = TREE_TYPE (current_function_decl);
786 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
787 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
792 c4x_interrupt_function_p ()
794 if (lookup_attribute ("interrupt",
795 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
796 return 1;
798 /* Look for TI style c_intnn. */
799 return current_function_name[0] == 'c'
800 && current_function_name[1] == '_'
801 && current_function_name[2] == 'i'
802 && current_function_name[3] == 'n'
803 && current_function_name[4] == 't'
804 && ISDIGIT (current_function_name[5])
805 && ISDIGIT (current_function_name[6]);
808 void
809 c4x_expand_prologue ()
811 unsigned int regno;
812 int size = get_frame_size ();
813 rtx insn;
815 /* In functions where ar3 is not used but frame pointers are still
816 specified, frame pointers are not adjusted (if >= -O2) and this
817 is used so it won't needlessly push the frame pointer. */
818 int dont_push_ar3;
820 /* For __assembler__ function don't build a prologue. */
821 if (c4x_assembler_function_p ())
823 return;
826 /* For __interrupt__ function build specific prologue. */
827 if (c4x_interrupt_function_p ())
829 c4x_leaf_function = c4x_leaf_function_p ();
831 insn = emit_insn (gen_push_st ());
832 RTX_FRAME_RELATED_P (insn) = 1;
833 if (size)
835 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
836 RTX_FRAME_RELATED_P (insn) = 1;
837 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
838 gen_rtx_REG (QImode, SP_REGNO)));
839 RTX_FRAME_RELATED_P (insn) = 1;
840 /* We require that an ISR uses fewer than 32768 words of
841 local variables, otherwise we have to go to lots of
842 effort to save a register, load it with the desired size,
843 adjust the stack pointer, and then restore the modified
844 register. Frankly, I think it is a poor ISR that
845 requires more than 32767 words of local temporary
846 storage! */
847 if (size > 32767)
848 error ("ISR %s requires %d words of local vars, max is 32767",
849 current_function_name, size);
851 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
852 gen_rtx_REG (QImode, SP_REGNO),
853 GEN_INT (size)));
854 RTX_FRAME_RELATED_P (insn) = 1;
856 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
858 if (c4x_isr_reg_used_p (regno))
860 if (regno == DP_REGNO)
862 insn = emit_insn (gen_push_dp ());
863 RTX_FRAME_RELATED_P (insn) = 1;
865 else
867 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
868 RTX_FRAME_RELATED_P (insn) = 1;
869 if (IS_EXT_REGNO (regno))
871 insn = emit_insn (gen_pushqf
872 (gen_rtx_REG (QFmode, regno)));
873 RTX_FRAME_RELATED_P (insn) = 1;
878 /* We need to clear the repeat mode flag if the ISR is
879 going to use a RPTB instruction or uses the RC, RS, or RE
880 registers. */
881 if (regs_ever_live[RC_REGNO]
882 || regs_ever_live[RS_REGNO]
883 || regs_ever_live[RE_REGNO])
885 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
886 RTX_FRAME_RELATED_P (insn) = 1;
889 /* Reload DP reg if we are paranoid about some turkey
890 violating small memory model rules. */
891 if (TARGET_SMALL && TARGET_PARANOID)
893 insn = emit_insn (gen_set_ldp_prologue
894 (gen_rtx_REG (QImode, DP_REGNO),
895 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
896 RTX_FRAME_RELATED_P (insn) = 1;
899 else
901 if (frame_pointer_needed)
903 if ((size != 0)
904 || (current_function_args_size != 0)
905 || (optimize < 2))
907 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
908 RTX_FRAME_RELATED_P (insn) = 1;
909 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
910 gen_rtx_REG (QImode, SP_REGNO)));
911 RTX_FRAME_RELATED_P (insn) = 1;
912 dont_push_ar3 = 1;
914 else
916 /* Since ar3 is not used, we don't need to push it. */
917 dont_push_ar3 = 1;
920 else
922 /* If we use ar3, we need to push it. */
923 dont_push_ar3 = 0;
924 if ((size != 0) || (current_function_args_size != 0))
926 /* If we are omitting the frame pointer, we still have
927 to make space for it so the offsets are correct
928 unless we don't use anything on the stack at all. */
929 size += 1;
933 if (size > 32767)
935 /* Local vars are too big, it will take multiple operations
936 to increment SP. */
937 if (TARGET_C3X)
939 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
940 GEN_INT(size >> 16)));
941 RTX_FRAME_RELATED_P (insn) = 1;
942 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
943 gen_rtx_REG (QImode, R1_REGNO),
944 GEN_INT(-16)));
945 RTX_FRAME_RELATED_P (insn) = 1;
947 else
949 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
950 GEN_INT(size & ~0xffff)));
951 RTX_FRAME_RELATED_P (insn) = 1;
953 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
954 gen_rtx_REG (QImode, R1_REGNO),
955 GEN_INT(size & 0xffff)));
956 RTX_FRAME_RELATED_P (insn) = 1;
957 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
958 gen_rtx_REG (QImode, SP_REGNO),
959 gen_rtx_REG (QImode, R1_REGNO)));
960 RTX_FRAME_RELATED_P (insn) = 1;
962 else if (size != 0)
964 /* Local vars take up less than 32767 words, so we can directly
965 add the number. */
966 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
967 gen_rtx_REG (QImode, SP_REGNO),
968 GEN_INT (size)));
969 RTX_FRAME_RELATED_P (insn) = 1;
972 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
974 if (regs_ever_live[regno] && ! call_used_regs[regno])
976 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
978 if (TARGET_PRESERVE_FLOAT)
980 insn = emit_insn (gen_pushqi
981 (gen_rtx_REG (QImode, regno)));
982 RTX_FRAME_RELATED_P (insn) = 1;
984 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
985 RTX_FRAME_RELATED_P (insn) = 1;
987 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
989 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
990 RTX_FRAME_RELATED_P (insn) = 1;
998 void
999 c4x_expand_epilogue()
1001 int regno;
1002 int jump = 0;
1003 int dont_pop_ar3;
1004 rtx insn;
1005 int size = get_frame_size ();
1007 /* For __assembler__ function build no epilogue. */
1008 if (c4x_assembler_function_p ())
1010 insn = emit_jump_insn (gen_return_from_epilogue ());
1011 RTX_FRAME_RELATED_P (insn) = 1;
1012 return;
1015 /* For __interrupt__ function build specific epilogue. */
1016 if (c4x_interrupt_function_p ())
1018 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1020 if (! c4x_isr_reg_used_p (regno))
1021 continue;
1022 if (regno == DP_REGNO)
1024 insn = emit_insn (gen_pop_dp ());
1025 RTX_FRAME_RELATED_P (insn) = 1;
1027 else
1029 /* We have to use unspec because the compiler will delete insns
1030 that are not call-saved. */
1031 if (IS_EXT_REGNO (regno))
1033 insn = emit_insn (gen_popqf_unspec
1034 (gen_rtx_REG (QFmode, regno)));
1035 RTX_FRAME_RELATED_P (insn) = 1;
1037 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1038 RTX_FRAME_RELATED_P (insn) = 1;
1041 if (size)
1043 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1044 gen_rtx_REG (QImode, SP_REGNO),
1045 GEN_INT(size)));
1046 RTX_FRAME_RELATED_P (insn) = 1;
1047 insn = emit_insn (gen_popqi
1048 (gen_rtx_REG (QImode, AR3_REGNO)));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1051 insn = emit_insn (gen_pop_st ());
1052 RTX_FRAME_RELATED_P (insn) = 1;
1053 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1054 RTX_FRAME_RELATED_P (insn) = 1;
1056 else
1058 if (frame_pointer_needed)
1060 if ((size != 0)
1061 || (current_function_args_size != 0)
1062 || (optimize < 2))
1064 insn = emit_insn
1065 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1066 gen_rtx_MEM (QImode,
1067 gen_rtx_PLUS
1068 (QImode, gen_rtx_REG (QImode,
1069 AR3_REGNO),
1070 GEN_INT(-1)))));
1071 RTX_FRAME_RELATED_P (insn) = 1;
1073 /* We already have the return value and the fp,
1074 so we need to add those to the stack. */
1075 size += 2;
1076 jump = 1;
1077 dont_pop_ar3 = 1;
1079 else
1081 /* Since ar3 is not used for anything, we don't need to
1082 pop it. */
1083 dont_pop_ar3 = 1;
1086 else
1088 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1089 if (size || current_function_args_size)
1091 /* If we are ommitting the frame pointer, we still have
1092 to make space for it so the offsets are correct
1093 unless we don't use anything on the stack at all. */
1094 size += 1;
1098 /* Now restore the saved registers, putting in the delayed branch
1099 where required. */
1100 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1102 if (regs_ever_live[regno] && ! call_used_regs[regno])
1104 if (regno == AR3_REGNO && dont_pop_ar3)
1105 continue;
1107 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1109 insn = emit_insn (gen_popqf_unspec
1110 (gen_rtx_REG (QFmode, regno)));
1111 RTX_FRAME_RELATED_P (insn) = 1;
1112 if (TARGET_PRESERVE_FLOAT)
1114 insn = emit_insn (gen_popqi_unspec
1115 (gen_rtx_REG (QImode, regno)));
1116 RTX_FRAME_RELATED_P (insn) = 1;
1119 else
1121 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1122 RTX_FRAME_RELATED_P (insn) = 1;
1127 if (frame_pointer_needed)
1129 if ((size != 0)
1130 || (current_function_args_size != 0)
1131 || (optimize < 2))
1133 /* Restore the old FP. */
1134 insn = emit_insn
1135 (gen_movqi
1136 (gen_rtx_REG (QImode, AR3_REGNO),
1137 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1139 RTX_FRAME_RELATED_P (insn) = 1;
1143 if (size > 32767)
1145 /* Local vars are too big, it will take multiple operations
1146 to decrement SP. */
1147 if (TARGET_C3X)
1149 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1150 GEN_INT(size >> 16)));
1151 RTX_FRAME_RELATED_P (insn) = 1;
1152 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1153 gen_rtx_REG (QImode, R3_REGNO),
1154 GEN_INT(-16)));
1155 RTX_FRAME_RELATED_P (insn) = 1;
1157 else
1159 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1160 GEN_INT(size & ~0xffff)));
1161 RTX_FRAME_RELATED_P (insn) = 1;
1163 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1164 gen_rtx_REG (QImode, R3_REGNO),
1165 GEN_INT(size & 0xffff)));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1167 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1168 gen_rtx_REG (QImode, SP_REGNO),
1169 gen_rtx_REG (QImode, R3_REGNO)));
1170 RTX_FRAME_RELATED_P (insn) = 1;
1172 else if (size != 0)
1174 /* Local vars take up less than 32768 words, so we can directly
1175 subtract the number. */
1176 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1177 gen_rtx_REG (QImode, SP_REGNO),
1178 GEN_INT(size)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1182 if (jump)
1184 insn = emit_jump_insn (gen_return_indirect_internal
1185 (gen_rtx_REG (QImode, R2_REGNO)));
1186 RTX_FRAME_RELATED_P (insn) = 1;
1188 else
1190 insn = emit_jump_insn (gen_return_from_epilogue ());
1191 RTX_FRAME_RELATED_P (insn) = 1;
1198 c4x_null_epilogue_p ()
1200 int regno;
1202 if (reload_completed
1203 && ! c4x_assembler_function_p ()
1204 && ! c4x_interrupt_function_p ()
1205 && ! current_function_calls_alloca
1206 && ! current_function_args_size
1207 && ! (optimize < 2)
1208 && ! get_frame_size ())
1210 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1211 if (regs_ever_live[regno] && ! call_used_regs[regno]
1212 && (regno != AR3_REGNO))
1213 return 0;
1214 return 1;
1216 return 0;
1221 c4x_emit_move_sequence (operands, mode)
1222 rtx *operands;
1223 enum machine_mode mode;
1225 rtx op0 = operands[0];
1226 rtx op1 = operands[1];
1228 if (! reload_in_progress
1229 && ! REG_P (op0)
1230 && ! REG_P (op1)
1231 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1232 op1 = force_reg (mode, op1);
1234 if (GET_CODE (op1) == LO_SUM
1235 && GET_MODE (op1) == Pmode
1236 && dp_reg_operand (XEXP (op1, 0), mode))
1238 /* expand_increment will sometimes create a LO_SUM immediate
1239 address. */
1240 op1 = XEXP (op1, 1);
1242 else if (symbolic_address_operand (op1, mode))
1244 if (TARGET_LOAD_ADDRESS)
1246 /* Alias analysis seems to do a better job if we force
1247 constant addresses to memory after reload. */
1248 emit_insn (gen_load_immed_address (op0, op1));
1249 return 1;
1251 else
1253 /* Stick symbol or label address into the constant pool. */
1254 op1 = force_const_mem (Pmode, op1);
1257 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1259 /* We could be a lot smarter about loading some of these
1260 constants... */
1261 op1 = force_const_mem (mode, op1);
1264 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1265 and emit associated (HIGH (SYMREF)) if large memory model.
1266 c4x_legitimize_address could be used to do this,
1267 perhaps by calling validize_address. */
1268 if (TARGET_EXPOSE_LDP
1269 && ! (reload_in_progress || reload_completed)
1270 && GET_CODE (op1) == MEM
1271 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1273 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1274 if (! TARGET_SMALL)
1275 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1276 op1 = change_address (op1, mode,
1277 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
1282 && GET_CODE (op0) == MEM
1283 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1288 op0 = change_address (op0, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1292 if (GET_CODE (op0) == SUBREG
1293 && mixed_subreg_operand (op0, mode))
1295 /* We should only generate these mixed mode patterns
1296 during RTL generation. If we need do it later on
1297 then we'll have to emit patterns that won't clobber CC. */
1298 if (reload_in_progress || reload_completed)
1299 abort ();
1300 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1301 op0 = SUBREG_REG (op0);
1302 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1304 op0 = copy_rtx (op0);
1305 PUT_MODE (op0, QImode);
1307 else
1308 abort ();
1310 if (mode == QFmode)
1311 emit_insn (gen_storeqf_int_clobber (op0, op1));
1312 else
1313 abort ();
1314 return 1;
1317 if (GET_CODE (op1) == SUBREG
1318 && mixed_subreg_operand (op1, mode))
1320 /* We should only generate these mixed mode patterns
1321 during RTL generation. If we need do it later on
1322 then we'll have to emit patterns that won't clobber CC. */
1323 if (reload_in_progress || reload_completed)
1324 abort ();
1325 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1326 op1 = SUBREG_REG (op1);
1327 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1329 op1 = copy_rtx (op1);
1330 PUT_MODE (op1, QImode);
1332 else
1333 abort ();
1335 if (mode == QFmode)
1336 emit_insn (gen_loadqf_int_clobber (op0, op1));
1337 else
1338 abort ();
1339 return 1;
1342 if (mode == QImode
1343 && reg_operand (op0, mode)
1344 && const_int_operand (op1, mode)
1345 && ! IS_INT16_CONST (INTVAL (op1))
1346 && ! IS_HIGH_CONST (INTVAL (op1)))
1348 emit_insn (gen_loadqi_big_constant (op0, op1));
1349 return 1;
1352 if (mode == HImode
1353 && reg_operand (op0, mode)
1354 && const_int_operand (op1, mode))
1356 emit_insn (gen_loadhi_big_constant (op0, op1));
1357 return 1;
1360 /* Adjust operands in case we have modified them. */
1361 operands[0] = op0;
1362 operands[1] = op1;
1364 /* Emit normal pattern. */
1365 return 0;
1369 void
1370 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1371 rtx libcall;
1372 enum rtx_code code;
1373 enum machine_mode dmode;
1374 enum machine_mode smode;
1375 int noperands;
1376 rtx *operands;
1378 rtx ret;
1379 rtx insns;
1380 rtx equiv;
1382 start_sequence ();
1383 switch (noperands)
1385 case 2:
1386 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1387 operands[1], smode);
1388 equiv = gen_rtx (code, dmode, operands[1]);
1389 break;
1391 case 3:
1392 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1393 operands[1], smode, operands[2], smode);
1394 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1395 break;
1397 default:
1398 abort ();
1401 insns = get_insns ();
1402 end_sequence ();
1403 emit_libcall_block (insns, operands[0], ret, equiv);
1407 void
1408 c4x_emit_libcall3 (libcall, code, mode, operands)
1409 rtx libcall;
1410 enum rtx_code code;
1411 enum machine_mode mode;
1412 rtx *operands;
1414 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1418 void
1419 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1420 rtx libcall;
1421 enum rtx_code code;
1422 enum machine_mode mode;
1423 rtx *operands;
1425 rtx ret;
1426 rtx insns;
1427 rtx equiv;
1429 start_sequence ();
1430 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1431 operands[1], mode, operands[2], mode);
1432 equiv = gen_rtx_TRUNCATE (mode,
1433 gen_rtx_LSHIFTRT (HImode,
1434 gen_rtx_MULT (HImode,
1435 gen_rtx (code, HImode, operands[1]),
1436 gen_rtx (code, HImode, operands[2])),
1437 GEN_INT (32)));
1438 insns = get_insns ();
1439 end_sequence ();
1440 emit_libcall_block (insns, operands[0], ret, equiv);
1444 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1445 yet use this info. */
1447 static void
1448 c4x_encode_section_info (decl, first)
1449 tree decl;
1450 int first ATTRIBUTE_UNUSED;
1452 if (TREE_CODE (decl) == FUNCTION_DECL)
1453 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1458 c4x_check_legit_addr (mode, addr, strict)
1459 enum machine_mode mode;
1460 rtx addr;
1461 int strict;
1463 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1464 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1465 rtx disp = NULL_RTX; /* Displacement. */
1466 enum rtx_code code;
1468 code = GET_CODE (addr);
1469 switch (code)
1471 /* Register indirect with auto increment/decrement. We don't
1472 allow SP here---push_operand should recognize an operand
1473 being pushed on the stack. */
1475 case PRE_DEC:
1476 case PRE_INC:
1477 case POST_DEC:
1478 if (mode != QImode && mode != QFmode)
1479 return 0;
1481 case POST_INC:
1482 base = XEXP (addr, 0);
1483 if (! REG_P (base))
1484 return 0;
1485 break;
1487 case PRE_MODIFY:
1488 case POST_MODIFY:
1490 rtx op0 = XEXP (addr, 0);
1491 rtx op1 = XEXP (addr, 1);
1493 if (mode != QImode && mode != QFmode)
1494 return 0;
1496 if (! REG_P (op0)
1497 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1498 return 0;
1499 base = XEXP (op1, 0);
1500 if (base != op0)
1501 return 0;
1502 if (REG_P (XEXP (op1, 1)))
1503 indx = XEXP (op1, 1);
1504 else
1505 disp = XEXP (op1, 1);
1507 break;
1509 /* Register indirect. */
1510 case REG:
1511 base = addr;
1512 break;
1514 /* Register indirect with displacement or index. */
1515 case PLUS:
1517 rtx op0 = XEXP (addr, 0);
1518 rtx op1 = XEXP (addr, 1);
1519 enum rtx_code code0 = GET_CODE (op0);
1521 switch (code0)
1523 case REG:
1524 if (REG_P (op1))
1526 base = op0; /* Base + index. */
1527 indx = op1;
1528 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1530 base = op1;
1531 indx = op0;
1534 else
1536 base = op0; /* Base + displacement. */
1537 disp = op1;
1539 break;
1541 default:
1542 return 0;
1545 break;
1547 /* Direct addressing with DP register. */
1548 case LO_SUM:
1550 rtx op0 = XEXP (addr, 0);
1551 rtx op1 = XEXP (addr, 1);
1553 /* HImode and HFmode direct memory references aren't truly
1554 offsettable (consider case at end of data page). We
1555 probably get better code by loading a pointer and using an
1556 indirect memory reference. */
1557 if (mode == HImode || mode == HFmode)
1558 return 0;
1560 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1561 return 0;
1563 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1564 return 1;
1566 if (GET_CODE (op1) == CONST)
1567 return 1;
1568 return 0;
1570 break;
1572 /* Direct addressing with some work for the assembler... */
1573 case CONST:
1574 /* Direct addressing. */
1575 case LABEL_REF:
1576 case SYMBOL_REF:
1577 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1578 return 1;
1579 /* These need to be converted to a LO_SUM (...).
1580 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1581 return 0;
1583 /* Do not allow direct memory access to absolute addresses.
1584 This is more pain than it's worth, especially for the
1585 small memory model where we can't guarantee that
1586 this address is within the data page---we don't want
1587 to modify the DP register in the small memory model,
1588 even temporarily, since an interrupt can sneak in.... */
1589 case CONST_INT:
1590 return 0;
1592 /* Indirect indirect addressing. */
1593 case MEM:
1594 return 0;
1596 case CONST_DOUBLE:
1597 fatal_insn ("using CONST_DOUBLE for address", addr);
1599 default:
1600 return 0;
1603 /* Validate the base register. */
1604 if (base)
1606 /* Check that the address is offsettable for HImode and HFmode. */
1607 if (indx && (mode == HImode || mode == HFmode))
1608 return 0;
1610 /* Handle DP based stuff. */
1611 if (REGNO (base) == DP_REGNO)
1612 return 1;
1613 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1614 return 0;
1615 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1616 return 0;
1619 /* Now validate the index register. */
1620 if (indx)
1622 if (GET_CODE (indx) != REG)
1623 return 0;
1624 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1625 return 0;
1626 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1627 return 0;
1630 /* Validate displacement. */
1631 if (disp)
1633 if (GET_CODE (disp) != CONST_INT)
1634 return 0;
1635 if (mode == HImode || mode == HFmode)
1637 /* The offset displacement must be legitimate. */
1638 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1639 return 0;
1641 else
1643 if (! IS_DISP8_CONST (INTVAL (disp)))
1644 return 0;
1646 /* Can't add an index with a disp. */
1647 if (indx)
1648 return 0;
1650 return 1;
1655 c4x_legitimize_address (orig, mode)
1656 rtx orig ATTRIBUTE_UNUSED;
1657 enum machine_mode mode ATTRIBUTE_UNUSED;
1659 if (GET_CODE (orig) == SYMBOL_REF
1660 || GET_CODE (orig) == LABEL_REF)
1662 if (mode == HImode || mode == HFmode)
1664 /* We need to force the address into
1665 a register so that it is offsettable. */
1666 rtx addr_reg = gen_reg_rtx (Pmode);
1667 emit_move_insn (addr_reg, orig);
1668 return addr_reg;
1670 else
1672 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1674 if (! TARGET_SMALL)
1675 emit_insn (gen_set_ldp (dp_reg, orig));
1677 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1681 return NULL_RTX;
1685 /* Provide the costs of an addressing mode that contains ADDR.
1686 If ADDR is not a valid address, its cost is irrelevant.
1687 This is used in cse and loop optimisation to determine
1688 if it is worthwhile storing a common address into a register.
1689 Unfortunately, the C4x address cost depends on other operands. */
1691 int
1692 c4x_address_cost (addr)
1693 rtx addr;
1695 switch (GET_CODE (addr))
1697 case REG:
1698 return 1;
1700 case POST_INC:
1701 case POST_DEC:
1702 case PRE_INC:
1703 case PRE_DEC:
1704 return 1;
1706 /* These shouldn't be directly generated. */
1707 case SYMBOL_REF:
1708 case LABEL_REF:
1709 case CONST:
1710 return 10;
1712 case LO_SUM:
1714 rtx op1 = XEXP (addr, 1);
1716 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1717 return TARGET_SMALL ? 3 : 4;
1719 if (GET_CODE (op1) == CONST)
1721 rtx offset = const0_rtx;
1723 op1 = eliminate_constant_term (op1, &offset);
1725 /* ??? These costs need rethinking... */
1726 if (GET_CODE (op1) == LABEL_REF)
1727 return 3;
1729 if (GET_CODE (op1) != SYMBOL_REF)
1730 return 4;
1732 if (INTVAL (offset) == 0)
1733 return 3;
1735 return 4;
1737 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1739 break;
1741 case PLUS:
1743 register rtx op0 = XEXP (addr, 0);
1744 register rtx op1 = XEXP (addr, 1);
1746 if (GET_CODE (op0) != REG)
1747 break;
1749 switch (GET_CODE (op1))
1751 default:
1752 break;
1754 case REG:
1755 /* This cost for REG+REG must be greater than the cost
1756 for REG if we want autoincrement addressing modes. */
1757 return 2;
1759 case CONST_INT:
1760 /* The following tries to improve GIV combination
1761 in strength reduce but appears not to help. */
1762 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1763 return 1;
1765 if (IS_DISP1_CONST (INTVAL (op1)))
1766 return 1;
1768 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1769 return 2;
1771 return 3;
1774 default:
1775 break;
1778 return 4;
1783 c4x_gen_compare_reg (code, x, y)
1784 enum rtx_code code;
1785 rtx x, y;
1787 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1788 rtx cc_reg;
1790 if (mode == CC_NOOVmode
1791 && (code == LE || code == GE || code == LT || code == GT))
1792 return NULL_RTX;
1794 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1795 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1796 gen_rtx_COMPARE (mode, x, y)));
1797 return cc_reg;
1800 char *
1801 c4x_output_cbranch (form, seq)
1802 const char *form;
1803 rtx seq;
1805 int delayed = 0;
1806 int annultrue = 0;
1807 int annulfalse = 0;
1808 rtx delay;
1809 char *cp;
1810 static char str[100];
1812 if (final_sequence)
1814 delay = XVECEXP (final_sequence, 0, 1);
1815 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1816 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1817 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1819 strcpy (str, form);
1820 cp = &str [strlen (str)];
1821 if (delayed)
1823 *cp++ = '%';
1824 *cp++ = '#';
1826 if (annultrue)
1828 *cp++ = 'a';
1829 *cp++ = 't';
1831 if (annulfalse)
1833 *cp++ = 'a';
1834 *cp++ = 'f';
1836 *cp++ = '\t';
1837 *cp++ = '%';
1838 *cp++ = 'l';
1839 *cp++ = '1';
1840 *cp = 0;
1841 return str;
1844 void
1845 c4x_print_operand (file, op, letter)
1846 FILE *file; /* File to write to. */
1847 rtx op; /* Operand to print. */
1848 int letter; /* %<letter> or 0. */
1850 rtx op1;
1851 enum rtx_code code;
1853 switch (letter)
1855 case '#': /* Delayed. */
1856 if (final_sequence)
1857 fprintf (file, "d");
1858 return;
1861 code = GET_CODE (op);
1862 switch (letter)
1864 case 'A': /* Direct address. */
1865 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1866 fprintf (file, "@");
1867 break;
1869 case 'H': /* Sethi. */
1870 output_addr_const (file, op);
1871 return;
1873 case 'I': /* Reversed condition. */
1874 code = reverse_condition (code);
1875 break;
1877 case 'L': /* Log 2 of constant. */
1878 if (code != CONST_INT)
1879 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1880 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1881 return;
1883 case 'N': /* Ones complement of small constant. */
1884 if (code != CONST_INT)
1885 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1886 fprintf (file, "%d", ~INTVAL (op));
1887 return;
1889 case 'K': /* Generate ldp(k) if direct address. */
1890 if (! TARGET_SMALL
1891 && code == MEM
1892 && GET_CODE (XEXP (op, 0)) == LO_SUM
1893 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1894 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1896 op1 = XEXP (XEXP (op, 0), 1);
1897 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1899 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1900 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1901 fprintf (file, "\n");
1904 return;
1906 case 'M': /* Generate ldp(k) if direct address. */
1907 if (! TARGET_SMALL /* Only used in asm statements. */
1908 && code == MEM
1909 && (GET_CODE (XEXP (op, 0)) == CONST
1910 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1912 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1913 output_address (XEXP (op, 0));
1914 fprintf (file, "\n\t");
1916 return;
1918 case 'O': /* Offset address. */
1919 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1920 break;
1921 else if (code == MEM)
1922 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1923 else if (code == REG)
1924 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1925 else
1926 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1927 return;
1929 case 'C': /* Call. */
1930 break;
1932 case 'U': /* Call/callu. */
1933 if (code != SYMBOL_REF)
1934 fprintf (file, "u");
1935 return;
1937 default:
1938 break;
1941 switch (code)
1943 case REG:
1944 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1945 && ! TARGET_TI)
1946 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1947 else
1948 fprintf (file, "%s", reg_names[REGNO (op)]);
1949 break;
1951 case MEM:
1952 output_address (XEXP (op, 0));
1953 break;
1955 case CONST_DOUBLE:
1957 char str[30];
1958 REAL_VALUE_TYPE r;
1960 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1961 REAL_VALUE_TO_DECIMAL (r, str, -1);
1962 fprintf (file, "%s", str);
1964 break;
1966 case CONST_INT:
1967 fprintf (file, "%d", INTVAL (op));
1968 break;
1970 case NE:
1971 fprintf (file, "ne");
1972 break;
1974 case EQ:
1975 fprintf (file, "eq");
1976 break;
1978 case GE:
1979 fprintf (file, "ge");
1980 break;
1982 case GT:
1983 fprintf (file, "gt");
1984 break;
1986 case LE:
1987 fprintf (file, "le");
1988 break;
1990 case LT:
1991 fprintf (file, "lt");
1992 break;
1994 case GEU:
1995 fprintf (file, "hs");
1996 break;
1998 case GTU:
1999 fprintf (file, "hi");
2000 break;
2002 case LEU:
2003 fprintf (file, "ls");
2004 break;
2006 case LTU:
2007 fprintf (file, "lo");
2008 break;
2010 case SYMBOL_REF:
2011 output_addr_const (file, op);
2012 break;
2014 case CONST:
2015 output_addr_const (file, XEXP (op, 0));
2016 break;
2018 case CODE_LABEL:
2019 break;
2021 default:
2022 fatal_insn ("c4x_print_operand: Bad operand case", op);
2023 break;
2028 void
2029 c4x_print_operand_address (file, addr)
2030 FILE *file;
2031 rtx addr;
2033 switch (GET_CODE (addr))
2035 case REG:
2036 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2037 break;
2039 case PRE_DEC:
2040 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2041 break;
2043 case POST_INC:
2044 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2045 break;
2047 case POST_MODIFY:
2049 rtx op0 = XEXP (XEXP (addr, 1), 0);
2050 rtx op1 = XEXP (XEXP (addr, 1), 1);
2052 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2053 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2054 reg_names[REGNO (op1)]);
2055 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2056 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2057 INTVAL (op1));
2058 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2059 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2060 -INTVAL (op1));
2061 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2062 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2063 reg_names[REGNO (op1)]);
2064 else
2065 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2067 break;
2069 case PRE_MODIFY:
2071 rtx op0 = XEXP (XEXP (addr, 1), 0);
2072 rtx op1 = XEXP (XEXP (addr, 1), 1);
2074 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2075 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2076 reg_names[REGNO (op1)]);
2077 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2078 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2079 INTVAL (op1));
2080 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2081 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2082 -INTVAL (op1));
2083 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2084 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2085 reg_names[REGNO (op1)]);
2086 else
2087 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2089 break;
2091 case PRE_INC:
2092 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2093 break;
2095 case POST_DEC:
2096 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2097 break;
2099 case PLUS: /* Indirect with displacement. */
2101 rtx op0 = XEXP (addr, 0);
2102 rtx op1 = XEXP (addr, 1);
2104 if (REG_P (op0))
2106 if (REG_P (op1))
2108 if (IS_INDEX_REG (op0))
2110 fprintf (file, "*+%s(%s)",
2111 reg_names[REGNO (op1)],
2112 reg_names[REGNO (op0)]); /* Index + base. */
2114 else
2116 fprintf (file, "*+%s(%s)",
2117 reg_names[REGNO (op0)],
2118 reg_names[REGNO (op1)]); /* Base + index. */
2121 else if (INTVAL (op1) < 0)
2123 fprintf (file, "*-%s(%d)",
2124 reg_names[REGNO (op0)],
2125 -INTVAL (op1)); /* Base - displacement. */
2127 else
2129 fprintf (file, "*+%s(%d)",
2130 reg_names[REGNO (op0)],
2131 INTVAL (op1)); /* Base + displacement. */
2134 else
2135 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2137 break;
2139 case LO_SUM:
2141 rtx op0 = XEXP (addr, 0);
2142 rtx op1 = XEXP (addr, 1);
2144 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2145 c4x_print_operand_address (file, op1);
2146 else
2147 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2149 break;
2151 case CONST:
2152 case SYMBOL_REF:
2153 case LABEL_REF:
2154 fprintf (file, "@");
2155 output_addr_const (file, addr);
2156 break;
2158 /* We shouldn't access CONST_INT addresses. */
2159 case CONST_INT:
2161 default:
2162 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2163 break;
2168 /* Return nonzero if the floating point operand will fit
2169 in the immediate field. */
2171 static int
2172 c4x_immed_float_p (op)
2173 rtx op;
2175 long convval[2];
2176 int exponent;
2177 REAL_VALUE_TYPE r;
2179 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2180 if (GET_MODE (op) == HFmode)
2181 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2182 else
2184 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2185 convval[1] = 0;
2188 /* Sign extend exponent. */
2189 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2190 if (exponent == -128)
2191 return 1; /* 0.0 */
2192 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2193 return 0; /* Precision doesn't fit. */
2194 return (exponent <= 7) /* Positive exp. */
2195 && (exponent >= -7); /* Negative exp. */
2199 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2200 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2202 None of the last four instructions from the bottom of the block can
2203 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2204 BcondAT or RETIcondD.
2206 This routine scans the four previous insns for a jump insn, and if
2207 one is found, returns 1 so that we bung in a nop instruction.
2208 This simple minded strategy will add a nop, when it may not
2209 be required. Say when there is a JUMP_INSN near the end of the
2210 block that doesn't get converted into a delayed branch.
2212 Note that we cannot have a call insn, since we don't generate
2213 repeat loops with calls in them (although I suppose we could, but
2214 there's no benefit.)
2216 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2219 c4x_rptb_nop_p (insn)
2220 rtx insn;
2222 rtx start_label;
2223 int i;
2225 /* Extract the start label from the jump pattern (rptb_end). */
2226 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2228 /* If there is a label at the end of the loop we must insert
2229 a NOP. */
2230 do {
2231 insn = previous_insn (insn);
2232 } while (GET_CODE (insn) == NOTE
2233 || GET_CODE (insn) == USE
2234 || GET_CODE (insn) == CLOBBER);
2235 if (GET_CODE (insn) == CODE_LABEL)
2236 return 1;
2238 for (i = 0; i < 4; i++)
2240 /* Search back for prev non-note and non-label insn. */
2241 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2242 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2244 if (insn == start_label)
2245 return i == 0;
2247 insn = previous_insn (insn);
2250 /* If we have a jump instruction we should insert a NOP. If we
2251 hit repeat block top we should only insert a NOP if the loop
2252 is empty. */
2253 if (GET_CODE (insn) == JUMP_INSN)
2254 return 1;
2255 insn = previous_insn (insn);
2257 return 0;
2261 /* The C4x looping instruction needs to be emitted at the top of the
2262 loop. Emitting the true RTL for a looping instruction at the top of
2263 the loop can cause problems with flow analysis. So instead, a dummy
2264 doloop insn is emitted at the end of the loop. This routine checks
2265 for the presence of this doloop insn and then searches back to the
2266 top of the loop, where it inserts the true looping insn (provided
2267 there are no instructions in the loop which would cause problems).
2268 Any additional labels can be emitted at this point. In addition, if
2269 the desired loop count register was not allocated, this routine does
2270 nothing.
2272 Before we can create a repeat block looping instruction we have to
2273 verify that there are no jumps outside the loop and no jumps outside
2274 the loop go into this loop. This can happen in the basic blocks reorder
2275 pass. The C4x cpu can not handle this. */
2277 static int
2278 c4x_label_ref_used_p (x, code_label)
2279 rtx x, code_label;
2281 enum rtx_code code;
2282 int i, j;
2283 const char *fmt;
2285 if (x == 0)
2286 return 0;
2288 code = GET_CODE (x);
2289 if (code == LABEL_REF)
2290 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2292 fmt = GET_RTX_FORMAT (code);
2293 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2295 if (fmt[i] == 'e')
2297 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2298 return 1;
2300 else if (fmt[i] == 'E')
2301 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2302 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2303 return 1;
2305 return 0;
2309 static int
2310 c4x_rptb_valid_p (insn, start_label)
2311 rtx insn, start_label;
2313 rtx end = insn;
2314 rtx start;
2315 rtx tmp;
2317 /* Find the start label. */
2318 for (; insn; insn = PREV_INSN (insn))
2319 if (insn == start_label)
2320 break;
2322 /* Note found then we can not use a rptb or rpts. The label was
2323 probably moved by the basic block reorder pass. */
2324 if (! insn)
2325 return 0;
2327 start = insn;
2328 /* If any jump jumps inside this block then we must fail. */
2329 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2331 if (GET_CODE (insn) == CODE_LABEL)
2333 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2334 if (GET_CODE (tmp) == JUMP_INSN
2335 && c4x_label_ref_used_p (tmp, insn))
2336 return 0;
2339 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2341 if (GET_CODE (insn) == CODE_LABEL)
2343 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2344 if (GET_CODE (tmp) == JUMP_INSN
2345 && c4x_label_ref_used_p (tmp, insn))
2346 return 0;
2349 /* If any jump jumps outside this block then we must fail. */
2350 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2352 if (GET_CODE (insn) == CODE_LABEL)
2354 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2355 if (GET_CODE (tmp) == JUMP_INSN
2356 && c4x_label_ref_used_p (tmp, insn))
2357 return 0;
2358 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2359 if (GET_CODE (tmp) == JUMP_INSN
2360 && c4x_label_ref_used_p (tmp, insn))
2361 return 0;
2365 /* All checks OK. */
2366 return 1;
2370 void
2371 c4x_rptb_insert (insn)
2372 rtx insn;
2374 rtx end_label;
2375 rtx start_label;
2376 rtx new_start_label;
2377 rtx count_reg;
2379 /* If the count register has not been allocated to RC, say if
2380 there is a movstr pattern in the loop, then do not insert a
2381 RPTB instruction. Instead we emit a decrement and branch
2382 at the end of the loop. */
2383 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2384 if (REGNO (count_reg) != RC_REGNO)
2385 return;
2387 /* Extract the start label from the jump pattern (rptb_end). */
2388 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2390 if (! c4x_rptb_valid_p (insn, start_label))
2392 /* We can not use the rptb insn. Replace it so reorg can use
2393 the delay slots of the jump insn. */
2394 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2395 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2396 emit_insn_before (gen_bge (start_label), insn);
2397 LABEL_NUSES (start_label)++;
2398 delete_insn (insn);
2399 return;
2402 end_label = gen_label_rtx ();
2403 LABEL_NUSES (end_label)++;
2404 emit_label_after (end_label, insn);
2406 new_start_label = gen_label_rtx ();
2407 LABEL_NUSES (new_start_label)++;
2409 for (; insn; insn = PREV_INSN (insn))
2411 if (insn == start_label)
2412 break;
2413 if (GET_CODE (insn) == JUMP_INSN &&
2414 JUMP_LABEL (insn) == start_label)
2415 redirect_jump (insn, new_start_label, 0);
2417 if (! insn)
2418 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2420 emit_label_after (new_start_label, insn);
2422 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2423 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2424 else
2425 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2426 if (LABEL_NUSES (start_label) == 0)
2427 delete_insn (start_label);
2431 /* This function is a C4x special called immediately before delayed
2432 branch scheduling. We fix up RTPB style loops that didn't get RC
2433 allocated as the loop counter. */
2435 void
2436 c4x_process_after_reload (first)
2437 rtx first;
2439 rtx insn;
2441 for (insn = first; insn; insn = NEXT_INSN (insn))
2443 /* Look for insn. */
2444 if (INSN_P (insn))
2446 int insn_code_number;
2447 rtx old;
2449 insn_code_number = recog_memoized (insn);
2451 if (insn_code_number < 0)
2452 continue;
2454 /* Insert the RTX for RPTB at the top of the loop
2455 and a label at the end of the loop. */
2456 if (insn_code_number == CODE_FOR_rptb_end)
2457 c4x_rptb_insert(insn);
2459 /* We need to split the insn here. Otherwise the calls to
2460 force_const_mem will not work for load_immed_address. */
2461 old = insn;
2463 /* Don't split the insn if it has been deleted. */
2464 if (! INSN_DELETED_P (old))
2465 insn = try_split (PATTERN(old), old, 1);
2467 /* When not optimizing, the old insn will be still left around
2468 with only the 'deleted' bit set. Transform it into a note
2469 to avoid confusion of subsequent processing. */
2470 if (INSN_DELETED_P (old))
2472 PUT_CODE (old, NOTE);
2473 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2474 NOTE_SOURCE_FILE (old) = 0;
2481 static int
2482 c4x_a_register (op)
2483 rtx op;
2485 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2489 static int
2490 c4x_x_register (op)
2491 rtx op;
2493 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2497 static int
2498 c4x_immed_int_constant (op)
2499 rtx op;
2501 if (GET_CODE (op) != CONST_INT)
2502 return 0;
2504 return GET_MODE (op) == VOIDmode
2505 || GET_MODE_CLASS (op) == MODE_INT
2506 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2510 static int
2511 c4x_immed_float_constant (op)
2512 rtx op;
2514 if (GET_CODE (op) != CONST_DOUBLE)
2515 return 0;
2517 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2518 present this only means that a MEM rtx has been generated. It does
2519 not mean the rtx is really in memory. */
2521 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2526 c4x_shiftable_constant (op)
2527 rtx op;
2529 int i;
2530 int mask;
2531 int val = INTVAL (op);
2533 for (i = 0; i < 16; i++)
2535 if (val & (1 << i))
2536 break;
2538 mask = ((0xffff >> i) << 16) | 0xffff;
2539 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2540 : (val >> i) & mask))
2541 return i;
2542 return -1;
2547 c4x_H_constant (op)
2548 rtx op;
2550 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2555 c4x_I_constant (op)
2556 rtx op;
2558 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2563 c4x_J_constant (op)
2564 rtx op;
2566 if (TARGET_C3X)
2567 return 0;
2568 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2572 static int
2573 c4x_K_constant (op)
2574 rtx op;
2576 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2577 return 0;
2578 return IS_INT5_CONST (INTVAL (op));
2583 c4x_L_constant (op)
2584 rtx op;
2586 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2590 static int
2591 c4x_N_constant (op)
2592 rtx op;
2594 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2598 static int
2599 c4x_O_constant (op)
2600 rtx op;
2602 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2606 /* The constraints do not have to check the register class,
2607 except when needed to discriminate between the constraints.
2608 The operand has been checked by the predicates to be valid. */
2610 /* ARx + 9-bit signed const or IRn
2611 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2612 We don't include the pre/post inc/dec forms here since
2613 they are handled by the <> constraints. */
2616 c4x_Q_constraint (op)
2617 rtx op;
2619 enum machine_mode mode = GET_MODE (op);
2621 if (GET_CODE (op) != MEM)
2622 return 0;
2623 op = XEXP (op, 0);
2624 switch (GET_CODE (op))
2626 case REG:
2627 return 1;
2629 case PLUS:
2631 rtx op0 = XEXP (op, 0);
2632 rtx op1 = XEXP (op, 1);
2634 if (! REG_P (op0))
2635 return 0;
2637 if (REG_P (op1))
2638 return 1;
2640 if (GET_CODE (op1) != CONST_INT)
2641 return 0;
2643 /* HImode and HFmode must be offsettable. */
2644 if (mode == HImode || mode == HFmode)
2645 return IS_DISP8_OFF_CONST (INTVAL (op1));
2647 return IS_DISP8_CONST (INTVAL (op1));
2649 break;
2651 default:
2652 break;
2654 return 0;
2658 /* ARx + 5-bit unsigned const
2659 *ARx, *+ARx(n) for n < 32. */
2662 c4x_R_constraint (op)
2663 rtx op;
2665 enum machine_mode mode = GET_MODE (op);
2667 if (TARGET_C3X)
2668 return 0;
2669 if (GET_CODE (op) != MEM)
2670 return 0;
2671 op = XEXP (op, 0);
2672 switch (GET_CODE (op))
2674 case REG:
2675 return 1;
2677 case PLUS:
2679 rtx op0 = XEXP (op, 0);
2680 rtx op1 = XEXP (op, 1);
2682 if (! REG_P (op0))
2683 return 0;
2685 if (GET_CODE (op1) != CONST_INT)
2686 return 0;
2688 /* HImode and HFmode must be offsettable. */
2689 if (mode == HImode || mode == HFmode)
2690 return IS_UINT5_CONST (INTVAL (op1) + 1);
2692 return IS_UINT5_CONST (INTVAL (op1));
2694 break;
2696 default:
2697 break;
2699 return 0;
2703 static int
2704 c4x_R_indirect (op)
2705 rtx op;
2707 enum machine_mode mode = GET_MODE (op);
2709 if (TARGET_C3X || GET_CODE (op) != MEM)
2710 return 0;
2712 op = XEXP (op, 0);
2713 switch (GET_CODE (op))
2715 case REG:
2716 return IS_ADDR_OR_PSEUDO_REG (op);
2718 case PLUS:
2720 rtx op0 = XEXP (op, 0);
2721 rtx op1 = XEXP (op, 1);
2723 /* HImode and HFmode must be offsettable. */
2724 if (mode == HImode || mode == HFmode)
2725 return IS_ADDR_OR_PSEUDO_REG (op0)
2726 && GET_CODE (op1) == CONST_INT
2727 && IS_UINT5_CONST (INTVAL (op1) + 1);
2729 return REG_P (op0)
2730 && IS_ADDR_OR_PSEUDO_REG (op0)
2731 && GET_CODE (op1) == CONST_INT
2732 && IS_UINT5_CONST (INTVAL (op1));
2734 break;
2736 default:
2737 break;
2739 return 0;
2743 /* ARx + 1-bit unsigned const or IRn
2744 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2745 We don't include the pre/post inc/dec forms here since
2746 they are handled by the <> constraints. */
2749 c4x_S_constraint (op)
2750 rtx op;
2752 enum machine_mode mode = GET_MODE (op);
2753 if (GET_CODE (op) != MEM)
2754 return 0;
2755 op = XEXP (op, 0);
2756 switch (GET_CODE (op))
2758 case REG:
2759 return 1;
2761 case PRE_MODIFY:
2762 case POST_MODIFY:
2764 rtx op0 = XEXP (op, 0);
2765 rtx op1 = XEXP (op, 1);
2767 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2768 || (op0 != XEXP (op1, 0)))
2769 return 0;
2771 op0 = XEXP (op1, 0);
2772 op1 = XEXP (op1, 1);
2773 return REG_P (op0) && REG_P (op1);
2774 /* Pre or post_modify with a displacement of 0 or 1
2775 should not be generated. */
2777 break;
2779 case PLUS:
2781 rtx op0 = XEXP (op, 0);
2782 rtx op1 = XEXP (op, 1);
2784 if (!REG_P (op0))
2785 return 0;
2787 if (REG_P (op1))
2788 return 1;
2790 if (GET_CODE (op1) != CONST_INT)
2791 return 0;
2793 /* HImode and HFmode must be offsettable. */
2794 if (mode == HImode || mode == HFmode)
2795 return IS_DISP1_OFF_CONST (INTVAL (op1));
2797 return IS_DISP1_CONST (INTVAL (op1));
2799 break;
2801 default:
2802 break;
2804 return 0;
2808 static int
2809 c4x_S_indirect (op)
2810 rtx op;
2812 enum machine_mode mode = GET_MODE (op);
2813 if (GET_CODE (op) != MEM)
2814 return 0;
2816 op = XEXP (op, 0);
2817 switch (GET_CODE (op))
2819 case PRE_DEC:
2820 case POST_DEC:
2821 if (mode != QImode && mode != QFmode)
2822 return 0;
2823 case PRE_INC:
2824 case POST_INC:
2825 op = XEXP (op, 0);
2827 case REG:
2828 return IS_ADDR_OR_PSEUDO_REG (op);
2830 case PRE_MODIFY:
2831 case POST_MODIFY:
2833 rtx op0 = XEXP (op, 0);
2834 rtx op1 = XEXP (op, 1);
2836 if (mode != QImode && mode != QFmode)
2837 return 0;
2839 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2840 || (op0 != XEXP (op1, 0)))
2841 return 0;
2843 op0 = XEXP (op1, 0);
2844 op1 = XEXP (op1, 1);
2845 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2846 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2847 /* Pre or post_modify with a displacement of 0 or 1
2848 should not be generated. */
2851 case PLUS:
2853 rtx op0 = XEXP (op, 0);
2854 rtx op1 = XEXP (op, 1);
2856 if (REG_P (op0))
2858 /* HImode and HFmode must be offsettable. */
2859 if (mode == HImode || mode == HFmode)
2860 return IS_ADDR_OR_PSEUDO_REG (op0)
2861 && GET_CODE (op1) == CONST_INT
2862 && IS_DISP1_OFF_CONST (INTVAL (op1));
2864 if (REG_P (op1))
2865 return (IS_INDEX_OR_PSEUDO_REG (op1)
2866 && IS_ADDR_OR_PSEUDO_REG (op0))
2867 || (IS_ADDR_OR_PSEUDO_REG (op1)
2868 && IS_INDEX_OR_PSEUDO_REG (op0));
2870 return IS_ADDR_OR_PSEUDO_REG (op0)
2871 && GET_CODE (op1) == CONST_INT
2872 && IS_DISP1_CONST (INTVAL (op1));
2875 break;
2877 default:
2878 break;
2880 return 0;
2884 /* Direct memory operand. */
2887 c4x_T_constraint (op)
2888 rtx op;
2890 if (GET_CODE (op) != MEM)
2891 return 0;
2892 op = XEXP (op, 0);
2894 if (GET_CODE (op) != LO_SUM)
2896 /* Allow call operands. */
2897 return GET_CODE (op) == SYMBOL_REF
2898 && GET_MODE (op) == Pmode
2899 && SYMBOL_REF_FLAG (op);
2902 /* HImode and HFmode are not offsettable. */
2903 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2904 return 0;
2906 if ((GET_CODE (XEXP (op, 0)) == REG)
2907 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2908 return c4x_U_constraint (XEXP (op, 1));
2910 return 0;
2914 /* Symbolic operand. */
2917 c4x_U_constraint (op)
2918 rtx op;
2920 /* Don't allow direct addressing to an arbitrary constant. */
2921 return GET_CODE (op) == CONST
2922 || GET_CODE (op) == SYMBOL_REF
2923 || GET_CODE (op) == LABEL_REF;
2928 c4x_autoinc_operand (op, mode)
2929 rtx op;
2930 enum machine_mode mode ATTRIBUTE_UNUSED;
2932 if (GET_CODE (op) == MEM)
2934 enum rtx_code code = GET_CODE (XEXP (op, 0));
2936 if (code == PRE_INC
2937 || code == PRE_DEC
2938 || code == POST_INC
2939 || code == POST_DEC
2940 || code == PRE_MODIFY
2941 || code == POST_MODIFY
2943 return 1;
2945 return 0;
2949 /* Match any operand. */
2952 any_operand (op, mode)
2953 register rtx op ATTRIBUTE_UNUSED;
2954 enum machine_mode mode ATTRIBUTE_UNUSED;
2956 return 1;
2960 /* Nonzero if OP is a floating point value with value 0.0. */
2963 fp_zero_operand (op, mode)
2964 rtx op;
2965 enum machine_mode mode ATTRIBUTE_UNUSED;
2967 REAL_VALUE_TYPE r;
2969 if (GET_CODE (op) != CONST_DOUBLE)
2970 return 0;
2971 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2972 return REAL_VALUES_EQUAL (r, dconst0);
2977 const_operand (op, mode)
2978 register rtx op;
2979 register enum machine_mode mode;
2981 switch (mode)
2983 case QFmode:
2984 case HFmode:
2985 if (GET_CODE (op) != CONST_DOUBLE
2986 || GET_MODE (op) != mode
2987 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2988 return 0;
2990 return c4x_immed_float_p (op);
2992 #if Pmode != QImode
2993 case Pmode:
2994 #endif
2995 case QImode:
2996 if (GET_CODE (op) == CONSTANT_P_RTX)
2997 return 1;
2999 if (GET_CODE (op) != CONST_INT
3000 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3001 || GET_MODE_CLASS (mode) != MODE_INT)
3002 return 0;
3004 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3006 case HImode:
3007 return 0;
3009 default:
3010 return 0;
3016 stik_const_operand (op, mode)
3017 rtx op;
3018 enum machine_mode mode ATTRIBUTE_UNUSED;
3020 return c4x_K_constant (op);
3025 not_const_operand (op, mode)
3026 rtx op;
3027 enum machine_mode mode ATTRIBUTE_UNUSED;
3029 return c4x_N_constant (op);
3034 reg_operand (op, mode)
3035 rtx op;
3036 enum machine_mode mode;
3038 if (GET_CODE (op) == SUBREG
3039 && GET_MODE (op) == QFmode)
3040 return 0;
3041 return register_operand (op, mode);
3046 mixed_subreg_operand (op, mode)
3047 rtx op;
3048 enum machine_mode mode ATTRIBUTE_UNUSED;
3050 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3051 int and a long double. */
3052 if (GET_CODE (op) == SUBREG
3053 && (GET_MODE (op) == QFmode)
3054 && (GET_MODE (SUBREG_REG (op)) == QImode
3055 || GET_MODE (SUBREG_REG (op)) == HImode))
3056 return 1;
3057 return 0;
3062 reg_imm_operand (op, mode)
3063 rtx op;
3064 enum machine_mode mode ATTRIBUTE_UNUSED;
3066 if (REG_P (op) || CONSTANT_P (op))
3067 return 1;
3068 return 0;
3073 not_modify_reg (op, mode)
3074 rtx op;
3075 enum machine_mode mode ATTRIBUTE_UNUSED;
3077 if (REG_P (op) || CONSTANT_P (op))
3078 return 1;
3079 if (GET_CODE (op) != MEM)
3080 return 0;
3081 op = XEXP (op, 0);
3082 switch (GET_CODE (op))
3084 case REG:
3085 return 1;
3087 case PLUS:
3089 rtx op0 = XEXP (op, 0);
3090 rtx op1 = XEXP (op, 1);
3092 if (! REG_P (op0))
3093 return 0;
3095 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3096 return 1;
3099 case LO_SUM:
3101 rtx op0 = XEXP (op, 0);
3103 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3104 return 1;
3106 break;
3108 case CONST:
3109 case SYMBOL_REF:
3110 case LABEL_REF:
3111 return 1;
3113 default:
3114 break;
3116 return 0;
3121 not_rc_reg (op, mode)
3122 rtx op;
3123 enum machine_mode mode ATTRIBUTE_UNUSED;
3125 if (REG_P (op) && REGNO (op) == RC_REGNO)
3126 return 0;
3127 return 1;
3131 /* Extended precision register R0-R1. */
3134 r0r1_reg_operand (op, mode)
3135 rtx op;
3136 enum machine_mode mode;
3138 if (! reg_operand (op, mode))
3139 return 0;
3140 if (GET_CODE (op) == SUBREG)
3141 op = SUBREG_REG (op);
3142 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3146 /* Extended precision register R2-R3. */
3149 r2r3_reg_operand (op, mode)
3150 rtx op;
3151 enum machine_mode mode;
3153 if (! reg_operand (op, mode))
3154 return 0;
3155 if (GET_CODE (op) == SUBREG)
3156 op = SUBREG_REG (op);
3157 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3161 /* Low extended precision register R0-R7. */
3164 ext_low_reg_operand (op, mode)
3165 rtx op;
3166 enum machine_mode mode;
3168 if (! reg_operand (op, mode))
3169 return 0;
3170 if (GET_CODE (op) == SUBREG)
3171 op = SUBREG_REG (op);
3172 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3176 /* Extended precision register. */
3179 ext_reg_operand (op, mode)
3180 rtx op;
3181 enum machine_mode mode;
3183 if (! reg_operand (op, mode))
3184 return 0;
3185 if (GET_CODE (op) == SUBREG)
3186 op = SUBREG_REG (op);
3187 if (! REG_P (op))
3188 return 0;
3189 return IS_EXT_OR_PSEUDO_REG (op);
3193 /* Standard precision register. */
3196 std_reg_operand (op, mode)
3197 rtx op;
3198 enum machine_mode mode;
3200 if (! reg_operand (op, mode))
3201 return 0;
3202 if (GET_CODE (op) == SUBREG)
3203 op = SUBREG_REG (op);
3204 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3207 /* Standard precision or normal register. */
3210 std_or_reg_operand (op, mode)
3211 rtx op;
3212 enum machine_mode mode;
3214 if (reload_in_progress)
3215 return std_reg_operand (op, mode);
3216 return reg_operand (op, mode);
3219 /* Address register. */
3222 addr_reg_operand (op, mode)
3223 rtx op;
3224 enum machine_mode mode;
3226 if (! reg_operand (op, mode))
3227 return 0;
3228 return c4x_a_register (op);
3232 /* Index register. */
3235 index_reg_operand (op, mode)
3236 rtx op;
3237 enum machine_mode mode;
3239 if (! reg_operand (op, mode))
3240 return 0;
3241 if (GET_CODE (op) == SUBREG)
3242 op = SUBREG_REG (op);
3243 return c4x_x_register (op);
3247 /* DP register. */
3250 dp_reg_operand (op, mode)
3251 rtx op;
3252 enum machine_mode mode ATTRIBUTE_UNUSED;
3254 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3258 /* SP register. */
3261 sp_reg_operand (op, mode)
3262 rtx op;
3263 enum machine_mode mode ATTRIBUTE_UNUSED;
3265 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3269 /* ST register. */
3272 st_reg_operand (op, mode)
3273 register rtx op;
3274 enum machine_mode mode ATTRIBUTE_UNUSED;
3276 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3280 /* RC register. */
3283 rc_reg_operand (op, mode)
3284 register rtx op;
3285 enum machine_mode mode ATTRIBUTE_UNUSED;
3287 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3292 call_address_operand (op, mode)
3293 rtx op;
3294 enum machine_mode mode ATTRIBUTE_UNUSED;
3296 return (REG_P (op) || symbolic_address_operand (op, mode));
3300 /* Symbolic address operand. */
3303 symbolic_address_operand (op, mode)
3304 register rtx op;
3305 enum machine_mode mode ATTRIBUTE_UNUSED;
3307 switch (GET_CODE (op))
3309 case CONST:
3310 case SYMBOL_REF:
3311 case LABEL_REF:
3312 return 1;
3313 default:
3314 return 0;
3319 /* Check dst operand of a move instruction. */
3322 dst_operand (op, mode)
3323 rtx op;
3324 enum machine_mode mode;
3326 if (GET_CODE (op) == SUBREG
3327 && mixed_subreg_operand (op, mode))
3328 return 0;
3330 if (REG_P (op))
3331 return reg_operand (op, mode);
3333 return nonimmediate_operand (op, mode);
3337 /* Check src operand of two operand arithmetic instructions. */
3340 src_operand (op, mode)
3341 rtx op;
3342 enum machine_mode mode;
3344 if (GET_CODE (op) == SUBREG
3345 && mixed_subreg_operand (op, mode))
3346 return 0;
3348 if (REG_P (op))
3349 return reg_operand (op, mode);
3351 if (mode == VOIDmode)
3352 mode = GET_MODE (op);
3354 if (GET_CODE (op) == CONST_INT)
3355 return (mode == QImode || mode == Pmode || mode == HImode)
3356 && c4x_I_constant (op);
3358 /* We don't like CONST_DOUBLE integers. */
3359 if (GET_CODE (op) == CONST_DOUBLE)
3360 return c4x_H_constant (op);
3362 /* Disallow symbolic addresses. Only the predicate
3363 symbolic_address_operand will match these. */
3364 if (GET_CODE (op) == SYMBOL_REF
3365 || GET_CODE (op) == LABEL_REF
3366 || GET_CODE (op) == CONST)
3367 return 0;
3369 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3370 access to symbolic addresses. These operands will get forced
3371 into a register and the movqi expander will generate a
3372 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3373 if (GET_CODE (op) == MEM
3374 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3375 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3376 || GET_CODE (XEXP (op, 0)) == CONST)))
3377 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3379 return general_operand (op, mode);
3384 src_hi_operand (op, mode)
3385 rtx op;
3386 enum machine_mode mode;
3388 if (c4x_O_constant (op))
3389 return 1;
3390 return src_operand (op, mode);
3394 /* Check src operand of two operand logical instructions. */
3397 lsrc_operand (op, mode)
3398 rtx op;
3399 enum machine_mode mode;
3401 if (mode == VOIDmode)
3402 mode = GET_MODE (op);
3404 if (mode != QImode && mode != Pmode)
3405 fatal_insn ("mode not QImode", op);
3407 if (GET_CODE (op) == CONST_INT)
3408 return c4x_L_constant (op) || c4x_J_constant (op);
3410 return src_operand (op, mode);
3414 /* Check src operand of two operand tricky instructions. */
3417 tsrc_operand (op, mode)
3418 rtx op;
3419 enum machine_mode mode;
3421 if (mode == VOIDmode)
3422 mode = GET_MODE (op);
3424 if (mode != QImode && mode != Pmode)
3425 fatal_insn ("mode not QImode", op);
3427 if (GET_CODE (op) == CONST_INT)
3428 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3430 return src_operand (op, mode);
3434 /* Check src operand of two operand non immedidate instructions. */
3437 nonimmediate_src_operand (op, mode)
3438 rtx op;
3439 enum machine_mode mode;
3441 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3442 return 0;
3444 return src_operand (op, mode);
3448 /* Check logical src operand of two operand non immedidate instructions. */
3451 nonimmediate_lsrc_operand (op, mode)
3452 rtx op;
3453 enum machine_mode mode;
3455 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3456 return 0;
3458 return lsrc_operand (op, mode);
3463 reg_or_const_operand (op, mode)
3464 rtx op;
3465 enum machine_mode mode;
3467 return reg_operand (op, mode) || const_operand (op, mode);
3471 /* Check for indirect operands allowable in parallel instruction. */
3474 par_ind_operand (op, mode)
3475 rtx op;
3476 enum machine_mode mode;
3478 if (mode != VOIDmode && mode != GET_MODE (op))
3479 return 0;
3481 return c4x_S_indirect (op);
3485 /* Check for operands allowable in parallel instruction. */
3488 parallel_operand (op, mode)
3489 rtx op;
3490 enum machine_mode mode;
3492 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3496 static void
3497 c4x_S_address_parse (op, base, incdec, index, disp)
3498 rtx op;
3499 int *base;
3500 int *incdec;
3501 int *index;
3502 int *disp;
3504 *base = 0;
3505 *incdec = 0;
3506 *index = 0;
3507 *disp = 0;
3509 if (GET_CODE (op) != MEM)
3510 fatal_insn ("invalid indirect memory address", op);
3512 op = XEXP (op, 0);
3513 switch (GET_CODE (op))
3515 case PRE_DEC:
3516 *base = REGNO (XEXP (op, 0));
3517 *incdec = 1;
3518 *disp = -1;
3519 return;
3521 case POST_DEC:
3522 *base = REGNO (XEXP (op, 0));
3523 *incdec = 1;
3524 *disp = 0;
3525 return;
3527 case PRE_INC:
3528 *base = REGNO (XEXP (op, 0));
3529 *incdec = 1;
3530 *disp = 1;
3531 return;
3533 case POST_INC:
3534 *base = REGNO (XEXP (op, 0));
3535 *incdec = 1;
3536 *disp = 0;
3537 return;
3539 case POST_MODIFY:
3540 *base = REGNO (XEXP (op, 0));
3541 if (REG_P (XEXP (XEXP (op, 1), 1)))
3543 *index = REGNO (XEXP (XEXP (op, 1), 1));
3544 *disp = 0; /* ??? */
3546 else
3547 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3548 *incdec = 1;
3549 return;
3551 case PRE_MODIFY:
3552 *base = REGNO (XEXP (op, 0));
3553 if (REG_P (XEXP (XEXP (op, 1), 1)))
3555 *index = REGNO (XEXP (XEXP (op, 1), 1));
3556 *disp = 1; /* ??? */
3558 else
3559 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3560 *incdec = 1;
3562 return;
3564 case REG:
3565 *base = REGNO (op);
3566 return;
3568 case PLUS:
3570 rtx op0 = XEXP (op, 0);
3571 rtx op1 = XEXP (op, 1);
3573 if (c4x_a_register (op0))
3575 if (c4x_x_register (op1))
3577 *base = REGNO (op0);
3578 *index = REGNO (op1);
3579 return;
3581 else if ((GET_CODE (op1) == CONST_INT
3582 && IS_DISP1_CONST (INTVAL (op1))))
3584 *base = REGNO (op0);
3585 *disp = INTVAL (op1);
3586 return;
3589 else if (c4x_x_register (op0) && c4x_a_register (op1))
3591 *base = REGNO (op1);
3592 *index = REGNO (op0);
3593 return;
3596 /* Fallthrough. */
3598 default:
3599 fatal_insn ("invalid indirect (S) memory address", op);
3605 c4x_address_conflict (op0, op1, store0, store1)
3606 rtx op0;
3607 rtx op1;
3608 int store0;
3609 int store1;
3611 int base0;
3612 int base1;
3613 int incdec0;
3614 int incdec1;
3615 int index0;
3616 int index1;
3617 int disp0;
3618 int disp1;
3620 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3621 return 1;
3623 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3624 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3626 if (store0 && store1)
3628 /* If we have two stores in parallel to the same address, then
3629 the C4x only executes one of the stores. This is unlikely to
3630 cause problems except when writing to a hardware device such
3631 as a FIFO since the second write will be lost. The user
3632 should flag the hardware location as being volatile so that
3633 we don't do this optimisation. While it is unlikely that we
3634 have an aliased address if both locations are not marked
3635 volatile, it is probably safer to flag a potential conflict
3636 if either location is volatile. */
3637 if (! flag_argument_noalias)
3639 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3640 return 1;
3644 /* If have a parallel load and a store to the same address, the load
3645 is performed first, so there is no conflict. Similarly, there is
3646 no conflict if have parallel loads from the same address. */
3648 /* Cannot use auto increment or auto decrement twice for same
3649 base register. */
3650 if (base0 == base1 && incdec0 && incdec0)
3651 return 1;
3653 /* It might be too confusing for GCC if we have use a base register
3654 with a side effect and a memory reference using the same register
3655 in parallel. */
3656 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3657 return 1;
3659 /* We can not optimize the case where op1 and op2 refer to the same
3660 address. */
3661 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3662 return 1;
3664 /* No conflict. */
3665 return 0;
3669 /* Check for while loop inside a decrement and branch loop. */
3672 c4x_label_conflict (insn, jump, db)
3673 rtx insn;
3674 rtx jump;
3675 rtx db;
3677 while (insn)
3679 if (GET_CODE (insn) == CODE_LABEL)
3681 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3682 return 1;
3683 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3684 return 0;
3686 insn = PREV_INSN (insn);
3688 return 1;
3692 /* Validate combination of operands for parallel load/store instructions. */
3695 valid_parallel_load_store (operands, mode)
3696 rtx *operands;
3697 enum machine_mode mode ATTRIBUTE_UNUSED;
3699 rtx op0 = operands[0];
3700 rtx op1 = operands[1];
3701 rtx op2 = operands[2];
3702 rtx op3 = operands[3];
3704 if (GET_CODE (op0) == SUBREG)
3705 op0 = SUBREG_REG (op0);
3706 if (GET_CODE (op1) == SUBREG)
3707 op1 = SUBREG_REG (op1);
3708 if (GET_CODE (op2) == SUBREG)
3709 op2 = SUBREG_REG (op2);
3710 if (GET_CODE (op3) == SUBREG)
3711 op3 = SUBREG_REG (op3);
3713 /* The patterns should only allow ext_low_reg_operand() or
3714 par_ind_operand() operands. Thus of the 4 operands, only 2
3715 should be REGs and the other 2 should be MEMs. */
3717 /* This test prevents the multipack pass from using this pattern if
3718 op0 is used as an index or base register in op2 or op3, since
3719 this combination will require reloading. */
3720 if (GET_CODE (op0) == REG
3721 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3722 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3723 return 0;
3725 /* LDI||LDI. */
3726 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3727 return (REGNO (op0) != REGNO (op2))
3728 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3729 && ! c4x_address_conflict (op1, op3, 0, 0);
3731 /* STI||STI. */
3732 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3733 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3734 && ! c4x_address_conflict (op0, op2, 1, 1);
3736 /* LDI||STI. */
3737 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3738 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3739 && ! c4x_address_conflict (op1, op2, 0, 1);
3741 /* STI||LDI. */
3742 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3743 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3744 && ! c4x_address_conflict (op0, op3, 1, 0);
3746 return 0;
3751 valid_parallel_operands_4 (operands, mode)
3752 rtx *operands;
3753 enum machine_mode mode ATTRIBUTE_UNUSED;
3755 rtx op0 = operands[0];
3756 rtx op2 = operands[2];
3758 if (GET_CODE (op0) == SUBREG)
3759 op0 = SUBREG_REG (op0);
3760 if (GET_CODE (op2) == SUBREG)
3761 op2 = SUBREG_REG (op2);
3763 /* This test prevents the multipack pass from using this pattern if
3764 op0 is used as an index or base register in op2, since this combination
3765 will require reloading. */
3766 if (GET_CODE (op0) == REG
3767 && GET_CODE (op2) == MEM
3768 && reg_mentioned_p (op0, XEXP (op2, 0)))
3769 return 0;
3771 return 1;
3776 valid_parallel_operands_5 (operands, mode)
3777 rtx *operands;
3778 enum machine_mode mode ATTRIBUTE_UNUSED;
3780 int regs = 0;
3781 rtx op0 = operands[0];
3782 rtx op1 = operands[1];
3783 rtx op2 = operands[2];
3784 rtx op3 = operands[3];
3786 if (GET_CODE (op0) == SUBREG)
3787 op0 = SUBREG_REG (op0);
3788 if (GET_CODE (op1) == SUBREG)
3789 op1 = SUBREG_REG (op1);
3790 if (GET_CODE (op2) == SUBREG)
3791 op2 = SUBREG_REG (op2);
3793 /* The patterns should only allow ext_low_reg_operand() or
3794 par_ind_operand() operands. Operands 1 and 2 may be commutative
3795 but only one of them can be a register. */
3796 if (GET_CODE (op1) == REG)
3797 regs++;
3798 if (GET_CODE (op2) == REG)
3799 regs++;
3801 if (regs != 1)
3802 return 0;
3804 /* This test prevents the multipack pass from using this pattern if
3805 op0 is used as an index or base register in op3, since this combination
3806 will require reloading. */
3807 if (GET_CODE (op0) == REG
3808 && GET_CODE (op3) == MEM
3809 && reg_mentioned_p (op0, XEXP (op3, 0)))
3810 return 0;
3812 return 1;
3817 valid_parallel_operands_6 (operands, mode)
3818 rtx *operands;
3819 enum machine_mode mode ATTRIBUTE_UNUSED;
3821 int regs = 0;
3822 rtx op0 = operands[0];
3823 rtx op1 = operands[1];
3824 rtx op2 = operands[2];
3825 rtx op4 = operands[4];
3826 rtx op5 = operands[5];
3828 if (GET_CODE (op1) == SUBREG)
3829 op1 = SUBREG_REG (op1);
3830 if (GET_CODE (op2) == SUBREG)
3831 op2 = SUBREG_REG (op2);
3832 if (GET_CODE (op4) == SUBREG)
3833 op4 = SUBREG_REG (op4);
3834 if (GET_CODE (op5) == SUBREG)
3835 op5 = SUBREG_REG (op5);
3837 /* The patterns should only allow ext_low_reg_operand() or
3838 par_ind_operand() operands. Thus of the 4 input operands, only 2
3839 should be REGs and the other 2 should be MEMs. */
3841 if (GET_CODE (op1) == REG)
3842 regs++;
3843 if (GET_CODE (op2) == REG)
3844 regs++;
3845 if (GET_CODE (op4) == REG)
3846 regs++;
3847 if (GET_CODE (op5) == REG)
3848 regs++;
3850 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3851 Perhaps we should count the MEMs as well? */
3852 if (regs != 2)
3853 return 0;
3855 /* This test prevents the multipack pass from using this pattern if
3856 op0 is used as an index or base register in op4 or op5, since
3857 this combination will require reloading. */
3858 if (GET_CODE (op0) == REG
3859 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3860 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3861 return 0;
3863 return 1;
3867 /* Validate combination of src operands. Note that the operands have
3868 been screened by the src_operand predicate. We just have to check
3869 that the combination of operands is valid. If FORCE is set, ensure
3870 that the destination regno is valid if we have a 2 operand insn. */
3872 static int
3873 c4x_valid_operands (code, operands, mode, force)
3874 enum rtx_code code;
3875 rtx *operands;
3876 enum machine_mode mode ATTRIBUTE_UNUSED;
3877 int force;
3879 rtx op1;
3880 rtx op2;
3881 enum rtx_code code1;
3882 enum rtx_code code2;
3884 if (code == COMPARE)
3886 op1 = operands[0];
3887 op2 = operands[1];
3889 else
3891 op1 = operands[1];
3892 op2 = operands[2];
3895 if (GET_CODE (op1) == SUBREG)
3896 op1 = SUBREG_REG (op1);
3897 if (GET_CODE (op2) == SUBREG)
3898 op2 = SUBREG_REG (op2);
3900 code1 = GET_CODE (op1);
3901 code2 = GET_CODE (op2);
3903 if (code1 == REG && code2 == REG)
3904 return 1;
3906 if (code1 == MEM && code2 == MEM)
3908 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3909 return 1;
3910 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3913 if (code1 == code2)
3914 return 0;
3916 if (code1 == REG)
3918 switch (code2)
3920 case CONST_INT:
3921 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3922 return 1;
3923 break;
3925 case CONST_DOUBLE:
3926 if (! c4x_H_constant (op2))
3927 return 0;
3928 break;
3930 /* Any valid memory operand screened by src_operand is OK. */
3931 case MEM:
3933 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3934 into a stack slot memory address comprising a PLUS and a
3935 constant. */
3936 case ADDRESSOF:
3937 break;
3939 default:
3940 fatal_insn ("c4x_valid_operands: Internal error", op2);
3941 break;
3944 /* Check that we have a valid destination register for a two operand
3945 instruction. */
3946 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3949 /* We assume MINUS is commutative since the subtract patterns
3950 also support the reverse subtract instructions. Since op1
3951 is not a register, and op2 is a register, op1 can only
3952 be a restricted memory operand for a shift instruction. */
3953 if (code == ASHIFTRT || code == LSHIFTRT
3954 || code == ASHIFT || code == COMPARE)
3955 return code2 == REG
3956 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3958 switch (code1)
3960 case CONST_INT:
3961 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3962 return 1;
3963 break;
3965 case CONST_DOUBLE:
3966 if (! c4x_H_constant (op1))
3967 return 0;
3968 break;
3970 /* Any valid memory operand screened by src_operand is OK. */
3971 case MEM:
3972 #if 0
3973 if (code2 != REG)
3974 return 0;
3975 #endif
3976 break;
3978 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3979 into a stack slot memory address comprising a PLUS and a
3980 constant. */
3981 case ADDRESSOF:
3982 break;
3984 default:
3985 abort ();
3986 break;
3989 /* Check that we have a valid destination register for a two operand
3990 instruction. */
3991 return ! force || REGNO (op1) == REGNO (operands[0]);
3995 int valid_operands (code, operands, mode)
3996 enum rtx_code code;
3997 rtx *operands;
3998 enum machine_mode mode;
4001 /* If we are not optimizing then we have to let anything go and let
4002 reload fix things up. instantiate_decl in function.c can produce
4003 invalid insns by changing the offset of a memory operand from a
4004 valid one into an invalid one, when the second operand is also a
4005 memory operand. The alternative is not to allow two memory
4006 operands for an insn when not optimizing. The problem only rarely
4007 occurs, for example with the C-torture program DFcmp.c. */
4009 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4014 legitimize_operands (code, operands, mode)
4015 enum rtx_code code;
4016 rtx *operands;
4017 enum machine_mode mode;
4019 /* Compare only has 2 operands. */
4020 if (code == COMPARE)
4022 /* During RTL generation, force constants into pseudos so that
4023 they can get hoisted out of loops. This will tie up an extra
4024 register but can save an extra cycle. Only do this if loop
4025 optimisation enabled. (We cannot pull this trick for add and
4026 sub instructions since the flow pass won't find
4027 autoincrements etc.) This allows us to generate compare
4028 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4029 of LDI *AR0++, R0; CMPI 42, R0.
4031 Note that expand_binops will try to load an expensive constant
4032 into a register if it is used within a loop. Unfortunately,
4033 the cost mechanism doesn't allow us to look at the other
4034 operand to decide whether the constant is expensive. */
4036 if (! reload_in_progress
4037 && TARGET_HOIST
4038 && optimize > 0
4039 && GET_CODE (operands[1]) == CONST_INT
4040 && preserve_subexpressions_p ()
4041 && rtx_cost (operands[1], code) > 1)
4042 operands[1] = force_reg (mode, operands[1]);
4044 if (! reload_in_progress
4045 && ! c4x_valid_operands (code, operands, mode, 0))
4046 operands[0] = force_reg (mode, operands[0]);
4047 return 1;
4050 /* We cannot do this for ADDI/SUBI insns since we will
4051 defeat the flow pass from finding autoincrement addressing
4052 opportunities. */
4053 if (! reload_in_progress
4054 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4055 && TARGET_HOIST
4056 && optimize > 1
4057 && GET_CODE (operands[2]) == CONST_INT
4058 && preserve_subexpressions_p ()
4059 && rtx_cost (operands[2], code) > 1)
4060 operands[2] = force_reg (mode, operands[2]);
4062 /* We can get better code on a C30 if we force constant shift counts
4063 into a register. This way they can get hoisted out of loops,
4064 tying up a register, but saving an instruction. The downside is
4065 that they may get allocated to an address or index register, and
4066 thus we will get a pipeline conflict if there is a nearby
4067 indirect address using an address register.
4069 Note that expand_binops will not try to load an expensive constant
4070 into a register if it is used within a loop for a shift insn. */
4072 if (! reload_in_progress
4073 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4075 /* If the operand combination is invalid, we force operand1 into a
4076 register, preventing reload from having doing to do this at a
4077 later stage. */
4078 operands[1] = force_reg (mode, operands[1]);
4079 if (TARGET_FORCE)
4081 emit_move_insn (operands[0], operands[1]);
4082 operands[1] = copy_rtx (operands[0]);
4084 else
4086 /* Just in case... */
4087 if (! c4x_valid_operands (code, operands, mode, 0))
4088 operands[2] = force_reg (mode, operands[2]);
4092 /* Right shifts require a negative shift count, but GCC expects
4093 a positive count, so we emit a NEG. */
4094 if ((code == ASHIFTRT || code == LSHIFTRT)
4095 && (GET_CODE (operands[2]) != CONST_INT))
4096 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4098 return 1;
4102 /* The following predicates are used for instruction scheduling. */
4105 group1_reg_operand (op, mode)
4106 rtx op;
4107 enum machine_mode mode;
4109 if (mode != VOIDmode && mode != GET_MODE (op))
4110 return 0;
4111 if (GET_CODE (op) == SUBREG)
4112 op = SUBREG_REG (op);
4113 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4118 group1_mem_operand (op, mode)
4119 rtx op;
4120 enum machine_mode mode;
4122 if (mode != VOIDmode && mode != GET_MODE (op))
4123 return 0;
4125 if (GET_CODE (op) == MEM)
4127 op = XEXP (op, 0);
4128 if (GET_CODE (op) == PLUS)
4130 rtx op0 = XEXP (op, 0);
4131 rtx op1 = XEXP (op, 1);
4133 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4134 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4135 return 1;
4137 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4138 return 1;
4141 return 0;
4145 /* Return true if any one of the address registers. */
4148 arx_reg_operand (op, mode)
4149 rtx op;
4150 enum machine_mode mode;
4152 if (mode != VOIDmode && mode != GET_MODE (op))
4153 return 0;
4154 if (GET_CODE (op) == SUBREG)
4155 op = SUBREG_REG (op);
4156 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4160 static int
4161 c4x_arn_reg_operand (op, mode, regno)
4162 rtx op;
4163 enum machine_mode mode;
4164 unsigned int regno;
4166 if (mode != VOIDmode && mode != GET_MODE (op))
4167 return 0;
4168 if (GET_CODE (op) == SUBREG)
4169 op = SUBREG_REG (op);
4170 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4174 static int
4175 c4x_arn_mem_operand (op, mode, regno)
4176 rtx op;
4177 enum machine_mode mode;
4178 unsigned int regno;
4180 if (mode != VOIDmode && mode != GET_MODE (op))
4181 return 0;
4183 if (GET_CODE (op) == MEM)
4185 op = XEXP (op, 0);
4186 switch (GET_CODE (op))
4188 case PRE_DEC:
4189 case POST_DEC:
4190 case PRE_INC:
4191 case POST_INC:
4192 op = XEXP (op, 0);
4194 case REG:
4195 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4197 case PRE_MODIFY:
4198 case POST_MODIFY:
4199 if (REG_P (XEXP (op, 0)) && (! reload_completed
4200 || (REGNO (XEXP (op, 0)) == regno)))
4201 return 1;
4202 if (REG_P (XEXP (XEXP (op, 1), 1))
4203 && (! reload_completed
4204 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4205 return 1;
4206 break;
4208 case PLUS:
4210 rtx op0 = XEXP (op, 0);
4211 rtx op1 = XEXP (op, 1);
4213 if ((REG_P (op0) && (! reload_completed
4214 || (REGNO (op0) == regno)))
4215 || (REG_P (op1) && (! reload_completed
4216 || (REGNO (op1) == regno))))
4217 return 1;
4219 break;
4221 default:
4222 break;
4225 return 0;
4230 ar0_reg_operand (op, mode)
4231 rtx op;
4232 enum machine_mode mode;
4234 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4239 ar0_mem_operand (op, mode)
4240 rtx op;
4241 enum machine_mode mode;
4243 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4248 ar1_reg_operand (op, mode)
4249 rtx op;
4250 enum machine_mode mode;
4252 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4257 ar1_mem_operand (op, mode)
4258 rtx op;
4259 enum machine_mode mode;
4261 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4266 ar2_reg_operand (op, mode)
4267 rtx op;
4268 enum machine_mode mode;
4270 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4275 ar2_mem_operand (op, mode)
4276 rtx op;
4277 enum machine_mode mode;
4279 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4284 ar3_reg_operand (op, mode)
4285 rtx op;
4286 enum machine_mode mode;
4288 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4293 ar3_mem_operand (op, mode)
4294 rtx op;
4295 enum machine_mode mode;
4297 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4302 ar4_reg_operand (op, mode)
4303 rtx op;
4304 enum machine_mode mode;
4306 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4311 ar4_mem_operand (op, mode)
4312 rtx op;
4313 enum machine_mode mode;
4315 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4320 ar5_reg_operand (op, mode)
4321 rtx op;
4322 enum machine_mode mode;
4324 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4329 ar5_mem_operand (op, mode)
4330 rtx op;
4331 enum machine_mode mode;
4333 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4338 ar6_reg_operand (op, mode)
4339 rtx op;
4340 enum machine_mode mode;
4342 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4347 ar6_mem_operand (op, mode)
4348 rtx op;
4349 enum machine_mode mode;
4351 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4356 ar7_reg_operand (op, mode)
4357 rtx op;
4358 enum machine_mode mode;
4360 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4365 ar7_mem_operand (op, mode)
4366 rtx op;
4367 enum machine_mode mode;
4369 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4374 ir0_reg_operand (op, mode)
4375 rtx op;
4376 enum machine_mode mode;
4378 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4383 ir0_mem_operand (op, mode)
4384 rtx op;
4385 enum machine_mode mode;
4387 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4392 ir1_reg_operand (op, mode)
4393 rtx op;
4394 enum machine_mode mode;
4396 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4401 ir1_mem_operand (op, mode)
4402 rtx op;
4403 enum machine_mode mode;
4405 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4409 /* This is similar to operand_subword but allows autoincrement
4410 addressing. */
4413 c4x_operand_subword (op, i, validate_address, mode)
4414 rtx op;
4415 int i;
4416 int validate_address;
4417 enum machine_mode mode;
4419 if (mode != HImode && mode != HFmode)
4420 fatal_insn ("c4x_operand_subword: invalid mode", op);
4422 if (mode == HFmode && REG_P (op))
4423 fatal_insn ("c4x_operand_subword: invalid operand", op);
4425 if (GET_CODE (op) == MEM)
4427 enum rtx_code code = GET_CODE (XEXP (op, 0));
4428 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4429 enum machine_mode submode;
4431 submode = mode;
4432 if (mode == HImode)
4433 submode = QImode;
4434 else if (mode == HFmode)
4435 submode = QFmode;
4437 switch (code)
4439 case POST_INC:
4440 case PRE_INC:
4441 return gen_rtx_MEM (submode, XEXP (op, 0));
4443 case POST_DEC:
4444 case PRE_DEC:
4445 case PRE_MODIFY:
4446 case POST_MODIFY:
4447 /* We could handle these with some difficulty.
4448 e.g., *p-- => *(p-=2); *(p+1). */
4449 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4451 case SYMBOL_REF:
4452 case LABEL_REF:
4453 case CONST:
4454 case CONST_INT:
4455 fatal_insn ("c4x_operand_subword: invalid address", op);
4457 /* Even though offsettable_address_p considers (MEM
4458 (LO_SUM)) to be offsettable, it is not safe if the
4459 address is at the end of the data page since we also have
4460 to fix up the associated high PART. In this case where
4461 we are trying to split a HImode or HFmode memory
4462 reference, we would have to emit another insn to reload a
4463 new HIGH value. It's easier to disable LO_SUM memory references
4464 in HImode or HFmode and we probably get better code. */
4465 case LO_SUM:
4466 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4468 default:
4469 break;
4473 return operand_subword (op, i, validate_address, mode);
4476 struct name_list
4478 struct name_list *next;
4479 const char *name;
4482 static struct name_list *global_head;
4483 static struct name_list *extern_head;
4486 /* Add NAME to list of global symbols and remove from external list if
4487 present on external list. */
4489 void
4490 c4x_global_label (name)
4491 const char *name;
4493 struct name_list *p, *last;
4495 /* Do not insert duplicate names, so linearly search through list of
4496 existing names. */
4497 p = global_head;
4498 while (p)
4500 if (strcmp (p->name, name) == 0)
4501 return;
4502 p = p->next;
4504 p = (struct name_list *) xmalloc (sizeof *p);
4505 p->next = global_head;
4506 p->name = name;
4507 global_head = p;
4509 /* Remove this name from ref list if present. */
4510 last = NULL;
4511 p = extern_head;
4512 while (p)
4514 if (strcmp (p->name, name) == 0)
4516 if (last)
4517 last->next = p->next;
4518 else
4519 extern_head = p->next;
4520 break;
4522 last = p;
4523 p = p->next;
4528 /* Add NAME to list of external symbols. */
4530 void
4531 c4x_external_ref (name)
4532 const char *name;
4534 struct name_list *p;
4536 /* Do not insert duplicate names. */
4537 p = extern_head;
4538 while (p)
4540 if (strcmp (p->name, name) == 0)
4541 return;
4542 p = p->next;
4545 /* Do not insert ref if global found. */
4546 p = global_head;
4547 while (p)
4549 if (strcmp (p->name, name) == 0)
4550 return;
4551 p = p->next;
4553 p = (struct name_list *) xmalloc (sizeof *p);
4554 p->next = extern_head;
4555 p->name = name;
4556 extern_head = p;
4560 void
4561 c4x_file_end (fp)
4562 FILE *fp;
4564 struct name_list *p;
4566 /* Output all external names that are not global. */
4567 p = extern_head;
4568 while (p)
4570 fprintf (fp, "\t.ref\t");
4571 assemble_name (fp, p->name);
4572 fprintf (fp, "\n");
4573 p = p->next;
4575 fprintf (fp, "\t.end\n");
4579 static void
4580 c4x_check_attribute (attrib, list, decl, attributes)
4581 const char *attrib;
4582 tree list, decl, *attributes;
4584 while (list != NULL_TREE
4585 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4586 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4587 list = TREE_CHAIN (list);
4588 if (list)
4589 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4590 *attributes);
4594 static void
4595 c4x_insert_attributes (decl, attributes)
4596 tree decl, *attributes;
4598 switch (TREE_CODE (decl))
4600 case FUNCTION_DECL:
4601 c4x_check_attribute ("section", code_tree, decl, attributes);
4602 c4x_check_attribute ("const", pure_tree, decl, attributes);
4603 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4604 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4605 break;
4607 case VAR_DECL:
4608 c4x_check_attribute ("section", data_tree, decl, attributes);
4609 break;
4611 default:
4612 break;
4616 /* Table of valid machine attributes. */
4617 const struct attribute_spec c4x_attribute_table[] =
4619 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4620 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4621 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4622 "interrupt"; should it be accepted here? */
4623 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4624 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4625 { NULL, 0, 0, false, false, false, NULL }
4628 /* Handle an attribute requiring a FUNCTION_TYPE;
4629 arguments as in struct attribute_spec.handler. */
4630 static tree
4631 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4632 tree *node;
4633 tree name;
4634 tree args ATTRIBUTE_UNUSED;
4635 int flags ATTRIBUTE_UNUSED;
4636 bool *no_add_attrs;
4638 if (TREE_CODE (*node) != FUNCTION_TYPE)
4640 warning ("`%s' attribute only applies to functions",
4641 IDENTIFIER_POINTER (name));
4642 *no_add_attrs = true;
4645 return NULL_TREE;
4649 /* !!! FIXME to emit RPTS correctly. */
4652 c4x_rptb_rpts_p (insn, op)
4653 rtx insn, op;
4655 /* The next insn should be our label marking where the
4656 repeat block starts. */
4657 insn = NEXT_INSN (insn);
4658 if (GET_CODE (insn) != CODE_LABEL)
4660 /* Some insns may have been shifted between the RPTB insn
4661 and the top label... They were probably destined to
4662 be moved out of the loop. For now, let's leave them
4663 where they are and print a warning. We should
4664 probably move these insns before the repeat block insn. */
4665 if (TARGET_DEBUG)
4666 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4667 insn);
4668 return 0;
4671 /* Skip any notes. */
4672 insn = next_nonnote_insn (insn);
4674 /* This should be our first insn in the loop. */
4675 if (! INSN_P (insn))
4676 return 0;
4678 /* Skip any notes. */
4679 insn = next_nonnote_insn (insn);
4681 if (! INSN_P (insn))
4682 return 0;
4684 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4685 return 0;
4687 if (TARGET_RPTS)
4688 return 1;
4690 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4694 /* Check if register r11 is used as the destination of an insn. */
4696 static int
4697 c4x_r11_set_p(x)
4698 rtx x;
4700 rtx set;
4701 int i, j;
4702 const char *fmt;
4704 if (x == 0)
4705 return 0;
4707 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4708 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4710 if (INSN_P (x) && (set = single_set (x)))
4711 x = SET_DEST (set);
4713 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4714 return 1;
4716 fmt = GET_RTX_FORMAT (GET_CODE (x));
4717 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4719 if (fmt[i] == 'e')
4721 if (c4x_r11_set_p (XEXP (x, i)))
4722 return 1;
4724 else if (fmt[i] == 'E')
4725 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4726 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4727 return 1;
4729 return 0;
4733 /* The c4x sometimes has a problem when the insn before the laj insn
4734 sets the r11 register. Check for this situation. */
4737 c4x_check_laj_p (insn)
4738 rtx insn;
4740 insn = prev_nonnote_insn (insn);
4742 /* If this is the start of the function no nop is needed. */
4743 if (insn == 0)
4744 return 0;
4746 /* If the previous insn is a code label we have to insert a nop. This
4747 could be a jump or table jump. We can find the normal jumps by
4748 scanning the function but this will not find table jumps. */
4749 if (GET_CODE (insn) == CODE_LABEL)
4750 return 1;
4752 /* If the previous insn sets register r11 we have to insert a nop. */
4753 if (c4x_r11_set_p (insn))
4754 return 1;
4756 /* No nop needed. */
4757 return 0;
4761 /* Adjust the cost of a scheduling dependency. Return the new cost of
4762 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4763 A set of an address register followed by a use occurs a 2 cycle
4764 stall (reduced to a single cycle on the c40 using LDA), while
4765 a read of an address register followed by a use occurs a single cycle. */
4767 #define SET_USE_COST 3
4768 #define SETLDA_USE_COST 2
4769 #define READ_USE_COST 2
4771 static int
4772 c4x_adjust_cost (insn, link, dep_insn, cost)
4773 rtx insn;
4774 rtx link;
4775 rtx dep_insn;
4776 int cost;
4778 /* Don't worry about this until we know what registers have been
4779 assigned. */
4780 if (flag_schedule_insns == 0 && ! reload_completed)
4781 return 0;
4783 /* How do we handle dependencies where a read followed by another
4784 read causes a pipeline stall? For example, a read of ar0 followed
4785 by the use of ar0 for a memory reference. It looks like we
4786 need to extend the scheduler to handle this case. */
4788 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4789 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4790 so only deal with insns we know about. */
4791 if (recog_memoized (dep_insn) < 0)
4792 return 0;
4794 if (REG_NOTE_KIND (link) == 0)
4796 int max = 0;
4798 /* Data dependency; DEP_INSN writes a register that INSN reads some
4799 cycles later. */
4800 if (TARGET_C3X)
4802 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4803 max = SET_USE_COST > max ? SET_USE_COST : max;
4804 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4805 max = READ_USE_COST > max ? READ_USE_COST : max;
4807 else
4809 /* This could be significantly optimized. We should look
4810 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4811 insn uses ar0-ar7. We then test if the same register
4812 is used. The tricky bit is that some operands will
4813 use several registers... */
4814 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4815 max = SET_USE_COST > max ? SET_USE_COST : max;
4816 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4817 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4818 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4819 max = READ_USE_COST > max ? READ_USE_COST : max;
4821 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4822 max = SET_USE_COST > max ? SET_USE_COST : max;
4823 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4824 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4825 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4826 max = READ_USE_COST > max ? READ_USE_COST : max;
4828 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4829 max = SET_USE_COST > max ? SET_USE_COST : max;
4830 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4831 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4832 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4833 max = READ_USE_COST > max ? READ_USE_COST : max;
4835 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4836 max = SET_USE_COST > max ? SET_USE_COST : max;
4837 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4838 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4839 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4840 max = READ_USE_COST > max ? READ_USE_COST : max;
4842 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4843 max = SET_USE_COST > max ? SET_USE_COST : max;
4844 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4845 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4846 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4847 max = READ_USE_COST > max ? READ_USE_COST : max;
4849 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4850 max = SET_USE_COST > max ? SET_USE_COST : max;
4851 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4852 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4853 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4854 max = READ_USE_COST > max ? READ_USE_COST : max;
4856 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4857 max = SET_USE_COST > max ? SET_USE_COST : max;
4858 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4859 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4860 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4861 max = READ_USE_COST > max ? READ_USE_COST : max;
4863 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4864 max = SET_USE_COST > max ? SET_USE_COST : max;
4865 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4866 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4867 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4868 max = READ_USE_COST > max ? READ_USE_COST : max;
4870 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4871 max = SET_USE_COST > max ? SET_USE_COST : max;
4872 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4873 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4875 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4876 max = SET_USE_COST > max ? SET_USE_COST : max;
4877 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4878 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4881 if (max)
4882 cost = max;
4884 /* For other data dependencies, the default cost specified in the
4885 md is correct. */
4886 return cost;
4888 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4890 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4891 cycles later. */
4893 /* For c4x anti dependencies, the cost is 0. */
4894 return 0;
4896 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4898 /* Output dependency; DEP_INSN writes a register that INSN writes some
4899 cycles later. */
4901 /* For c4x output dependencies, the cost is 0. */
4902 return 0;
4904 else
4905 abort ();
4908 void
4909 c4x_init_builtins ()
4911 tree endlink = void_list_node;
4913 builtin_function ("fast_ftoi",
4914 build_function_type
4915 (integer_type_node,
4916 tree_cons (NULL_TREE, double_type_node, endlink)),
4917 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4918 builtin_function ("ansi_ftoi",
4919 build_function_type
4920 (integer_type_node,
4921 tree_cons (NULL_TREE, double_type_node, endlink)),
4922 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4923 if (TARGET_C3X)
4924 builtin_function ("fast_imult",
4925 build_function_type
4926 (integer_type_node,
4927 tree_cons (NULL_TREE, integer_type_node,
4928 tree_cons (NULL_TREE,
4929 integer_type_node, endlink))),
4930 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4931 else
4933 builtin_function ("toieee",
4934 build_function_type
4935 (double_type_node,
4936 tree_cons (NULL_TREE, double_type_node, endlink)),
4937 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4938 builtin_function ("frieee",
4939 build_function_type
4940 (double_type_node,
4941 tree_cons (NULL_TREE, double_type_node, endlink)),
4942 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4943 builtin_function ("fast_invf",
4944 build_function_type
4945 (double_type_node,
4946 tree_cons (NULL_TREE, double_type_node, endlink)),
4947 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4953 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4954 tree exp;
4955 rtx target;
4956 rtx subtarget ATTRIBUTE_UNUSED;
4957 enum machine_mode mode ATTRIBUTE_UNUSED;
4958 int ignore ATTRIBUTE_UNUSED;
4960 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4961 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4962 tree arglist = TREE_OPERAND (exp, 1);
4963 tree arg0, arg1;
4964 rtx r0, r1;
4966 switch (fcode)
4968 case C4X_BUILTIN_FIX:
4969 arg0 = TREE_VALUE (arglist);
4970 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4971 r0 = protect_from_queue (r0, 0);
4972 if (! target || ! register_operand (target, QImode))
4973 target = gen_reg_rtx (QImode);
4974 emit_insn (gen_fixqfqi_clobber (target, r0));
4975 return target;
4977 case C4X_BUILTIN_FIX_ANSI:
4978 arg0 = TREE_VALUE (arglist);
4979 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4980 r0 = protect_from_queue (r0, 0);
4981 if (! target || ! register_operand (target, QImode))
4982 target = gen_reg_rtx (QImode);
4983 emit_insn (gen_fix_truncqfqi2 (target, r0));
4984 return target;
4986 case C4X_BUILTIN_MPYI:
4987 if (! TARGET_C3X)
4988 break;
4989 arg0 = TREE_VALUE (arglist);
4990 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4991 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4992 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4993 r0 = protect_from_queue (r0, 0);
4994 r1 = protect_from_queue (r1, 0);
4995 if (! target || ! register_operand (target, QImode))
4996 target = gen_reg_rtx (QImode);
4997 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4998 return target;
5000 case C4X_BUILTIN_TOIEEE:
5001 if (TARGET_C3X)
5002 break;
5003 arg0 = TREE_VALUE (arglist);
5004 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5005 r0 = protect_from_queue (r0, 0);
5006 if (! target || ! register_operand (target, QFmode))
5007 target = gen_reg_rtx (QFmode);
5008 emit_insn (gen_toieee (target, r0));
5009 return target;
5011 case C4X_BUILTIN_FRIEEE:
5012 if (TARGET_C3X)
5013 break;
5014 arg0 = TREE_VALUE (arglist);
5015 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5016 put_var_into_stack (arg0);
5017 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5018 r0 = protect_from_queue (r0, 0);
5019 if (register_operand (r0, QFmode))
5021 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5022 emit_move_insn (r1, r0);
5023 r0 = r1;
5025 if (! target || ! register_operand (target, QFmode))
5026 target = gen_reg_rtx (QFmode);
5027 emit_insn (gen_frieee (target, r0));
5028 return target;
5030 case C4X_BUILTIN_RCPF:
5031 if (TARGET_C3X)
5032 break;
5033 arg0 = TREE_VALUE (arglist);
5034 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5035 r0 = protect_from_queue (r0, 0);
5036 if (! target || ! register_operand (target, QFmode))
5037 target = gen_reg_rtx (QFmode);
5038 emit_insn (gen_rcpfqf_clobber (target, r0));
5039 return target;
5041 return NULL_RTX;
5044 static void
5045 c4x_asm_named_section (name, flags)
5046 const char *name;
5047 unsigned int flags ATTRIBUTE_UNUSED;
5049 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5052 static void
5053 c4x_globalize_label (stream, name)
5054 FILE *stream;
5055 const char *name;
5057 default_globalize_label (stream, name);
5058 c4x_global_label (name);