* target.h (struct gcc_target): Add insert_attributes.
[official-gcc.git] / gcc / config / c4x / c4x.c
blobc0500bef2f43d938e7d2d50e9feb547fd60577ef
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "flags.h"
42 #include "loop.h"
43 #include "recog.h"
44 #include "c-tree.h"
45 #include "ggc.h"
46 #include "cpplib.h"
47 #include "c-lex.h"
48 #include "c-pragma.h"
49 #include "c4x-protos.h"
50 #include "target.h"
51 #include "target-def.h"
53 rtx smulhi3_libfunc;
54 rtx umulhi3_libfunc;
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
64 static int c4x_leaf_function;
66 static const char *float_reg_names[] = FLOAT_REGISTER_NAMES;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 struct rtx_def *c4x_compare_op0 = NULL_RTX;
152 struct rtx_def *c4x_compare_op1 = NULL_RTX;
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 static tree code_tree = NULL_TREE;
162 static tree data_tree = NULL_TREE;
163 static tree pure_tree = NULL_TREE;
164 static tree noreturn_tree = NULL_TREE;
165 static tree interrupt_tree = NULL_TREE;
167 /* Forward declarations */
168 static void c4x_add_gc_roots PARAMS ((void));
169 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
170 static int c4x_leaf_function_p PARAMS ((void));
171 static int c4x_assembler_function_p PARAMS ((void));
172 static int c4x_immed_float_p PARAMS ((rtx));
173 static int c4x_a_register PARAMS ((rtx));
174 static int c4x_x_register PARAMS ((rtx));
175 static int c4x_immed_int_constant PARAMS ((rtx));
176 static int c4x_immed_float_constant PARAMS ((rtx));
177 static int c4x_K_constant PARAMS ((rtx));
178 static int c4x_N_constant PARAMS ((rtx));
179 static int c4x_O_constant PARAMS ((rtx));
180 static int c4x_R_indirect PARAMS ((rtx));
181 static int c4x_S_indirect PARAMS ((rtx));
182 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
183 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
184 enum machine_mode, int));
185 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
188 static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
189 static int c4x_r11_set_p PARAMS ((rtx));
190 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
191 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
192 static int c4x_valid_type_attribute_p PARAMS ((tree, tree, tree, tree));
193 static void c4x_insert_attributes PARAMS ((tree, tree *));
195 /* Initialize the GCC target structure. */
196 #undef TARGET_VALID_TYPE_ATTRIBUTE
197 #define TARGET_VALID_TYPE_ATTRIBUTE c4x_valid_type_attribute_p
199 #undef TARGET_INSERT_ATTRIBUTES
200 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
202 struct gcc_target targetm = TARGET_INITIALIZER;
204 /* Called to register all of our global variables with the garbage
205 collector. */
207 static void
208 c4x_add_gc_roots ()
210 ggc_add_rtx_root (&c4x_compare_op0, 1);
211 ggc_add_rtx_root (&c4x_compare_op1, 1);
212 ggc_add_tree_root (&code_tree, 1);
213 ggc_add_tree_root (&data_tree, 1);
214 ggc_add_tree_root (&pure_tree, 1);
215 ggc_add_tree_root (&noreturn_tree, 1);
216 ggc_add_tree_root (&interrupt_tree, 1);
217 ggc_add_rtx_root (&smulhi3_libfunc, 1);
218 ggc_add_rtx_root (&umulhi3_libfunc, 1);
219 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
220 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
221 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
222 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
223 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
224 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
225 ggc_add_rtx_root (&floathihf2_libfunc, 1);
226 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
230 /* Override command line options.
231 Called once after all options have been parsed.
232 Mostly we process the processor
233 type and sometimes adjust other TARGET_ options. */
235 void
236 c4x_override_options ()
238 if (c4x_rpts_cycles_string)
239 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
240 else
241 c4x_rpts_cycles = 0;
243 if (TARGET_C30)
244 c4x_cpu_version = 30;
245 else if (TARGET_C31)
246 c4x_cpu_version = 31;
247 else if (TARGET_C32)
248 c4x_cpu_version = 32;
249 else if (TARGET_C33)
250 c4x_cpu_version = 33;
251 else if (TARGET_C40)
252 c4x_cpu_version = 40;
253 else if (TARGET_C44)
254 c4x_cpu_version = 44;
255 else
256 c4x_cpu_version = 40;
258 /* -mcpu=xx overrides -m40 etc. */
259 if (c4x_cpu_version_string)
261 const char *p = c4x_cpu_version_string;
263 /* Also allow -mcpu=c30 etc. */
264 if (*p == 'c' || *p == 'C')
265 p++;
266 c4x_cpu_version = atoi (p);
269 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
270 C40_FLAG | C44_FLAG);
272 switch (c4x_cpu_version)
274 case 30: target_flags |= C30_FLAG; break;
275 case 31: target_flags |= C31_FLAG; break;
276 case 32: target_flags |= C32_FLAG; break;
277 case 33: target_flags |= C33_FLAG; break;
278 case 40: target_flags |= C40_FLAG; break;
279 case 44: target_flags |= C44_FLAG; break;
280 default:
281 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
282 c4x_cpu_version = 40;
283 target_flags |= C40_FLAG;
286 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
287 target_flags |= C3X_FLAG;
288 else
289 target_flags &= ~C3X_FLAG;
291 /* Convert foo / 8.0 into foo * 0.125, etc. */
292 set_fast_math_flags();
294 /* We should phase out the following at some stage.
295 This provides compatibility with the old -mno-aliases option. */
296 if (! TARGET_ALIASES && ! flag_argument_noalias)
297 flag_argument_noalias = 1;
299 /* Register global variables with the garbage collector. */
300 c4x_add_gc_roots ();
304 /* This is called before c4x_override_options. */
306 void
307 c4x_optimization_options (level, size)
308 int level ATTRIBUTE_UNUSED;
309 int size ATTRIBUTE_UNUSED;
311 /* Scheduling before register allocation can screw up global
312 register allocation, especially for functions that use MPY||ADD
313 instructions. The benefit we gain we get by scheduling before
314 register allocation is probably marginal anyhow. */
315 flag_schedule_insns = 0;
319 /* Write an ASCII string. */
321 #define C4X_ASCII_LIMIT 40
323 void
324 c4x_output_ascii (stream, ptr, len)
325 FILE *stream;
326 const char *ptr;
327 int len;
329 char sbuf[C4X_ASCII_LIMIT + 1];
330 int s, l, special, first = 1, onlys;
332 if (len)
333 fprintf (stream, "\t.byte\t");
335 for (s = l = 0; len > 0; --len, ++ptr)
337 onlys = 0;
339 /* Escape " and \ with a \". */
340 special = *ptr == '\"' || *ptr == '\\';
342 /* If printable - add to buff. */
343 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
345 if (special)
346 sbuf[s++] = '\\';
347 sbuf[s++] = *ptr;
348 if (s < C4X_ASCII_LIMIT - 1)
349 continue;
350 onlys = 1;
352 if (s)
354 if (first)
355 first = 0;
356 else
358 fputc (',', stream);
359 l++;
362 sbuf[s] = 0;
363 fprintf (stream, "\"%s\"", sbuf);
364 l += s + 2;
365 if (TARGET_TI && l >= 80 && len > 1)
367 fprintf (stream, "\n\t.byte\t");
368 first = 1;
369 l = 0;
372 s = 0;
374 if (onlys)
375 continue;
377 if (first)
378 first = 0;
379 else
381 fputc (',', stream);
382 l++;
385 fprintf (stream, "%d", *ptr);
386 l += 3;
387 if (TARGET_TI && l >= 80 && len > 1)
389 fprintf (stream, "\n\t.byte\t");
390 first = 1;
391 l = 0;
394 if (s)
396 if (! first)
397 fputc (',', stream);
399 sbuf[s] = 0;
400 fprintf (stream, "\"%s\"", sbuf);
401 s = 0;
403 fputc ('\n', stream);
408 c4x_hard_regno_mode_ok (regno, mode)
409 unsigned int regno;
410 enum machine_mode mode;
412 switch (mode)
414 #if Pmode != QImode
415 case Pmode: /* Pointer (24/32 bits). */
416 #endif
417 case QImode: /* Integer (32 bits). */
418 return IS_INT_REGNO (regno);
420 case QFmode: /* Float, Double (32 bits). */
421 case HFmode: /* Long Double (40 bits). */
422 return IS_EXT_REGNO (regno);
424 case CCmode: /* Condition Codes. */
425 case CC_NOOVmode: /* Condition Codes. */
426 return IS_ST_REGNO (regno);
428 case HImode: /* Long Long (64 bits). */
429 /* We need two registers to store long longs. Note that
430 it is much easier to constrain the first register
431 to start on an even boundary. */
432 return IS_INT_REGNO (regno)
433 && IS_INT_REGNO (regno + 1)
434 && (regno & 1) == 0;
436 default:
437 return 0; /* We don't support these modes. */
440 return 0;
443 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
445 c4x_hard_regno_rename_ok (regno1, regno2)
446 unsigned int regno1;
447 unsigned int regno2;
449 /* We can not copy call saved registers from mode QI into QF or from
450 mode QF into QI. */
451 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
452 return 0;
453 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
454 return 0;
455 /* We cannot copy from an extended (40 bit) register to a standard
456 (32 bit) register because we only set the condition codes for
457 extended registers. */
458 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
459 return 0;
460 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
461 return 0;
462 return 1;
465 /* The TI C3x C compiler register argument runtime model uses 6 registers,
466 AR2, R2, R3, RC, RS, RE.
468 The first two floating point arguments (float, double, long double)
469 that are found scanning from left to right are assigned to R2 and R3.
471 The remaining integer (char, short, int, long) or pointer arguments
472 are assigned to the remaining registers in the order AR2, R2, R3,
473 RC, RS, RE when scanning left to right, except for the last named
474 argument prior to an ellipsis denoting variable number of
475 arguments. We don't have to worry about the latter condition since
476 function.c treats the last named argument as anonymous (unnamed).
478 All arguments that cannot be passed in registers are pushed onto
479 the stack in reverse order (right to left). GCC handles that for us.
481 c4x_init_cumulative_args() is called at the start, so we can parse
482 the args to see how many floating point arguments and how many
483 integer (or pointer) arguments there are. c4x_function_arg() is
484 then called (sometimes repeatedly) for each argument (parsed left
485 to right) to obtain the register to pass the argument in, or zero
486 if the argument is to be passed on the stack. Once the compiler is
487 happy, c4x_function_arg_advance() is called.
489 Don't use R0 to pass arguments in, we use 0 to indicate a stack
490 argument. */
492 static int c4x_int_reglist[3][6] =
494 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
495 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
496 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
499 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
502 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
503 function whose data type is FNTYPE.
504 For a library call, FNTYPE is 0. */
506 void
507 c4x_init_cumulative_args (cum, fntype, libname)
508 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
509 tree fntype; /* Tree ptr for function decl. */
510 rtx libname; /* SYMBOL_REF of library name or 0. */
512 tree param, next_param;
514 cum->floats = cum->ints = 0;
515 cum->init = 0;
516 cum->var = 0;
517 cum->args = 0;
519 if (TARGET_DEBUG)
521 fprintf (stderr, "\nc4x_init_cumulative_args (");
522 if (fntype)
524 tree ret_type = TREE_TYPE (fntype);
526 fprintf (stderr, "fntype code = %s, ret code = %s",
527 tree_code_name[(int) TREE_CODE (fntype)],
528 tree_code_name[(int) TREE_CODE (ret_type)]);
530 else
531 fprintf (stderr, "no fntype");
533 if (libname)
534 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
537 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
539 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
540 param; param = next_param)
542 tree type;
544 next_param = TREE_CHAIN (param);
546 type = TREE_VALUE (param);
547 if (type && type != void_type_node)
549 enum machine_mode mode;
551 /* If the last arg doesn't have void type then we have
552 variable arguments. */
553 if (! next_param)
554 cum->var = 1;
556 if ((mode = TYPE_MODE (type)))
558 if (! MUST_PASS_IN_STACK (mode, type))
560 /* Look for float, double, or long double argument. */
561 if (mode == QFmode || mode == HFmode)
562 cum->floats++;
563 /* Look for integer, enumeral, boolean, char, or pointer
564 argument. */
565 else if (mode == QImode || mode == Pmode)
566 cum->ints++;
569 cum->args++;
573 if (TARGET_DEBUG)
574 fprintf (stderr, "%s%s, args = %d)\n",
575 cum->prototype ? ", prototype" : "",
576 cum->var ? ", variable args" : "",
577 cum->args);
581 /* Update the data in CUM to advance over an argument
582 of mode MODE and data type TYPE.
583 (TYPE is null for libcalls where that information may not be available.) */
585 void
586 c4x_function_arg_advance (cum, mode, type, named)
587 CUMULATIVE_ARGS *cum; /* Current arg information. */
588 enum machine_mode mode; /* Current arg mode. */
589 tree type; /* Type of the arg or 0 if lib support. */
590 int named; /* Whether or not the argument was named. */
592 if (TARGET_DEBUG)
593 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
594 GET_MODE_NAME (mode), named);
595 if (! TARGET_MEMPARM
596 && named
597 && type
598 && ! MUST_PASS_IN_STACK (mode, type))
600 /* Look for float, double, or long double argument. */
601 if (mode == QFmode || mode == HFmode)
602 cum->floats++;
603 /* Look for integer, enumeral, boolean, char, or pointer argument. */
604 else if (mode == QImode || mode == Pmode)
605 cum->ints++;
607 else if (! TARGET_MEMPARM && ! type)
609 /* Handle libcall arguments. */
610 if (mode == QFmode || mode == HFmode)
611 cum->floats++;
612 else if (mode == QImode || mode == Pmode)
613 cum->ints++;
615 return;
619 /* Define where to put the arguments to a function. Value is zero to
620 push the argument on the stack, or a hard register in which to
621 store the argument.
623 MODE is the argument's machine mode.
624 TYPE is the data type of the argument (as a tree).
625 This is null for libcalls where that information may
626 not be available.
627 CUM is a variable of type CUMULATIVE_ARGS which gives info about
628 the preceding args and about the function being called.
629 NAMED is nonzero if this argument is a named parameter
630 (otherwise it is an extra parameter matching an ellipsis). */
632 struct rtx_def *
633 c4x_function_arg (cum, mode, type, named)
634 CUMULATIVE_ARGS *cum; /* Current arg information. */
635 enum machine_mode mode; /* Current arg mode. */
636 tree type; /* Type of the arg or 0 if lib support. */
637 int named; /* != 0 for normal args, == 0 for ... args. */
639 int reg = 0; /* Default to passing argument on stack. */
641 if (! cum->init)
643 /* We can handle at most 2 floats in R2, R3. */
644 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
646 /* We can handle at most 6 integers minus number of floats passed
647 in registers. */
648 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
649 6 - cum->maxfloats : cum->ints;
651 /* If there is no prototype, assume all the arguments are integers. */
652 if (! cum->prototype)
653 cum->maxints = 6;
655 cum->ints = cum->floats = 0;
656 cum->init = 1;
659 /* This marks the last argument. We don't need to pass this through
660 to the call insn. */
661 if (type == void_type_node)
662 return 0;
664 if (! TARGET_MEMPARM
665 && named
666 && type
667 && ! MUST_PASS_IN_STACK (mode, type))
669 /* Look for float, double, or long double argument. */
670 if (mode == QFmode || mode == HFmode)
672 if (cum->floats < cum->maxfloats)
673 reg = c4x_fp_reglist[cum->floats];
675 /* Look for integer, enumeral, boolean, char, or pointer argument. */
676 else if (mode == QImode || mode == Pmode)
678 if (cum->ints < cum->maxints)
679 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
682 else if (! TARGET_MEMPARM && ! type)
684 /* We could use a different argument calling model for libcalls,
685 since we're only calling functions in libgcc. Thus we could
686 pass arguments for long longs in registers rather than on the
687 stack. In the meantime, use the odd TI format. We make the
688 assumption that we won't have more than two floating point
689 args, six integer args, and that all the arguments are of the
690 same mode. */
691 if (mode == QFmode || mode == HFmode)
692 reg = c4x_fp_reglist[cum->floats];
693 else if (mode == QImode || mode == Pmode)
694 reg = c4x_int_reglist[0][cum->ints];
697 if (TARGET_DEBUG)
699 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
700 GET_MODE_NAME (mode), named);
701 if (reg)
702 fprintf (stderr, ", reg=%s", reg_names[reg]);
703 else
704 fprintf (stderr, ", stack");
705 fprintf (stderr, ")\n");
707 if (reg)
708 return gen_rtx_REG (mode, reg);
709 else
710 return NULL_RTX;
714 void
715 c4x_va_start (stdarg_p, valist, nextarg)
716 int stdarg_p;
717 tree valist;
718 rtx nextarg;
720 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
722 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
726 /* C[34]x arguments grow in weird ways (downwards) that the standard
727 varargs stuff can't handle.. */
729 c4x_va_arg (valist, type)
730 tree valist, type;
732 tree t;
734 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
735 build_int_2 (int_size_in_bytes (type), 0));
736 TREE_SIDE_EFFECTS (t) = 1;
738 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
742 static int
743 c4x_isr_reg_used_p (regno)
744 unsigned int regno;
746 /* Don't save/restore FP or ST, we handle them separately. */
747 if (regno == FRAME_POINTER_REGNUM
748 || IS_ST_REGNO (regno))
749 return 0;
751 /* We could be a little smarter abut saving/restoring DP.
752 We'll only save if for the big memory model or if
753 we're paranoid. ;-) */
754 if (IS_DP_REGNO (regno))
755 return ! TARGET_SMALL || TARGET_PARANOID;
757 /* Only save/restore regs in leaf function that are used. */
758 if (c4x_leaf_function)
759 return regs_ever_live[regno] && fixed_regs[regno] == 0;
761 /* Only save/restore regs that are used by the ISR and regs
762 that are likely to be used by functions the ISR calls
763 if they are not fixed. */
764 return IS_EXT_REGNO (regno)
765 || ((regs_ever_live[regno] || call_used_regs[regno])
766 && fixed_regs[regno] == 0);
770 static int
771 c4x_leaf_function_p ()
773 /* A leaf function makes no calls, so we only need
774 to save/restore the registers we actually use.
775 For the global variable leaf_function to be set, we need
776 to define LEAF_REGISTERS and all that it entails.
777 Let's check ourselves... */
779 if (lookup_attribute ("leaf_pretend",
780 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
781 return 1;
783 /* Use the leaf_pretend attribute at your own risk. This is a hack
784 to speed up ISRs that call a function infrequently where the
785 overhead of saving and restoring the additional registers is not
786 warranted. You must save and restore the additional registers
787 required by the called function. Caveat emptor. Here's enough
788 rope... */
790 if (leaf_function_p ())
791 return 1;
793 return 0;
797 static int
798 c4x_assembler_function_p ()
800 tree type;
802 type = TREE_TYPE (current_function_decl);
803 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
804 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
809 c4x_interrupt_function_p ()
811 if (lookup_attribute ("interrupt",
812 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
813 return 1;
815 /* Look for TI style c_intnn. */
816 return current_function_name[0] == 'c'
817 && current_function_name[1] == '_'
818 && current_function_name[2] == 'i'
819 && current_function_name[3] == 'n'
820 && current_function_name[4] == 't'
821 && ISDIGIT (current_function_name[5])
822 && ISDIGIT (current_function_name[6]);
825 void
826 c4x_expand_prologue ()
828 unsigned int regno;
829 int size = get_frame_size ();
830 rtx insn;
832 /* In functions where ar3 is not used but frame pointers are still
833 specified, frame pointers are not adjusted (if >= -O2) and this
834 is used so it won't needlessly push the frame pointer. */
835 int dont_push_ar3;
837 /* For __assembler__ function don't build a prologue. */
838 if (c4x_assembler_function_p ())
840 return;
843 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
844 if (profile_block_flag == 2)
846 FUNCTION_BLOCK_PROFILER_EXIT
848 #endif
850 /* For __interrupt__ function build specific prologue. */
851 if (c4x_interrupt_function_p ())
853 c4x_leaf_function = c4x_leaf_function_p ();
855 insn = emit_insn (gen_push_st ());
856 RTX_FRAME_RELATED_P (insn) = 1;
857 if (size)
859 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
860 RTX_FRAME_RELATED_P (insn) = 1;
861 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
862 gen_rtx_REG (QImode, SP_REGNO)));
863 RTX_FRAME_RELATED_P (insn) = 1;
864 /* We require that an ISR uses fewer than 32768 words of
865 local variables, otherwise we have to go to lots of
866 effort to save a register, load it with the desired size,
867 adjust the stack pointer, and then restore the modified
868 register. Frankly, I think it is a poor ISR that
869 requires more than 32767 words of local temporary
870 storage! */
871 if (size > 32767)
872 error ("ISR %s requires %d words of local vars, max is 32767.",
873 current_function_name, size);
875 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
876 gen_rtx_REG (QImode, SP_REGNO),
877 GEN_INT (size)));
878 RTX_FRAME_RELATED_P (insn) = 1;
880 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
882 if (c4x_isr_reg_used_p (regno))
884 if (regno == DP_REGNO)
886 insn = emit_insn (gen_push_dp ());
887 RTX_FRAME_RELATED_P (insn) = 1;
889 else
891 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
892 RTX_FRAME_RELATED_P (insn) = 1;
893 if (IS_EXT_REGNO (regno))
895 insn = emit_insn (gen_pushqf
896 (gen_rtx_REG (QFmode, regno)));
897 RTX_FRAME_RELATED_P (insn) = 1;
902 /* We need to clear the repeat mode flag if the ISR is
903 going to use a RPTB instruction or uses the RC, RS, or RE
904 registers. */
905 if (regs_ever_live[RC_REGNO]
906 || regs_ever_live[RS_REGNO]
907 || regs_ever_live[RE_REGNO])
909 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
910 RTX_FRAME_RELATED_P (insn) = 1;
913 /* Reload DP reg if we are paranoid about some turkey
914 violating small memory model rules. */
915 if (TARGET_SMALL && TARGET_PARANOID)
917 insn = emit_insn (gen_set_ldp_prologue
918 (gen_rtx_REG (QImode, DP_REGNO),
919 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
920 RTX_FRAME_RELATED_P (insn) = 1;
923 else
925 if (frame_pointer_needed)
927 if ((size != 0)
928 || (current_function_args_size != 0)
929 || (optimize < 2))
931 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
932 RTX_FRAME_RELATED_P (insn) = 1;
933 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
934 gen_rtx_REG (QImode, SP_REGNO)));
935 RTX_FRAME_RELATED_P (insn) = 1;
936 dont_push_ar3 = 1;
938 else
940 /* Since ar3 is not used, we don't need to push it. */
941 dont_push_ar3 = 1;
944 else
946 /* If we use ar3, we need to push it. */
947 dont_push_ar3 = 0;
948 if ((size != 0) || (current_function_args_size != 0))
950 /* If we are omitting the frame pointer, we still have
951 to make space for it so the offsets are correct
952 unless we don't use anything on the stack at all. */
953 size += 1;
957 if (size > 32767)
959 /* Local vars are too big, it will take multiple operations
960 to increment SP. */
961 if (TARGET_C3X)
963 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
964 GEN_INT(size >> 16)));
965 RTX_FRAME_RELATED_P (insn) = 1;
966 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
967 gen_rtx_REG (QImode, R1_REGNO),
968 GEN_INT(-16)));
969 RTX_FRAME_RELATED_P (insn) = 1;
971 else
973 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
974 GEN_INT(size & ~0xffff)));
975 RTX_FRAME_RELATED_P (insn) = 1;
977 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
978 gen_rtx_REG (QImode, R1_REGNO),
979 GEN_INT(size & 0xffff)));
980 RTX_FRAME_RELATED_P (insn) = 1;
981 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
982 gen_rtx_REG (QImode, SP_REGNO),
983 gen_rtx_REG (QImode, R1_REGNO)));
984 RTX_FRAME_RELATED_P (insn) = 1;
986 else if (size != 0)
988 /* Local vars take up less than 32767 words, so we can directly
989 add the number. */
990 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
991 gen_rtx_REG (QImode, SP_REGNO),
992 GEN_INT (size)));
993 RTX_FRAME_RELATED_P (insn) = 1;
996 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
998 if (regs_ever_live[regno] && ! call_used_regs[regno])
1000 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1002 if (TARGET_PRESERVE_FLOAT)
1004 insn = emit_insn (gen_pushqi
1005 (gen_rtx_REG (QImode, regno)));
1006 RTX_FRAME_RELATED_P (insn) = 1;
1008 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1009 RTX_FRAME_RELATED_P (insn) = 1;
1011 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1013 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1014 RTX_FRAME_RELATED_P (insn) = 1;
1022 void
1023 c4x_expand_epilogue()
1025 int regno;
1026 int jump = 0;
1027 int dont_pop_ar3;
1028 rtx insn;
1029 int size = get_frame_size ();
1031 /* For __assembler__ function build no epilogue. */
1032 if (c4x_assembler_function_p ())
1034 insn = emit_jump_insn (gen_return_from_epilogue ());
1035 RTX_FRAME_RELATED_P (insn) = 1;
1036 return;
1039 /* For __interrupt__ function build specific epilogue. */
1040 if (c4x_interrupt_function_p ())
1042 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1044 if (! c4x_isr_reg_used_p (regno))
1045 continue;
1046 if (regno == DP_REGNO)
1048 insn = emit_insn (gen_pop_dp ());
1049 RTX_FRAME_RELATED_P (insn) = 1;
1051 else
1053 /* We have to use unspec because the compiler will delete insns
1054 that are not call-saved. */
1055 if (IS_EXT_REGNO (regno))
1057 insn = emit_insn (gen_popqf_unspec
1058 (gen_rtx_REG (QFmode, regno)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1061 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
1065 if (size)
1067 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1068 gen_rtx_REG (QImode, SP_REGNO),
1069 GEN_INT(size)));
1070 RTX_FRAME_RELATED_P (insn) = 1;
1071 insn = emit_insn (gen_popqi
1072 (gen_rtx_REG (QImode, AR3_REGNO)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1075 insn = emit_insn (gen_pop_st ());
1076 RTX_FRAME_RELATED_P (insn) = 1;
1077 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1078 RTX_FRAME_RELATED_P (insn) = 1;
1080 else
1082 if (frame_pointer_needed)
1084 if ((size != 0)
1085 || (current_function_args_size != 0)
1086 || (optimize < 2))
1088 insn = emit_insn
1089 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1090 gen_rtx_MEM (QImode,
1091 gen_rtx_PLUS
1092 (QImode, gen_rtx_REG (QImode,
1093 AR3_REGNO),
1094 GEN_INT(-1)))));
1095 RTX_FRAME_RELATED_P (insn) = 1;
1097 /* We already have the return value and the fp,
1098 so we need to add those to the stack. */
1099 size += 2;
1100 jump = 1;
1101 dont_pop_ar3 = 1;
1103 else
1105 /* Since ar3 is not used for anything, we don't need to
1106 pop it. */
1107 dont_pop_ar3 = 1;
1110 else
1112 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1113 if (size || current_function_args_size)
1115 /* If we are ommitting the frame pointer, we still have
1116 to make space for it so the offsets are correct
1117 unless we don't use anything on the stack at all. */
1118 size += 1;
1122 /* Now restore the saved registers, putting in the delayed branch
1123 where required. */
1124 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1126 if (regs_ever_live[regno] && ! call_used_regs[regno])
1128 if (regno == AR3_REGNO && dont_pop_ar3)
1129 continue;
1131 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1133 insn = emit_insn (gen_popqf_unspec
1134 (gen_rtx_REG (QFmode, regno)));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1136 if (TARGET_PRESERVE_FLOAT)
1138 insn = emit_insn (gen_popqi_unspec
1139 (gen_rtx_REG (QImode, regno)));
1140 RTX_FRAME_RELATED_P (insn) = 1;
1143 else
1145 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1146 RTX_FRAME_RELATED_P (insn) = 1;
1151 if (frame_pointer_needed)
1153 if ((size != 0)
1154 || (current_function_args_size != 0)
1155 || (optimize < 2))
1157 /* Restore the old FP. */
1158 insn = emit_insn
1159 (gen_movqi
1160 (gen_rtx_REG (QImode, AR3_REGNO),
1161 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1163 RTX_FRAME_RELATED_P (insn) = 1;
1167 if (size > 32767)
1169 /* Local vars are too big, it will take multiple operations
1170 to decrement SP. */
1171 if (TARGET_C3X)
1173 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1174 GEN_INT(size >> 16)));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1176 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1177 gen_rtx_REG (QImode, R3_REGNO),
1178 GEN_INT(-16)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1181 else
1183 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1184 GEN_INT(size & ~0xffff)));
1185 RTX_FRAME_RELATED_P (insn) = 1;
1187 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1188 gen_rtx_REG (QImode, R3_REGNO),
1189 GEN_INT(size & 0xffff)));
1190 RTX_FRAME_RELATED_P (insn) = 1;
1191 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1192 gen_rtx_REG (QImode, SP_REGNO),
1193 gen_rtx_REG (QImode, R3_REGNO)));
1194 RTX_FRAME_RELATED_P (insn) = 1;
1196 else if (size != 0)
1198 /* Local vars take up less than 32768 words, so we can directly
1199 subtract the number. */
1200 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1201 gen_rtx_REG (QImode, SP_REGNO),
1202 GEN_INT(size)));
1203 RTX_FRAME_RELATED_P (insn) = 1;
1206 if (jump)
1208 insn = emit_jump_insn (gen_return_indirect_internal
1209 (gen_rtx_REG (QImode, R2_REGNO)));
1210 RTX_FRAME_RELATED_P (insn) = 1;
1212 else
1214 insn = emit_jump_insn (gen_return_from_epilogue ());
1215 RTX_FRAME_RELATED_P (insn) = 1;
1222 c4x_null_epilogue_p ()
1224 int regno;
1226 if (reload_completed
1227 && ! c4x_assembler_function_p ()
1228 && ! c4x_interrupt_function_p ()
1229 && ! current_function_calls_alloca
1230 && ! current_function_args_size
1231 && ! (profile_block_flag == 2)
1232 && ! (optimize < 2)
1233 && ! get_frame_size ())
1235 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1236 if (regs_ever_live[regno] && ! call_used_regs[regno]
1237 && (regno != AR3_REGNO))
1238 return 0;
1239 return 1;
1241 return 0;
1246 c4x_emit_move_sequence (operands, mode)
1247 rtx *operands;
1248 enum machine_mode mode;
1250 rtx op0 = operands[0];
1251 rtx op1 = operands[1];
1253 if (! reload_in_progress
1254 && ! REG_P (op0)
1255 && ! REG_P (op1)
1256 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1257 op1 = force_reg (mode, op1);
1259 if (GET_CODE (op1) == LO_SUM
1260 && GET_MODE (op1) == Pmode
1261 && dp_reg_operand (XEXP (op1, 0), mode))
1263 /* expand_increment will sometimes create a LO_SUM immediate
1264 address. */
1265 op1 = XEXP (op1, 1);
1267 else if (symbolic_address_operand (op1, mode))
1269 if (TARGET_LOAD_ADDRESS)
1271 /* Alias analysis seems to do a better job if we force
1272 constant addresses to memory after reload. */
1273 emit_insn (gen_load_immed_address (op0, op1));
1274 return 1;
1276 else
1278 /* Stick symbol or label address into the constant pool. */
1279 op1 = force_const_mem (Pmode, op1);
1282 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1284 /* We could be a lot smarter about loading some of these
1285 constants... */
1286 op1 = force_const_mem (mode, op1);
1289 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1290 and emit associated (HIGH (SYMREF)) if large memory model.
1291 c4x_legitimize_address could be used to do this,
1292 perhaps by calling validize_address. */
1293 if (TARGET_EXPOSE_LDP
1294 && ! (reload_in_progress || reload_completed)
1295 && GET_CODE (op1) == MEM
1296 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1298 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1299 if (! TARGET_SMALL)
1300 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1301 op1 = change_address (op1, mode,
1302 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1305 if (TARGET_EXPOSE_LDP
1306 && ! (reload_in_progress || reload_completed)
1307 && GET_CODE (op0) == MEM
1308 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1310 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1311 if (! TARGET_SMALL)
1312 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1313 op0 = change_address (op0, mode,
1314 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1317 if (GET_CODE (op0) == SUBREG
1318 && mixed_subreg_operand (op0, mode))
1320 /* We should only generate these mixed mode patterns
1321 during RTL generation. If we need do it later on
1322 then we'll have to emit patterns that won't clobber CC. */
1323 if (reload_in_progress || reload_completed)
1324 abort ();
1325 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1326 op0 = SUBREG_REG (op0);
1327 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1329 op0 = copy_rtx (op0);
1330 PUT_MODE (op0, QImode);
1332 else
1333 abort ();
1335 if (mode == QFmode)
1336 emit_insn (gen_storeqf_int_clobber (op0, op1));
1337 else
1338 abort ();
1339 return 1;
1342 if (GET_CODE (op1) == SUBREG
1343 && mixed_subreg_operand (op1, mode))
1345 /* We should only generate these mixed mode patterns
1346 during RTL generation. If we need do it later on
1347 then we'll have to emit patterns that won't clobber CC. */
1348 if (reload_in_progress || reload_completed)
1349 abort ();
1350 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1351 op1 = SUBREG_REG (op1);
1352 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1354 op1 = copy_rtx (op1);
1355 PUT_MODE (op1, QImode);
1357 else
1358 abort ();
1360 if (mode == QFmode)
1361 emit_insn (gen_loadqf_int_clobber (op0, op1));
1362 else
1363 abort ();
1364 return 1;
1367 if (mode == QImode
1368 && reg_operand (op0, mode)
1369 && const_int_operand (op1, mode)
1370 && ! IS_INT16_CONST (INTVAL (op1))
1371 && ! IS_HIGH_CONST (INTVAL (op1)))
1373 emit_insn (gen_loadqi_big_constant (op0, op1));
1374 return 1;
1377 if (mode == HImode
1378 && reg_operand (op0, mode)
1379 && const_int_operand (op1, mode))
1381 emit_insn (gen_loadhi_big_constant (op0, op1));
1382 return 1;
1385 /* Adjust operands in case we have modified them. */
1386 operands[0] = op0;
1387 operands[1] = op1;
1389 /* Emit normal pattern. */
1390 return 0;
1394 void
1395 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1396 rtx libcall;
1397 enum rtx_code code;
1398 enum machine_mode dmode;
1399 enum machine_mode smode;
1400 int noperands;
1401 rtx *operands;
1403 rtx ret;
1404 rtx insns;
1405 rtx equiv;
1407 start_sequence ();
1408 switch (noperands)
1410 case 2:
1411 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1412 operands[1], smode);
1413 equiv = gen_rtx (code, dmode, operands[1]);
1414 break;
1416 case 3:
1417 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1418 operands[1], smode, operands[2], smode);
1419 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1420 break;
1422 default:
1423 abort ();
1426 insns = get_insns ();
1427 end_sequence ();
1428 emit_libcall_block (insns, operands[0], ret, equiv);
1432 void
1433 c4x_emit_libcall3 (libcall, code, mode, operands)
1434 rtx libcall;
1435 enum rtx_code code;
1436 enum machine_mode mode;
1437 rtx *operands;
1439 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1443 void
1444 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1445 rtx libcall;
1446 enum rtx_code code;
1447 enum machine_mode mode;
1448 rtx *operands;
1450 rtx ret;
1451 rtx insns;
1452 rtx equiv;
1454 start_sequence ();
1455 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1456 operands[1], mode, operands[2], mode);
1457 equiv = gen_rtx_TRUNCATE (mode,
1458 gen_rtx_LSHIFTRT (HImode,
1459 gen_rtx_MULT (HImode,
1460 gen_rtx (code, HImode, operands[1]),
1461 gen_rtx (code, HImode, operands[2])),
1462 GEN_INT (32)));
1463 insns = get_insns ();
1464 end_sequence ();
1465 emit_libcall_block (insns, operands[0], ret, equiv);
1469 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1470 yet use this info. */
1471 void
1472 c4x_encode_section_info (decl)
1473 tree decl;
1475 #if 0
1476 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1477 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1478 #else
1479 if (TREE_CODE (decl) == FUNCTION_DECL)
1480 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1481 #endif
1486 c4x_check_legit_addr (mode, addr, strict)
1487 enum machine_mode mode;
1488 rtx addr;
1489 int strict;
1491 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1492 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1493 rtx disp = NULL_RTX; /* Displacement. */
1494 enum rtx_code code;
1496 code = GET_CODE (addr);
1497 switch (code)
1499 /* Register indirect with auto increment/decrement. We don't
1500 allow SP here---push_operand should recognise an operand
1501 being pushed on the stack. */
1503 case PRE_DEC:
1504 case PRE_INC:
1505 case POST_DEC:
1506 if (mode != QImode && mode != QFmode)
1507 return 0;
1509 case POST_INC:
1510 base = XEXP (addr, 0);
1511 if (! REG_P (base))
1512 return 0;
1513 break;
1515 case PRE_MODIFY:
1516 case POST_MODIFY:
1518 rtx op0 = XEXP (addr, 0);
1519 rtx op1 = XEXP (addr, 1);
1521 if (mode != QImode && mode != QFmode)
1522 return 0;
1524 if (! REG_P (op0)
1525 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1526 return 0;
1527 base = XEXP (op1, 0);
1528 if (base != op0)
1529 return 0;
1530 if (REG_P (XEXP (op1, 1)))
1531 indx = XEXP (op1, 1);
1532 else
1533 disp = XEXP (op1, 1);
1535 break;
1537 /* Register indirect. */
1538 case REG:
1539 base = addr;
1540 break;
1542 /* Register indirect with displacement or index. */
1543 case PLUS:
1545 rtx op0 = XEXP (addr, 0);
1546 rtx op1 = XEXP (addr, 1);
1547 enum rtx_code code0 = GET_CODE (op0);
1549 switch (code0)
1551 case REG:
1552 if (REG_P (op1))
1554 base = op0; /* Base + index. */
1555 indx = op1;
1556 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1558 base = op1;
1559 indx = op0;
1562 else
1564 base = op0; /* Base + displacement. */
1565 disp = op1;
1567 break;
1569 default:
1570 return 0;
1573 break;
1575 /* Direct addressing with DP register. */
1576 case LO_SUM:
1578 rtx op0 = XEXP (addr, 0);
1579 rtx op1 = XEXP (addr, 1);
1581 /* HImode and HFmode direct memory references aren't truly
1582 offsettable (consider case at end of data page). We
1583 probably get better code by loading a pointer and using an
1584 indirect memory reference. */
1585 if (mode == HImode || mode == HFmode)
1586 return 0;
1588 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1589 return 0;
1591 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1592 return 1;
1594 if (GET_CODE (op1) == CONST)
1595 return 1;
1596 return 0;
1598 break;
1600 /* Direct addressing with some work for the assembler... */
1601 case CONST:
1602 /* Direct addressing. */
1603 case LABEL_REF:
1604 case SYMBOL_REF:
1605 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1606 return 1;
1607 /* These need to be converted to a LO_SUM (...).
1608 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1609 return 0;
1611 /* Do not allow direct memory access to absolute addresses.
1612 This is more pain than it's worth, especially for the
1613 small memory model where we can't guarantee that
1614 this address is within the data page---we don't want
1615 to modify the DP register in the small memory model,
1616 even temporarily, since an interrupt can sneak in.... */
1617 case CONST_INT:
1618 return 0;
1620 /* Indirect indirect addressing. */
1621 case MEM:
1622 return 0;
1624 case CONST_DOUBLE:
1625 fatal_insn ("Using CONST_DOUBLE for address", addr);
1627 default:
1628 return 0;
1631 /* Validate the base register. */
1632 if (base)
1634 /* Check that the address is offsettable for HImode and HFmode. */
1635 if (indx && (mode == HImode || mode == HFmode))
1636 return 0;
1638 /* Handle DP based stuff. */
1639 if (REGNO (base) == DP_REGNO)
1640 return 1;
1641 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1642 return 0;
1643 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1644 return 0;
1647 /* Now validate the index register. */
1648 if (indx)
1650 if (GET_CODE (indx) != REG)
1651 return 0;
1652 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1653 return 0;
1654 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1655 return 0;
1658 /* Validate displacement. */
1659 if (disp)
1661 if (GET_CODE (disp) != CONST_INT)
1662 return 0;
1663 if (mode == HImode || mode == HFmode)
1665 /* The offset displacement must be legitimate. */
1666 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1667 return 0;
1669 else
1671 if (! IS_DISP8_CONST (INTVAL (disp)))
1672 return 0;
1674 /* Can't add an index with a disp. */
1675 if (indx)
1676 return 0;
1678 return 1;
1683 c4x_legitimize_address (orig, mode)
1684 rtx orig ATTRIBUTE_UNUSED;
1685 enum machine_mode mode ATTRIBUTE_UNUSED;
1687 if (GET_CODE (orig) == SYMBOL_REF
1688 || GET_CODE (orig) == LABEL_REF)
1690 if (mode == HImode || mode == HFmode)
1692 /* We need to force the address into
1693 a register so that it is offsettable. */
1694 rtx addr_reg = gen_reg_rtx (Pmode);
1695 emit_move_insn (addr_reg, orig);
1696 return addr_reg;
1698 else
1700 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1702 if (! TARGET_SMALL)
1703 emit_insn (gen_set_ldp (dp_reg, orig));
1705 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1709 return NULL_RTX;
1713 /* Provide the costs of an addressing mode that contains ADDR.
1714 If ADDR is not a valid address, its cost is irrelevant.
1715 This is used in cse and loop optimisation to determine
1716 if it is worthwhile storing a common address into a register.
1717 Unfortunately, the C4x address cost depends on other operands. */
1719 int
1720 c4x_address_cost (addr)
1721 rtx addr;
1723 switch (GET_CODE (addr))
1725 case REG:
1726 return 1;
1728 case POST_INC:
1729 case POST_DEC:
1730 case PRE_INC:
1731 case PRE_DEC:
1732 return 1;
1734 /* These shouldn't be directly generated. */
1735 case SYMBOL_REF:
1736 case LABEL_REF:
1737 case CONST:
1738 return 10;
1740 case LO_SUM:
1742 rtx op1 = XEXP (addr, 1);
1744 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1745 return TARGET_SMALL ? 3 : 4;
1747 if (GET_CODE (op1) == CONST)
1749 rtx offset = const0_rtx;
1751 op1 = eliminate_constant_term (op1, &offset);
1753 /* ??? These costs need rethinking... */
1754 if (GET_CODE (op1) == LABEL_REF)
1755 return 3;
1757 if (GET_CODE (op1) != SYMBOL_REF)
1758 return 4;
1760 if (INTVAL (offset) == 0)
1761 return 3;
1763 return 4;
1765 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1767 break;
1769 case PLUS:
1771 register rtx op0 = XEXP (addr, 0);
1772 register rtx op1 = XEXP (addr, 1);
1774 if (GET_CODE (op0) != REG)
1775 break;
1777 switch (GET_CODE (op1))
1779 default:
1780 break;
1782 case REG:
1783 /* This cost for REG+REG must be greater than the cost
1784 for REG if we want autoincrement addressing modes. */
1785 return 2;
1787 case CONST_INT:
1788 /* The following tries to improve GIV combination
1789 in strength reduce but appears not to help. */
1790 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1791 return 1;
1793 if (IS_DISP1_CONST (INTVAL (op1)))
1794 return 1;
1796 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1797 return 2;
1799 return 3;
1802 default:
1803 break;
1806 return 4;
1811 c4x_gen_compare_reg (code, x, y)
1812 enum rtx_code code;
1813 rtx x, y;
1815 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1816 rtx cc_reg;
1818 if (mode == CC_NOOVmode
1819 && (code == LE || code == GE || code == LT || code == GT))
1820 return NULL_RTX;
1822 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1823 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1824 gen_rtx_COMPARE (mode, x, y)));
1825 return cc_reg;
1828 char *
1829 c4x_output_cbranch (form, seq)
1830 const char *form;
1831 rtx seq;
1833 int delayed = 0;
1834 int annultrue = 0;
1835 int annulfalse = 0;
1836 rtx delay;
1837 char *cp;
1838 static char str[100];
1840 if (final_sequence)
1842 delay = XVECEXP (final_sequence, 0, 1);
1843 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1844 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1845 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1847 strcpy (str, form);
1848 cp = &str [strlen (str)];
1849 if (delayed)
1851 *cp++ = '%';
1852 *cp++ = '#';
1854 if (annultrue)
1856 *cp++ = 'a';
1857 *cp++ = 't';
1859 if (annulfalse)
1861 *cp++ = 'a';
1862 *cp++ = 'f';
1864 *cp++ = '\t';
1865 *cp++ = '%';
1866 *cp++ = 'l';
1867 *cp++ = '1';
1868 *cp = 0;
1869 return str;
1872 void
1873 c4x_print_operand (file, op, letter)
1874 FILE *file; /* File to write to. */
1875 rtx op; /* Operand to print. */
1876 int letter; /* %<letter> or 0. */
1878 rtx op1;
1879 enum rtx_code code;
1881 switch (letter)
1883 case '#': /* Delayed. */
1884 if (final_sequence)
1885 asm_fprintf (file, "d");
1886 return;
1889 code = GET_CODE (op);
1890 switch (letter)
1892 case 'A': /* Direct address. */
1893 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1894 asm_fprintf (file, "@");
1895 break;
1897 case 'H': /* Sethi. */
1898 output_addr_const (file, op);
1899 return;
1901 case 'I': /* Reversed condition. */
1902 code = reverse_condition (code);
1903 break;
1905 case 'L': /* Log 2 of constant. */
1906 if (code != CONST_INT)
1907 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1908 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1909 return;
1911 case 'N': /* Ones complement of small constant. */
1912 if (code != CONST_INT)
1913 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1914 fprintf (file, "%d", ~INTVAL (op));
1915 return;
1917 case 'K': /* Generate ldp(k) if direct address. */
1918 if (! TARGET_SMALL
1919 && code == MEM
1920 && GET_CODE (XEXP (op, 0)) == LO_SUM
1921 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1922 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1924 op1 = XEXP (XEXP (op, 0), 1);
1925 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1927 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1928 output_address (XEXP (adjust_address (op, VOIDmodem, 1), 0));
1929 asm_fprintf (file, "\n");
1932 return;
1934 case 'M': /* Generate ldp(k) if direct address. */
1935 if (! TARGET_SMALL /* Only used in asm statements. */
1936 && code == MEM
1937 && (GET_CODE (XEXP (op, 0)) == CONST
1938 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1940 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1941 output_address (XEXP (op, 0));
1942 asm_fprintf (file, "\n\t");
1944 return;
1946 case 'O': /* Offset address. */
1947 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1948 break;
1949 else if (code == MEM)
1950 output_address (XEXP (adjust_address (op, 1), VOIDmode, 0));
1951 else if (code == REG)
1952 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1953 else
1954 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1955 return;
1957 case 'C': /* Call. */
1958 break;
1960 case 'U': /* Call/callu. */
1961 if (code != SYMBOL_REF)
1962 asm_fprintf (file, "u");
1963 return;
1965 default:
1966 break;
1969 switch (code)
1971 case REG:
1972 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1973 && ! TARGET_TI)
1974 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1975 else
1976 fprintf (file, "%s", reg_names[REGNO (op)]);
1977 break;
1979 case MEM:
1980 output_address (XEXP (op, 0));
1981 break;
1983 case CONST_DOUBLE:
1985 char str[30];
1986 REAL_VALUE_TYPE r;
1988 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1989 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1990 fprintf (file, "%s", str);
1992 break;
1994 case CONST_INT:
1995 fprintf (file, "%d", INTVAL (op));
1996 break;
1998 case NE:
1999 asm_fprintf (file, "ne");
2000 break;
2002 case EQ:
2003 asm_fprintf (file, "eq");
2004 break;
2006 case GE:
2007 asm_fprintf (file, "ge");
2008 break;
2010 case GT:
2011 asm_fprintf (file, "gt");
2012 break;
2014 case LE:
2015 asm_fprintf (file, "le");
2016 break;
2018 case LT:
2019 asm_fprintf (file, "lt");
2020 break;
2022 case GEU:
2023 asm_fprintf (file, "hs");
2024 break;
2026 case GTU:
2027 asm_fprintf (file, "hi");
2028 break;
2030 case LEU:
2031 asm_fprintf (file, "ls");
2032 break;
2034 case LTU:
2035 asm_fprintf (file, "lo");
2036 break;
2038 case SYMBOL_REF:
2039 output_addr_const (file, op);
2040 break;
2042 case CONST:
2043 output_addr_const (file, XEXP (op, 0));
2044 break;
2046 case CODE_LABEL:
2047 break;
2049 default:
2050 fatal_insn ("c4x_print_operand: Bad operand case", op);
2051 break;
2056 void
2057 c4x_print_operand_address (file, addr)
2058 FILE *file;
2059 rtx addr;
2061 switch (GET_CODE (addr))
2063 case REG:
2064 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2065 break;
2067 case PRE_DEC:
2068 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2069 break;
2071 case POST_INC:
2072 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2073 break;
2075 case POST_MODIFY:
2077 rtx op0 = XEXP (XEXP (addr, 1), 0);
2078 rtx op1 = XEXP (XEXP (addr, 1), 1);
2080 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2081 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2082 reg_names[REGNO (op1)]);
2083 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2084 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2085 INTVAL (op1));
2086 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2087 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2088 -INTVAL (op1));
2089 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2090 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2091 reg_names[REGNO (op1)]);
2092 else
2093 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2095 break;
2097 case PRE_MODIFY:
2099 rtx op0 = XEXP (XEXP (addr, 1), 0);
2100 rtx op1 = XEXP (XEXP (addr, 1), 1);
2102 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2103 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2104 reg_names[REGNO (op1)]);
2105 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2106 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2107 INTVAL (op1));
2108 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2109 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2110 -INTVAL (op1));
2111 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2112 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2113 reg_names[REGNO (op1)]);
2114 else
2115 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2117 break;
2119 case PRE_INC:
2120 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2121 break;
2123 case POST_DEC:
2124 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2125 break;
2127 case PLUS: /* Indirect with displacement. */
2129 rtx op0 = XEXP (addr, 0);
2130 rtx op1 = XEXP (addr, 1);
2132 if (REG_P (op0))
2134 if (REG_P (op1))
2136 if (IS_INDEX_REG (op0))
2138 fprintf (file, "*+%s(%s)",
2139 reg_names[REGNO (op1)],
2140 reg_names[REGNO (op0)]); /* Index + base. */
2142 else
2144 fprintf (file, "*+%s(%s)",
2145 reg_names[REGNO (op0)],
2146 reg_names[REGNO (op1)]); /* Base + index. */
2149 else if (INTVAL (op1) < 0)
2151 fprintf (file, "*-%s(%d)",
2152 reg_names[REGNO (op0)],
2153 -INTVAL (op1)); /* Base - displacement. */
2155 else
2157 fprintf (file, "*+%s(%d)",
2158 reg_names[REGNO (op0)],
2159 INTVAL (op1)); /* Base + displacement. */
2162 else
2163 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2165 break;
2167 case LO_SUM:
2169 rtx op0 = XEXP (addr, 0);
2170 rtx op1 = XEXP (addr, 1);
2172 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2173 c4x_print_operand_address (file, op1);
2174 else
2175 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2177 break;
2179 case CONST:
2180 case SYMBOL_REF:
2181 case LABEL_REF:
2182 fprintf (file, "@");
2183 output_addr_const (file, addr);
2184 break;
2186 /* We shouldn't access CONST_INT addresses. */
2187 case CONST_INT:
2189 default:
2190 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2191 break;
2196 /* Return nonzero if the floating point operand will fit
2197 in the immediate field. */
2199 static int
2200 c4x_immed_float_p (op)
2201 rtx op;
2203 long convval[2];
2204 int exponent;
2205 REAL_VALUE_TYPE r;
2207 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2208 if (GET_MODE (op) == HFmode)
2209 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2210 else
2212 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2213 convval[1] = 0;
2216 /* Sign extend exponent. */
2217 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2218 if (exponent == -128)
2219 return 1; /* 0.0 */
2220 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2221 return 0; /* Precision doesn't fit. */
2222 return (exponent <= 7) /* Positive exp. */
2223 && (exponent >= -7); /* Negative exp. */
2227 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2228 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2230 None of the last four instructions from the bottom of the block can
2231 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2232 BcondAT or RETIcondD.
2234 This routine scans the four previous insns for a jump insn, and if
2235 one is found, returns 1 so that we bung in a nop instruction.
2236 This simple minded strategy will add a nop, when it may not
2237 be required. Say when there is a JUMP_INSN near the end of the
2238 block that doesn't get converted into a delayed branch.
2240 Note that we cannot have a call insn, since we don't generate
2241 repeat loops with calls in them (although I suppose we could, but
2242 there's no benefit.)
2244 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2247 c4x_rptb_nop_p (insn)
2248 rtx insn;
2250 rtx start_label;
2251 int i;
2253 /* Extract the start label from the jump pattern (rptb_end). */
2254 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2256 /* If there is a label at the end of the loop we must insert
2257 a NOP. */
2258 do {
2259 insn = previous_insn (insn);
2260 } while (GET_CODE (insn) == NOTE
2261 || GET_CODE (insn) == USE
2262 || GET_CODE (insn) == CLOBBER);
2263 if (GET_CODE (insn) == CODE_LABEL)
2264 return 1;
2266 for (i = 0; i < 4; i++)
2268 /* Search back for prev non-note and non-label insn. */
2269 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2270 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2272 if (insn == start_label)
2273 return i == 0;
2275 insn = previous_insn (insn);
2278 /* If we have a jump instruction we should insert a NOP. If we
2279 hit repeat block top we should only insert a NOP if the loop
2280 is empty. */
2281 if (GET_CODE (insn) == JUMP_INSN)
2282 return 1;
2283 insn = previous_insn (insn);
2285 return 0;
2289 /* The C4x looping instruction needs to be emitted at the top of the
2290 loop. Emitting the true RTL for a looping instruction at the top of
2291 the loop can cause problems with flow analysis. So instead, a dummy
2292 doloop insn is emitted at the end of the loop. This routine checks
2293 for the presence of this doloop insn and then searches back to the
2294 top of the loop, where it inserts the true looping insn (provided
2295 there are no instructions in the loop which would cause problems).
2296 Any additional labels can be emitted at this point. In addition, if
2297 the desired loop count register was not allocated, this routine does
2298 nothing.
2300 Before we can create a repeat block looping instruction we have to
2301 verify that there are no jumps outside the loop and no jumps outside
2302 the loop go into this loop. This can happen in the basic blocks reorder
2303 pass. The C4x cpu can not handle this. */
2305 static int
2306 c4x_label_ref_used_p (x, code_label)
2307 rtx x, code_label;
2309 enum rtx_code code;
2310 int i, j;
2311 const char *fmt;
2313 if (x == 0)
2314 return 0;
2316 code = GET_CODE (x);
2317 if (code == LABEL_REF)
2318 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2320 fmt = GET_RTX_FORMAT (code);
2321 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2323 if (fmt[i] == 'e')
2325 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2326 return 1;
2328 else if (fmt[i] == 'E')
2329 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2330 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2331 return 1;
2333 return 0;
2337 static int
2338 c4x_rptb_valid_p (insn, start_label)
2339 rtx insn, start_label;
2341 rtx end = insn;
2342 rtx start;
2343 rtx tmp;
2345 /* Find the start label. */
2346 for (; insn; insn = PREV_INSN (insn))
2347 if (insn == start_label)
2348 break;
2350 /* Note found then we can not use a rptb or rpts. The label was
2351 probably moved by the basic block reorder pass. */
2352 if (! insn)
2353 return 0;
2355 start = insn;
2356 /* If any jump jumps inside this block then we must fail. */
2357 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2359 if (GET_CODE (insn) == CODE_LABEL)
2361 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2362 if (GET_CODE (tmp) == JUMP_INSN
2363 && c4x_label_ref_used_p (tmp, insn))
2364 return 0;
2367 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2369 if (GET_CODE (insn) == CODE_LABEL)
2371 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2372 if (GET_CODE (tmp) == JUMP_INSN
2373 && c4x_label_ref_used_p (tmp, insn))
2374 return 0;
2377 /* If any jump jumps outside this block then we must fail. */
2378 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2380 if (GET_CODE (insn) == CODE_LABEL)
2382 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2383 if (GET_CODE (tmp) == JUMP_INSN
2384 && c4x_label_ref_used_p (tmp, insn))
2385 return 0;
2386 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2387 if (GET_CODE (tmp) == JUMP_INSN
2388 && c4x_label_ref_used_p (tmp, insn))
2389 return 0;
2393 /* All checks OK. */
2394 return 1;
2398 void
2399 c4x_rptb_insert (insn)
2400 rtx insn;
2402 rtx end_label;
2403 rtx start_label;
2404 rtx new_start_label;
2405 rtx count_reg;
2407 /* If the count register has not been allocated to RC, say if
2408 there is a movstr pattern in the loop, then do not insert a
2409 RPTB instruction. Instead we emit a decrement and branch
2410 at the end of the loop. */
2411 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2412 if (REGNO (count_reg) != RC_REGNO)
2413 return;
2415 /* Extract the start label from the jump pattern (rptb_end). */
2416 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2418 if (! c4x_rptb_valid_p (insn, start_label))
2420 /* We can not use the rptb insn. Replace it so reorg can use
2421 the delay slots of the jump insn. */
2422 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2423 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2424 emit_insn_before (gen_bge (start_label), insn);
2425 LABEL_NUSES (start_label)++;
2426 delete_insn (insn);
2427 return;
2430 end_label = gen_label_rtx ();
2431 LABEL_NUSES (end_label)++;
2432 emit_label_after (end_label, insn);
2434 new_start_label = gen_label_rtx ();
2435 LABEL_NUSES (new_start_label)++;
2437 for (; insn; insn = PREV_INSN (insn))
2439 if (insn == start_label)
2440 break;
2441 if (GET_CODE (insn) == JUMP_INSN &&
2442 JUMP_LABEL (insn) == start_label)
2443 redirect_jump (insn, new_start_label, 0);
2445 if (! insn)
2446 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2448 emit_label_after (new_start_label, insn);
2450 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2451 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2452 else
2453 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2454 if (LABEL_NUSES (start_label) == 0)
2455 delete_insn (start_label);
2459 /* This function is a C4x special called immediately before delayed
2460 branch scheduling. We fix up RTPB style loops that didn't get RC
2461 allocated as the loop counter. */
2463 void
2464 c4x_process_after_reload (first)
2465 rtx first;
2467 rtx insn;
2469 for (insn = first; insn; insn = NEXT_INSN (insn))
2471 /* Look for insn. */
2472 if (INSN_P (insn))
2474 int insn_code_number;
2475 rtx old;
2477 insn_code_number = recog_memoized (insn);
2479 if (insn_code_number < 0)
2480 continue;
2482 /* Insert the RTX for RPTB at the top of the loop
2483 and a label at the end of the loop. */
2484 if (insn_code_number == CODE_FOR_rptb_end)
2485 c4x_rptb_insert(insn);
2487 /* We need to split the insn here. Otherwise the calls to
2488 force_const_mem will not work for load_immed_address. */
2489 old = insn;
2491 /* Don't split the insn if it has been deleted. */
2492 if (! INSN_DELETED_P (old))
2493 insn = try_split (PATTERN(old), old, 1);
2495 /* When not optimizing, the old insn will be still left around
2496 with only the 'deleted' bit set. Transform it into a note
2497 to avoid confusion of subsequent processing. */
2498 if (INSN_DELETED_P (old))
2500 PUT_CODE (old, NOTE);
2501 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2502 NOTE_SOURCE_FILE (old) = 0;
2509 static int
2510 c4x_a_register (op)
2511 rtx op;
2513 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2517 static int
2518 c4x_x_register (op)
2519 rtx op;
2521 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2525 static int
2526 c4x_immed_int_constant (op)
2527 rtx op;
2529 if (GET_CODE (op) != CONST_INT)
2530 return 0;
2532 return GET_MODE (op) == VOIDmode
2533 || GET_MODE_CLASS (op) == MODE_INT
2534 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2538 static int
2539 c4x_immed_float_constant (op)
2540 rtx op;
2542 if (GET_CODE (op) != CONST_DOUBLE)
2543 return 0;
2545 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2546 present this only means that a MEM rtx has been generated. It does
2547 not mean the rtx is really in memory. */
2549 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2554 c4x_shiftable_constant (op)
2555 rtx op;
2557 int i;
2558 int mask;
2559 int val = INTVAL (op);
2561 for (i = 0; i < 16; i++)
2563 if (val & (1 << i))
2564 break;
2566 mask = ((0xffff >> i) << 16) | 0xffff;
2567 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2568 : (val >> i) & mask))
2569 return i;
2570 return -1;
2575 c4x_H_constant (op)
2576 rtx op;
2578 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2583 c4x_I_constant (op)
2584 rtx op;
2586 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2591 c4x_J_constant (op)
2592 rtx op;
2594 if (TARGET_C3X)
2595 return 0;
2596 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2600 static int
2601 c4x_K_constant (op)
2602 rtx op;
2604 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2605 return 0;
2606 return IS_INT5_CONST (INTVAL (op));
2611 c4x_L_constant (op)
2612 rtx op;
2614 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2618 static int
2619 c4x_N_constant (op)
2620 rtx op;
2622 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2626 static int
2627 c4x_O_constant (op)
2628 rtx op;
2630 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2634 /* The constraints do not have to check the register class,
2635 except when needed to discriminate between the constraints.
2636 The operand has been checked by the predicates to be valid. */
2638 /* ARx + 9-bit signed const or IRn
2639 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2640 We don't include the pre/post inc/dec forms here since
2641 they are handled by the <> constraints. */
2644 c4x_Q_constraint (op)
2645 rtx op;
2647 enum machine_mode mode = GET_MODE (op);
2649 if (GET_CODE (op) != MEM)
2650 return 0;
2651 op = XEXP (op, 0);
2652 switch (GET_CODE (op))
2654 case REG:
2655 return 1;
2657 case PLUS:
2659 rtx op0 = XEXP (op, 0);
2660 rtx op1 = XEXP (op, 1);
2662 if (! REG_P (op0))
2663 return 0;
2665 if (REG_P (op1))
2666 return 1;
2668 if (GET_CODE (op1) != CONST_INT)
2669 return 0;
2671 /* HImode and HFmode must be offsettable. */
2672 if (mode == HImode || mode == HFmode)
2673 return IS_DISP8_OFF_CONST (INTVAL (op1));
2675 return IS_DISP8_CONST (INTVAL (op1));
2677 break;
2679 default:
2680 break;
2682 return 0;
2686 /* ARx + 5-bit unsigned const
2687 *ARx, *+ARx(n) for n < 32. */
2690 c4x_R_constraint (op)
2691 rtx op;
2693 enum machine_mode mode = GET_MODE (op);
2695 if (TARGET_C3X)
2696 return 0;
2697 if (GET_CODE (op) != MEM)
2698 return 0;
2699 op = XEXP (op, 0);
2700 switch (GET_CODE (op))
2702 case REG:
2703 return 1;
2705 case PLUS:
2707 rtx op0 = XEXP (op, 0);
2708 rtx op1 = XEXP (op, 1);
2710 if (! REG_P (op0))
2711 return 0;
2713 if (GET_CODE (op1) != CONST_INT)
2714 return 0;
2716 /* HImode and HFmode must be offsettable. */
2717 if (mode == HImode || mode == HFmode)
2718 return IS_UINT5_CONST (INTVAL (op1) + 1);
2720 return IS_UINT5_CONST (INTVAL (op1));
2722 break;
2724 default:
2725 break;
2727 return 0;
2731 static int
2732 c4x_R_indirect (op)
2733 rtx op;
2735 enum machine_mode mode = GET_MODE (op);
2737 if (TARGET_C3X || GET_CODE (op) != MEM)
2738 return 0;
2740 op = XEXP (op, 0);
2741 switch (GET_CODE (op))
2743 case REG:
2744 return IS_ADDR_OR_PSEUDO_REG (op);
2746 case PLUS:
2748 rtx op0 = XEXP (op, 0);
2749 rtx op1 = XEXP (op, 1);
2751 /* HImode and HFmode must be offsettable. */
2752 if (mode == HImode || mode == HFmode)
2753 return IS_ADDR_OR_PSEUDO_REG (op0)
2754 && GET_CODE (op1) == CONST_INT
2755 && IS_UINT5_CONST (INTVAL (op1) + 1);
2757 return REG_P (op0)
2758 && IS_ADDR_OR_PSEUDO_REG (op0)
2759 && GET_CODE (op1) == CONST_INT
2760 && IS_UINT5_CONST (INTVAL (op1));
2762 break;
2764 default:
2765 break;
2767 return 0;
2771 /* ARx + 1-bit unsigned const or IRn
2772 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2773 We don't include the pre/post inc/dec forms here since
2774 they are handled by the <> constraints. */
2777 c4x_S_constraint (op)
2778 rtx op;
2780 enum machine_mode mode = GET_MODE (op);
2781 if (GET_CODE (op) != MEM)
2782 return 0;
2783 op = XEXP (op, 0);
2784 switch (GET_CODE (op))
2786 case REG:
2787 return 1;
2789 case PRE_MODIFY:
2790 case POST_MODIFY:
2792 rtx op0 = XEXP (op, 0);
2793 rtx op1 = XEXP (op, 1);
2795 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2796 || (op0 != XEXP (op1, 0)))
2797 return 0;
2799 op0 = XEXP (op1, 0);
2800 op1 = XEXP (op1, 1);
2801 return REG_P (op0) && REG_P (op1);
2802 /* Pre or post_modify with a displacement of 0 or 1
2803 should not be generated. */
2805 break;
2807 case PLUS:
2809 rtx op0 = XEXP (op, 0);
2810 rtx op1 = XEXP (op, 1);
2812 if (!REG_P (op0))
2813 return 0;
2815 if (REG_P (op1))
2816 return 1;
2818 if (GET_CODE (op1) != CONST_INT)
2819 return 0;
2821 /* HImode and HFmode must be offsettable. */
2822 if (mode == HImode || mode == HFmode)
2823 return IS_DISP1_OFF_CONST (INTVAL (op1));
2825 return IS_DISP1_CONST (INTVAL (op1));
2827 break;
2829 default:
2830 break;
2832 return 0;
2836 static int
2837 c4x_S_indirect (op)
2838 rtx op;
2840 enum machine_mode mode = GET_MODE (op);
2841 if (GET_CODE (op) != MEM)
2842 return 0;
2844 op = XEXP (op, 0);
2845 switch (GET_CODE (op))
2847 case PRE_DEC:
2848 case POST_DEC:
2849 if (mode != QImode && mode != QFmode)
2850 return 0;
2851 case PRE_INC:
2852 case POST_INC:
2853 op = XEXP (op, 0);
2855 case REG:
2856 return IS_ADDR_OR_PSEUDO_REG (op);
2858 case PRE_MODIFY:
2859 case POST_MODIFY:
2861 rtx op0 = XEXP (op, 0);
2862 rtx op1 = XEXP (op, 1);
2864 if (mode != QImode && mode != QFmode)
2865 return 0;
2867 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2868 || (op0 != XEXP (op1, 0)))
2869 return 0;
2871 op0 = XEXP (op1, 0);
2872 op1 = XEXP (op1, 1);
2873 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2874 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2875 /* Pre or post_modify with a displacement of 0 or 1
2876 should not be generated. */
2879 case PLUS:
2881 rtx op0 = XEXP (op, 0);
2882 rtx op1 = XEXP (op, 1);
2884 if (REG_P (op0))
2886 /* HImode and HFmode must be offsettable. */
2887 if (mode == HImode || mode == HFmode)
2888 return IS_ADDR_OR_PSEUDO_REG (op0)
2889 && GET_CODE (op1) == CONST_INT
2890 && IS_DISP1_OFF_CONST (INTVAL (op1));
2892 if (REG_P (op1))
2893 return (IS_INDEX_OR_PSEUDO_REG (op1)
2894 && IS_ADDR_OR_PSEUDO_REG (op0))
2895 || (IS_ADDR_OR_PSEUDO_REG (op1)
2896 && IS_INDEX_OR_PSEUDO_REG (op0));
2898 return IS_ADDR_OR_PSEUDO_REG (op0)
2899 && GET_CODE (op1) == CONST_INT
2900 && IS_DISP1_CONST (INTVAL (op1));
2903 break;
2905 default:
2906 break;
2908 return 0;
2912 /* Direct memory operand. */
2915 c4x_T_constraint (op)
2916 rtx op;
2918 if (GET_CODE (op) != MEM)
2919 return 0;
2920 op = XEXP (op, 0);
2922 if (GET_CODE (op) != LO_SUM)
2924 /* Allow call operands. */
2925 return GET_CODE (op) == SYMBOL_REF
2926 && GET_MODE (op) == Pmode
2927 && SYMBOL_REF_FLAG (op);
2930 /* HImode and HFmode are not offsettable. */
2931 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2932 return 0;
2934 if ((GET_CODE (XEXP (op, 0)) == REG)
2935 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2936 return c4x_U_constraint (XEXP (op, 1));
2938 return 0;
2942 /* Symbolic operand. */
2945 c4x_U_constraint (op)
2946 rtx op;
2948 /* Don't allow direct addressing to an arbitrary constant. */
2949 return GET_CODE (op) == CONST
2950 || GET_CODE (op) == SYMBOL_REF
2951 || GET_CODE (op) == LABEL_REF;
2956 c4x_autoinc_operand (op, mode)
2957 rtx op;
2958 enum machine_mode mode ATTRIBUTE_UNUSED;
2960 if (GET_CODE (op) == MEM)
2962 enum rtx_code code = GET_CODE (XEXP (op, 0));
2964 if (code == PRE_INC
2965 || code == PRE_DEC
2966 || code == POST_INC
2967 || code == POST_DEC
2968 || code == PRE_MODIFY
2969 || code == POST_MODIFY
2971 return 1;
2973 return 0;
2977 /* Match any operand. */
2980 any_operand (op, mode)
2981 register rtx op ATTRIBUTE_UNUSED;
2982 enum machine_mode mode ATTRIBUTE_UNUSED;
2984 return 1;
2988 /* Nonzero if OP is a floating point value with value 0.0. */
2991 fp_zero_operand (op, mode)
2992 rtx op;
2993 enum machine_mode mode ATTRIBUTE_UNUSED;
2995 REAL_VALUE_TYPE r;
2997 if (GET_CODE (op) != CONST_DOUBLE)
2998 return 0;
2999 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3000 return REAL_VALUES_EQUAL (r, dconst0);
3005 const_operand (op, mode)
3006 register rtx op;
3007 register enum machine_mode mode;
3009 switch (mode)
3011 case QFmode:
3012 case HFmode:
3013 if (GET_CODE (op) != CONST_DOUBLE
3014 || GET_MODE (op) != mode
3015 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3016 return 0;
3018 return c4x_immed_float_p (op);
3020 #if Pmode != QImode
3021 case Pmode:
3022 #endif
3023 case QImode:
3024 if (GET_CODE (op) == CONSTANT_P_RTX)
3025 return 1;
3027 if (GET_CODE (op) != CONST_INT
3028 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3029 || GET_MODE_CLASS (mode) != MODE_INT)
3030 return 0;
3032 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3034 case HImode:
3035 return 0;
3037 default:
3038 return 0;
3044 stik_const_operand (op, mode)
3045 rtx op;
3046 enum machine_mode mode ATTRIBUTE_UNUSED;
3048 return c4x_K_constant (op);
3053 not_const_operand (op, mode)
3054 rtx op;
3055 enum machine_mode mode ATTRIBUTE_UNUSED;
3057 return c4x_N_constant (op);
3062 reg_operand (op, mode)
3063 rtx op;
3064 enum machine_mode mode;
3066 if (GET_CODE (op) == SUBREG
3067 && GET_MODE (op) == QFmode)
3068 return 0;
3069 return register_operand (op, mode);
3074 mixed_subreg_operand (op, mode)
3075 rtx op;
3076 enum machine_mode mode ATTRIBUTE_UNUSED;
3078 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3079 int and a long double. */
3080 if (GET_CODE (op) == SUBREG
3081 && (GET_MODE (op) == QFmode)
3082 && (GET_MODE (SUBREG_REG (op)) == QImode
3083 || GET_MODE (SUBREG_REG (op)) == HImode))
3084 return 1;
3085 return 0;
3090 reg_imm_operand (op, mode)
3091 rtx op;
3092 enum machine_mode mode ATTRIBUTE_UNUSED;
3094 if (REG_P (op) || CONSTANT_P (op))
3095 return 1;
3096 return 0;
3101 not_modify_reg (op, mode)
3102 rtx op;
3103 enum machine_mode mode ATTRIBUTE_UNUSED;
3105 if (REG_P (op) || CONSTANT_P (op))
3106 return 1;
3107 if (GET_CODE (op) != MEM)
3108 return 0;
3109 op = XEXP (op, 0);
3110 switch (GET_CODE (op))
3112 case REG:
3113 return 1;
3115 case PLUS:
3117 rtx op0 = XEXP (op, 0);
3118 rtx op1 = XEXP (op, 1);
3120 if (! REG_P (op0))
3121 return 0;
3123 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3124 return 1;
3127 case LO_SUM:
3129 rtx op0 = XEXP (op, 0);
3131 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3132 return 1;
3134 break;
3136 case CONST:
3137 case SYMBOL_REF:
3138 case LABEL_REF:
3139 return 1;
3141 default:
3142 break;
3144 return 0;
3149 not_rc_reg (op, mode)
3150 rtx op;
3151 enum machine_mode mode ATTRIBUTE_UNUSED;
3153 if (REG_P (op) && REGNO (op) == RC_REGNO)
3154 return 0;
3155 return 1;
3159 /* Extended precision register R0-R1. */
3162 r0r1_reg_operand (op, mode)
3163 rtx op;
3164 enum machine_mode mode;
3166 if (! reg_operand (op, mode))
3167 return 0;
3168 if (GET_CODE (op) == SUBREG)
3169 op = SUBREG_REG (op);
3170 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3174 /* Extended precision register R2-R3. */
3177 r2r3_reg_operand (op, mode)
3178 rtx op;
3179 enum machine_mode mode;
3181 if (! reg_operand (op, mode))
3182 return 0;
3183 if (GET_CODE (op) == SUBREG)
3184 op = SUBREG_REG (op);
3185 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3189 /* Low extended precision register R0-R7. */
3192 ext_low_reg_operand (op, mode)
3193 rtx op;
3194 enum machine_mode mode;
3196 if (! reg_operand (op, mode))
3197 return 0;
3198 if (GET_CODE (op) == SUBREG)
3199 op = SUBREG_REG (op);
3200 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3204 /* Extended precision register. */
3207 ext_reg_operand (op, mode)
3208 rtx op;
3209 enum machine_mode mode;
3211 if (! reg_operand (op, mode))
3212 return 0;
3213 if (GET_CODE (op) == SUBREG)
3214 op = SUBREG_REG (op);
3215 if (! REG_P (op))
3216 return 0;
3217 return IS_EXT_OR_PSEUDO_REG (op);
3221 /* Standard precision register. */
3224 std_reg_operand (op, mode)
3225 rtx op;
3226 enum machine_mode mode;
3228 if (! reg_operand (op, mode))
3229 return 0;
3230 if (GET_CODE (op) == SUBREG)
3231 op = SUBREG_REG (op);
3232 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3235 /* Standard precision or normal register. */
3238 std_or_reg_operand (op, mode)
3239 rtx op;
3240 enum machine_mode mode;
3242 if (reload_in_progress)
3243 return std_reg_operand (op, mode);
3244 return reg_operand (op, mode);
3247 /* Address register. */
3250 addr_reg_operand (op, mode)
3251 rtx op;
3252 enum machine_mode mode;
3254 if (! reg_operand (op, mode))
3255 return 0;
3256 return c4x_a_register (op);
3260 /* Index register. */
3263 index_reg_operand (op, mode)
3264 rtx op;
3265 enum machine_mode mode;
3267 if (! reg_operand (op, mode))
3268 return 0;
3269 if (GET_CODE (op) == SUBREG)
3270 op = SUBREG_REG (op);
3271 return c4x_x_register (op);
3275 /* DP register. */
3278 dp_reg_operand (op, mode)
3279 rtx op;
3280 enum machine_mode mode ATTRIBUTE_UNUSED;
3282 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3286 /* SP register. */
3289 sp_reg_operand (op, mode)
3290 rtx op;
3291 enum machine_mode mode ATTRIBUTE_UNUSED;
3293 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3297 /* ST register. */
3300 st_reg_operand (op, mode)
3301 register rtx op;
3302 enum machine_mode mode ATTRIBUTE_UNUSED;
3304 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3308 /* RC register. */
3311 rc_reg_operand (op, mode)
3312 register rtx op;
3313 enum machine_mode mode ATTRIBUTE_UNUSED;
3315 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3320 call_address_operand (op, mode)
3321 rtx op;
3322 enum machine_mode mode ATTRIBUTE_UNUSED;
3324 return (REG_P (op) || symbolic_address_operand (op, mode));
3328 /* Symbolic address operand. */
3331 symbolic_address_operand (op, mode)
3332 register rtx op;
3333 enum machine_mode mode ATTRIBUTE_UNUSED;
3335 switch (GET_CODE (op))
3337 case CONST:
3338 case SYMBOL_REF:
3339 case LABEL_REF:
3340 return 1;
3341 default:
3342 return 0;
3347 /* Check dst operand of a move instruction. */
3350 dst_operand (op, mode)
3351 rtx op;
3352 enum machine_mode mode;
3354 if (GET_CODE (op) == SUBREG
3355 && mixed_subreg_operand (op, mode))
3356 return 0;
3358 if (REG_P (op))
3359 return reg_operand (op, mode);
3361 return nonimmediate_operand (op, mode);
3365 /* Check src operand of two operand arithmetic instructions. */
3368 src_operand (op, mode)
3369 rtx op;
3370 enum machine_mode mode;
3372 if (GET_CODE (op) == SUBREG
3373 && mixed_subreg_operand (op, mode))
3374 return 0;
3376 if (REG_P (op))
3377 return reg_operand (op, mode);
3379 if (mode == VOIDmode)
3380 mode = GET_MODE (op);
3382 if (GET_CODE (op) == CONST_INT)
3383 return (mode == QImode || mode == Pmode || mode == HImode)
3384 && c4x_I_constant (op);
3386 /* We don't like CONST_DOUBLE integers. */
3387 if (GET_CODE (op) == CONST_DOUBLE)
3388 return c4x_H_constant (op);
3390 /* Disallow symbolic addresses. Only the predicate
3391 symbolic_address_operand will match these. */
3392 if (GET_CODE (op) == SYMBOL_REF
3393 || GET_CODE (op) == LABEL_REF
3394 || GET_CODE (op) == CONST)
3395 return 0;
3397 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3398 access to symbolic addresses. These operands will get forced
3399 into a register and the movqi expander will generate a
3400 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3401 if (GET_CODE (op) == MEM
3402 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3403 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3404 || GET_CODE (XEXP (op, 0)) == CONST)))
3405 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3407 return general_operand (op, mode);
3412 src_hi_operand (op, mode)
3413 rtx op;
3414 enum machine_mode mode;
3416 if (c4x_O_constant (op))
3417 return 1;
3418 return src_operand (op, mode);
3422 /* Check src operand of two operand logical instructions. */
3425 lsrc_operand (op, mode)
3426 rtx op;
3427 enum machine_mode mode;
3429 if (mode == VOIDmode)
3430 mode = GET_MODE (op);
3432 if (mode != QImode && mode != Pmode)
3433 fatal_insn ("Mode not QImode", op);
3435 if (GET_CODE (op) == CONST_INT)
3436 return c4x_L_constant (op) || c4x_J_constant (op);
3438 return src_operand (op, mode);
3442 /* Check src operand of two operand tricky instructions. */
3445 tsrc_operand (op, mode)
3446 rtx op;
3447 enum machine_mode mode;
3449 if (mode == VOIDmode)
3450 mode = GET_MODE (op);
3452 if (mode != QImode && mode != Pmode)
3453 fatal_insn ("Mode not QImode", op);
3455 if (GET_CODE (op) == CONST_INT)
3456 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3458 return src_operand (op, mode);
3463 reg_or_const_operand (op, mode)
3464 rtx op;
3465 enum machine_mode mode;
3467 return reg_operand (op, mode) || const_operand (op, mode);
3471 /* Check for indirect operands allowable in parallel instruction. */
3474 par_ind_operand (op, mode)
3475 rtx op;
3476 enum machine_mode mode;
3478 if (mode != VOIDmode && mode != GET_MODE (op))
3479 return 0;
3481 return c4x_S_indirect (op);
3485 /* Check for operands allowable in parallel instruction. */
3488 parallel_operand (op, mode)
3489 rtx op;
3490 enum machine_mode mode;
3492 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3496 static void
3497 c4x_S_address_parse (op, base, incdec, index, disp)
3498 rtx op;
3499 int *base;
3500 int *incdec;
3501 int *index;
3502 int *disp;
3504 *base = 0;
3505 *incdec = 0;
3506 *index = 0;
3507 *disp = 0;
3509 if (GET_CODE (op) != MEM)
3510 fatal_insn ("Invalid indirect memory address", op);
3512 op = XEXP (op, 0);
3513 switch (GET_CODE (op))
3515 case PRE_DEC:
3516 *base = REGNO (XEXP (op, 0));
3517 *incdec = 1;
3518 *disp = -1;
3519 return;
3521 case POST_DEC:
3522 *base = REGNO (XEXP (op, 0));
3523 *incdec = 1;
3524 *disp = 0;
3525 return;
3527 case PRE_INC:
3528 *base = REGNO (XEXP (op, 0));
3529 *incdec = 1;
3530 *disp = 1;
3531 return;
3533 case POST_INC:
3534 *base = REGNO (XEXP (op, 0));
3535 *incdec = 1;
3536 *disp = 0;
3537 return;
3539 case POST_MODIFY:
3540 *base = REGNO (XEXP (op, 0));
3541 if (REG_P (XEXP (XEXP (op, 1), 1)))
3543 *index = REGNO (XEXP (XEXP (op, 1), 1));
3544 *disp = 0; /* ??? */
3546 else
3547 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3548 *incdec = 1;
3549 return;
3551 case PRE_MODIFY:
3552 *base = REGNO (XEXP (op, 0));
3553 if (REG_P (XEXP (XEXP (op, 1), 1)))
3555 *index = REGNO (XEXP (XEXP (op, 1), 1));
3556 *disp = 1; /* ??? */
3558 else
3559 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3560 *incdec = 1;
3562 return;
3564 case REG:
3565 *base = REGNO (op);
3566 return;
3568 case PLUS:
3570 rtx op0 = XEXP (op, 0);
3571 rtx op1 = XEXP (op, 1);
3573 if (c4x_a_register (op0))
3575 if (c4x_x_register (op1))
3577 *base = REGNO (op0);
3578 *index = REGNO (op1);
3579 return;
3581 else if ((GET_CODE (op1) == CONST_INT
3582 && IS_DISP1_CONST (INTVAL (op1))))
3584 *base = REGNO (op0);
3585 *disp = INTVAL (op1);
3586 return;
3589 else if (c4x_x_register (op0) && c4x_a_register (op1))
3591 *base = REGNO (op1);
3592 *index = REGNO (op0);
3593 return;
3596 /* Fallthrough. */
3598 default:
3599 fatal_insn ("Invalid indirect (S) memory address", op);
3605 c4x_address_conflict (op0, op1, store0, store1)
3606 rtx op0;
3607 rtx op1;
3608 int store0;
3609 int store1;
3611 int base0;
3612 int base1;
3613 int incdec0;
3614 int incdec1;
3615 int index0;
3616 int index1;
3617 int disp0;
3618 int disp1;
3620 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3621 return 1;
3623 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3624 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3626 if (store0 && store1)
3628 /* If we have two stores in parallel to the same address, then
3629 the C4x only executes one of the stores. This is unlikely to
3630 cause problems except when writing to a hardware device such
3631 as a FIFO since the second write will be lost. The user
3632 should flag the hardware location as being volatile so that
3633 we don't do this optimisation. While it is unlikely that we
3634 have an aliased address if both locations are not marked
3635 volatile, it is probably safer to flag a potential conflict
3636 if either location is volatile. */
3637 if (! flag_argument_noalias)
3639 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3640 return 1;
3644 /* If have a parallel load and a store to the same address, the load
3645 is performed first, so there is no conflict. Similarly, there is
3646 no conflict if have parallel loads from the same address. */
3648 /* Cannot use auto increment or auto decrement twice for same
3649 base register. */
3650 if (base0 == base1 && incdec0 && incdec0)
3651 return 1;
3653 /* It might be too confusing for GCC if we have use a base register
3654 with a side effect and a memory reference using the same register
3655 in parallel. */
3656 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3657 return 1;
3659 /* We can not optimize the case where op1 and op2 refer to the same
3660 address. */
3661 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3662 return 1;
3664 /* No conflict. */
3665 return 0;
3669 /* Check for while loop inside a decrement and branch loop. */
3672 c4x_label_conflict (insn, jump, db)
3673 rtx insn;
3674 rtx jump;
3675 rtx db;
3677 while (insn)
3679 if (GET_CODE (insn) == CODE_LABEL)
3681 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3682 return 1;
3683 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3684 return 0;
3686 insn = PREV_INSN (insn);
3688 return 1;
3692 /* Validate combination of operands for parallel load/store instructions. */
3695 valid_parallel_load_store (operands, mode)
3696 rtx *operands;
3697 enum machine_mode mode ATTRIBUTE_UNUSED;
3699 rtx op0 = operands[0];
3700 rtx op1 = operands[1];
3701 rtx op2 = operands[2];
3702 rtx op3 = operands[3];
3704 if (GET_CODE (op0) == SUBREG)
3705 op0 = SUBREG_REG (op0);
3706 if (GET_CODE (op1) == SUBREG)
3707 op1 = SUBREG_REG (op1);
3708 if (GET_CODE (op2) == SUBREG)
3709 op2 = SUBREG_REG (op2);
3710 if (GET_CODE (op3) == SUBREG)
3711 op3 = SUBREG_REG (op3);
3713 /* The patterns should only allow ext_low_reg_operand() or
3714 par_ind_operand() operands. Thus of the 4 operands, only 2
3715 should be REGs and the other 2 should be MEMs. */
3717 /* This test prevents the multipack pass from using this pattern if
3718 op0 is used as an index or base register in op2 or op3, since
3719 this combination will require reloading. */
3720 if (GET_CODE (op0) == REG
3721 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3722 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3723 return 0;
3725 /* LDI||LDI. */
3726 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3727 return (REGNO (op0) != REGNO (op2))
3728 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3729 && ! c4x_address_conflict (op1, op3, 0, 0);
3731 /* STI||STI. */
3732 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3733 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3734 && ! c4x_address_conflict (op0, op2, 1, 1);
3736 /* LDI||STI. */
3737 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3738 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3739 && ! c4x_address_conflict (op1, op2, 0, 1);
3741 /* STI||LDI. */
3742 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3743 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3744 && ! c4x_address_conflict (op0, op3, 1, 0);
3746 return 0;
3751 valid_parallel_operands_4 (operands, mode)
3752 rtx *operands;
3753 enum machine_mode mode ATTRIBUTE_UNUSED;
3755 rtx op0 = operands[0];
3756 rtx op2 = operands[2];
3758 if (GET_CODE (op0) == SUBREG)
3759 op0 = SUBREG_REG (op0);
3760 if (GET_CODE (op2) == SUBREG)
3761 op2 = SUBREG_REG (op2);
3763 /* This test prevents the multipack pass from using this pattern if
3764 op0 is used as an index or base register in op2, since this combination
3765 will require reloading. */
3766 if (GET_CODE (op0) == REG
3767 && GET_CODE (op2) == MEM
3768 && reg_mentioned_p (op0, XEXP (op2, 0)))
3769 return 0;
3771 return 1;
3776 valid_parallel_operands_5 (operands, mode)
3777 rtx *operands;
3778 enum machine_mode mode ATTRIBUTE_UNUSED;
3780 int regs = 0;
3781 rtx op0 = operands[0];
3782 rtx op1 = operands[1];
3783 rtx op2 = operands[2];
3784 rtx op3 = operands[3];
3786 if (GET_CODE (op0) == SUBREG)
3787 op0 = SUBREG_REG (op0);
3788 if (GET_CODE (op1) == SUBREG)
3789 op1 = SUBREG_REG (op1);
3790 if (GET_CODE (op2) == SUBREG)
3791 op2 = SUBREG_REG (op2);
3793 /* The patterns should only allow ext_low_reg_operand() or
3794 par_ind_operand() operands. Operands 1 and 2 may be commutative
3795 but only one of them can be a register. */
3796 if (GET_CODE (op1) == REG)
3797 regs++;
3798 if (GET_CODE (op2) == REG)
3799 regs++;
3801 if (regs != 1)
3802 return 0;
3804 /* This test prevents the multipack pass from using this pattern if
3805 op0 is used as an index or base register in op3, since this combination
3806 will require reloading. */
3807 if (GET_CODE (op0) == REG
3808 && GET_CODE (op3) == MEM
3809 && reg_mentioned_p (op0, XEXP (op3, 0)))
3810 return 0;
3812 return 1;
3817 valid_parallel_operands_6 (operands, mode)
3818 rtx *operands;
3819 enum machine_mode mode ATTRIBUTE_UNUSED;
3821 int regs = 0;
3822 rtx op0 = operands[0];
3823 rtx op1 = operands[1];
3824 rtx op2 = operands[2];
3825 rtx op4 = operands[4];
3826 rtx op5 = operands[5];
3828 if (GET_CODE (op1) == SUBREG)
3829 op1 = SUBREG_REG (op1);
3830 if (GET_CODE (op2) == SUBREG)
3831 op2 = SUBREG_REG (op2);
3832 if (GET_CODE (op4) == SUBREG)
3833 op4 = SUBREG_REG (op4);
3834 if (GET_CODE (op5) == SUBREG)
3835 op5 = SUBREG_REG (op5);
3837 /* The patterns should only allow ext_low_reg_operand() or
3838 par_ind_operand() operands. Thus of the 4 input operands, only 2
3839 should be REGs and the other 2 should be MEMs. */
3841 if (GET_CODE (op1) == REG)
3842 regs++;
3843 if (GET_CODE (op2) == REG)
3844 regs++;
3845 if (GET_CODE (op4) == REG)
3846 regs++;
3847 if (GET_CODE (op5) == REG)
3848 regs++;
3850 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3851 Perhaps we should count the MEMs as well? */
3852 if (regs != 2)
3853 return 0;
3855 /* This test prevents the multipack pass from using this pattern if
3856 op0 is used as an index or base register in op4 or op5, since
3857 this combination will require reloading. */
3858 if (GET_CODE (op0) == REG
3859 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3860 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3861 return 0;
3863 return 1;
3867 /* Validate combination of src operands. Note that the operands have
3868 been screened by the src_operand predicate. We just have to check
3869 that the combination of operands is valid. If FORCE is set, ensure
3870 that the destination regno is valid if we have a 2 operand insn. */
3872 static int
3873 c4x_valid_operands (code, operands, mode, force)
3874 enum rtx_code code;
3875 rtx *operands;
3876 enum machine_mode mode ATTRIBUTE_UNUSED;
3877 int force;
3879 rtx op1;
3880 rtx op2;
3881 enum rtx_code code1;
3882 enum rtx_code code2;
3884 if (code == COMPARE)
3886 op1 = operands[0];
3887 op2 = operands[1];
3889 else
3891 op1 = operands[1];
3892 op2 = operands[2];
3895 if (GET_CODE (op1) == SUBREG)
3896 op1 = SUBREG_REG (op1);
3897 if (GET_CODE (op2) == SUBREG)
3898 op2 = SUBREG_REG (op2);
3900 code1 = GET_CODE (op1);
3901 code2 = GET_CODE (op2);
3903 if (code1 == REG && code2 == REG)
3904 return 1;
3906 if (code1 == MEM && code2 == MEM)
3908 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3909 return 1;
3910 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3913 if (code1 == code2)
3914 return 0;
3916 if (code1 == REG)
3918 switch (code2)
3920 case CONST_INT:
3921 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3922 return 1;
3923 break;
3925 case CONST_DOUBLE:
3926 if (! c4x_H_constant (op2))
3927 return 0;
3928 break;
3930 /* Any valid memory operand screened by src_operand is OK. */
3931 case MEM:
3933 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3934 into a stack slot memory address comprising a PLUS and a
3935 constant. */
3936 case ADDRESSOF:
3937 break;
3939 default:
3940 fatal_insn ("c4x_valid_operands: Internal error", op2);
3941 break;
3944 /* Check that we have a valid destination register for a two operand
3945 instruction. */
3946 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3949 /* We assume MINUS is commutative since the subtract patterns
3950 also support the reverse subtract instructions. Since op1
3951 is not a register, and op2 is a register, op1 can only
3952 be a restricted memory operand for a shift instruction. */
3953 if (code == ASHIFTRT || code == LSHIFTRT
3954 || code == ASHIFT || code == COMPARE)
3955 return code2 == REG
3956 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3958 switch (code1)
3960 case CONST_INT:
3961 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3962 return 1;
3963 break;
3965 case CONST_DOUBLE:
3966 if (! c4x_H_constant (op1))
3967 return 0;
3968 break;
3970 /* Any valid memory operand screened by src_operand is OK. */
3971 case MEM:
3972 #if 0
3973 if (code2 != REG)
3974 return 0;
3975 #endif
3976 break;
3978 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3979 into a stack slot memory address comprising a PLUS and a
3980 constant. */
3981 case ADDRESSOF:
3982 break;
3984 default:
3985 abort ();
3986 break;
3989 /* Check that we have a valid destination register for a two operand
3990 instruction. */
3991 return ! force || REGNO (op1) == REGNO (operands[0]);
3995 int valid_operands (code, operands, mode)
3996 enum rtx_code code;
3997 rtx *operands;
3998 enum machine_mode mode;
4001 /* If we are not optimizing then we have to let anything go and let
4002 reload fix things up. instantiate_decl in function.c can produce
4003 invalid insns by changing the offset of a memory operand from a
4004 valid one into an invalid one, when the second operand is also a
4005 memory operand. The alternative is not to allow two memory
4006 operands for an insn when not optimizing. The problem only rarely
4007 occurs, for example with the C-torture program DFcmp.c. */
4009 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4014 legitimize_operands (code, operands, mode)
4015 enum rtx_code code;
4016 rtx *operands;
4017 enum machine_mode mode;
4019 /* Compare only has 2 operands. */
4020 if (code == COMPARE)
4022 /* During RTL generation, force constants into pseudos so that
4023 they can get hoisted out of loops. This will tie up an extra
4024 register but can save an extra cycle. Only do this if loop
4025 optimisation enabled. (We cannot pull this trick for add and
4026 sub instructions since the flow pass won't find
4027 autoincrements etc.) This allows us to generate compare
4028 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4029 of LDI *AR0++, R0; CMPI 42, R0.
4031 Note that expand_binops will try to load an expensive constant
4032 into a register if it is used within a loop. Unfortunately,
4033 the cost mechanism doesn't allow us to look at the other
4034 operand to decide whether the constant is expensive. */
4036 if (! reload_in_progress
4037 && TARGET_HOIST
4038 && optimize > 0
4039 && GET_CODE (operands[1]) == CONST_INT
4040 && preserve_subexpressions_p ()
4041 && rtx_cost (operands[1], code) > 1)
4042 operands[1] = force_reg (mode, operands[1]);
4044 if (! reload_in_progress
4045 && ! c4x_valid_operands (code, operands, mode, 0))
4046 operands[0] = force_reg (mode, operands[0]);
4047 return 1;
4050 /* We cannot do this for ADDI/SUBI insns since we will
4051 defeat the flow pass from finding autoincrement addressing
4052 opportunities. */
4053 if (! reload_in_progress
4054 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4055 && TARGET_HOIST
4056 && optimize > 1
4057 && GET_CODE (operands[2]) == CONST_INT
4058 && preserve_subexpressions_p ()
4059 && rtx_cost (operands[2], code) > 1)
4060 operands[2] = force_reg (mode, operands[2]);
4062 /* We can get better code on a C30 if we force constant shift counts
4063 into a register. This way they can get hoisted out of loops,
4064 tying up a register, but saving an instruction. The downside is
4065 that they may get allocated to an address or index register, and
4066 thus we will get a pipeline conflict if there is a nearby
4067 indirect address using an address register.
4069 Note that expand_binops will not try to load an expensive constant
4070 into a register if it is used within a loop for a shift insn. */
4072 if (! reload_in_progress
4073 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4075 /* If the operand combination is invalid, we force operand1 into a
4076 register, preventing reload from having doing to do this at a
4077 later stage. */
4078 operands[1] = force_reg (mode, operands[1]);
4079 if (TARGET_FORCE)
4081 emit_move_insn (operands[0], operands[1]);
4082 operands[1] = copy_rtx (operands[0]);
4084 else
4086 /* Just in case... */
4087 if (! c4x_valid_operands (code, operands, mode, 0))
4088 operands[2] = force_reg (mode, operands[2]);
4092 /* Right shifts require a negative shift count, but GCC expects
4093 a positive count, so we emit a NEG. */
4094 if ((code == ASHIFTRT || code == LSHIFTRT)
4095 && (GET_CODE (operands[2]) != CONST_INT))
4096 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4098 return 1;
4102 /* The following predicates are used for instruction scheduling. */
4105 group1_reg_operand (op, mode)
4106 rtx op;
4107 enum machine_mode mode;
4109 if (mode != VOIDmode && mode != GET_MODE (op))
4110 return 0;
4111 if (GET_CODE (op) == SUBREG)
4112 op = SUBREG_REG (op);
4113 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4118 group1_mem_operand (op, mode)
4119 rtx op;
4120 enum machine_mode mode;
4122 if (mode != VOIDmode && mode != GET_MODE (op))
4123 return 0;
4125 if (GET_CODE (op) == MEM)
4127 op = XEXP (op, 0);
4128 if (GET_CODE (op) == PLUS)
4130 rtx op0 = XEXP (op, 0);
4131 rtx op1 = XEXP (op, 1);
4133 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4134 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4135 return 1;
4137 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4138 return 1;
4141 return 0;
4145 /* Return true if any one of the address registers. */
4148 arx_reg_operand (op, mode)
4149 rtx op;
4150 enum machine_mode mode;
4152 if (mode != VOIDmode && mode != GET_MODE (op))
4153 return 0;
4154 if (GET_CODE (op) == SUBREG)
4155 op = SUBREG_REG (op);
4156 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4160 static int
4161 c4x_arn_reg_operand (op, mode, regno)
4162 rtx op;
4163 enum machine_mode mode;
4164 unsigned int regno;
4166 if (mode != VOIDmode && mode != GET_MODE (op))
4167 return 0;
4168 if (GET_CODE (op) == SUBREG)
4169 op = SUBREG_REG (op);
4170 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4174 static int
4175 c4x_arn_mem_operand (op, mode, regno)
4176 rtx op;
4177 enum machine_mode mode;
4178 unsigned int regno;
4180 if (mode != VOIDmode && mode != GET_MODE (op))
4181 return 0;
4183 if (GET_CODE (op) == MEM)
4185 op = XEXP (op, 0);
4186 switch (GET_CODE (op))
4188 case PRE_DEC:
4189 case POST_DEC:
4190 case PRE_INC:
4191 case POST_INC:
4192 op = XEXP (op, 0);
4194 case REG:
4195 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4197 case PRE_MODIFY:
4198 case POST_MODIFY:
4199 if (REG_P (XEXP (op, 0)) && (! reload_completed
4200 || (REGNO (XEXP (op, 0)) == regno)))
4201 return 1;
4202 if (REG_P (XEXP (XEXP (op, 1), 1))
4203 && (! reload_completed
4204 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4205 return 1;
4206 break;
4208 case PLUS:
4210 rtx op0 = XEXP (op, 0);
4211 rtx op1 = XEXP (op, 1);
4213 if ((REG_P (op0) && (! reload_completed
4214 || (REGNO (op0) == regno)))
4215 || (REG_P (op1) && (! reload_completed
4216 || (REGNO (op1) == regno))))
4217 return 1;
4219 break;
4221 default:
4222 break;
4225 return 0;
4230 ar0_reg_operand (op, mode)
4231 rtx op;
4232 enum machine_mode mode;
4234 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4239 ar0_mem_operand (op, mode)
4240 rtx op;
4241 enum machine_mode mode;
4243 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4248 ar1_reg_operand (op, mode)
4249 rtx op;
4250 enum machine_mode mode;
4252 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4257 ar1_mem_operand (op, mode)
4258 rtx op;
4259 enum machine_mode mode;
4261 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4266 ar2_reg_operand (op, mode)
4267 rtx op;
4268 enum machine_mode mode;
4270 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4275 ar2_mem_operand (op, mode)
4276 rtx op;
4277 enum machine_mode mode;
4279 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4284 ar3_reg_operand (op, mode)
4285 rtx op;
4286 enum machine_mode mode;
4288 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4293 ar3_mem_operand (op, mode)
4294 rtx op;
4295 enum machine_mode mode;
4297 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4302 ar4_reg_operand (op, mode)
4303 rtx op;
4304 enum machine_mode mode;
4306 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4311 ar4_mem_operand (op, mode)
4312 rtx op;
4313 enum machine_mode mode;
4315 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4320 ar5_reg_operand (op, mode)
4321 rtx op;
4322 enum machine_mode mode;
4324 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4329 ar5_mem_operand (op, mode)
4330 rtx op;
4331 enum machine_mode mode;
4333 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4338 ar6_reg_operand (op, mode)
4339 rtx op;
4340 enum machine_mode mode;
4342 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4347 ar6_mem_operand (op, mode)
4348 rtx op;
4349 enum machine_mode mode;
4351 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4356 ar7_reg_operand (op, mode)
4357 rtx op;
4358 enum machine_mode mode;
4360 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4365 ar7_mem_operand (op, mode)
4366 rtx op;
4367 enum machine_mode mode;
4369 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4374 ir0_reg_operand (op, mode)
4375 rtx op;
4376 enum machine_mode mode;
4378 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4383 ir0_mem_operand (op, mode)
4384 rtx op;
4385 enum machine_mode mode;
4387 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4392 ir1_reg_operand (op, mode)
4393 rtx op;
4394 enum machine_mode mode;
4396 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4401 ir1_mem_operand (op, mode)
4402 rtx op;
4403 enum machine_mode mode;
4405 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4409 /* This is similar to operand_subword but allows autoincrement
4410 addressing. */
4413 c4x_operand_subword (op, i, validate_address, mode)
4414 rtx op;
4415 int i;
4416 int validate_address;
4417 enum machine_mode mode;
4419 if (mode != HImode && mode != HFmode)
4420 fatal_insn ("c4x_operand_subword: invalid mode", op);
4422 if (mode == HFmode && REG_P (op))
4423 fatal_insn ("c4x_operand_subword: invalid operand", op);
4425 if (GET_CODE (op) == MEM)
4427 enum rtx_code code = GET_CODE (XEXP (op, 0));
4428 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4429 enum machine_mode submode;
4431 submode = mode;
4432 if (mode == HImode)
4433 submode = QImode;
4434 else if (mode == HFmode)
4435 submode = QFmode;
4437 switch (code)
4439 case POST_INC:
4440 case PRE_INC:
4441 return gen_rtx_MEM (submode, XEXP (op, 0));
4443 case POST_DEC:
4444 case PRE_DEC:
4445 case PRE_MODIFY:
4446 case POST_MODIFY:
4447 /* We could handle these with some difficulty.
4448 e.g., *p-- => *(p-=2); *(p+1). */
4449 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4451 case SYMBOL_REF:
4452 case LABEL_REF:
4453 case CONST:
4454 case CONST_INT:
4455 fatal_insn ("c4x_operand_subword: invalid address", op);
4457 /* Even though offsettable_address_p considers (MEM
4458 (LO_SUM)) to be offsettable, it is not safe if the
4459 address is at the end of the data page since we also have
4460 to fix up the associated high PART. In this case where
4461 we are trying to split a HImode or HFmode memory
4462 reference, we would have to emit another insn to reload a
4463 new HIGH value. It's easier to disable LO_SUM memory references
4464 in HImode or HFmode and we probably get better code. */
4465 case LO_SUM:
4466 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4468 default:
4469 break;
4473 return operand_subword (op, i, validate_address, mode);
4476 /* Handle machine specific pragmas for compatibility with existing
4477 compilers for the C3x/C4x.
4479 pragma attribute
4480 ----------------------------------------------------------
4481 CODE_SECTION(symbol,"section") section("section")
4482 DATA_SECTION(symbol,"section") section("section")
4483 FUNC_CANNOT_INLINE(function)
4484 FUNC_EXT_CALLED(function)
4485 FUNC_IS_PURE(function) const
4486 FUNC_IS_SYSTEM(function)
4487 FUNC_NEVER_RETURNS(function) noreturn
4488 FUNC_NO_GLOBAL_ASG(function)
4489 FUNC_NO_IND_ASG(function)
4490 INTERRUPT(function) interrupt
4494 /* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4495 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4496 the STRING_CST node of the string. If SECT is null, then this
4497 pragma doesn't take a section string. Returns 0 for a good pragma,
4498 -1 for a malformed pragma. */
4499 #define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4501 static int (*c_lex_func) (tree *);
4503 void
4504 c4x_init_pragma (get_token)
4505 int (*get_token) PARAMS ((tree *));
4507 c_lex_func = get_token;
4511 static int
4512 c4x_parse_pragma (name, func, sect)
4513 const char *name;
4514 tree *func;
4515 tree *sect;
4517 tree f, s, x;
4519 if (c_lex_func (&x) != CPP_OPEN_PAREN)
4520 BAD ("missing '(' after '#pragma %s' - ignored", name);
4522 if (c_lex_func (&f) != CPP_NAME)
4523 BAD ("missing function name in '#pragma %s' - ignored", name);
4525 if (sect)
4527 if (c_lex_func (&x) != CPP_COMMA)
4528 BAD ("malformed '#pragma %s' - ignored", name);
4529 if (c_lex_func (&s) != CPP_STRING)
4530 BAD ("missing section name in '#pragma %s' - ignored", name);
4531 *sect = s;
4534 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
4535 BAD ("missing ')' for '#pragma %s' - ignored", name);
4537 if (c_lex_func (&x) != CPP_EOF)
4538 warning ("junk at end of '#pragma %s'", name);
4540 *func = f;
4541 return 0;
4544 void
4545 c4x_pr_CODE_SECTION (pfile)
4546 cpp_reader *pfile ATTRIBUTE_UNUSED;
4548 tree func, sect;
4550 if (c4x_parse_pragma ("CODE_SECTION", &func, &sect))
4551 return;
4552 code_tree = chainon (code_tree,
4553 build_tree_list (func,
4554 build_tree_list (NULL_TREE, sect)));
4557 void
4558 c4x_pr_DATA_SECTION (pfile)
4559 cpp_reader *pfile ATTRIBUTE_UNUSED;
4561 tree func, sect;
4563 if (c4x_parse_pragma ("DATA_SECTION", &func, &sect))
4564 return;
4565 data_tree = chainon (data_tree,
4566 build_tree_list (func,
4567 build_tree_list (NULL_TREE, sect)));
4570 void
4571 c4x_pr_FUNC_IS_PURE (pfile)
4572 cpp_reader *pfile ATTRIBUTE_UNUSED;
4574 tree func;
4576 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4577 return;
4578 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4581 void
4582 c4x_pr_FUNC_NEVER_RETURNS (pfile)
4583 cpp_reader *pfile ATTRIBUTE_UNUSED;
4585 tree func;
4587 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4588 return;
4589 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4592 void
4593 c4x_pr_INTERRUPT (pfile)
4594 cpp_reader *pfile ATTRIBUTE_UNUSED;
4596 tree func;
4598 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4599 return;
4600 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4603 /* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4604 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4605 void
4606 c4x_pr_ignored (pfile)
4607 cpp_reader *pfile ATTRIBUTE_UNUSED;
4611 struct name_list
4613 struct name_list *next;
4614 const char *name;
4617 static struct name_list *global_head;
4618 static struct name_list *extern_head;
4621 /* Add NAME to list of global symbols and remove from external list if
4622 present on external list. */
4624 void
4625 c4x_global_label (name)
4626 const char *name;
4628 struct name_list *p, *last;
4630 /* Do not insert duplicate names, so linearly search through list of
4631 existing names. */
4632 p = global_head;
4633 while (p)
4635 if (strcmp (p->name, name) == 0)
4636 return;
4637 p = p->next;
4639 p = (struct name_list *) permalloc (sizeof *p);
4640 p->next = global_head;
4641 p->name = name;
4642 global_head = p;
4644 /* Remove this name from ref list if present. */
4645 last = NULL;
4646 p = extern_head;
4647 while (p)
4649 if (strcmp (p->name, name) == 0)
4651 if (last)
4652 last->next = p->next;
4653 else
4654 extern_head = p->next;
4655 break;
4657 last = p;
4658 p = p->next;
4663 /* Add NAME to list of external symbols. */
4665 void
4666 c4x_external_ref (name)
4667 const char *name;
4669 struct name_list *p;
4671 /* Do not insert duplicate names. */
4672 p = extern_head;
4673 while (p)
4675 if (strcmp (p->name, name) == 0)
4676 return;
4677 p = p->next;
4680 /* Do not insert ref if global found. */
4681 p = global_head;
4682 while (p)
4684 if (strcmp (p->name, name) == 0)
4685 return;
4686 p = p->next;
4688 p = (struct name_list *) permalloc (sizeof *p);
4689 p->next = extern_head;
4690 p->name = name;
4691 extern_head = p;
4695 void
4696 c4x_file_end (fp)
4697 FILE *fp;
4699 struct name_list *p;
4701 /* Output all external names that are not global. */
4702 p = extern_head;
4703 while (p)
4705 fprintf (fp, "\t.ref\t");
4706 assemble_name (fp, p->name);
4707 fprintf (fp, "\n");
4708 p = p->next;
4710 fprintf (fp, "\t.end\n");
4714 static void
4715 c4x_check_attribute (attrib, list, decl, attributes)
4716 const char *attrib;
4717 tree list, decl, *attributes;
4719 while (list != NULL_TREE
4720 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4721 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4722 list = TREE_CHAIN (list);
4723 if (list)
4724 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4725 *attributes);
4729 static void
4730 c4x_insert_attributes (decl, attributes)
4731 tree decl, *attributes;
4733 switch (TREE_CODE (decl))
4735 case FUNCTION_DECL:
4736 c4x_check_attribute ("section", code_tree, decl, attributes);
4737 c4x_check_attribute ("const", pure_tree, decl, attributes);
4738 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4739 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4740 break;
4742 case VAR_DECL:
4743 c4x_check_attribute ("section", data_tree, decl, attributes);
4744 break;
4746 default:
4747 break;
4752 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine
4753 specific attribute for TYPE. The attributes in ATTRIBUTES have
4754 previously been assigned to TYPE. */
4756 static int
4757 c4x_valid_type_attribute_p (type, attributes, identifier, args)
4758 tree type;
4759 tree attributes ATTRIBUTE_UNUSED;
4760 tree identifier;
4761 tree args ATTRIBUTE_UNUSED;
4763 if (TREE_CODE (type) != FUNCTION_TYPE)
4764 return 0;
4766 if (is_attribute_p ("interrupt", identifier))
4767 return 1;
4769 if (is_attribute_p ("assembler", identifier))
4770 return 1;
4772 if (is_attribute_p ("leaf_pretend", identifier))
4773 return 1;
4775 return 0;
4779 /* !!! FIXME to emit RPTS correctly. */
4782 c4x_rptb_rpts_p (insn, op)
4783 rtx insn, op;
4785 /* The next insn should be our label marking where the
4786 repeat block starts. */
4787 insn = NEXT_INSN (insn);
4788 if (GET_CODE (insn) != CODE_LABEL)
4790 /* Some insns may have been shifted between the RPTB insn
4791 and the top label... They were probably destined to
4792 be moved out of the loop. For now, let's leave them
4793 where they are and print a warning. We should
4794 probably move these insns before the repeat block insn. */
4795 if (TARGET_DEBUG)
4796 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4797 insn);
4798 return 0;
4801 /* Skip any notes. */
4802 insn = next_nonnote_insn (insn);
4804 /* This should be our first insn in the loop. */
4805 if (! INSN_P (insn))
4806 return 0;
4808 /* Skip any notes. */
4809 insn = next_nonnote_insn (insn);
4811 if (! INSN_P (insn))
4812 return 0;
4814 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4815 return 0;
4817 if (TARGET_RPTS)
4818 return 1;
4820 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4824 /* Check if register r11 is used as the destination of an insn. */
4826 static int
4827 c4x_r11_set_p(x)
4828 rtx x;
4830 rtx set;
4831 int i, j;
4832 const char *fmt;
4834 if (x == 0)
4835 return 0;
4837 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4838 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4840 if (INSN_P (x) && (set = single_set (x)))
4841 x = SET_DEST (set);
4843 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4844 return 1;
4846 fmt = GET_RTX_FORMAT (GET_CODE (x));
4847 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4849 if (fmt[i] == 'e')
4851 if (c4x_r11_set_p (XEXP (x, i)))
4852 return 1;
4854 else if (fmt[i] == 'E')
4855 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4856 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4857 return 1;
4859 return 0;
4863 /* The c4x sometimes has a problem when the insn before the laj insn
4864 sets the r11 register. Check for this situation. */
4867 c4x_check_laj_p (insn)
4868 rtx insn;
4870 insn = prev_nonnote_insn (insn);
4872 /* If this is the start of the function no nop is needed. */
4873 if (insn == 0)
4874 return 0;
4876 /* If the previous insn is a code label we have to insert a nop. This
4877 could be a jump or table jump. We can find the normal jumps by
4878 scanning the function but this will not find table jumps. */
4879 if (GET_CODE (insn) == CODE_LABEL)
4880 return 1;
4882 /* If the previous insn sets register r11 we have to insert a nop. */
4883 if (c4x_r11_set_p (insn))
4884 return 1;
4886 /* No nop needed. */
4887 return 0;
4891 /* Adjust the cost of a scheduling dependency. Return the new cost of
4892 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4893 A set of an address register followed by a use occurs a 2 cycle
4894 stall (reduced to a single cycle on the c40 using LDA), while
4895 a read of an address register followed by a use occurs a single cycle. */
4897 #define SET_USE_COST 3
4898 #define SETLDA_USE_COST 2
4899 #define READ_USE_COST 2
4903 c4x_adjust_cost (insn, link, dep_insn, cost)
4904 rtx insn;
4905 rtx link;
4906 rtx dep_insn;
4907 int cost;
4909 /* Don't worry about this until we know what registers have been
4910 assigned. */
4911 if (flag_schedule_insns == 0 && ! reload_completed)
4912 return 0;
4914 /* How do we handle dependencies where a read followed by another
4915 read causes a pipeline stall? For example, a read of ar0 followed
4916 by the use of ar0 for a memory reference. It looks like we
4917 need to extend the scheduler to handle this case. */
4919 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4920 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4921 so only deal with insns we know about. */
4922 if (recog_memoized (dep_insn) < 0)
4923 return 0;
4925 if (REG_NOTE_KIND (link) == 0)
4927 int max = 0;
4929 /* Data dependency; DEP_INSN writes a register that INSN reads some
4930 cycles later. */
4931 if (TARGET_C3X)
4933 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4934 max = SET_USE_COST > max ? SET_USE_COST : max;
4935 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4936 max = READ_USE_COST > max ? READ_USE_COST : max;
4938 else
4940 /* This could be significantly optimized. We should look
4941 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4942 insn uses ar0-ar7. We then test if the same register
4943 is used. The tricky bit is that some operands will
4944 use several registers... */
4945 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4946 max = SET_USE_COST > max ? SET_USE_COST : max;
4947 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4948 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4949 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4950 max = READ_USE_COST > max ? READ_USE_COST : max;
4952 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4953 max = SET_USE_COST > max ? SET_USE_COST : max;
4954 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4955 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4956 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4957 max = READ_USE_COST > max ? READ_USE_COST : max;
4959 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4960 max = SET_USE_COST > max ? SET_USE_COST : max;
4961 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4962 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4963 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4964 max = READ_USE_COST > max ? READ_USE_COST : max;
4966 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4967 max = SET_USE_COST > max ? SET_USE_COST : max;
4968 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4969 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4970 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4971 max = READ_USE_COST > max ? READ_USE_COST : max;
4973 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4974 max = SET_USE_COST > max ? SET_USE_COST : max;
4975 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4976 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4977 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4978 max = READ_USE_COST > max ? READ_USE_COST : max;
4980 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4981 max = SET_USE_COST > max ? SET_USE_COST : max;
4982 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4983 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4984 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4985 max = READ_USE_COST > max ? READ_USE_COST : max;
4987 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4988 max = SET_USE_COST > max ? SET_USE_COST : max;
4989 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4990 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4991 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4992 max = READ_USE_COST > max ? READ_USE_COST : max;
4994 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4995 max = SET_USE_COST > max ? SET_USE_COST : max;
4996 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4997 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4998 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4999 max = READ_USE_COST > max ? READ_USE_COST : max;
5001 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
5002 max = SET_USE_COST > max ? SET_USE_COST : max;
5003 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
5004 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5006 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
5007 max = SET_USE_COST > max ? SET_USE_COST : max;
5008 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
5009 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5012 if (max)
5013 cost = max;
5015 /* For other data dependencies, the default cost specified in the
5016 md is correct. */
5017 return cost;
5019 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5021 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5022 cycles later. */
5024 /* For c4x anti dependencies, the cost is 0. */
5025 return 0;
5027 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5029 /* Output dependency; DEP_INSN writes a register that INSN writes some
5030 cycles later. */
5032 /* For c4x output dependencies, the cost is 0. */
5033 return 0;
5035 else
5036 abort ();
5039 void
5040 c4x_init_builtins (endlink)
5041 tree endlink;
5043 builtin_function ("fast_ftoi",
5044 build_function_type
5045 (integer_type_node,
5046 tree_cons (NULL_TREE, double_type_node, endlink)),
5047 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
5048 builtin_function ("ansi_ftoi",
5049 build_function_type
5050 (integer_type_node,
5051 tree_cons (NULL_TREE, double_type_node, endlink)),
5052 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
5053 if (TARGET_C3X)
5054 builtin_function ("fast_imult",
5055 build_function_type
5056 (integer_type_node,
5057 tree_cons (NULL_TREE, integer_type_node,
5058 tree_cons (NULL_TREE,
5059 integer_type_node, endlink))),
5060 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
5061 else
5063 builtin_function ("toieee",
5064 build_function_type
5065 (double_type_node,
5066 tree_cons (NULL_TREE, double_type_node, endlink)),
5067 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
5068 builtin_function ("frieee",
5069 build_function_type
5070 (double_type_node,
5071 tree_cons (NULL_TREE, double_type_node, endlink)),
5072 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
5073 builtin_function ("fast_invf",
5074 build_function_type
5075 (double_type_node,
5076 tree_cons (NULL_TREE, double_type_node, endlink)),
5077 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
5083 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5084 tree exp;
5085 rtx target;
5086 rtx subtarget ATTRIBUTE_UNUSED;
5087 enum machine_mode mode ATTRIBUTE_UNUSED;
5088 int ignore ATTRIBUTE_UNUSED;
5090 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5091 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5092 tree arglist = TREE_OPERAND (exp, 1);
5093 tree arg0, arg1;
5094 rtx r0, r1;
5096 switch (fcode)
5098 case C4X_BUILTIN_FIX:
5099 arg0 = TREE_VALUE (arglist);
5100 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5101 r0 = protect_from_queue (r0, 0);
5102 if (! target || ! register_operand (target, QImode))
5103 target = gen_reg_rtx (QImode);
5104 emit_insn (gen_fixqfqi_clobber (target, r0));
5105 return target;
5107 case C4X_BUILTIN_FIX_ANSI:
5108 arg0 = TREE_VALUE (arglist);
5109 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5110 r0 = protect_from_queue (r0, 0);
5111 if (! target || ! register_operand (target, QImode))
5112 target = gen_reg_rtx (QImode);
5113 emit_insn (gen_fix_truncqfqi2 (target, r0));
5114 return target;
5116 case C4X_BUILTIN_MPYI:
5117 if (! TARGET_C3X)
5118 break;
5119 arg0 = TREE_VALUE (arglist);
5120 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5121 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5122 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5123 r0 = protect_from_queue (r0, 0);
5124 r1 = protect_from_queue (r1, 0);
5125 if (! target || ! register_operand (target, QImode))
5126 target = gen_reg_rtx (QImode);
5127 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5128 return target;
5130 case C4X_BUILTIN_TOIEEE:
5131 if (TARGET_C3X)
5132 break;
5133 arg0 = TREE_VALUE (arglist);
5134 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5135 r0 = protect_from_queue (r0, 0);
5136 if (! target || ! register_operand (target, QFmode))
5137 target = gen_reg_rtx (QFmode);
5138 emit_insn (gen_toieee (target, r0));
5139 return target;
5141 case C4X_BUILTIN_FRIEEE:
5142 if (TARGET_C3X)
5143 break;
5144 arg0 = TREE_VALUE (arglist);
5145 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5146 put_var_into_stack (arg0);
5147 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5148 r0 = protect_from_queue (r0, 0);
5149 if (register_operand (r0, QFmode))
5151 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5152 emit_move_insn (r1, r0);
5153 r0 = r1;
5155 if (! target || ! register_operand (target, QFmode))
5156 target = gen_reg_rtx (QFmode);
5157 emit_insn (gen_frieee (target, r0));
5158 return target;
5160 case C4X_BUILTIN_RCPF:
5161 if (TARGET_C3X)
5162 break;
5163 arg0 = TREE_VALUE (arglist);
5164 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5165 r0 = protect_from_queue (r0, 0);
5166 if (! target || ! register_operand (target, QFmode))
5167 target = gen_reg_rtx (QFmode);
5168 emit_insn (gen_rcpfqf_clobber (target, r0));
5169 return target;
5171 return NULL_RTX;