* alpha.h: NULL_PTR -> NULL.
[official-gcc.git] / gcc / config / c4x / c4x.c
blob9486fdce7febe6b649de42114556e64fc355407d
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "flags.h"
42 #include "loop.h"
43 #include "recog.h"
44 #include "c-tree.h"
45 #include "ggc.h"
46 #include "cpplib.h"
47 #include "c-lex.h"
48 #include "c-pragma.h"
49 #include "c4x-protos.h"
51 rtx smulhi3_libfunc;
52 rtx umulhi3_libfunc;
53 rtx fix_truncqfhi2_libfunc;
54 rtx fixuns_truncqfhi2_libfunc;
55 rtx fix_trunchfhi2_libfunc;
56 rtx fixuns_trunchfhi2_libfunc;
57 rtx floathiqf2_libfunc;
58 rtx floatunshiqf2_libfunc;
59 rtx floathihf2_libfunc;
60 rtx floatunshihf2_libfunc;
62 static int c4x_leaf_function;
64 static const char *float_reg_names[] = FLOAT_REGISTER_NAMES;
66 /* Array of the smallest class containing reg number REGNO, indexed by
67 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
68 registers are available and set the class to NO_REGS for registers
69 that the target switches say are unavailable. */
71 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
73 /* Reg Modes Saved. */
74 R0R1_REGS, /* R0 QI, QF, HF No. */
75 R0R1_REGS, /* R1 QI, QF, HF No. */
76 R2R3_REGS, /* R2 QI, QF, HF No. */
77 R2R3_REGS, /* R3 QI, QF, HF No. */
78 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
79 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
80 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
81 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
82 ADDR_REGS, /* AR0 QI No. */
83 ADDR_REGS, /* AR1 QI No. */
84 ADDR_REGS, /* AR2 QI No. */
85 ADDR_REGS, /* AR3 QI QI. */
86 ADDR_REGS, /* AR4 QI QI. */
87 ADDR_REGS, /* AR5 QI QI. */
88 ADDR_REGS, /* AR6 QI QI. */
89 ADDR_REGS, /* AR7 QI QI. */
90 DP_REG, /* DP QI No. */
91 INDEX_REGS, /* IR0 QI No. */
92 INDEX_REGS, /* IR1 QI No. */
93 BK_REG, /* BK QI QI. */
94 SP_REG, /* SP QI No. */
95 ST_REG, /* ST CC No. */
96 NO_REGS, /* DIE/IE No. */
97 NO_REGS, /* IIE/IF No. */
98 NO_REGS, /* IIF/IOF No. */
99 INT_REGS, /* RS QI No. */
100 INT_REGS, /* RE QI No. */
101 RC_REG, /* RC QI No. */
102 EXT_REGS, /* R8 QI, QF, HF QI. */
103 EXT_REGS, /* R9 QI, QF, HF No. */
104 EXT_REGS, /* R10 QI, QF, HF No. */
105 EXT_REGS, /* R11 QI, QF, HF No. */
108 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
110 /* Reg Modes Saved. */
111 HFmode, /* R0 QI, QF, HF No. */
112 HFmode, /* R1 QI, QF, HF No. */
113 HFmode, /* R2 QI, QF, HF No. */
114 HFmode, /* R3 QI, QF, HF No. */
115 QFmode, /* R4 QI, QF, HF QI. */
116 QFmode, /* R5 QI, QF, HF QI. */
117 QImode, /* R6 QI, QF, HF QF. */
118 QImode, /* R7 QI, QF, HF QF. */
119 QImode, /* AR0 QI No. */
120 QImode, /* AR1 QI No. */
121 QImode, /* AR2 QI No. */
122 QImode, /* AR3 QI QI. */
123 QImode, /* AR4 QI QI. */
124 QImode, /* AR5 QI QI. */
125 QImode, /* AR6 QI QI. */
126 QImode, /* AR7 QI QI. */
127 VOIDmode, /* DP QI No. */
128 QImode, /* IR0 QI No. */
129 QImode, /* IR1 QI No. */
130 QImode, /* BK QI QI. */
131 VOIDmode, /* SP QI No. */
132 VOIDmode, /* ST CC No. */
133 VOIDmode, /* DIE/IE No. */
134 VOIDmode, /* IIE/IF No. */
135 VOIDmode, /* IIF/IOF No. */
136 QImode, /* RS QI No. */
137 QImode, /* RE QI No. */
138 VOIDmode, /* RC QI No. */
139 QFmode, /* R8 QI, QF, HF QI. */
140 HFmode, /* R9 QI, QF, HF No. */
141 HFmode, /* R10 QI, QF, HF No. */
142 HFmode, /* R11 QI, QF, HF No. */
146 /* Test and compare insns in c4x.md store the information needed to
147 generate branch and scc insns here. */
149 struct rtx_def *c4x_compare_op0 = NULL_RTX;
150 struct rtx_def *c4x_compare_op1 = NULL_RTX;
152 const char *c4x_rpts_cycles_string;
153 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
154 const char *c4x_cpu_version_string;
155 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
157 /* Pragma definitions. */
159 static tree code_tree = NULL_TREE;
160 static tree data_tree = NULL_TREE;
161 static tree pure_tree = NULL_TREE;
162 static tree noreturn_tree = NULL_TREE;
163 static tree interrupt_tree = NULL_TREE;
165 /* Forward declarations */
166 static void c4x_add_gc_roots PARAMS ((void));
167 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
168 static int c4x_leaf_function_p PARAMS ((void));
169 static int c4x_assembler_function_p PARAMS ((void));
170 static int c4x_immed_float_p PARAMS ((rtx));
171 static int c4x_a_register PARAMS ((rtx));
172 static int c4x_x_register PARAMS ((rtx));
173 static int c4x_immed_int_constant PARAMS ((rtx));
174 static int c4x_immed_float_constant PARAMS ((rtx));
175 static int c4x_K_constant PARAMS ((rtx));
176 static int c4x_N_constant PARAMS ((rtx));
177 static int c4x_O_constant PARAMS ((rtx));
178 static int c4x_R_indirect PARAMS ((rtx));
179 static int c4x_S_indirect PARAMS ((rtx));
180 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
181 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
182 enum machine_mode, int));
183 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
184 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
186 static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
191 /* Called to register all of our global variables with the garbage
192 collector. */
194 static void
195 c4x_add_gc_roots ()
197 ggc_add_rtx_root (&c4x_compare_op0, 1);
198 ggc_add_rtx_root (&c4x_compare_op1, 1);
199 ggc_add_tree_root (&code_tree, 1);
200 ggc_add_tree_root (&data_tree, 1);
201 ggc_add_tree_root (&pure_tree, 1);
202 ggc_add_tree_root (&noreturn_tree, 1);
203 ggc_add_tree_root (&interrupt_tree, 1);
204 ggc_add_rtx_root (&smulhi3_libfunc, 1);
205 ggc_add_rtx_root (&umulhi3_libfunc, 1);
206 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
207 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
208 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
209 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
210 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
211 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
212 ggc_add_rtx_root (&floathihf2_libfunc, 1);
213 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
217 /* Override command line options.
218 Called once after all options have been parsed.
219 Mostly we process the processor
220 type and sometimes adjust other TARGET_ options. */
222 void
223 c4x_override_options ()
225 if (c4x_rpts_cycles_string)
226 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
227 else
228 c4x_rpts_cycles = 0;
230 if (TARGET_C30)
231 c4x_cpu_version = 30;
232 else if (TARGET_C31)
233 c4x_cpu_version = 31;
234 else if (TARGET_C32)
235 c4x_cpu_version = 32;
236 else if (TARGET_C33)
237 c4x_cpu_version = 33;
238 else if (TARGET_C40)
239 c4x_cpu_version = 40;
240 else if (TARGET_C44)
241 c4x_cpu_version = 44;
242 else
243 c4x_cpu_version = 40;
245 /* -mcpu=xx overrides -m40 etc. */
246 if (c4x_cpu_version_string)
248 const char *p = c4x_cpu_version_string;
250 /* Also allow -mcpu=c30 etc. */
251 if (*p == 'c' || *p == 'C')
252 p++;
253 c4x_cpu_version = atoi (p);
256 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
257 C40_FLAG | C44_FLAG);
259 switch (c4x_cpu_version)
261 case 30: target_flags |= C30_FLAG; break;
262 case 31: target_flags |= C31_FLAG; break;
263 case 32: target_flags |= C32_FLAG; break;
264 case 33: target_flags |= C33_FLAG; break;
265 case 40: target_flags |= C40_FLAG; break;
266 case 44: target_flags |= C44_FLAG; break;
267 default:
268 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
269 c4x_cpu_version = 40;
270 target_flags |= C40_FLAG;
273 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
274 target_flags |= C3X_FLAG;
275 else
276 target_flags &= ~C3X_FLAG;
278 /* Convert foo / 8.0 into foo * 0.125, etc. */
279 set_fast_math_flags();
281 /* We should phase out the following at some stage.
282 This provides compatibility with the old -mno-aliases option. */
283 if (! TARGET_ALIASES && ! flag_argument_noalias)
284 flag_argument_noalias = 1;
286 /* Register global variables with the garbage collector. */
287 c4x_add_gc_roots ();
291 /* This is called before c4x_override_options. */
293 void
294 c4x_optimization_options (level, size)
295 int level ATTRIBUTE_UNUSED;
296 int size ATTRIBUTE_UNUSED;
298 /* Scheduling before register allocation can screw up global
299 register allocation, especially for functions that use MPY||ADD
300 instructions. The benefit we gain we get by scheduling before
301 register allocation is probably marginal anyhow. */
302 flag_schedule_insns = 0;
306 /* Write an ASCII string. */
308 #define C4X_ASCII_LIMIT 40
310 void
311 c4x_output_ascii (stream, ptr, len)
312 FILE *stream;
313 const char *ptr;
314 int len;
316 char sbuf[C4X_ASCII_LIMIT + 1];
317 int s, l, special, first = 1, onlys;
319 if (len)
320 fprintf (stream, "\t.byte\t");
322 for (s = l = 0; len > 0; --len, ++ptr)
324 onlys = 0;
326 /* Escape " and \ with a \". */
327 special = *ptr == '\"' || *ptr == '\\';
329 /* If printable - add to buff. */
330 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
332 if (special)
333 sbuf[s++] = '\\';
334 sbuf[s++] = *ptr;
335 if (s < C4X_ASCII_LIMIT - 1)
336 continue;
337 onlys = 1;
339 if (s)
341 if (first)
342 first = 0;
343 else
345 fputc (',', stream);
346 l++;
349 sbuf[s] = 0;
350 fprintf (stream, "\"%s\"", sbuf);
351 l += s + 2;
352 if (TARGET_TI && l >= 80 && len > 1)
354 fprintf (stream, "\n\t.byte\t");
355 first = 1;
356 l = 0;
359 s = 0;
361 if (onlys)
362 continue;
364 if (first)
365 first = 0;
366 else
368 fputc (',', stream);
369 l++;
372 fprintf (stream, "%d", *ptr);
373 l += 3;
374 if (TARGET_TI && l >= 80 && len > 1)
376 fprintf (stream, "\n\t.byte\t");
377 first = 1;
378 l = 0;
381 if (s)
383 if (! first)
384 fputc (',', stream);
386 sbuf[s] = 0;
387 fprintf (stream, "\"%s\"", sbuf);
388 s = 0;
390 fputc ('\n', stream);
395 c4x_hard_regno_mode_ok (regno, mode)
396 unsigned int regno;
397 enum machine_mode mode;
399 switch (mode)
401 #if Pmode != QImode
402 case Pmode: /* Pointer (24/32 bits). */
403 #endif
404 case QImode: /* Integer (32 bits). */
405 return IS_INT_REGNO (regno);
407 case QFmode: /* Float, Double (32 bits). */
408 case HFmode: /* Long Double (40 bits). */
409 return IS_EXT_REGNO (regno);
411 case CCmode: /* Condition Codes. */
412 case CC_NOOVmode: /* Condition Codes. */
413 return IS_ST_REGNO (regno);
415 case HImode: /* Long Long (64 bits). */
416 /* We need two registers to store long longs. Note that
417 it is much easier to constrain the first register
418 to start on an even boundary. */
419 return IS_INT_REGNO (regno)
420 && IS_INT_REGNO (regno + 1)
421 && (regno & 1) == 0;
423 default:
424 return 0; /* We don't support these modes. */
427 return 0;
430 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
432 c4x_hard_regno_rename_ok (regno1, regno2)
433 unsigned int regno1;
434 unsigned int regno2;
436 /* We can not copy call saved registers from mode QI into QF or from
437 mode QF into QI. */
438 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
439 return 0;
440 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
441 return 0;
442 /* We cannot copy from an extended (40 bit) register to a standard
443 (32 bit) register because we only set the condition codes for
444 extended registers. */
445 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
446 return 0;
447 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
448 return 0;
449 return 1;
452 /* The TI C3x C compiler register argument runtime model uses 6 registers,
453 AR2, R2, R3, RC, RS, RE.
455 The first two floating point arguments (float, double, long double)
456 that are found scanning from left to right are assigned to R2 and R3.
458 The remaining integer (char, short, int, long) or pointer arguments
459 are assigned to the remaining registers in the order AR2, R2, R3,
460 RC, RS, RE when scanning left to right, except for the last named
461 argument prior to an ellipsis denoting variable number of
462 arguments. We don't have to worry about the latter condition since
463 function.c treats the last named argument as anonymous (unnamed).
465 All arguments that cannot be passed in registers are pushed onto
466 the stack in reverse order (right to left). GCC handles that for us.
468 c4x_init_cumulative_args() is called at the start, so we can parse
469 the args to see how many floating point arguments and how many
470 integer (or pointer) arguments there are. c4x_function_arg() is
471 then called (sometimes repeatedly) for each argument (parsed left
472 to right) to obtain the register to pass the argument in, or zero
473 if the argument is to be passed on the stack. Once the compiler is
474 happy, c4x_function_arg_advance() is called.
476 Don't use R0 to pass arguments in, we use 0 to indicate a stack
477 argument. */
479 static int c4x_int_reglist[3][6] =
481 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
482 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
483 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
486 static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
489 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
490 function whose data type is FNTYPE.
491 For a library call, FNTYPE is 0. */
493 void
494 c4x_init_cumulative_args (cum, fntype, libname)
495 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
496 tree fntype; /* Tree ptr for function decl. */
497 rtx libname; /* SYMBOL_REF of library name or 0. */
499 tree param, next_param;
501 cum->floats = cum->ints = 0;
502 cum->init = 0;
503 cum->var = 0;
504 cum->args = 0;
506 if (TARGET_DEBUG)
508 fprintf (stderr, "\nc4x_init_cumulative_args (");
509 if (fntype)
511 tree ret_type = TREE_TYPE (fntype);
513 fprintf (stderr, "fntype code = %s, ret code = %s",
514 tree_code_name[(int) TREE_CODE (fntype)],
515 tree_code_name[(int) TREE_CODE (ret_type)]);
517 else
518 fprintf (stderr, "no fntype");
520 if (libname)
521 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
524 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
526 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
527 param; param = next_param)
529 tree type;
531 next_param = TREE_CHAIN (param);
533 type = TREE_VALUE (param);
534 if (type && type != void_type_node)
536 enum machine_mode mode;
538 /* If the last arg doesn't have void type then we have
539 variable arguments. */
540 if (! next_param)
541 cum->var = 1;
543 if ((mode = TYPE_MODE (type)))
545 if (! MUST_PASS_IN_STACK (mode, type))
547 /* Look for float, double, or long double argument. */
548 if (mode == QFmode || mode == HFmode)
549 cum->floats++;
550 /* Look for integer, enumeral, boolean, char, or pointer
551 argument. */
552 else if (mode == QImode || mode == Pmode)
553 cum->ints++;
556 cum->args++;
560 if (TARGET_DEBUG)
561 fprintf (stderr, "%s%s, args = %d)\n",
562 cum->prototype ? ", prototype" : "",
563 cum->var ? ", variable args" : "",
564 cum->args);
568 /* Update the data in CUM to advance over an argument
569 of mode MODE and data type TYPE.
570 (TYPE is null for libcalls where that information may not be available.) */
572 void
573 c4x_function_arg_advance (cum, mode, type, named)
574 CUMULATIVE_ARGS *cum; /* Current arg information. */
575 enum machine_mode mode; /* Current arg mode. */
576 tree type; /* Type of the arg or 0 if lib support. */
577 int named; /* Whether or not the argument was named. */
579 if (TARGET_DEBUG)
580 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
581 GET_MODE_NAME (mode), named);
582 if (! TARGET_MEMPARM
583 && named
584 && type
585 && ! MUST_PASS_IN_STACK (mode, type))
587 /* Look for float, double, or long double argument. */
588 if (mode == QFmode || mode == HFmode)
589 cum->floats++;
590 /* Look for integer, enumeral, boolean, char, or pointer argument. */
591 else if (mode == QImode || mode == Pmode)
592 cum->ints++;
594 else if (! TARGET_MEMPARM && ! type)
596 /* Handle libcall arguments. */
597 if (mode == QFmode || mode == HFmode)
598 cum->floats++;
599 else if (mode == QImode || mode == Pmode)
600 cum->ints++;
602 return;
606 /* Define where to put the arguments to a function. Value is zero to
607 push the argument on the stack, or a hard register in which to
608 store the argument.
610 MODE is the argument's machine mode.
611 TYPE is the data type of the argument (as a tree).
612 This is null for libcalls where that information may
613 not be available.
614 CUM is a variable of type CUMULATIVE_ARGS which gives info about
615 the preceding args and about the function being called.
616 NAMED is nonzero if this argument is a named parameter
617 (otherwise it is an extra parameter matching an ellipsis). */
619 struct rtx_def *
620 c4x_function_arg (cum, mode, type, named)
621 CUMULATIVE_ARGS *cum; /* Current arg information. */
622 enum machine_mode mode; /* Current arg mode. */
623 tree type; /* Type of the arg or 0 if lib support. */
624 int named; /* != 0 for normal args, == 0 for ... args. */
626 int reg = 0; /* Default to passing argument on stack. */
628 if (! cum->init)
630 /* We can handle at most 2 floats in R2, R3. */
631 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
633 /* We can handle at most 6 integers minus number of floats passed
634 in registers. */
635 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
636 6 - cum->maxfloats : cum->ints;
638 /* If there is no prototype, assume all the arguments are integers. */
639 if (! cum->prototype)
640 cum->maxints = 6;
642 cum->ints = cum->floats = 0;
643 cum->init = 1;
646 /* This marks the last argument. We don't need to pass this through
647 to the call insn. */
648 if (type == void_type_node)
649 return 0;
651 if (! TARGET_MEMPARM
652 && named
653 && type
654 && ! MUST_PASS_IN_STACK (mode, type))
656 /* Look for float, double, or long double argument. */
657 if (mode == QFmode || mode == HFmode)
659 if (cum->floats < cum->maxfloats)
660 reg = c4x_fp_reglist[cum->floats];
662 /* Look for integer, enumeral, boolean, char, or pointer argument. */
663 else if (mode == QImode || mode == Pmode)
665 if (cum->ints < cum->maxints)
666 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
669 else if (! TARGET_MEMPARM && ! type)
671 /* We could use a different argument calling model for libcalls,
672 since we're only calling functions in libgcc. Thus we could
673 pass arguments for long longs in registers rather than on the
674 stack. In the meantime, use the odd TI format. We make the
675 assumption that we won't have more than two floating point
676 args, six integer args, and that all the arguments are of the
677 same mode. */
678 if (mode == QFmode || mode == HFmode)
679 reg = c4x_fp_reglist[cum->floats];
680 else if (mode == QImode || mode == Pmode)
681 reg = c4x_int_reglist[0][cum->ints];
684 if (TARGET_DEBUG)
686 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
687 GET_MODE_NAME (mode), named);
688 if (reg)
689 fprintf (stderr, ", reg=%s", reg_names[reg]);
690 else
691 fprintf (stderr, ", stack");
692 fprintf (stderr, ")\n");
694 if (reg)
695 return gen_rtx_REG (mode, reg);
696 else
697 return NULL_RTX;
701 void
702 c4x_va_start (stdarg_p, valist, nextarg)
703 int stdarg_p;
704 tree valist;
705 rtx nextarg;
707 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
709 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
713 /* C[34]x arguments grow in weird ways (downwards) that the standard
714 varargs stuff can't handle.. */
716 c4x_va_arg (valist, type)
717 tree valist, type;
719 tree t;
721 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
722 build_int_2 (int_size_in_bytes (type), 0));
723 TREE_SIDE_EFFECTS (t) = 1;
725 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
729 static int
730 c4x_isr_reg_used_p (regno)
731 unsigned int regno;
733 /* Don't save/restore FP or ST, we handle them separately. */
734 if (regno == FRAME_POINTER_REGNUM
735 || IS_ST_REGNO (regno))
736 return 0;
738 /* We could be a little smarter abut saving/restoring DP.
739 We'll only save if for the big memory model or if
740 we're paranoid. ;-) */
741 if (IS_DP_REGNO (regno))
742 return ! TARGET_SMALL || TARGET_PARANOID;
744 /* Only save/restore regs in leaf function that are used. */
745 if (c4x_leaf_function)
746 return regs_ever_live[regno] && fixed_regs[regno] == 0;
748 /* Only save/restore regs that are used by the ISR and regs
749 that are likely to be used by functions the ISR calls
750 if they are not fixed. */
751 return IS_EXT_REGNO (regno)
752 || ((regs_ever_live[regno] || call_used_regs[regno])
753 && fixed_regs[regno] == 0);
757 static int
758 c4x_leaf_function_p ()
760 /* A leaf function makes no calls, so we only need
761 to save/restore the registers we actually use.
762 For the global variable leaf_function to be set, we need
763 to define LEAF_REGISTERS and all that it entails.
764 Let's check ourselves... */
766 if (lookup_attribute ("leaf_pretend",
767 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
768 return 1;
770 /* Use the leaf_pretend attribute at your own risk. This is a hack
771 to speed up ISRs that call a function infrequently where the
772 overhead of saving and restoring the additional registers is not
773 warranted. You must save and restore the additional registers
774 required by the called function. Caveat emptor. Here's enough
775 rope... */
777 if (leaf_function_p ())
778 return 1;
780 return 0;
784 static int
785 c4x_assembler_function_p ()
787 tree type;
789 type = TREE_TYPE (current_function_decl);
790 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
791 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
796 c4x_interrupt_function_p ()
798 if (lookup_attribute ("interrupt",
799 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
800 return 1;
802 /* Look for TI style c_intnn. */
803 return current_function_name[0] == 'c'
804 && current_function_name[1] == '_'
805 && current_function_name[2] == 'i'
806 && current_function_name[3] == 'n'
807 && current_function_name[4] == 't'
808 && ISDIGIT (current_function_name[5])
809 && ISDIGIT (current_function_name[6]);
812 void
813 c4x_expand_prologue ()
815 unsigned int regno;
816 int size = get_frame_size ();
817 rtx insn;
819 /* In functions where ar3 is not used but frame pointers are still
820 specified, frame pointers are not adjusted (if >= -O2) and this
821 is used so it won't needlessly push the frame pointer. */
822 int dont_push_ar3;
824 /* For __assembler__ function don't build a prologue. */
825 if (c4x_assembler_function_p ())
827 return;
830 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
831 if (profile_block_flag == 2)
833 FUNCTION_BLOCK_PROFILER_EXIT
835 #endif
837 /* For __interrupt__ function build specific prologue. */
838 if (c4x_interrupt_function_p ())
840 c4x_leaf_function = c4x_leaf_function_p ();
842 insn = emit_insn (gen_push_st ());
843 RTX_FRAME_RELATED_P (insn) = 1;
844 if (size)
846 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
847 RTX_FRAME_RELATED_P (insn) = 1;
848 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
849 gen_rtx_REG (QImode, SP_REGNO)));
850 RTX_FRAME_RELATED_P (insn) = 1;
851 /* We require that an ISR uses fewer than 32768 words of
852 local variables, otherwise we have to go to lots of
853 effort to save a register, load it with the desired size,
854 adjust the stack pointer, and then restore the modified
855 register. Frankly, I think it is a poor ISR that
856 requires more than 32767 words of local temporary
857 storage! */
858 if (size > 32767)
859 error ("ISR %s requires %d words of local vars, max is 32767.",
860 current_function_name, size);
862 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
863 gen_rtx_REG (QImode, SP_REGNO),
864 GEN_INT (size)));
865 RTX_FRAME_RELATED_P (insn) = 1;
867 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
869 if (c4x_isr_reg_used_p (regno))
871 if (regno == DP_REGNO)
873 insn = emit_insn (gen_push_dp ());
874 RTX_FRAME_RELATED_P (insn) = 1;
876 else
878 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
879 RTX_FRAME_RELATED_P (insn) = 1;
880 if (IS_EXT_REGNO (regno))
882 insn = emit_insn (gen_pushqf
883 (gen_rtx_REG (QFmode, regno)));
884 RTX_FRAME_RELATED_P (insn) = 1;
889 /* We need to clear the repeat mode flag if the ISR is
890 going to use a RPTB instruction or uses the RC, RS, or RE
891 registers. */
892 if (regs_ever_live[RC_REGNO]
893 || regs_ever_live[RS_REGNO]
894 || regs_ever_live[RE_REGNO])
896 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
897 RTX_FRAME_RELATED_P (insn) = 1;
900 /* Reload DP reg if we are paranoid about some turkey
901 violating small memory model rules. */
902 if (TARGET_SMALL && TARGET_PARANOID)
904 insn = emit_insn (gen_set_ldp_prologue
905 (gen_rtx_REG (QImode, DP_REGNO),
906 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
907 RTX_FRAME_RELATED_P (insn) = 1;
910 else
912 if (frame_pointer_needed)
914 if ((size != 0)
915 || (current_function_args_size != 0)
916 || (optimize < 2))
918 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
919 RTX_FRAME_RELATED_P (insn) = 1;
920 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
921 gen_rtx_REG (QImode, SP_REGNO)));
922 RTX_FRAME_RELATED_P (insn) = 1;
923 dont_push_ar3 = 1;
925 else
927 /* Since ar3 is not used, we don't need to push it. */
928 dont_push_ar3 = 1;
931 else
933 /* If we use ar3, we need to push it. */
934 dont_push_ar3 = 0;
935 if ((size != 0) || (current_function_args_size != 0))
937 /* If we are omitting the frame pointer, we still have
938 to make space for it so the offsets are correct
939 unless we don't use anything on the stack at all. */
940 size += 1;
944 if (size > 32767)
946 /* Local vars are too big, it will take multiple operations
947 to increment SP. */
948 if (TARGET_C3X)
950 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
951 GEN_INT(size >> 16)));
952 RTX_FRAME_RELATED_P (insn) = 1;
953 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
954 gen_rtx_REG (QImode, R1_REGNO),
955 GEN_INT(-16)));
956 RTX_FRAME_RELATED_P (insn) = 1;
958 else
960 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
961 GEN_INT(size & ~0xffff)));
962 RTX_FRAME_RELATED_P (insn) = 1;
964 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
965 gen_rtx_REG (QImode, R1_REGNO),
966 GEN_INT(size & 0xffff)));
967 RTX_FRAME_RELATED_P (insn) = 1;
968 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
969 gen_rtx_REG (QImode, SP_REGNO),
970 gen_rtx_REG (QImode, R1_REGNO)));
971 RTX_FRAME_RELATED_P (insn) = 1;
973 else if (size != 0)
975 /* Local vars take up less than 32767 words, so we can directly
976 add the number. */
977 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
978 gen_rtx_REG (QImode, SP_REGNO),
979 GEN_INT (size)));
980 RTX_FRAME_RELATED_P (insn) = 1;
983 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
985 if (regs_ever_live[regno] && ! call_used_regs[regno])
987 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
989 if (TARGET_PRESERVE_FLOAT)
991 insn = emit_insn (gen_pushqi
992 (gen_rtx_REG (QImode, regno)));
993 RTX_FRAME_RELATED_P (insn) = 1;
995 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
996 RTX_FRAME_RELATED_P (insn) = 1;
998 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1000 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1001 RTX_FRAME_RELATED_P (insn) = 1;
1009 void
1010 c4x_expand_epilogue()
1012 int regno;
1013 int jump = 0;
1014 int dont_pop_ar3;
1015 rtx insn;
1016 int size = get_frame_size ();
1018 /* For __assembler__ function build no epilogue. */
1019 if (c4x_assembler_function_p ())
1021 insn = emit_jump_insn (gen_return_from_epilogue ());
1022 RTX_FRAME_RELATED_P (insn) = 1;
1023 return;
1026 /* For __interrupt__ function build specific epilogue. */
1027 if (c4x_interrupt_function_p ())
1029 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1031 if (! c4x_isr_reg_used_p (regno))
1032 continue;
1033 if (regno == DP_REGNO)
1035 insn = emit_insn (gen_pop_dp ());
1036 RTX_FRAME_RELATED_P (insn) = 1;
1038 else
1040 /* We have to use unspec because the compiler will delete insns
1041 that are not call-saved. */
1042 if (IS_EXT_REGNO (regno))
1044 insn = emit_insn (gen_popqf_unspec
1045 (gen_rtx_REG (QFmode, regno)));
1046 RTX_FRAME_RELATED_P (insn) = 1;
1048 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1052 if (size)
1054 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1055 gen_rtx_REG (QImode, SP_REGNO),
1056 GEN_INT(size)));
1057 RTX_FRAME_RELATED_P (insn) = 1;
1058 insn = emit_insn (gen_popqi
1059 (gen_rtx_REG (QImode, AR3_REGNO)));
1060 RTX_FRAME_RELATED_P (insn) = 1;
1062 insn = emit_insn (gen_pop_st ());
1063 RTX_FRAME_RELATED_P (insn) = 1;
1064 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1065 RTX_FRAME_RELATED_P (insn) = 1;
1067 else
1069 if (frame_pointer_needed)
1071 if ((size != 0)
1072 || (current_function_args_size != 0)
1073 || (optimize < 2))
1075 insn = emit_insn
1076 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1077 gen_rtx_MEM (QImode,
1078 gen_rtx_PLUS
1079 (QImode, gen_rtx_REG (QImode,
1080 AR3_REGNO),
1081 GEN_INT(-1)))));
1082 RTX_FRAME_RELATED_P (insn) = 1;
1084 /* We already have the return value and the fp,
1085 so we need to add those to the stack. */
1086 size += 2;
1087 jump = 1;
1088 dont_pop_ar3 = 1;
1090 else
1092 /* Since ar3 is not used for anything, we don't need to
1093 pop it. */
1094 dont_pop_ar3 = 1;
1097 else
1099 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1100 if (size || current_function_args_size)
1102 /* If we are ommitting the frame pointer, we still have
1103 to make space for it so the offsets are correct
1104 unless we don't use anything on the stack at all. */
1105 size += 1;
1109 /* Now restore the saved registers, putting in the delayed branch
1110 where required. */
1111 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1113 if (regs_ever_live[regno] && ! call_used_regs[regno])
1115 if (regno == AR3_REGNO && dont_pop_ar3)
1116 continue;
1118 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1120 insn = emit_insn (gen_popqf_unspec
1121 (gen_rtx_REG (QFmode, regno)));
1122 RTX_FRAME_RELATED_P (insn) = 1;
1123 if (TARGET_PRESERVE_FLOAT)
1125 insn = emit_insn (gen_popqi_unspec
1126 (gen_rtx_REG (QImode, regno)));
1127 RTX_FRAME_RELATED_P (insn) = 1;
1130 else
1132 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1133 RTX_FRAME_RELATED_P (insn) = 1;
1138 if (frame_pointer_needed)
1140 if ((size != 0)
1141 || (current_function_args_size != 0)
1142 || (optimize < 2))
1144 /* Restore the old FP. */
1145 insn = emit_insn
1146 (gen_movqi
1147 (gen_rtx_REG (QImode, AR3_REGNO),
1148 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1150 RTX_FRAME_RELATED_P (insn) = 1;
1154 if (size > 32767)
1156 /* Local vars are too big, it will take multiple operations
1157 to decrement SP. */
1158 if (TARGET_C3X)
1160 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1161 GEN_INT(size >> 16)));
1162 RTX_FRAME_RELATED_P (insn) = 1;
1163 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1164 gen_rtx_REG (QImode, R3_REGNO),
1165 GEN_INT(-16)));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1168 else
1170 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1171 GEN_INT(size & ~0xffff)));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1174 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1175 gen_rtx_REG (QImode, R3_REGNO),
1176 GEN_INT(size & 0xffff)));
1177 RTX_FRAME_RELATED_P (insn) = 1;
1178 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1179 gen_rtx_REG (QImode, SP_REGNO),
1180 gen_rtx_REG (QImode, R3_REGNO)));
1181 RTX_FRAME_RELATED_P (insn) = 1;
1183 else if (size != 0)
1185 /* Local vars take up less than 32768 words, so we can directly
1186 subtract the number. */
1187 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1188 gen_rtx_REG (QImode, SP_REGNO),
1189 GEN_INT(size)));
1190 RTX_FRAME_RELATED_P (insn) = 1;
1193 if (jump)
1195 insn = emit_jump_insn (gen_return_indirect_internal
1196 (gen_rtx_REG (QImode, R2_REGNO)));
1197 RTX_FRAME_RELATED_P (insn) = 1;
1199 else
1201 insn = emit_jump_insn (gen_return_from_epilogue ());
1202 RTX_FRAME_RELATED_P (insn) = 1;
1209 c4x_null_epilogue_p ()
1211 int regno;
1213 if (reload_completed
1214 && ! c4x_assembler_function_p ()
1215 && ! c4x_interrupt_function_p ()
1216 && ! current_function_calls_alloca
1217 && ! current_function_args_size
1218 && ! (profile_block_flag == 2)
1219 && ! (optimize < 2)
1220 && ! get_frame_size ())
1222 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1223 if (regs_ever_live[regno] && ! call_used_regs[regno]
1224 && (regno != AR3_REGNO))
1225 return 0;
1226 return 1;
1228 return 0;
1233 c4x_emit_move_sequence (operands, mode)
1234 rtx *operands;
1235 enum machine_mode mode;
1237 rtx op0 = operands[0];
1238 rtx op1 = operands[1];
1240 if (! reload_in_progress
1241 && ! REG_P (op0)
1242 && ! REG_P (op1)
1243 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1244 op1 = force_reg (mode, op1);
1246 if (GET_CODE (op1) == LO_SUM
1247 && GET_MODE (op1) == Pmode
1248 && dp_reg_operand (XEXP (op1, 0), mode))
1250 /* expand_increment will sometimes create a LO_SUM immediate
1251 address. */
1252 op1 = XEXP (op1, 1);
1254 else if (symbolic_address_operand (op1, mode))
1256 if (TARGET_LOAD_ADDRESS)
1258 /* Alias analysis seems to do a better job if we force
1259 constant addresses to memory after reload. */
1260 emit_insn (gen_load_immed_address (op0, op1));
1261 return 1;
1263 else
1265 /* Stick symbol or label address into the constant pool. */
1266 op1 = force_const_mem (Pmode, op1);
1269 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1271 /* We could be a lot smarter about loading some of these
1272 constants... */
1273 op1 = force_const_mem (mode, op1);
1276 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1277 and emit associated (HIGH (SYMREF)) if large memory model.
1278 c4x_legitimize_address could be used to do this,
1279 perhaps by calling validize_address. */
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
1282 && GET_CODE (op1) == MEM
1283 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1288 op1 = change_address (op1, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1292 if (TARGET_EXPOSE_LDP
1293 && ! (reload_in_progress || reload_completed)
1294 && GET_CODE (op0) == MEM
1295 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1297 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1298 if (! TARGET_SMALL)
1299 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1300 op0 = change_address (op0, mode,
1301 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1304 if (GET_CODE (op0) == SUBREG
1305 && mixed_subreg_operand (op0, mode))
1307 /* We should only generate these mixed mode patterns
1308 during RTL generation. If we need do it later on
1309 then we'll have to emit patterns that won't clobber CC. */
1310 if (reload_in_progress || reload_completed)
1311 abort ();
1312 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1313 op0 = SUBREG_REG (op0);
1314 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1316 op0 = copy_rtx (op0);
1317 PUT_MODE (op0, QImode);
1319 else
1320 abort ();
1322 if (mode == QFmode)
1323 emit_insn (gen_storeqf_int_clobber (op0, op1));
1324 else
1325 abort ();
1326 return 1;
1329 if (GET_CODE (op1) == SUBREG
1330 && mixed_subreg_operand (op1, mode))
1332 /* We should only generate these mixed mode patterns
1333 during RTL generation. If we need do it later on
1334 then we'll have to emit patterns that won't clobber CC. */
1335 if (reload_in_progress || reload_completed)
1336 abort ();
1337 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1338 op1 = SUBREG_REG (op1);
1339 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1341 op1 = copy_rtx (op1);
1342 PUT_MODE (op1, QImode);
1344 else
1345 abort ();
1347 if (mode == QFmode)
1348 emit_insn (gen_loadqf_int_clobber (op0, op1));
1349 else
1350 abort ();
1351 return 1;
1354 if (mode == QImode
1355 && reg_operand (op0, mode)
1356 && const_int_operand (op1, mode)
1357 && ! IS_INT16_CONST (INTVAL (op1))
1358 && ! IS_HIGH_CONST (INTVAL (op1)))
1360 emit_insn (gen_loadqi_big_constant (op0, op1));
1361 return 1;
1364 if (mode == HImode
1365 && reg_operand (op0, mode)
1366 && const_int_operand (op1, mode))
1368 emit_insn (gen_loadhi_big_constant (op0, op1));
1369 return 1;
1372 /* Adjust operands in case we have modified them. */
1373 operands[0] = op0;
1374 operands[1] = op1;
1376 /* Emit normal pattern. */
1377 return 0;
1381 void
1382 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1383 rtx libcall;
1384 enum rtx_code code;
1385 enum machine_mode dmode;
1386 enum machine_mode smode;
1387 int noperands;
1388 rtx *operands;
1390 rtx ret;
1391 rtx insns;
1392 rtx equiv;
1394 start_sequence ();
1395 switch (noperands)
1397 case 2:
1398 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1399 operands[1], smode);
1400 equiv = gen_rtx (code, dmode, operands[1]);
1401 break;
1403 case 3:
1404 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1405 operands[1], smode, operands[2], smode);
1406 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1407 break;
1409 default:
1410 abort ();
1413 insns = get_insns ();
1414 end_sequence ();
1415 emit_libcall_block (insns, operands[0], ret, equiv);
1419 void
1420 c4x_emit_libcall3 (libcall, code, mode, operands)
1421 rtx libcall;
1422 enum rtx_code code;
1423 enum machine_mode mode;
1424 rtx *operands;
1426 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1430 void
1431 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1432 rtx libcall;
1433 enum rtx_code code;
1434 enum machine_mode mode;
1435 rtx *operands;
1437 rtx ret;
1438 rtx insns;
1439 rtx equiv;
1441 start_sequence ();
1442 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1443 operands[1], mode, operands[2], mode);
1444 equiv = gen_rtx_TRUNCATE (mode,
1445 gen_rtx_LSHIFTRT (HImode,
1446 gen_rtx_MULT (HImode,
1447 gen_rtx (code, HImode, operands[1]),
1448 gen_rtx (code, HImode, operands[2])),
1449 GEN_INT (32)));
1450 insns = get_insns ();
1451 end_sequence ();
1452 emit_libcall_block (insns, operands[0], ret, equiv);
1456 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1457 yet use this info. */
1458 void
1459 c4x_encode_section_info (decl)
1460 tree decl;
1462 #if 0
1463 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1464 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1465 #else
1466 if (TREE_CODE (decl) == FUNCTION_DECL)
1467 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1468 #endif
1473 c4x_check_legit_addr (mode, addr, strict)
1474 enum machine_mode mode;
1475 rtx addr;
1476 int strict;
1478 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1479 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1480 rtx disp = NULL_RTX; /* Displacement. */
1481 enum rtx_code code;
1483 code = GET_CODE (addr);
1484 switch (code)
1486 /* Register indirect with auto increment/decrement. We don't
1487 allow SP here---push_operand should recognise an operand
1488 being pushed on the stack. */
1490 case PRE_DEC:
1491 case PRE_INC:
1492 case POST_DEC:
1493 if (mode != QImode && mode != QFmode)
1494 return 0;
1496 case POST_INC:
1497 base = XEXP (addr, 0);
1498 if (! REG_P (base))
1499 return 0;
1500 break;
1502 case PRE_MODIFY:
1503 case POST_MODIFY:
1505 rtx op0 = XEXP (addr, 0);
1506 rtx op1 = XEXP (addr, 1);
1508 if (mode != QImode && mode != QFmode)
1509 return 0;
1511 if (! REG_P (op0)
1512 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1513 return 0;
1514 base = XEXP (op1, 0);
1515 if (base != op0)
1516 return 0;
1517 if (REG_P (XEXP (op1, 1)))
1518 indx = XEXP (op1, 1);
1519 else
1520 disp = XEXP (op1, 1);
1522 break;
1524 /* Register indirect. */
1525 case REG:
1526 base = addr;
1527 break;
1529 /* Register indirect with displacement or index. */
1530 case PLUS:
1532 rtx op0 = XEXP (addr, 0);
1533 rtx op1 = XEXP (addr, 1);
1534 enum rtx_code code0 = GET_CODE (op0);
1536 switch (code0)
1538 case REG:
1539 if (REG_P (op1))
1541 base = op0; /* Base + index. */
1542 indx = op1;
1543 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1545 base = op1;
1546 indx = op0;
1549 else
1551 base = op0; /* Base + displacement. */
1552 disp = op1;
1554 break;
1556 default:
1557 return 0;
1560 break;
1562 /* Direct addressing with DP register. */
1563 case LO_SUM:
1565 rtx op0 = XEXP (addr, 0);
1566 rtx op1 = XEXP (addr, 1);
1568 /* HImode and HFmode direct memory references aren't truly
1569 offsettable (consider case at end of data page). We
1570 probably get better code by loading a pointer and using an
1571 indirect memory reference. */
1572 if (mode == HImode || mode == HFmode)
1573 return 0;
1575 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1576 return 0;
1578 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1579 return 1;
1581 if (GET_CODE (op1) == CONST)
1582 return 1;
1583 return 0;
1585 break;
1587 /* Direct addressing with some work for the assembler... */
1588 case CONST:
1589 /* Direct addressing. */
1590 case LABEL_REF:
1591 case SYMBOL_REF:
1592 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1593 return 1;
1594 /* These need to be converted to a LO_SUM (...).
1595 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1596 return 0;
1598 /* Do not allow direct memory access to absolute addresses.
1599 This is more pain than it's worth, especially for the
1600 small memory model where we can't guarantee that
1601 this address is within the data page---we don't want
1602 to modify the DP register in the small memory model,
1603 even temporarily, since an interrupt can sneak in.... */
1604 case CONST_INT:
1605 return 0;
1607 /* Indirect indirect addressing. */
1608 case MEM:
1609 return 0;
1611 case CONST_DOUBLE:
1612 fatal_insn ("Using CONST_DOUBLE for address", addr);
1614 default:
1615 return 0;
1618 /* Validate the base register. */
1619 if (base)
1621 /* Check that the address is offsettable for HImode and HFmode. */
1622 if (indx && (mode == HImode || mode == HFmode))
1623 return 0;
1625 /* Handle DP based stuff. */
1626 if (REGNO (base) == DP_REGNO)
1627 return 1;
1628 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1629 return 0;
1630 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1631 return 0;
1634 /* Now validate the index register. */
1635 if (indx)
1637 if (GET_CODE (indx) != REG)
1638 return 0;
1639 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1640 return 0;
1641 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1642 return 0;
1645 /* Validate displacement. */
1646 if (disp)
1648 if (GET_CODE (disp) != CONST_INT)
1649 return 0;
1650 if (mode == HImode || mode == HFmode)
1652 /* The offset displacement must be legitimate. */
1653 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1654 return 0;
1656 else
1658 if (! IS_DISP8_CONST (INTVAL (disp)))
1659 return 0;
1661 /* Can't add an index with a disp. */
1662 if (indx)
1663 return 0;
1665 return 1;
1670 c4x_legitimize_address (orig, mode)
1671 rtx orig ATTRIBUTE_UNUSED;
1672 enum machine_mode mode ATTRIBUTE_UNUSED;
1674 if (GET_CODE (orig) == SYMBOL_REF
1675 || GET_CODE (orig) == LABEL_REF)
1677 if (mode == HImode || mode == HFmode)
1679 /* We need to force the address into
1680 a register so that it is offsettable. */
1681 rtx addr_reg = gen_reg_rtx (Pmode);
1682 emit_move_insn (addr_reg, orig);
1683 return addr_reg;
1685 else
1687 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1689 if (! TARGET_SMALL)
1690 emit_insn (gen_set_ldp (dp_reg, orig));
1692 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1696 return NULL_RTX;
1700 /* Provide the costs of an addressing mode that contains ADDR.
1701 If ADDR is not a valid address, its cost is irrelevant.
1702 This is used in cse and loop optimisation to determine
1703 if it is worthwhile storing a common address into a register.
1704 Unfortunately, the C4x address cost depends on other operands. */
1706 int
1707 c4x_address_cost (addr)
1708 rtx addr;
1710 switch (GET_CODE (addr))
1712 case REG:
1713 return 1;
1715 case POST_INC:
1716 case POST_DEC:
1717 case PRE_INC:
1718 case PRE_DEC:
1719 return 1;
1721 /* These shouldn't be directly generated. */
1722 case SYMBOL_REF:
1723 case LABEL_REF:
1724 case CONST:
1725 return 10;
1727 case LO_SUM:
1729 rtx op1 = XEXP (addr, 1);
1731 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1732 return TARGET_SMALL ? 3 : 4;
1734 if (GET_CODE (op1) == CONST)
1736 rtx offset = const0_rtx;
1738 op1 = eliminate_constant_term (op1, &offset);
1740 /* ??? These costs need rethinking... */
1741 if (GET_CODE (op1) == LABEL_REF)
1742 return 3;
1744 if (GET_CODE (op1) != SYMBOL_REF)
1745 return 4;
1747 if (INTVAL (offset) == 0)
1748 return 3;
1750 return 4;
1752 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1754 break;
1756 case PLUS:
1758 register rtx op0 = XEXP (addr, 0);
1759 register rtx op1 = XEXP (addr, 1);
1761 if (GET_CODE (op0) != REG)
1762 break;
1764 switch (GET_CODE (op1))
1766 default:
1767 break;
1769 case REG:
1770 /* This cost for REG+REG must be greater than the cost
1771 for REG if we want autoincrement addressing modes. */
1772 return 2;
1774 case CONST_INT:
1775 /* The following tries to improve GIV combination
1776 in strength reduce but appears not to help. */
1777 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1778 return 1;
1780 if (IS_DISP1_CONST (INTVAL (op1)))
1781 return 1;
1783 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1784 return 2;
1786 return 3;
1789 default:
1790 break;
1793 return 4;
1798 c4x_gen_compare_reg (code, x, y)
1799 enum rtx_code code;
1800 rtx x, y;
1802 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1803 rtx cc_reg;
1805 if (mode == CC_NOOVmode
1806 && (code == LE || code == GE || code == LT || code == GT))
1807 return NULL_RTX;
1809 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1810 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1811 gen_rtx_COMPARE (mode, x, y)));
1812 return cc_reg;
1815 char *
1816 c4x_output_cbranch (form, seq)
1817 const char *form;
1818 rtx seq;
1820 int delayed = 0;
1821 int annultrue = 0;
1822 int annulfalse = 0;
1823 rtx delay;
1824 char *cp;
1825 static char str[100];
1827 if (final_sequence)
1829 delay = XVECEXP (final_sequence, 0, 1);
1830 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1831 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1832 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1834 strcpy (str, form);
1835 cp = &str [strlen (str)];
1836 if (delayed)
1838 *cp++ = '%';
1839 *cp++ = '#';
1841 if (annultrue)
1843 *cp++ = 'a';
1844 *cp++ = 't';
1846 if (annulfalse)
1848 *cp++ = 'a';
1849 *cp++ = 'f';
1851 *cp++ = '\t';
1852 *cp++ = '%';
1853 *cp++ = 'l';
1854 *cp++ = '1';
1855 *cp = 0;
1856 return str;
1859 void
1860 c4x_print_operand (file, op, letter)
1861 FILE *file; /* File to write to. */
1862 rtx op; /* Operand to print. */
1863 int letter; /* %<letter> or 0. */
1865 rtx op1;
1866 enum rtx_code code;
1868 switch (letter)
1870 case '#': /* Delayed. */
1871 if (final_sequence)
1872 asm_fprintf (file, "d");
1873 return;
1876 code = GET_CODE (op);
1877 switch (letter)
1879 case 'A': /* Direct address. */
1880 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1881 asm_fprintf (file, "@");
1882 break;
1884 case 'H': /* Sethi. */
1885 output_addr_const (file, op);
1886 return;
1888 case 'I': /* Reversed condition. */
1889 code = reverse_condition (code);
1890 break;
1892 case 'L': /* Log 2 of constant. */
1893 if (code != CONST_INT)
1894 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1895 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1896 return;
1898 case 'N': /* Ones complement of small constant. */
1899 if (code != CONST_INT)
1900 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1901 fprintf (file, "%d", ~INTVAL (op));
1902 return;
1904 case 'K': /* Generate ldp(k) if direct address. */
1905 if (! TARGET_SMALL
1906 && code == MEM
1907 && GET_CODE (XEXP (op, 0)) == LO_SUM
1908 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1909 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1911 op1 = XEXP (XEXP (op, 0), 1);
1912 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1914 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1915 output_address (XEXP (adj_offsettable_operand (op, 1), 0));
1916 asm_fprintf (file, "\n");
1919 return;
1921 case 'M': /* Generate ldp(k) if direct address. */
1922 if (! TARGET_SMALL /* Only used in asm statements. */
1923 && code == MEM
1924 && (GET_CODE (XEXP (op, 0)) == CONST
1925 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1927 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1928 output_address (XEXP (op, 0));
1929 asm_fprintf (file, "\n\t");
1931 return;
1933 case 'O': /* Offset address. */
1934 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1935 break;
1936 else if (code == MEM)
1937 output_address (XEXP (adj_offsettable_operand (op, 1), 0));
1938 else if (code == REG)
1939 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1940 else
1941 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1942 return;
1944 case 'C': /* Call. */
1945 break;
1947 case 'U': /* Call/callu. */
1948 if (code != SYMBOL_REF)
1949 asm_fprintf (file, "u");
1950 return;
1952 default:
1953 break;
1956 switch (code)
1958 case REG:
1959 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1960 && ! TARGET_TI)
1961 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1962 else
1963 fprintf (file, "%s", reg_names[REGNO (op)]);
1964 break;
1966 case MEM:
1967 output_address (XEXP (op, 0));
1968 break;
1970 case CONST_DOUBLE:
1972 char str[30];
1973 REAL_VALUE_TYPE r;
1975 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
1976 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
1977 fprintf (file, "%s", str);
1979 break;
1981 case CONST_INT:
1982 fprintf (file, "%d", INTVAL (op));
1983 break;
1985 case NE:
1986 asm_fprintf (file, "ne");
1987 break;
1989 case EQ:
1990 asm_fprintf (file, "eq");
1991 break;
1993 case GE:
1994 asm_fprintf (file, "ge");
1995 break;
1997 case GT:
1998 asm_fprintf (file, "gt");
1999 break;
2001 case LE:
2002 asm_fprintf (file, "le");
2003 break;
2005 case LT:
2006 asm_fprintf (file, "lt");
2007 break;
2009 case GEU:
2010 asm_fprintf (file, "hs");
2011 break;
2013 case GTU:
2014 asm_fprintf (file, "hi");
2015 break;
2017 case LEU:
2018 asm_fprintf (file, "ls");
2019 break;
2021 case LTU:
2022 asm_fprintf (file, "lo");
2023 break;
2025 case SYMBOL_REF:
2026 output_addr_const (file, op);
2027 break;
2029 case CONST:
2030 output_addr_const (file, XEXP (op, 0));
2031 break;
2033 case CODE_LABEL:
2034 break;
2036 default:
2037 fatal_insn ("c4x_print_operand: Bad operand case", op);
2038 break;
2043 void
2044 c4x_print_operand_address (file, addr)
2045 FILE *file;
2046 rtx addr;
2048 switch (GET_CODE (addr))
2050 case REG:
2051 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2052 break;
2054 case PRE_DEC:
2055 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2056 break;
2058 case POST_INC:
2059 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2060 break;
2062 case POST_MODIFY:
2064 rtx op0 = XEXP (XEXP (addr, 1), 0);
2065 rtx op1 = XEXP (XEXP (addr, 1), 1);
2067 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2068 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2069 reg_names[REGNO (op1)]);
2070 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2071 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2072 INTVAL (op1));
2073 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2074 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2075 -INTVAL (op1));
2076 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2077 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2078 reg_names[REGNO (op1)]);
2079 else
2080 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2082 break;
2084 case PRE_MODIFY:
2086 rtx op0 = XEXP (XEXP (addr, 1), 0);
2087 rtx op1 = XEXP (XEXP (addr, 1), 1);
2089 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2090 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2091 reg_names[REGNO (op1)]);
2092 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2093 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2094 INTVAL (op1));
2095 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2096 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2097 -INTVAL (op1));
2098 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2099 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2100 reg_names[REGNO (op1)]);
2101 else
2102 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2104 break;
2106 case PRE_INC:
2107 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2108 break;
2110 case POST_DEC:
2111 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2112 break;
2114 case PLUS: /* Indirect with displacement. */
2116 rtx op0 = XEXP (addr, 0);
2117 rtx op1 = XEXP (addr, 1);
2119 if (REG_P (op0))
2121 if (REG_P (op1))
2123 if (IS_INDEX_REG (op0))
2125 fprintf (file, "*+%s(%s)",
2126 reg_names[REGNO (op1)],
2127 reg_names[REGNO (op0)]); /* Index + base. */
2129 else
2131 fprintf (file, "*+%s(%s)",
2132 reg_names[REGNO (op0)],
2133 reg_names[REGNO (op1)]); /* Base + index. */
2136 else if (INTVAL (op1) < 0)
2138 fprintf (file, "*-%s(%d)",
2139 reg_names[REGNO (op0)],
2140 -INTVAL (op1)); /* Base - displacement. */
2142 else
2144 fprintf (file, "*+%s(%d)",
2145 reg_names[REGNO (op0)],
2146 INTVAL (op1)); /* Base + displacement. */
2149 else
2150 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2152 break;
2154 case LO_SUM:
2156 rtx op0 = XEXP (addr, 0);
2157 rtx op1 = XEXP (addr, 1);
2159 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2160 c4x_print_operand_address (file, op1);
2161 else
2162 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2164 break;
2166 case CONST:
2167 case SYMBOL_REF:
2168 case LABEL_REF:
2169 fprintf (file, "@");
2170 output_addr_const (file, addr);
2171 break;
2173 /* We shouldn't access CONST_INT addresses. */
2174 case CONST_INT:
2176 default:
2177 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2178 break;
2183 /* Return nonzero if the floating point operand will fit
2184 in the immediate field. */
2186 static int
2187 c4x_immed_float_p (op)
2188 rtx op;
2190 long convval[2];
2191 int exponent;
2192 REAL_VALUE_TYPE r;
2194 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2195 if (GET_MODE (op) == HFmode)
2196 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2197 else
2199 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2200 convval[1] = 0;
2203 /* Sign extend exponent. */
2204 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2205 if (exponent == -128)
2206 return 1; /* 0.0 */
2207 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2208 return 0; /* Precision doesn't fit. */
2209 return (exponent <= 7) /* Positive exp. */
2210 && (exponent >= -7); /* Negative exp. */
2214 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2215 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2217 None of the last four instructions from the bottom of the block can
2218 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2219 BcondAT or RETIcondD.
2221 This routine scans the four previous insns for a jump insn, and if
2222 one is found, returns 1 so that we bung in a nop instruction.
2223 This simple minded strategy will add a nop, when it may not
2224 be required. Say when there is a JUMP_INSN near the end of the
2225 block that doesn't get converted into a delayed branch.
2227 Note that we cannot have a call insn, since we don't generate
2228 repeat loops with calls in them (although I suppose we could, but
2229 there's no benefit.)
2231 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2234 c4x_rptb_nop_p (insn)
2235 rtx insn;
2237 rtx start_label;
2238 int i;
2240 /* Extract the start label from the jump pattern (rptb_end). */
2241 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2243 /* If there is a label at the end of the loop we must insert
2244 a NOP. */
2245 do {
2246 insn = previous_insn (insn);
2247 } while (GET_CODE (insn) == NOTE
2248 || GET_CODE (insn) == USE
2249 || GET_CODE (insn) == CLOBBER);
2250 if (GET_CODE (insn) == CODE_LABEL)
2251 return 1;
2253 for (i = 0; i < 4; i++)
2255 /* Search back for prev non-note and non-label insn. */
2256 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2257 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2259 if (insn == start_label)
2260 return i == 0;
2262 insn = previous_insn (insn);
2265 /* If we have a jump instruction we should insert a NOP. If we
2266 hit repeat block top we should only insert a NOP if the loop
2267 is empty. */
2268 if (GET_CODE (insn) == JUMP_INSN)
2269 return 1;
2270 insn = previous_insn (insn);
2272 return 0;
2276 /* The C4x looping instruction needs to be emitted at the top of the
2277 loop. Emitting the true RTL for a looping instruction at the top of
2278 the loop can cause problems with flow analysis. So instead, a dummy
2279 doloop insn is emitted at the end of the loop. This routine checks
2280 for the presence of this doloop insn and then searches back to the
2281 top of the loop, where it inserts the true looping insn (provided
2282 there are no instructions in the loop which would cause problems).
2283 Any additional labels can be emitted at this point. In addition, if
2284 the desired loop count register was not allocated, this routine does
2285 nothing.
2287 Before we can create a repeat block looping instruction we have to
2288 verify that there are no jumps outside the loop and no jumps outside
2289 the loop go into this loop. This can happen in the basic blocks reorder
2290 pass. The C4x cpu can not handle this. */
2292 static int
2293 c4x_label_ref_used_p (x, code_label)
2294 rtx x, code_label;
2296 enum rtx_code code;
2297 int i, j;
2298 const char *fmt;
2300 if (x == 0)
2301 return 0;
2303 code = GET_CODE (x);
2304 if (code == LABEL_REF)
2305 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2307 fmt = GET_RTX_FORMAT (code);
2308 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2310 if (fmt[i] == 'e')
2312 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2313 return 1;
2315 else if (fmt[i] == 'E')
2316 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2317 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2318 return 1;
2320 return 0;
2324 static int
2325 c4x_rptb_valid_p (insn, start_label)
2326 rtx insn, start_label;
2328 rtx end = insn;
2329 rtx start;
2330 rtx tmp;
2332 /* Find the start label. */
2333 for (; insn; insn = PREV_INSN (insn))
2334 if (insn == start_label)
2335 break;
2337 /* Note found then we can not use a rptb or rpts. The label was
2338 probably moved by the basic block reorder pass. */
2339 if (! insn)
2340 return 0;
2342 start = insn;
2343 /* If any jump jumps inside this block then we must fail. */
2344 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2346 if (GET_CODE (insn) == CODE_LABEL)
2348 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2349 if (GET_CODE (tmp) == JUMP_INSN
2350 && c4x_label_ref_used_p (tmp, insn))
2351 return 0;
2354 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2356 if (GET_CODE (insn) == CODE_LABEL)
2358 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2359 if (GET_CODE (tmp) == JUMP_INSN
2360 && c4x_label_ref_used_p (tmp, insn))
2361 return 0;
2364 /* If any jump jumps outside this block then we must fail. */
2365 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2367 if (GET_CODE (insn) == CODE_LABEL)
2369 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2370 if (GET_CODE (tmp) == JUMP_INSN
2371 && c4x_label_ref_used_p (tmp, insn))
2372 return 0;
2373 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2374 if (GET_CODE (tmp) == JUMP_INSN
2375 && c4x_label_ref_used_p (tmp, insn))
2376 return 0;
2380 /* All checks OK. */
2381 return 1;
2385 void
2386 c4x_rptb_insert (insn)
2387 rtx insn;
2389 rtx end_label;
2390 rtx start_label;
2391 rtx new_start_label;
2392 rtx count_reg;
2394 /* If the count register has not been allocated to RC, say if
2395 there is a movstr pattern in the loop, then do not insert a
2396 RPTB instruction. Instead we emit a decrement and branch
2397 at the end of the loop. */
2398 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2399 if (REGNO (count_reg) != RC_REGNO)
2400 return;
2402 /* Extract the start label from the jump pattern (rptb_end). */
2403 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2405 if (! c4x_rptb_valid_p (insn, start_label))
2407 /* We can not use the rptb insn. Replace it so reorg can use
2408 the delay slots of the jump insn. */
2409 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2410 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2411 emit_insn_before (gen_bge (start_label), insn);
2412 LABEL_NUSES (start_label)++;
2413 delete_insn (insn);
2414 return;
2417 end_label = gen_label_rtx ();
2418 LABEL_NUSES (end_label)++;
2419 emit_label_after (end_label, insn);
2421 new_start_label = gen_label_rtx ();
2422 LABEL_NUSES (new_start_label)++;
2424 for (; insn; insn = PREV_INSN (insn))
2426 if (insn == start_label)
2427 break;
2428 if (GET_CODE (insn) == JUMP_INSN &&
2429 JUMP_LABEL (insn) == start_label)
2430 redirect_jump (insn, new_start_label, 0);
2432 if (! insn)
2433 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2435 emit_label_after (new_start_label, insn);
2437 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2438 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2439 else
2440 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2441 if (LABEL_NUSES (start_label) == 0)
2442 delete_insn (start_label);
2446 /* This function is a C4x special called immediately before delayed
2447 branch scheduling. We fix up RTPB style loops that didn't get RC
2448 allocated as the loop counter. */
2450 void
2451 c4x_process_after_reload (first)
2452 rtx first;
2454 rtx insn;
2456 for (insn = first; insn; insn = NEXT_INSN (insn))
2458 /* Look for insn. */
2459 if (INSN_P (insn))
2461 int insn_code_number;
2462 rtx old;
2464 insn_code_number = recog_memoized (insn);
2466 if (insn_code_number < 0)
2467 continue;
2469 /* Insert the RTX for RPTB at the top of the loop
2470 and a label at the end of the loop. */
2471 if (insn_code_number == CODE_FOR_rptb_end)
2472 c4x_rptb_insert(insn);
2474 /* We need to split the insn here. Otherwise the calls to
2475 force_const_mem will not work for load_immed_address. */
2476 old = insn;
2478 /* Don't split the insn if it has been deleted. */
2479 if (! INSN_DELETED_P (old))
2480 insn = try_split (PATTERN(old), old, 1);
2482 /* When not optimizing, the old insn will be still left around
2483 with only the 'deleted' bit set. Transform it into a note
2484 to avoid confusion of subsequent processing. */
2485 if (INSN_DELETED_P (old))
2487 PUT_CODE (old, NOTE);
2488 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2489 NOTE_SOURCE_FILE (old) = 0;
2496 static int
2497 c4x_a_register (op)
2498 rtx op;
2500 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2504 static int
2505 c4x_x_register (op)
2506 rtx op;
2508 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2512 static int
2513 c4x_immed_int_constant (op)
2514 rtx op;
2516 if (GET_CODE (op) != CONST_INT)
2517 return 0;
2519 return GET_MODE (op) == VOIDmode
2520 || GET_MODE_CLASS (op) == MODE_INT
2521 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2525 static int
2526 c4x_immed_float_constant (op)
2527 rtx op;
2529 if (GET_CODE (op) != CONST_DOUBLE)
2530 return 0;
2532 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2533 present this only means that a MEM rtx has been generated. It does
2534 not mean the rtx is really in memory. */
2536 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2541 c4x_shiftable_constant (op)
2542 rtx op;
2544 int i;
2545 int mask;
2546 int val = INTVAL (op);
2548 for (i = 0; i < 16; i++)
2550 if (val & (1 << i))
2551 break;
2553 mask = ((0xffff >> i) << 16) | 0xffff;
2554 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2555 : (val >> i) & mask))
2556 return i;
2557 return -1;
2562 c4x_H_constant (op)
2563 rtx op;
2565 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2570 c4x_I_constant (op)
2571 rtx op;
2573 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2578 c4x_J_constant (op)
2579 rtx op;
2581 if (TARGET_C3X)
2582 return 0;
2583 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2587 static int
2588 c4x_K_constant (op)
2589 rtx op;
2591 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2592 return 0;
2593 return IS_INT5_CONST (INTVAL (op));
2598 c4x_L_constant (op)
2599 rtx op;
2601 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2605 static int
2606 c4x_N_constant (op)
2607 rtx op;
2609 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2613 static int
2614 c4x_O_constant (op)
2615 rtx op;
2617 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2621 /* The constraints do not have to check the register class,
2622 except when needed to discriminate between the constraints.
2623 The operand has been checked by the predicates to be valid. */
2625 /* ARx + 9-bit signed const or IRn
2626 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2627 We don't include the pre/post inc/dec forms here since
2628 they are handled by the <> constraints. */
2631 c4x_Q_constraint (op)
2632 rtx op;
2634 enum machine_mode mode = GET_MODE (op);
2636 if (GET_CODE (op) != MEM)
2637 return 0;
2638 op = XEXP (op, 0);
2639 switch (GET_CODE (op))
2641 case REG:
2642 return 1;
2644 case PLUS:
2646 rtx op0 = XEXP (op, 0);
2647 rtx op1 = XEXP (op, 1);
2649 if (! REG_P (op0))
2650 return 0;
2652 if (REG_P (op1))
2653 return 1;
2655 if (GET_CODE (op1) != CONST_INT)
2656 return 0;
2658 /* HImode and HFmode must be offsettable. */
2659 if (mode == HImode || mode == HFmode)
2660 return IS_DISP8_OFF_CONST (INTVAL (op1));
2662 return IS_DISP8_CONST (INTVAL (op1));
2664 break;
2666 default:
2667 break;
2669 return 0;
2673 /* ARx + 5-bit unsigned const
2674 *ARx, *+ARx(n) for n < 32. */
2677 c4x_R_constraint (op)
2678 rtx op;
2680 enum machine_mode mode = GET_MODE (op);
2682 if (TARGET_C3X)
2683 return 0;
2684 if (GET_CODE (op) != MEM)
2685 return 0;
2686 op = XEXP (op, 0);
2687 switch (GET_CODE (op))
2689 case REG:
2690 return 1;
2692 case PLUS:
2694 rtx op0 = XEXP (op, 0);
2695 rtx op1 = XEXP (op, 1);
2697 if (! REG_P (op0))
2698 return 0;
2700 if (GET_CODE (op1) != CONST_INT)
2701 return 0;
2703 /* HImode and HFmode must be offsettable. */
2704 if (mode == HImode || mode == HFmode)
2705 return IS_UINT5_CONST (INTVAL (op1) + 1);
2707 return IS_UINT5_CONST (INTVAL (op1));
2709 break;
2711 default:
2712 break;
2714 return 0;
2718 static int
2719 c4x_R_indirect (op)
2720 rtx op;
2722 enum machine_mode mode = GET_MODE (op);
2724 if (TARGET_C3X || GET_CODE (op) != MEM)
2725 return 0;
2727 op = XEXP (op, 0);
2728 switch (GET_CODE (op))
2730 case REG:
2731 return IS_ADDR_OR_PSEUDO_REG (op);
2733 case PLUS:
2735 rtx op0 = XEXP (op, 0);
2736 rtx op1 = XEXP (op, 1);
2738 /* HImode and HFmode must be offsettable. */
2739 if (mode == HImode || mode == HFmode)
2740 return IS_ADDR_OR_PSEUDO_REG (op0)
2741 && GET_CODE (op1) == CONST_INT
2742 && IS_UINT5_CONST (INTVAL (op1) + 1);
2744 return REG_P (op0)
2745 && IS_ADDR_OR_PSEUDO_REG (op0)
2746 && GET_CODE (op1) == CONST_INT
2747 && IS_UINT5_CONST (INTVAL (op1));
2749 break;
2751 default:
2752 break;
2754 return 0;
2758 /* ARx + 1-bit unsigned const or IRn
2759 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2760 We don't include the pre/post inc/dec forms here since
2761 they are handled by the <> constraints. */
2764 c4x_S_constraint (op)
2765 rtx op;
2767 enum machine_mode mode = GET_MODE (op);
2768 if (GET_CODE (op) != MEM)
2769 return 0;
2770 op = XEXP (op, 0);
2771 switch (GET_CODE (op))
2773 case REG:
2774 return 1;
2776 case PRE_MODIFY:
2777 case POST_MODIFY:
2779 rtx op0 = XEXP (op, 0);
2780 rtx op1 = XEXP (op, 1);
2782 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2783 || (op0 != XEXP (op1, 0)))
2784 return 0;
2786 op0 = XEXP (op1, 0);
2787 op1 = XEXP (op1, 1);
2788 return REG_P (op0) && REG_P (op1);
2789 /* Pre or post_modify with a displacement of 0 or 1
2790 should not be generated. */
2792 break;
2794 case PLUS:
2796 rtx op0 = XEXP (op, 0);
2797 rtx op1 = XEXP (op, 1);
2799 if (!REG_P (op0))
2800 return 0;
2802 if (REG_P (op1))
2803 return 1;
2805 if (GET_CODE (op1) != CONST_INT)
2806 return 0;
2808 /* HImode and HFmode must be offsettable. */
2809 if (mode == HImode || mode == HFmode)
2810 return IS_DISP1_OFF_CONST (INTVAL (op1));
2812 return IS_DISP1_CONST (INTVAL (op1));
2814 break;
2816 default:
2817 break;
2819 return 0;
2823 static int
2824 c4x_S_indirect (op)
2825 rtx op;
2827 enum machine_mode mode = GET_MODE (op);
2828 if (GET_CODE (op) != MEM)
2829 return 0;
2831 op = XEXP (op, 0);
2832 switch (GET_CODE (op))
2834 case PRE_DEC:
2835 case POST_DEC:
2836 if (mode != QImode && mode != QFmode)
2837 return 0;
2838 case PRE_INC:
2839 case POST_INC:
2840 op = XEXP (op, 0);
2842 case REG:
2843 return IS_ADDR_OR_PSEUDO_REG (op);
2845 case PRE_MODIFY:
2846 case POST_MODIFY:
2848 rtx op0 = XEXP (op, 0);
2849 rtx op1 = XEXP (op, 1);
2851 if (mode != QImode && mode != QFmode)
2852 return 0;
2854 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2855 || (op0 != XEXP (op1, 0)))
2856 return 0;
2858 op0 = XEXP (op1, 0);
2859 op1 = XEXP (op1, 1);
2860 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2861 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2862 /* Pre or post_modify with a displacement of 0 or 1
2863 should not be generated. */
2866 case PLUS:
2868 rtx op0 = XEXP (op, 0);
2869 rtx op1 = XEXP (op, 1);
2871 if (REG_P (op0))
2873 /* HImode and HFmode must be offsettable. */
2874 if (mode == HImode || mode == HFmode)
2875 return IS_ADDR_OR_PSEUDO_REG (op0)
2876 && GET_CODE (op1) == CONST_INT
2877 && IS_DISP1_OFF_CONST (INTVAL (op1));
2879 if (REG_P (op1))
2880 return (IS_INDEX_OR_PSEUDO_REG (op1)
2881 && IS_ADDR_OR_PSEUDO_REG (op0))
2882 || (IS_ADDR_OR_PSEUDO_REG (op1)
2883 && IS_INDEX_OR_PSEUDO_REG (op0));
2885 return IS_ADDR_OR_PSEUDO_REG (op0)
2886 && GET_CODE (op1) == CONST_INT
2887 && IS_DISP1_CONST (INTVAL (op1));
2890 break;
2892 default:
2893 break;
2895 return 0;
2899 /* Direct memory operand. */
2902 c4x_T_constraint (op)
2903 rtx op;
2905 if (GET_CODE (op) != MEM)
2906 return 0;
2907 op = XEXP (op, 0);
2909 if (GET_CODE (op) != LO_SUM)
2911 /* Allow call operands. */
2912 return GET_CODE (op) == SYMBOL_REF
2913 && GET_MODE (op) == Pmode
2914 && SYMBOL_REF_FLAG (op);
2917 /* HImode and HFmode are not offsettable. */
2918 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2919 return 0;
2921 if ((GET_CODE (XEXP (op, 0)) == REG)
2922 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2923 return c4x_U_constraint (XEXP (op, 1));
2925 return 0;
2929 /* Symbolic operand. */
2932 c4x_U_constraint (op)
2933 rtx op;
2935 /* Don't allow direct addressing to an arbitrary constant. */
2936 return GET_CODE (op) == CONST
2937 || GET_CODE (op) == SYMBOL_REF
2938 || GET_CODE (op) == LABEL_REF;
2943 c4x_autoinc_operand (op, mode)
2944 rtx op;
2945 enum machine_mode mode ATTRIBUTE_UNUSED;
2947 if (GET_CODE (op) == MEM)
2949 enum rtx_code code = GET_CODE (XEXP (op, 0));
2951 if (code == PRE_INC
2952 || code == PRE_DEC
2953 || code == POST_INC
2954 || code == POST_DEC
2955 || code == PRE_MODIFY
2956 || code == POST_MODIFY
2958 return 1;
2960 return 0;
2964 /* Match any operand. */
2967 any_operand (op, mode)
2968 register rtx op ATTRIBUTE_UNUSED;
2969 enum machine_mode mode ATTRIBUTE_UNUSED;
2971 return 1;
2975 /* Nonzero if OP is a floating point value with value 0.0. */
2978 fp_zero_operand (op, mode)
2979 rtx op;
2980 enum machine_mode mode ATTRIBUTE_UNUSED;
2982 REAL_VALUE_TYPE r;
2984 if (GET_CODE (op) != CONST_DOUBLE)
2985 return 0;
2986 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2987 return REAL_VALUES_EQUAL (r, dconst0);
2992 const_operand (op, mode)
2993 register rtx op;
2994 register enum machine_mode mode;
2996 switch (mode)
2998 case QFmode:
2999 case HFmode:
3000 if (GET_CODE (op) != CONST_DOUBLE
3001 || GET_MODE (op) != mode
3002 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3003 return 0;
3005 return c4x_immed_float_p (op);
3007 #if Pmode != QImode
3008 case Pmode:
3009 #endif
3010 case QImode:
3011 if (GET_CODE (op) == CONSTANT_P_RTX)
3012 return 1;
3014 if (GET_CODE (op) != CONST_INT
3015 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3016 || GET_MODE_CLASS (mode) != MODE_INT)
3017 return 0;
3019 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3021 case HImode:
3022 return 0;
3024 default:
3025 return 0;
3031 stik_const_operand (op, mode)
3032 rtx op;
3033 enum machine_mode mode ATTRIBUTE_UNUSED;
3035 return c4x_K_constant (op);
3040 not_const_operand (op, mode)
3041 rtx op;
3042 enum machine_mode mode ATTRIBUTE_UNUSED;
3044 return c4x_N_constant (op);
3049 reg_operand (op, mode)
3050 rtx op;
3051 enum machine_mode mode;
3053 if (GET_CODE (op) == SUBREG
3054 && GET_MODE (op) == QFmode)
3055 return 0;
3056 return register_operand (op, mode);
3061 mixed_subreg_operand (op, mode)
3062 rtx op;
3063 enum machine_mode mode ATTRIBUTE_UNUSED;
3065 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3066 int and a long double. */
3067 if (GET_CODE (op) == SUBREG
3068 && (GET_MODE (op) == QFmode)
3069 && (GET_MODE (SUBREG_REG (op)) == QImode
3070 || GET_MODE (SUBREG_REG (op)) == HImode))
3071 return 1;
3072 return 0;
3077 reg_imm_operand (op, mode)
3078 rtx op;
3079 enum machine_mode mode ATTRIBUTE_UNUSED;
3081 if (REG_P (op) || CONSTANT_P (op))
3082 return 1;
3083 return 0;
3088 not_modify_reg (op, mode)
3089 rtx op;
3090 enum machine_mode mode ATTRIBUTE_UNUSED;
3092 if (REG_P (op) || CONSTANT_P (op))
3093 return 1;
3094 if (GET_CODE (op) != MEM)
3095 return 0;
3096 op = XEXP (op, 0);
3097 switch (GET_CODE (op))
3099 case REG:
3100 return 1;
3102 case PLUS:
3104 rtx op0 = XEXP (op, 0);
3105 rtx op1 = XEXP (op, 1);
3107 if (! REG_P (op0))
3108 return 0;
3110 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3111 return 1;
3114 case LO_SUM:
3116 rtx op0 = XEXP (op, 0);
3118 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3119 return 1;
3121 break;
3123 case CONST:
3124 case SYMBOL_REF:
3125 case LABEL_REF:
3126 return 1;
3128 default:
3129 break;
3131 return 0;
3136 not_rc_reg (op, mode)
3137 rtx op;
3138 enum machine_mode mode ATTRIBUTE_UNUSED;
3140 if (REG_P (op) && REGNO (op) == RC_REGNO)
3141 return 0;
3142 return 1;
3146 /* Extended precision register R0-R1. */
3149 r0r1_reg_operand (op, mode)
3150 rtx op;
3151 enum machine_mode mode;
3153 if (! reg_operand (op, mode))
3154 return 0;
3155 if (GET_CODE (op) == SUBREG)
3156 op = SUBREG_REG (op);
3157 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3161 /* Extended precision register R2-R3. */
3164 r2r3_reg_operand (op, mode)
3165 rtx op;
3166 enum machine_mode mode;
3168 if (! reg_operand (op, mode))
3169 return 0;
3170 if (GET_CODE (op) == SUBREG)
3171 op = SUBREG_REG (op);
3172 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3176 /* Low extended precision register R0-R7. */
3179 ext_low_reg_operand (op, mode)
3180 rtx op;
3181 enum machine_mode mode;
3183 if (! reg_operand (op, mode))
3184 return 0;
3185 if (GET_CODE (op) == SUBREG)
3186 op = SUBREG_REG (op);
3187 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3191 /* Extended precision register. */
3194 ext_reg_operand (op, mode)
3195 rtx op;
3196 enum machine_mode mode;
3198 if (! reg_operand (op, mode))
3199 return 0;
3200 if (GET_CODE (op) == SUBREG)
3201 op = SUBREG_REG (op);
3202 if (! REG_P (op))
3203 return 0;
3204 return IS_EXT_OR_PSEUDO_REG (op);
3208 /* Standard precision register. */
3211 std_reg_operand (op, mode)
3212 rtx op;
3213 enum machine_mode mode;
3215 if (! reg_operand (op, mode))
3216 return 0;
3217 if (GET_CODE (op) == SUBREG)
3218 op = SUBREG_REG (op);
3219 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3222 /* Standard precision or normal register. */
3225 std_or_reg_operand (op, mode)
3226 rtx op;
3227 enum machine_mode mode;
3229 if (reload_in_progress)
3230 return std_reg_operand (op, mode);
3231 return reg_operand (op, mode);
3234 /* Address register. */
3237 addr_reg_operand (op, mode)
3238 rtx op;
3239 enum machine_mode mode;
3241 if (! reg_operand (op, mode))
3242 return 0;
3243 return c4x_a_register (op);
3247 /* Index register. */
3250 index_reg_operand (op, mode)
3251 rtx op;
3252 enum machine_mode mode;
3254 if (! reg_operand (op, mode))
3255 return 0;
3256 if (GET_CODE (op) == SUBREG)
3257 op = SUBREG_REG (op);
3258 return c4x_x_register (op);
3262 /* DP register. */
3265 dp_reg_operand (op, mode)
3266 rtx op;
3267 enum machine_mode mode ATTRIBUTE_UNUSED;
3269 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3273 /* SP register. */
3276 sp_reg_operand (op, mode)
3277 rtx op;
3278 enum machine_mode mode ATTRIBUTE_UNUSED;
3280 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3284 /* ST register. */
3287 st_reg_operand (op, mode)
3288 register rtx op;
3289 enum machine_mode mode ATTRIBUTE_UNUSED;
3291 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3295 /* RC register. */
3298 rc_reg_operand (op, mode)
3299 register rtx op;
3300 enum machine_mode mode ATTRIBUTE_UNUSED;
3302 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3307 call_address_operand (op, mode)
3308 rtx op;
3309 enum machine_mode mode ATTRIBUTE_UNUSED;
3311 return (REG_P (op) || symbolic_address_operand (op, mode));
3315 /* Symbolic address operand. */
3318 symbolic_address_operand (op, mode)
3319 register rtx op;
3320 enum machine_mode mode ATTRIBUTE_UNUSED;
3322 switch (GET_CODE (op))
3324 case CONST:
3325 case SYMBOL_REF:
3326 case LABEL_REF:
3327 return 1;
3328 default:
3329 return 0;
3334 /* Check dst operand of a move instruction. */
3337 dst_operand (op, mode)
3338 rtx op;
3339 enum machine_mode mode;
3341 if (GET_CODE (op) == SUBREG
3342 && mixed_subreg_operand (op, mode))
3343 return 0;
3345 if (REG_P (op))
3346 return reg_operand (op, mode);
3348 return nonimmediate_operand (op, mode);
3352 /* Check src operand of two operand arithmetic instructions. */
3355 src_operand (op, mode)
3356 rtx op;
3357 enum machine_mode mode;
3359 if (GET_CODE (op) == SUBREG
3360 && mixed_subreg_operand (op, mode))
3361 return 0;
3363 if (REG_P (op))
3364 return reg_operand (op, mode);
3366 if (mode == VOIDmode)
3367 mode = GET_MODE (op);
3369 if (GET_CODE (op) == CONST_INT)
3370 return (mode == QImode || mode == Pmode || mode == HImode)
3371 && c4x_I_constant (op);
3373 /* We don't like CONST_DOUBLE integers. */
3374 if (GET_CODE (op) == CONST_DOUBLE)
3375 return c4x_H_constant (op);
3377 /* Disallow symbolic addresses. Only the predicate
3378 symbolic_address_operand will match these. */
3379 if (GET_CODE (op) == SYMBOL_REF
3380 || GET_CODE (op) == LABEL_REF
3381 || GET_CODE (op) == CONST)
3382 return 0;
3384 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3385 access to symbolic addresses. These operands will get forced
3386 into a register and the movqi expander will generate a
3387 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3388 if (GET_CODE (op) == MEM
3389 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3390 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3391 || GET_CODE (XEXP (op, 0)) == CONST)))
3392 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3394 return general_operand (op, mode);
3399 src_hi_operand (op, mode)
3400 rtx op;
3401 enum machine_mode mode;
3403 if (c4x_O_constant (op))
3404 return 1;
3405 return src_operand (op, mode);
3409 /* Check src operand of two operand logical instructions. */
3412 lsrc_operand (op, mode)
3413 rtx op;
3414 enum machine_mode mode;
3416 if (mode == VOIDmode)
3417 mode = GET_MODE (op);
3419 if (mode != QImode && mode != Pmode)
3420 fatal_insn ("Mode not QImode", op);
3422 if (GET_CODE (op) == CONST_INT)
3423 return c4x_L_constant (op) || c4x_J_constant (op);
3425 return src_operand (op, mode);
3429 /* Check src operand of two operand tricky instructions. */
3432 tsrc_operand (op, mode)
3433 rtx op;
3434 enum machine_mode mode;
3436 if (mode == VOIDmode)
3437 mode = GET_MODE (op);
3439 if (mode != QImode && mode != Pmode)
3440 fatal_insn ("Mode not QImode", op);
3442 if (GET_CODE (op) == CONST_INT)
3443 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3445 return src_operand (op, mode);
3450 reg_or_const_operand (op, mode)
3451 rtx op;
3452 enum machine_mode mode;
3454 return reg_operand (op, mode) || const_operand (op, mode);
3458 /* Check for indirect operands allowable in parallel instruction. */
3461 par_ind_operand (op, mode)
3462 rtx op;
3463 enum machine_mode mode;
3465 if (mode != VOIDmode && mode != GET_MODE (op))
3466 return 0;
3468 return c4x_S_indirect (op);
3472 /* Check for operands allowable in parallel instruction. */
3475 parallel_operand (op, mode)
3476 rtx op;
3477 enum machine_mode mode;
3479 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3483 static void
3484 c4x_S_address_parse (op, base, incdec, index, disp)
3485 rtx op;
3486 int *base;
3487 int *incdec;
3488 int *index;
3489 int *disp;
3491 *base = 0;
3492 *incdec = 0;
3493 *index = 0;
3494 *disp = 0;
3496 if (GET_CODE (op) != MEM)
3497 fatal_insn ("Invalid indirect memory address", op);
3499 op = XEXP (op, 0);
3500 switch (GET_CODE (op))
3502 case PRE_DEC:
3503 *base = REGNO (XEXP (op, 0));
3504 *incdec = 1;
3505 *disp = -1;
3506 return;
3508 case POST_DEC:
3509 *base = REGNO (XEXP (op, 0));
3510 *incdec = 1;
3511 *disp = 0;
3512 return;
3514 case PRE_INC:
3515 *base = REGNO (XEXP (op, 0));
3516 *incdec = 1;
3517 *disp = 1;
3518 return;
3520 case POST_INC:
3521 *base = REGNO (XEXP (op, 0));
3522 *incdec = 1;
3523 *disp = 0;
3524 return;
3526 case POST_MODIFY:
3527 *base = REGNO (XEXP (op, 0));
3528 if (REG_P (XEXP (XEXP (op, 1), 1)))
3530 *index = REGNO (XEXP (XEXP (op, 1), 1));
3531 *disp = 0; /* ??? */
3533 else
3534 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3535 *incdec = 1;
3536 return;
3538 case PRE_MODIFY:
3539 *base = REGNO (XEXP (op, 0));
3540 if (REG_P (XEXP (XEXP (op, 1), 1)))
3542 *index = REGNO (XEXP (XEXP (op, 1), 1));
3543 *disp = 1; /* ??? */
3545 else
3546 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3547 *incdec = 1;
3549 return;
3551 case REG:
3552 *base = REGNO (op);
3553 return;
3555 case PLUS:
3557 rtx op0 = XEXP (op, 0);
3558 rtx op1 = XEXP (op, 1);
3560 if (c4x_a_register (op0))
3562 if (c4x_x_register (op1))
3564 *base = REGNO (op0);
3565 *index = REGNO (op1);
3566 return;
3568 else if ((GET_CODE (op1) == CONST_INT
3569 && IS_DISP1_CONST (INTVAL (op1))))
3571 *base = REGNO (op0);
3572 *disp = INTVAL (op1);
3573 return;
3576 else if (c4x_x_register (op0) && c4x_a_register (op1))
3578 *base = REGNO (op1);
3579 *index = REGNO (op0);
3580 return;
3583 /* Fallthrough. */
3585 default:
3586 fatal_insn ("Invalid indirect (S) memory address", op);
3592 c4x_address_conflict (op0, op1, store0, store1)
3593 rtx op0;
3594 rtx op1;
3595 int store0;
3596 int store1;
3598 int base0;
3599 int base1;
3600 int incdec0;
3601 int incdec1;
3602 int index0;
3603 int index1;
3604 int disp0;
3605 int disp1;
3607 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3608 return 1;
3610 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3611 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3613 if (store0 && store1)
3615 /* If we have two stores in parallel to the same address, then
3616 the C4x only executes one of the stores. This is unlikely to
3617 cause problems except when writing to a hardware device such
3618 as a FIFO since the second write will be lost. The user
3619 should flag the hardware location as being volatile so that
3620 we don't do this optimisation. While it is unlikely that we
3621 have an aliased address if both locations are not marked
3622 volatile, it is probably safer to flag a potential conflict
3623 if either location is volatile. */
3624 if (! flag_argument_noalias)
3626 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3627 return 1;
3631 /* If have a parallel load and a store to the same address, the load
3632 is performed first, so there is no conflict. Similarly, there is
3633 no conflict if have parallel loads from the same address. */
3635 /* Cannot use auto increment or auto decrement twice for same
3636 base register. */
3637 if (base0 == base1 && incdec0 && incdec0)
3638 return 1;
3640 /* It might be too confusing for GCC if we have use a base register
3641 with a side effect and a memory reference using the same register
3642 in parallel. */
3643 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3644 return 1;
3646 /* We can not optimize the case where op1 and op2 refer to the same
3647 address. */
3648 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3649 return 1;
3651 /* No conflict. */
3652 return 0;
3656 /* Check for while loop inside a decrement and branch loop. */
3659 c4x_label_conflict (insn, jump, db)
3660 rtx insn;
3661 rtx jump;
3662 rtx db;
3664 while (insn)
3666 if (GET_CODE (insn) == CODE_LABEL)
3668 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3669 return 1;
3670 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3671 return 0;
3673 insn = PREV_INSN (insn);
3675 return 1;
3679 /* Validate combination of operands for parallel load/store instructions. */
3682 valid_parallel_load_store (operands, mode)
3683 rtx *operands;
3684 enum machine_mode mode ATTRIBUTE_UNUSED;
3686 rtx op0 = operands[0];
3687 rtx op1 = operands[1];
3688 rtx op2 = operands[2];
3689 rtx op3 = operands[3];
3691 if (GET_CODE (op0) == SUBREG)
3692 op0 = SUBREG_REG (op0);
3693 if (GET_CODE (op1) == SUBREG)
3694 op1 = SUBREG_REG (op1);
3695 if (GET_CODE (op2) == SUBREG)
3696 op2 = SUBREG_REG (op2);
3697 if (GET_CODE (op3) == SUBREG)
3698 op3 = SUBREG_REG (op3);
3700 /* The patterns should only allow ext_low_reg_operand() or
3701 par_ind_operand() operands. Thus of the 4 operands, only 2
3702 should be REGs and the other 2 should be MEMs. */
3704 /* This test prevents the multipack pass from using this pattern if
3705 op0 is used as an index or base register in op2 or op3, since
3706 this combination will require reloading. */
3707 if (GET_CODE (op0) == REG
3708 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3709 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3710 return 0;
3712 /* LDI||LDI. */
3713 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3714 return (REGNO (op0) != REGNO (op2))
3715 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3716 && ! c4x_address_conflict (op1, op3, 0, 0);
3718 /* STI||STI. */
3719 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3720 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3721 && ! c4x_address_conflict (op0, op2, 1, 1);
3723 /* LDI||STI. */
3724 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3725 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3726 && ! c4x_address_conflict (op1, op2, 0, 1);
3728 /* STI||LDI. */
3729 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3730 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3731 && ! c4x_address_conflict (op0, op3, 1, 0);
3733 return 0;
3738 valid_parallel_operands_4 (operands, mode)
3739 rtx *operands;
3740 enum machine_mode mode ATTRIBUTE_UNUSED;
3742 rtx op0 = operands[0];
3743 rtx op2 = operands[2];
3745 if (GET_CODE (op0) == SUBREG)
3746 op0 = SUBREG_REG (op0);
3747 if (GET_CODE (op2) == SUBREG)
3748 op2 = SUBREG_REG (op2);
3750 /* This test prevents the multipack pass from using this pattern if
3751 op0 is used as an index or base register in op2, since this combination
3752 will require reloading. */
3753 if (GET_CODE (op0) == REG
3754 && GET_CODE (op2) == MEM
3755 && reg_mentioned_p (op0, XEXP (op2, 0)))
3756 return 0;
3758 return 1;
3763 valid_parallel_operands_5 (operands, mode)
3764 rtx *operands;
3765 enum machine_mode mode ATTRIBUTE_UNUSED;
3767 int regs = 0;
3768 rtx op0 = operands[0];
3769 rtx op1 = operands[1];
3770 rtx op2 = operands[2];
3771 rtx op3 = operands[3];
3773 if (GET_CODE (op0) == SUBREG)
3774 op0 = SUBREG_REG (op0);
3775 if (GET_CODE (op1) == SUBREG)
3776 op1 = SUBREG_REG (op1);
3777 if (GET_CODE (op2) == SUBREG)
3778 op2 = SUBREG_REG (op2);
3780 /* The patterns should only allow ext_low_reg_operand() or
3781 par_ind_operand() operands. Operands 1 and 2 may be commutative
3782 but only one of them can be a register. */
3783 if (GET_CODE (op1) == REG)
3784 regs++;
3785 if (GET_CODE (op2) == REG)
3786 regs++;
3788 if (regs != 1)
3789 return 0;
3791 /* This test prevents the multipack pass from using this pattern if
3792 op0 is used as an index or base register in op3, since this combination
3793 will require reloading. */
3794 if (GET_CODE (op0) == REG
3795 && GET_CODE (op3) == MEM
3796 && reg_mentioned_p (op0, XEXP (op3, 0)))
3797 return 0;
3799 return 1;
3804 valid_parallel_operands_6 (operands, mode)
3805 rtx *operands;
3806 enum machine_mode mode ATTRIBUTE_UNUSED;
3808 int regs = 0;
3809 rtx op0 = operands[0];
3810 rtx op1 = operands[1];
3811 rtx op2 = operands[2];
3812 rtx op4 = operands[4];
3813 rtx op5 = operands[5];
3815 if (GET_CODE (op1) == SUBREG)
3816 op1 = SUBREG_REG (op1);
3817 if (GET_CODE (op2) == SUBREG)
3818 op2 = SUBREG_REG (op2);
3819 if (GET_CODE (op4) == SUBREG)
3820 op4 = SUBREG_REG (op4);
3821 if (GET_CODE (op5) == SUBREG)
3822 op5 = SUBREG_REG (op5);
3824 /* The patterns should only allow ext_low_reg_operand() or
3825 par_ind_operand() operands. Thus of the 4 input operands, only 2
3826 should be REGs and the other 2 should be MEMs. */
3828 if (GET_CODE (op1) == REG)
3829 regs++;
3830 if (GET_CODE (op2) == REG)
3831 regs++;
3832 if (GET_CODE (op4) == REG)
3833 regs++;
3834 if (GET_CODE (op5) == REG)
3835 regs++;
3837 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3838 Perhaps we should count the MEMs as well? */
3839 if (regs != 2)
3840 return 0;
3842 /* This test prevents the multipack pass from using this pattern if
3843 op0 is used as an index or base register in op4 or op5, since
3844 this combination will require reloading. */
3845 if (GET_CODE (op0) == REG
3846 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3847 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3848 return 0;
3850 return 1;
3854 /* Validate combination of src operands. Note that the operands have
3855 been screened by the src_operand predicate. We just have to check
3856 that the combination of operands is valid. If FORCE is set, ensure
3857 that the destination regno is valid if we have a 2 operand insn. */
3859 static int
3860 c4x_valid_operands (code, operands, mode, force)
3861 enum rtx_code code;
3862 rtx *operands;
3863 enum machine_mode mode ATTRIBUTE_UNUSED;
3864 int force;
3866 rtx op1;
3867 rtx op2;
3868 enum rtx_code code1;
3869 enum rtx_code code2;
3871 if (code == COMPARE)
3873 op1 = operands[0];
3874 op2 = operands[1];
3876 else
3878 op1 = operands[1];
3879 op2 = operands[2];
3882 if (GET_CODE (op1) == SUBREG)
3883 op1 = SUBREG_REG (op1);
3884 if (GET_CODE (op2) == SUBREG)
3885 op2 = SUBREG_REG (op2);
3887 code1 = GET_CODE (op1);
3888 code2 = GET_CODE (op2);
3890 if (code1 == REG && code2 == REG)
3891 return 1;
3893 if (code1 == MEM && code2 == MEM)
3895 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3896 return 1;
3897 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3900 if (code1 == code2)
3901 return 0;
3903 if (code1 == REG)
3905 switch (code2)
3907 case CONST_INT:
3908 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3909 return 1;
3910 break;
3912 case CONST_DOUBLE:
3913 if (! c4x_H_constant (op2))
3914 return 0;
3915 break;
3917 /* Any valid memory operand screened by src_operand is OK. */
3918 case MEM:
3920 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3921 into a stack slot memory address comprising a PLUS and a
3922 constant. */
3923 case ADDRESSOF:
3924 break;
3926 default:
3927 fatal_insn ("c4x_valid_operands: Internal error", op2);
3928 break;
3931 /* Check that we have a valid destination register for a two operand
3932 instruction. */
3933 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3936 /* We assume MINUS is commutative since the subtract patterns
3937 also support the reverse subtract instructions. Since op1
3938 is not a register, and op2 is a register, op1 can only
3939 be a restricted memory operand for a shift instruction. */
3940 if (code == ASHIFTRT || code == LSHIFTRT
3941 || code == ASHIFT || code == COMPARE)
3942 return code2 == REG
3943 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3945 switch (code1)
3947 case CONST_INT:
3948 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3949 return 1;
3950 break;
3952 case CONST_DOUBLE:
3953 if (! c4x_H_constant (op1))
3954 return 0;
3955 break;
3957 /* Any valid memory operand screened by src_operand is OK. */
3958 case MEM:
3959 #if 0
3960 if (code2 != REG)
3961 return 0;
3962 #endif
3963 break;
3965 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3966 into a stack slot memory address comprising a PLUS and a
3967 constant. */
3968 case ADDRESSOF:
3969 break;
3971 default:
3972 abort ();
3973 break;
3976 /* Check that we have a valid destination register for a two operand
3977 instruction. */
3978 return ! force || REGNO (op1) == REGNO (operands[0]);
3982 int valid_operands (code, operands, mode)
3983 enum rtx_code code;
3984 rtx *operands;
3985 enum machine_mode mode;
3988 /* If we are not optimizing then we have to let anything go and let
3989 reload fix things up. instantiate_decl in function.c can produce
3990 invalid insns by changing the offset of a memory operand from a
3991 valid one into an invalid one, when the second operand is also a
3992 memory operand. The alternative is not to allow two memory
3993 operands for an insn when not optimizing. The problem only rarely
3994 occurs, for example with the C-torture program DFcmp.c. */
3996 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4001 legitimize_operands (code, operands, mode)
4002 enum rtx_code code;
4003 rtx *operands;
4004 enum machine_mode mode;
4006 /* Compare only has 2 operands. */
4007 if (code == COMPARE)
4009 /* During RTL generation, force constants into pseudos so that
4010 they can get hoisted out of loops. This will tie up an extra
4011 register but can save an extra cycle. Only do this if loop
4012 optimisation enabled. (We cannot pull this trick for add and
4013 sub instructions since the flow pass won't find
4014 autoincrements etc.) This allows us to generate compare
4015 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4016 of LDI *AR0++, R0; CMPI 42, R0.
4018 Note that expand_binops will try to load an expensive constant
4019 into a register if it is used within a loop. Unfortunately,
4020 the cost mechanism doesn't allow us to look at the other
4021 operand to decide whether the constant is expensive. */
4023 if (! reload_in_progress
4024 && TARGET_HOIST
4025 && optimize > 0
4026 && GET_CODE (operands[1]) == CONST_INT
4027 && preserve_subexpressions_p ()
4028 && rtx_cost (operands[1], code) > 1)
4029 operands[1] = force_reg (mode, operands[1]);
4031 if (! reload_in_progress
4032 && ! c4x_valid_operands (code, operands, mode, 0))
4033 operands[0] = force_reg (mode, operands[0]);
4034 return 1;
4037 /* We cannot do this for ADDI/SUBI insns since we will
4038 defeat the flow pass from finding autoincrement addressing
4039 opportunities. */
4040 if (! reload_in_progress
4041 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4042 && TARGET_HOIST
4043 && optimize > 1
4044 && GET_CODE (operands[2]) == CONST_INT
4045 && preserve_subexpressions_p ()
4046 && rtx_cost (operands[2], code) > 1)
4047 operands[2] = force_reg (mode, operands[2]);
4049 /* We can get better code on a C30 if we force constant shift counts
4050 into a register. This way they can get hoisted out of loops,
4051 tying up a register, but saving an instruction. The downside is
4052 that they may get allocated to an address or index register, and
4053 thus we will get a pipeline conflict if there is a nearby
4054 indirect address using an address register.
4056 Note that expand_binops will not try to load an expensive constant
4057 into a register if it is used within a loop for a shift insn. */
4059 if (! reload_in_progress
4060 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4062 /* If the operand combination is invalid, we force operand1 into a
4063 register, preventing reload from having doing to do this at a
4064 later stage. */
4065 operands[1] = force_reg (mode, operands[1]);
4066 if (TARGET_FORCE)
4068 emit_move_insn (operands[0], operands[1]);
4069 operands[1] = copy_rtx (operands[0]);
4071 else
4073 /* Just in case... */
4074 if (! c4x_valid_operands (code, operands, mode, 0))
4075 operands[2] = force_reg (mode, operands[2]);
4079 /* Right shifts require a negative shift count, but GCC expects
4080 a positive count, so we emit a NEG. */
4081 if ((code == ASHIFTRT || code == LSHIFTRT)
4082 && (GET_CODE (operands[2]) != CONST_INT))
4083 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4085 return 1;
4089 /* The following predicates are used for instruction scheduling. */
4092 group1_reg_operand (op, mode)
4093 rtx op;
4094 enum machine_mode mode;
4096 if (mode != VOIDmode && mode != GET_MODE (op))
4097 return 0;
4098 if (GET_CODE (op) == SUBREG)
4099 op = SUBREG_REG (op);
4100 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4105 group1_mem_operand (op, mode)
4106 rtx op;
4107 enum machine_mode mode;
4109 if (mode != VOIDmode && mode != GET_MODE (op))
4110 return 0;
4112 if (GET_CODE (op) == MEM)
4114 op = XEXP (op, 0);
4115 if (GET_CODE (op) == PLUS)
4117 rtx op0 = XEXP (op, 0);
4118 rtx op1 = XEXP (op, 1);
4120 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4121 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4122 return 1;
4124 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4125 return 1;
4128 return 0;
4132 /* Return true if any one of the address registers. */
4135 arx_reg_operand (op, mode)
4136 rtx op;
4137 enum machine_mode mode;
4139 if (mode != VOIDmode && mode != GET_MODE (op))
4140 return 0;
4141 if (GET_CODE (op) == SUBREG)
4142 op = SUBREG_REG (op);
4143 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4147 static int
4148 c4x_arn_reg_operand (op, mode, regno)
4149 rtx op;
4150 enum machine_mode mode;
4151 unsigned int regno;
4153 if (mode != VOIDmode && mode != GET_MODE (op))
4154 return 0;
4155 if (GET_CODE (op) == SUBREG)
4156 op = SUBREG_REG (op);
4157 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4161 static int
4162 c4x_arn_mem_operand (op, mode, regno)
4163 rtx op;
4164 enum machine_mode mode;
4165 unsigned int regno;
4167 if (mode != VOIDmode && mode != GET_MODE (op))
4168 return 0;
4170 if (GET_CODE (op) == MEM)
4172 op = XEXP (op, 0);
4173 switch (GET_CODE (op))
4175 case PRE_DEC:
4176 case POST_DEC:
4177 case PRE_INC:
4178 case POST_INC:
4179 op = XEXP (op, 0);
4181 case REG:
4182 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4184 case PRE_MODIFY:
4185 case POST_MODIFY:
4186 if (REG_P (XEXP (op, 0)) && (! reload_completed
4187 || (REGNO (XEXP (op, 0)) == regno)))
4188 return 1;
4189 if (REG_P (XEXP (XEXP (op, 1), 1))
4190 && (! reload_completed
4191 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4192 return 1;
4193 break;
4195 case PLUS:
4197 rtx op0 = XEXP (op, 0);
4198 rtx op1 = XEXP (op, 1);
4200 if ((REG_P (op0) && (! reload_completed
4201 || (REGNO (op0) == regno)))
4202 || (REG_P (op1) && (! reload_completed
4203 || (REGNO (op1) == regno))))
4204 return 1;
4206 break;
4208 default:
4209 break;
4212 return 0;
4217 ar0_reg_operand (op, mode)
4218 rtx op;
4219 enum machine_mode mode;
4221 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4226 ar0_mem_operand (op, mode)
4227 rtx op;
4228 enum machine_mode mode;
4230 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4235 ar1_reg_operand (op, mode)
4236 rtx op;
4237 enum machine_mode mode;
4239 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4244 ar1_mem_operand (op, mode)
4245 rtx op;
4246 enum machine_mode mode;
4248 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4253 ar2_reg_operand (op, mode)
4254 rtx op;
4255 enum machine_mode mode;
4257 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4262 ar2_mem_operand (op, mode)
4263 rtx op;
4264 enum machine_mode mode;
4266 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4271 ar3_reg_operand (op, mode)
4272 rtx op;
4273 enum machine_mode mode;
4275 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4280 ar3_mem_operand (op, mode)
4281 rtx op;
4282 enum machine_mode mode;
4284 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4289 ar4_reg_operand (op, mode)
4290 rtx op;
4291 enum machine_mode mode;
4293 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4298 ar4_mem_operand (op, mode)
4299 rtx op;
4300 enum machine_mode mode;
4302 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4307 ar5_reg_operand (op, mode)
4308 rtx op;
4309 enum machine_mode mode;
4311 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4316 ar5_mem_operand (op, mode)
4317 rtx op;
4318 enum machine_mode mode;
4320 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4325 ar6_reg_operand (op, mode)
4326 rtx op;
4327 enum machine_mode mode;
4329 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4334 ar6_mem_operand (op, mode)
4335 rtx op;
4336 enum machine_mode mode;
4338 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4343 ar7_reg_operand (op, mode)
4344 rtx op;
4345 enum machine_mode mode;
4347 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4352 ar7_mem_operand (op, mode)
4353 rtx op;
4354 enum machine_mode mode;
4356 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4361 ir0_reg_operand (op, mode)
4362 rtx op;
4363 enum machine_mode mode;
4365 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4370 ir0_mem_operand (op, mode)
4371 rtx op;
4372 enum machine_mode mode;
4374 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4379 ir1_reg_operand (op, mode)
4380 rtx op;
4381 enum machine_mode mode;
4383 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4388 ir1_mem_operand (op, mode)
4389 rtx op;
4390 enum machine_mode mode;
4392 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4396 /* This is similar to operand_subword but allows autoincrement
4397 addressing. */
4400 c4x_operand_subword (op, i, validate_address, mode)
4401 rtx op;
4402 int i;
4403 int validate_address;
4404 enum machine_mode mode;
4406 if (mode != HImode && mode != HFmode)
4407 fatal_insn ("c4x_operand_subword: invalid mode", op);
4409 if (mode == HFmode && REG_P (op))
4410 fatal_insn ("c4x_operand_subword: invalid operand", op);
4412 if (GET_CODE (op) == MEM)
4414 enum rtx_code code = GET_CODE (XEXP (op, 0));
4415 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4416 enum machine_mode submode;
4418 submode = mode;
4419 if (mode == HImode)
4420 submode = QImode;
4421 else if (mode == HFmode)
4422 submode = QFmode;
4424 switch (code)
4426 case POST_INC:
4427 case PRE_INC:
4428 return gen_rtx_MEM (submode, XEXP (op, 0));
4430 case POST_DEC:
4431 case PRE_DEC:
4432 case PRE_MODIFY:
4433 case POST_MODIFY:
4434 /* We could handle these with some difficulty.
4435 e.g., *p-- => *(p-=2); *(p+1). */
4436 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4438 case SYMBOL_REF:
4439 case LABEL_REF:
4440 case CONST:
4441 case CONST_INT:
4442 fatal_insn ("c4x_operand_subword: invalid address", op);
4444 /* Even though offsettable_address_p considers (MEM
4445 (LO_SUM)) to be offsettable, it is not safe if the
4446 address is at the end of the data page since we also have
4447 to fix up the associated high PART. In this case where
4448 we are trying to split a HImode or HFmode memory
4449 reference, we would have to emit another insn to reload a
4450 new HIGH value. It's easier to disable LO_SUM memory references
4451 in HImode or HFmode and we probably get better code. */
4452 case LO_SUM:
4453 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4455 default:
4456 break;
4460 return operand_subword (op, i, validate_address, mode);
4463 /* Handle machine specific pragmas for compatibility with existing
4464 compilers for the C3x/C4x.
4466 pragma attribute
4467 ----------------------------------------------------------
4468 CODE_SECTION(symbol,"section") section("section")
4469 DATA_SECTION(symbol,"section") section("section")
4470 FUNC_CANNOT_INLINE(function)
4471 FUNC_EXT_CALLED(function)
4472 FUNC_IS_PURE(function) const
4473 FUNC_IS_SYSTEM(function)
4474 FUNC_NEVER_RETURNS(function) noreturn
4475 FUNC_NO_GLOBAL_ASG(function)
4476 FUNC_NO_IND_ASG(function)
4477 INTERRUPT(function) interrupt
4481 /* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4482 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4483 the STRING_CST node of the string. If SECT is null, then this
4484 pragma doesn't take a section string. Returns 0 for a good pragma,
4485 -1 for a malformed pragma. */
4486 #define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4488 static int (*c_lex_func) (tree *);
4490 void
4491 c4x_init_pragma (get_token)
4492 int (*get_token) PARAMS ((tree *));
4494 c_lex_func = get_token;
4498 static int
4499 c4x_parse_pragma (name, func, sect)
4500 const char *name;
4501 tree *func;
4502 tree *sect;
4504 tree f, s, x;
4506 if (c_lex_func (&x) != CPP_OPEN_PAREN)
4507 BAD ("missing '(' after '#pragma %s' - ignored", name);
4509 if (c_lex_func (&f) != CPP_NAME)
4510 BAD ("missing function name in '#pragma %s' - ignored", name);
4512 if (sect)
4514 if (c_lex_func (&x) != CPP_COMMA)
4515 BAD ("malformed '#pragma %s' - ignored", name);
4516 if (c_lex_func (&s) != CPP_STRING)
4517 BAD ("missing section name in '#pragma %s' - ignored", name);
4518 *sect = s;
4521 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
4522 BAD ("missing ')' for '#pragma %s' - ignored", name);
4524 if (c_lex_func (&x) != CPP_EOF)
4525 warning ("junk at end of '#pragma %s'", name);
4527 *func = f;
4528 return 0;
4531 void
4532 c4x_pr_CODE_SECTION (pfile)
4533 cpp_reader *pfile ATTRIBUTE_UNUSED;
4535 tree func, sect;
4537 if (c4x_parse_pragma ("CODE_SECTION", &func, &sect))
4538 return;
4539 code_tree = chainon (code_tree,
4540 build_tree_list (func,
4541 build_tree_list (NULL_TREE, sect)));
4544 void
4545 c4x_pr_DATA_SECTION (pfile)
4546 cpp_reader *pfile ATTRIBUTE_UNUSED;
4548 tree func, sect;
4550 if (c4x_parse_pragma ("DATA_SECTION", &func, &sect))
4551 return;
4552 data_tree = chainon (data_tree,
4553 build_tree_list (func,
4554 build_tree_list (NULL_TREE, sect)));
4557 void
4558 c4x_pr_FUNC_IS_PURE (pfile)
4559 cpp_reader *pfile ATTRIBUTE_UNUSED;
4561 tree func;
4563 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4564 return;
4565 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4568 void
4569 c4x_pr_FUNC_NEVER_RETURNS (pfile)
4570 cpp_reader *pfile ATTRIBUTE_UNUSED;
4572 tree func;
4574 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4575 return;
4576 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4579 void
4580 c4x_pr_INTERRUPT (pfile)
4581 cpp_reader *pfile ATTRIBUTE_UNUSED;
4583 tree func;
4585 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4586 return;
4587 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4590 /* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4591 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4592 void
4593 c4x_pr_ignored (pfile)
4594 cpp_reader *pfile ATTRIBUTE_UNUSED;
4598 struct name_list
4600 struct name_list *next;
4601 const char *name;
4604 static struct name_list *global_head;
4605 static struct name_list *extern_head;
4608 /* Add NAME to list of global symbols and remove from external list if
4609 present on external list. */
4611 void
4612 c4x_global_label (name)
4613 const char *name;
4615 struct name_list *p, *last;
4617 /* Do not insert duplicate names, so linearly search through list of
4618 existing names. */
4619 p = global_head;
4620 while (p)
4622 if (strcmp (p->name, name) == 0)
4623 return;
4624 p = p->next;
4626 p = (struct name_list *) permalloc (sizeof *p);
4627 p->next = global_head;
4628 p->name = name;
4629 global_head = p;
4631 /* Remove this name from ref list if present. */
4632 last = NULL;
4633 p = extern_head;
4634 while (p)
4636 if (strcmp (p->name, name) == 0)
4638 if (last)
4639 last->next = p->next;
4640 else
4641 extern_head = p->next;
4642 break;
4644 last = p;
4645 p = p->next;
4650 /* Add NAME to list of external symbols. */
4652 void
4653 c4x_external_ref (name)
4654 const char *name;
4656 struct name_list *p;
4658 /* Do not insert duplicate names. */
4659 p = extern_head;
4660 while (p)
4662 if (strcmp (p->name, name) == 0)
4663 return;
4664 p = p->next;
4667 /* Do not insert ref if global found. */
4668 p = global_head;
4669 while (p)
4671 if (strcmp (p->name, name) == 0)
4672 return;
4673 p = p->next;
4675 p = (struct name_list *) permalloc (sizeof *p);
4676 p->next = extern_head;
4677 p->name = name;
4678 extern_head = p;
4682 void
4683 c4x_file_end (fp)
4684 FILE *fp;
4686 struct name_list *p;
4688 /* Output all external names that are not global. */
4689 p = extern_head;
4690 while (p)
4692 fprintf (fp, "\t.ref\t");
4693 assemble_name (fp, p->name);
4694 fprintf (fp, "\n");
4695 p = p->next;
4697 fprintf (fp, "\t.end\n");
4701 static void
4702 c4x_check_attribute (attrib, list, decl, attributes)
4703 const char *attrib;
4704 tree list, decl, *attributes;
4706 while (list != NULL_TREE
4707 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4708 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4709 list = TREE_CHAIN (list);
4710 if (list)
4711 *attributes = chainon (*attributes,
4712 build_tree_list (get_identifier (attrib),
4713 TREE_VALUE (list)));
4717 void
4718 c4x_set_default_attributes(decl, attributes)
4719 tree decl, *attributes;
4721 switch (TREE_CODE (decl))
4723 case FUNCTION_DECL:
4724 c4x_check_attribute ("section", code_tree, decl, attributes);
4725 c4x_check_attribute ("const", pure_tree, decl, attributes);
4726 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4727 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4728 break;
4730 case VAR_DECL:
4731 c4x_check_attribute ("section", data_tree, decl, attributes);
4732 break;
4734 default:
4735 break;
4740 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine
4741 specific attribute for TYPE. The attributes in ATTRIBUTES have
4742 previously been assigned to TYPE. */
4745 c4x_valid_type_attribute_p (type, attributes, identifier, args)
4746 tree type;
4747 tree attributes ATTRIBUTE_UNUSED;
4748 tree identifier;
4749 tree args ATTRIBUTE_UNUSED;
4751 if (TREE_CODE (type) != FUNCTION_TYPE)
4752 return 0;
4754 if (is_attribute_p ("interrupt", identifier))
4755 return 1;
4757 if (is_attribute_p ("assembler", identifier))
4758 return 1;
4760 if (is_attribute_p ("leaf_pretend", identifier))
4761 return 1;
4763 return 0;
4767 /* !!! FIXME to emit RPTS correctly. */
4770 c4x_rptb_rpts_p (insn, op)
4771 rtx insn, op;
4773 /* The next insn should be our label marking where the
4774 repeat block starts. */
4775 insn = NEXT_INSN (insn);
4776 if (GET_CODE (insn) != CODE_LABEL)
4778 /* Some insns may have been shifted between the RPTB insn
4779 and the top label... They were probably destined to
4780 be moved out of the loop. For now, let's leave them
4781 where they are and print a warning. We should
4782 probably move these insns before the repeat block insn. */
4783 if (TARGET_DEBUG)
4784 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4785 insn);
4786 return 0;
4789 /* Skip any notes. */
4790 insn = next_nonnote_insn (insn);
4792 /* This should be our first insn in the loop. */
4793 if (! INSN_P (insn))
4794 return 0;
4796 /* Skip any notes. */
4797 insn = next_nonnote_insn (insn);
4799 if (! INSN_P (insn))
4800 return 0;
4802 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4803 return 0;
4805 if (TARGET_RPTS)
4806 return 1;
4808 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4812 /* Check if register r11 is used as the destination of an insn. */
4814 static int
4815 c4x_r11_set_p(x)
4816 rtx x;
4818 rtx set;
4819 int i, j;
4820 const char *fmt;
4822 if (x == 0)
4823 return 0;
4825 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4826 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4828 if (INSN_P (x) && (set = single_set (x)))
4829 x = SET_DEST (set);
4831 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4832 return 1;
4834 fmt = GET_RTX_FORMAT (GET_CODE (x));
4835 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4837 if (fmt[i] == 'e')
4839 if (c4x_r11_set_p (XEXP (x, i)))
4840 return 1;
4842 else if (fmt[i] == 'E')
4843 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4844 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4845 return 1;
4847 return 0;
4851 /* The c4x sometimes has a problem when the insn before the laj insn
4852 sets the r11 register. Check for this situation. */
4855 c4x_check_laj_p (insn)
4856 rtx insn;
4858 insn = prev_nonnote_insn (insn);
4860 /* If this is the start of the function no nop is needed. */
4861 if (insn == 0)
4862 return 0;
4864 /* If the previous insn is a code label we have to insert a nop. This
4865 could be a jump or table jump. We can find the normal jumps by
4866 scanning the function but this will not find table jumps. */
4867 if (GET_CODE (insn) == CODE_LABEL)
4868 return 1;
4870 /* If the previous insn sets register r11 we have to insert a nop. */
4871 if (c4x_r11_set_p (insn))
4872 return 1;
4874 /* No nop needed. */
4875 return 0;
4879 /* Adjust the cost of a scheduling dependency. Return the new cost of
4880 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4881 A set of an address register followed by a use occurs a 2 cycle
4882 stall (reduced to a single cycle on the c40 using LDA), while
4883 a read of an address register followed by a use occurs a single cycle. */
4885 #define SET_USE_COST 3
4886 #define SETLDA_USE_COST 2
4887 #define READ_USE_COST 2
4891 c4x_adjust_cost (insn, link, dep_insn, cost)
4892 rtx insn;
4893 rtx link;
4894 rtx dep_insn;
4895 int cost;
4897 /* Don't worry about this until we know what registers have been
4898 assigned. */
4899 if (flag_schedule_insns == 0 && ! reload_completed)
4900 return 0;
4902 /* How do we handle dependencies where a read followed by another
4903 read causes a pipeline stall? For example, a read of ar0 followed
4904 by the use of ar0 for a memory reference. It looks like we
4905 need to extend the scheduler to handle this case. */
4907 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4908 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4909 so only deal with insns we know about. */
4910 if (recog_memoized (dep_insn) < 0)
4911 return 0;
4913 if (REG_NOTE_KIND (link) == 0)
4915 int max = 0;
4917 /* Data dependency; DEP_INSN writes a register that INSN reads some
4918 cycles later. */
4919 if (TARGET_C3X)
4921 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4922 max = SET_USE_COST > max ? SET_USE_COST : max;
4923 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4924 max = READ_USE_COST > max ? READ_USE_COST : max;
4926 else
4928 /* This could be significantly optimized. We should look
4929 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4930 insn uses ar0-ar7. We then test if the same register
4931 is used. The tricky bit is that some operands will
4932 use several registers... */
4933 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4934 max = SET_USE_COST > max ? SET_USE_COST : max;
4935 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4936 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4937 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4938 max = READ_USE_COST > max ? READ_USE_COST : max;
4940 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4941 max = SET_USE_COST > max ? SET_USE_COST : max;
4942 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4943 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4944 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4945 max = READ_USE_COST > max ? READ_USE_COST : max;
4947 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4948 max = SET_USE_COST > max ? SET_USE_COST : max;
4949 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4950 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4951 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4952 max = READ_USE_COST > max ? READ_USE_COST : max;
4954 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4955 max = SET_USE_COST > max ? SET_USE_COST : max;
4956 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4957 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4958 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4959 max = READ_USE_COST > max ? READ_USE_COST : max;
4961 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4962 max = SET_USE_COST > max ? SET_USE_COST : max;
4963 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4964 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4965 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4966 max = READ_USE_COST > max ? READ_USE_COST : max;
4968 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4969 max = SET_USE_COST > max ? SET_USE_COST : max;
4970 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4971 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4972 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4973 max = READ_USE_COST > max ? READ_USE_COST : max;
4975 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4976 max = SET_USE_COST > max ? SET_USE_COST : max;
4977 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4978 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4979 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4980 max = READ_USE_COST > max ? READ_USE_COST : max;
4982 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4983 max = SET_USE_COST > max ? SET_USE_COST : max;
4984 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4985 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4986 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4987 max = READ_USE_COST > max ? READ_USE_COST : max;
4989 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4990 max = SET_USE_COST > max ? SET_USE_COST : max;
4991 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4992 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4994 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4995 max = SET_USE_COST > max ? SET_USE_COST : max;
4996 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4997 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5000 if (max)
5001 cost = max;
5003 /* For other data dependencies, the default cost specified in the
5004 md is correct. */
5005 return cost;
5007 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5009 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5010 cycles later. */
5012 /* For c4x anti dependencies, the cost is 0. */
5013 return 0;
5015 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5017 /* Output dependency; DEP_INSN writes a register that INSN writes some
5018 cycles later. */
5020 /* For c4x output dependencies, the cost is 0. */
5021 return 0;
5023 else
5024 abort ();
5027 void
5028 c4x_init_builtins (endlink)
5029 tree endlink;
5031 builtin_function ("fast_ftoi",
5032 build_function_type
5033 (integer_type_node,
5034 tree_cons (NULL_TREE, double_type_node, endlink)),
5035 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
5036 builtin_function ("ansi_ftoi",
5037 build_function_type
5038 (integer_type_node,
5039 tree_cons (NULL_TREE, double_type_node, endlink)),
5040 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
5041 if (TARGET_C3X)
5042 builtin_function ("fast_imult",
5043 build_function_type
5044 (integer_type_node,
5045 tree_cons (NULL_TREE, integer_type_node,
5046 tree_cons (NULL_TREE,
5047 integer_type_node, endlink))),
5048 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
5049 else
5051 builtin_function ("toieee",
5052 build_function_type
5053 (double_type_node,
5054 tree_cons (NULL_TREE, double_type_node, endlink)),
5055 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
5056 builtin_function ("frieee",
5057 build_function_type
5058 (double_type_node,
5059 tree_cons (NULL_TREE, double_type_node, endlink)),
5060 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
5061 builtin_function ("fast_invf",
5062 build_function_type
5063 (double_type_node,
5064 tree_cons (NULL_TREE, double_type_node, endlink)),
5065 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
5071 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5072 tree exp;
5073 rtx target;
5074 rtx subtarget ATTRIBUTE_UNUSED;
5075 enum machine_mode mode ATTRIBUTE_UNUSED;
5076 int ignore ATTRIBUTE_UNUSED;
5078 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5079 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5080 tree arglist = TREE_OPERAND (exp, 1);
5081 tree arg0, arg1;
5082 rtx r0, r1;
5084 switch (fcode)
5086 case C4X_BUILTIN_FIX:
5087 arg0 = TREE_VALUE (arglist);
5088 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5089 r0 = protect_from_queue (r0, 0);
5090 if (! target || ! register_operand (target, QImode))
5091 target = gen_reg_rtx (QImode);
5092 emit_insn (gen_fixqfqi_clobber (target, r0));
5093 return target;
5095 case C4X_BUILTIN_FIX_ANSI:
5096 arg0 = TREE_VALUE (arglist);
5097 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5098 r0 = protect_from_queue (r0, 0);
5099 if (! target || ! register_operand (target, QImode))
5100 target = gen_reg_rtx (QImode);
5101 emit_insn (gen_fix_truncqfqi2 (target, r0));
5102 return target;
5104 case C4X_BUILTIN_MPYI:
5105 if (! TARGET_C3X)
5106 break;
5107 arg0 = TREE_VALUE (arglist);
5108 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5109 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5110 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5111 r0 = protect_from_queue (r0, 0);
5112 r1 = protect_from_queue (r1, 0);
5113 if (! target || ! register_operand (target, QImode))
5114 target = gen_reg_rtx (QImode);
5115 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5116 return target;
5118 case C4X_BUILTIN_TOIEEE:
5119 if (TARGET_C3X)
5120 break;
5121 arg0 = TREE_VALUE (arglist);
5122 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5123 r0 = protect_from_queue (r0, 0);
5124 if (! target || ! register_operand (target, QFmode))
5125 target = gen_reg_rtx (QFmode);
5126 emit_insn (gen_toieee (target, r0));
5127 return target;
5129 case C4X_BUILTIN_FRIEEE:
5130 if (TARGET_C3X)
5131 break;
5132 arg0 = TREE_VALUE (arglist);
5133 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5134 put_var_into_stack (arg0);
5135 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5136 r0 = protect_from_queue (r0, 0);
5137 if (register_operand (r0, QFmode))
5139 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5140 emit_move_insn (r1, r0);
5141 r0 = r1;
5143 if (! target || ! register_operand (target, QFmode))
5144 target = gen_reg_rtx (QFmode);
5145 emit_insn (gen_frieee (target, r0));
5146 return target;
5148 case C4X_BUILTIN_RCPF:
5149 if (TARGET_C3X)
5150 break;
5151 arg0 = TREE_VALUE (arglist);
5152 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5153 r0 = protect_from_queue (r0, 0);
5154 if (! target || ! register_operand (target, QFmode))
5155 target = gen_reg_rtx (QFmode);
5156 emit_insn (gen_rcpfqf_clobber (target, r0));
5157 return target;
5159 return NULL_RTX;