* target.h (asm_out.file_start, file_start_app_off,
[official-gcc.git] / gcc / config / c4x / c4x.c
blob5d097cc0cda272ac7c680a0a291f5d840970d184
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 rtx smulhi3_libfunc;
55 rtx umulhi3_libfunc;
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
65 static int c4x_leaf_function;
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
152 rtx c4x_compare_op0;
153 rtx c4x_compare_op1;
155 const char *c4x_rpts_cycles_string;
156 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
157 const char *c4x_cpu_version_string;
158 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
160 /* Pragma definitions. */
162 tree code_tree = NULL_TREE;
163 tree data_tree = NULL_TREE;
164 tree pure_tree = NULL_TREE;
165 tree noreturn_tree = NULL_TREE;
166 tree interrupt_tree = NULL_TREE;
167 tree naked_tree = NULL_TREE;
169 /* Forward declarations */
170 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
171 static int c4x_leaf_function_p PARAMS ((void));
172 static int c4x_naked_function_p PARAMS ((void));
173 static int c4x_immed_float_p PARAMS ((rtx));
174 static int c4x_a_register PARAMS ((rtx));
175 static int c4x_x_register PARAMS ((rtx));
176 static int c4x_immed_int_constant PARAMS ((rtx));
177 static int c4x_immed_float_constant PARAMS ((rtx));
178 static int c4x_K_constant PARAMS ((rtx));
179 static int c4x_N_constant PARAMS ((rtx));
180 static int c4x_O_constant PARAMS ((rtx));
181 static int c4x_R_indirect PARAMS ((rtx));
182 static int c4x_S_indirect PARAMS ((rtx));
183 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
184 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
185 enum machine_mode, int));
186 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
188 static void c4x_file_start PARAMS ((void));
189 static void c4x_file_end PARAMS ((void));
190 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
191 static int c4x_r11_set_p PARAMS ((rtx));
192 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
193 static void c4x_reorg PARAMS ((void));
194 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
195 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
196 const struct attribute_spec c4x_attribute_table[];
197 static void c4x_insert_attributes PARAMS ((tree, tree *));
198 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
199 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
200 static void c4x_globalize_label PARAMS ((FILE *, const char *));
201 static bool c4x_rtx_costs PARAMS ((rtx, int, int, int *));
202 static int c4x_address_cost PARAMS ((rtx));
204 /* Initialize the GCC target structure. */
205 #undef TARGET_ASM_BYTE_OP
206 #define TARGET_ASM_BYTE_OP "\t.word\t"
207 #undef TARGET_ASM_ALIGNED_HI_OP
208 #define TARGET_ASM_ALIGNED_HI_OP NULL
209 #undef TARGET_ASM_ALIGNED_SI_OP
210 #define TARGET_ASM_ALIGNED_SI_OP NULL
211 #undef TARGET_ASM_FILE_START
212 #define TARGET_ASM_FILE_START c4x_file_start
213 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
214 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
215 #undef TARGET_ASM_FILE_END
216 #define TARGET_ASM_FILE_END c4x_file_end
218 #undef TARGET_ATTRIBUTE_TABLE
219 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
224 #undef TARGET_INIT_BUILTINS
225 #define TARGET_INIT_BUILTINS c4x_init_builtins
227 #undef TARGET_EXPAND_BUILTIN
228 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
230 #undef TARGET_SCHED_ADJUST_COST
231 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
233 #undef TARGET_ASM_GLOBALIZE_LABEL
234 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS c4x_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST c4x_address_cost
241 #undef TARGET_MACHINE_DEPENDENT_REORG
242 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
244 struct gcc_target targetm = TARGET_INITIALIZER;
246 /* Override command line options.
247 Called once after all options have been parsed.
248 Mostly we process the processor
249 type and sometimes adjust other TARGET_ options. */
251 void
252 c4x_override_options ()
254 if (c4x_rpts_cycles_string)
255 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
256 else
257 c4x_rpts_cycles = 0;
259 if (TARGET_C30)
260 c4x_cpu_version = 30;
261 else if (TARGET_C31)
262 c4x_cpu_version = 31;
263 else if (TARGET_C32)
264 c4x_cpu_version = 32;
265 else if (TARGET_C33)
266 c4x_cpu_version = 33;
267 else if (TARGET_C40)
268 c4x_cpu_version = 40;
269 else if (TARGET_C44)
270 c4x_cpu_version = 44;
271 else
272 c4x_cpu_version = 40;
274 /* -mcpu=xx overrides -m40 etc. */
275 if (c4x_cpu_version_string)
277 const char *p = c4x_cpu_version_string;
279 /* Also allow -mcpu=c30 etc. */
280 if (*p == 'c' || *p == 'C')
281 p++;
282 c4x_cpu_version = atoi (p);
285 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
286 C40_FLAG | C44_FLAG);
288 switch (c4x_cpu_version)
290 case 30: target_flags |= C30_FLAG; break;
291 case 31: target_flags |= C31_FLAG; break;
292 case 32: target_flags |= C32_FLAG; break;
293 case 33: target_flags |= C33_FLAG; break;
294 case 40: target_flags |= C40_FLAG; break;
295 case 44: target_flags |= C44_FLAG; break;
296 default:
297 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
298 c4x_cpu_version = 40;
299 target_flags |= C40_FLAG;
302 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
303 target_flags |= C3X_FLAG;
304 else
305 target_flags &= ~C3X_FLAG;
307 /* Convert foo / 8.0 into foo * 0.125, etc. */
308 set_fast_math_flags (1);
310 /* We should phase out the following at some stage.
311 This provides compatibility with the old -mno-aliases option. */
312 if (! TARGET_ALIASES && ! flag_argument_noalias)
313 flag_argument_noalias = 1;
315 /* We're C4X floating point, not IEEE floating point. */
316 memset (real_format_for_mode, 0, sizeof real_format_for_mode);
317 real_format_for_mode[QFmode - QFmode] = &c4x_single_format;
318 real_format_for_mode[HFmode - QFmode] = &c4x_extended_format;
322 /* This is called before c4x_override_options. */
324 void
325 c4x_optimization_options (level, size)
326 int level ATTRIBUTE_UNUSED;
327 int size ATTRIBUTE_UNUSED;
329 /* Scheduling before register allocation can screw up global
330 register allocation, especially for functions that use MPY||ADD
331 instructions. The benefit we gain we get by scheduling before
332 register allocation is probably marginal anyhow. */
333 flag_schedule_insns = 0;
337 /* Write an ASCII string. */
339 #define C4X_ASCII_LIMIT 40
341 void
342 c4x_output_ascii (stream, ptr, len)
343 FILE *stream;
344 const char *ptr;
345 int len;
347 char sbuf[C4X_ASCII_LIMIT + 1];
348 int s, l, special, first = 1, onlys;
350 if (len)
351 fprintf (stream, "\t.byte\t");
353 for (s = l = 0; len > 0; --len, ++ptr)
355 onlys = 0;
357 /* Escape " and \ with a \". */
358 special = *ptr == '\"' || *ptr == '\\';
360 /* If printable - add to buff. */
361 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
363 if (special)
364 sbuf[s++] = '\\';
365 sbuf[s++] = *ptr;
366 if (s < C4X_ASCII_LIMIT - 1)
367 continue;
368 onlys = 1;
370 if (s)
372 if (first)
373 first = 0;
374 else
376 fputc (',', stream);
377 l++;
380 sbuf[s] = 0;
381 fprintf (stream, "\"%s\"", sbuf);
382 l += s + 2;
383 if (TARGET_TI && l >= 80 && len > 1)
385 fprintf (stream, "\n\t.byte\t");
386 first = 1;
387 l = 0;
390 s = 0;
392 if (onlys)
393 continue;
395 if (first)
396 first = 0;
397 else
399 fputc (',', stream);
400 l++;
403 fprintf (stream, "%d", *ptr);
404 l += 3;
405 if (TARGET_TI && l >= 80 && len > 1)
407 fprintf (stream, "\n\t.byte\t");
408 first = 1;
409 l = 0;
412 if (s)
414 if (! first)
415 fputc (',', stream);
417 sbuf[s] = 0;
418 fprintf (stream, "\"%s\"", sbuf);
419 s = 0;
421 fputc ('\n', stream);
426 c4x_hard_regno_mode_ok (regno, mode)
427 unsigned int regno;
428 enum machine_mode mode;
430 switch (mode)
432 #if Pmode != QImode
433 case Pmode: /* Pointer (24/32 bits). */
434 #endif
435 case QImode: /* Integer (32 bits). */
436 return IS_INT_REGNO (regno);
438 case QFmode: /* Float, Double (32 bits). */
439 case HFmode: /* Long Double (40 bits). */
440 return IS_EXT_REGNO (regno);
442 case CCmode: /* Condition Codes. */
443 case CC_NOOVmode: /* Condition Codes. */
444 return IS_ST_REGNO (regno);
446 case HImode: /* Long Long (64 bits). */
447 /* We need two registers to store long longs. Note that
448 it is much easier to constrain the first register
449 to start on an even boundary. */
450 return IS_INT_REGNO (regno)
451 && IS_INT_REGNO (regno + 1)
452 && (regno & 1) == 0;
454 default:
455 return 0; /* We don't support these modes. */
458 return 0;
461 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
463 c4x_hard_regno_rename_ok (regno1, regno2)
464 unsigned int regno1;
465 unsigned int regno2;
467 /* We can not copy call saved registers from mode QI into QF or from
468 mode QF into QI. */
469 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
470 return 0;
471 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
472 return 0;
473 /* We cannot copy from an extended (40 bit) register to a standard
474 (32 bit) register because we only set the condition codes for
475 extended registers. */
476 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
477 return 0;
478 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
479 return 0;
480 return 1;
483 /* The TI C3x C compiler register argument runtime model uses 6 registers,
484 AR2, R2, R3, RC, RS, RE.
486 The first two floating point arguments (float, double, long double)
487 that are found scanning from left to right are assigned to R2 and R3.
489 The remaining integer (char, short, int, long) or pointer arguments
490 are assigned to the remaining registers in the order AR2, R2, R3,
491 RC, RS, RE when scanning left to right, except for the last named
492 argument prior to an ellipsis denoting variable number of
493 arguments. We don't have to worry about the latter condition since
494 function.c treats the last named argument as anonymous (unnamed).
496 All arguments that cannot be passed in registers are pushed onto
497 the stack in reverse order (right to left). GCC handles that for us.
499 c4x_init_cumulative_args() is called at the start, so we can parse
500 the args to see how many floating point arguments and how many
501 integer (or pointer) arguments there are. c4x_function_arg() is
502 then called (sometimes repeatedly) for each argument (parsed left
503 to right) to obtain the register to pass the argument in, or zero
504 if the argument is to be passed on the stack. Once the compiler is
505 happy, c4x_function_arg_advance() is called.
507 Don't use R0 to pass arguments in, we use 0 to indicate a stack
508 argument. */
510 static const int c4x_int_reglist[3][6] =
512 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
513 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
514 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
517 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
520 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
521 function whose data type is FNTYPE.
522 For a library call, FNTYPE is 0. */
524 void
525 c4x_init_cumulative_args (cum, fntype, libname)
526 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
527 tree fntype; /* Tree ptr for function decl. */
528 rtx libname; /* SYMBOL_REF of library name or 0. */
530 tree param, next_param;
532 cum->floats = cum->ints = 0;
533 cum->init = 0;
534 cum->var = 0;
535 cum->args = 0;
537 if (TARGET_DEBUG)
539 fprintf (stderr, "\nc4x_init_cumulative_args (");
540 if (fntype)
542 tree ret_type = TREE_TYPE (fntype);
544 fprintf (stderr, "fntype code = %s, ret code = %s",
545 tree_code_name[(int) TREE_CODE (fntype)],
546 tree_code_name[(int) TREE_CODE (ret_type)]);
548 else
549 fprintf (stderr, "no fntype");
551 if (libname)
552 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
555 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
557 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
558 param; param = next_param)
560 tree type;
562 next_param = TREE_CHAIN (param);
564 type = TREE_VALUE (param);
565 if (type && type != void_type_node)
567 enum machine_mode mode;
569 /* If the last arg doesn't have void type then we have
570 variable arguments. */
571 if (! next_param)
572 cum->var = 1;
574 if ((mode = TYPE_MODE (type)))
576 if (! MUST_PASS_IN_STACK (mode, type))
578 /* Look for float, double, or long double argument. */
579 if (mode == QFmode || mode == HFmode)
580 cum->floats++;
581 /* Look for integer, enumeral, boolean, char, or pointer
582 argument. */
583 else if (mode == QImode || mode == Pmode)
584 cum->ints++;
587 cum->args++;
591 if (TARGET_DEBUG)
592 fprintf (stderr, "%s%s, args = %d)\n",
593 cum->prototype ? ", prototype" : "",
594 cum->var ? ", variable args" : "",
595 cum->args);
599 /* Update the data in CUM to advance over an argument
600 of mode MODE and data type TYPE.
601 (TYPE is null for libcalls where that information may not be available.) */
603 void
604 c4x_function_arg_advance (cum, mode, type, named)
605 CUMULATIVE_ARGS *cum; /* Current arg information. */
606 enum machine_mode mode; /* Current arg mode. */
607 tree type; /* Type of the arg or 0 if lib support. */
608 int named; /* Whether or not the argument was named. */
610 if (TARGET_DEBUG)
611 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
612 GET_MODE_NAME (mode), named);
613 if (! TARGET_MEMPARM
614 && named
615 && type
616 && ! MUST_PASS_IN_STACK (mode, type))
618 /* Look for float, double, or long double argument. */
619 if (mode == QFmode || mode == HFmode)
620 cum->floats++;
621 /* Look for integer, enumeral, boolean, char, or pointer argument. */
622 else if (mode == QImode || mode == Pmode)
623 cum->ints++;
625 else if (! TARGET_MEMPARM && ! type)
627 /* Handle libcall arguments. */
628 if (mode == QFmode || mode == HFmode)
629 cum->floats++;
630 else if (mode == QImode || mode == Pmode)
631 cum->ints++;
633 return;
637 /* Define where to put the arguments to a function. Value is zero to
638 push the argument on the stack, or a hard register in which to
639 store the argument.
641 MODE is the argument's machine mode.
642 TYPE is the data type of the argument (as a tree).
643 This is null for libcalls where that information may
644 not be available.
645 CUM is a variable of type CUMULATIVE_ARGS which gives info about
646 the preceding args and about the function being called.
647 NAMED is nonzero if this argument is a named parameter
648 (otherwise it is an extra parameter matching an ellipsis). */
650 struct rtx_def *
651 c4x_function_arg (cum, mode, type, named)
652 CUMULATIVE_ARGS *cum; /* Current arg information. */
653 enum machine_mode mode; /* Current arg mode. */
654 tree type; /* Type of the arg or 0 if lib support. */
655 int named; /* != 0 for normal args, == 0 for ... args. */
657 int reg = 0; /* Default to passing argument on stack. */
659 if (! cum->init)
661 /* We can handle at most 2 floats in R2, R3. */
662 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
664 /* We can handle at most 6 integers minus number of floats passed
665 in registers. */
666 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
667 6 - cum->maxfloats : cum->ints;
669 /* If there is no prototype, assume all the arguments are integers. */
670 if (! cum->prototype)
671 cum->maxints = 6;
673 cum->ints = cum->floats = 0;
674 cum->init = 1;
677 /* This marks the last argument. We don't need to pass this through
678 to the call insn. */
679 if (type == void_type_node)
680 return 0;
682 if (! TARGET_MEMPARM
683 && named
684 && type
685 && ! MUST_PASS_IN_STACK (mode, type))
687 /* Look for float, double, or long double argument. */
688 if (mode == QFmode || mode == HFmode)
690 if (cum->floats < cum->maxfloats)
691 reg = c4x_fp_reglist[cum->floats];
693 /* Look for integer, enumeral, boolean, char, or pointer argument. */
694 else if (mode == QImode || mode == Pmode)
696 if (cum->ints < cum->maxints)
697 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
700 else if (! TARGET_MEMPARM && ! type)
702 /* We could use a different argument calling model for libcalls,
703 since we're only calling functions in libgcc. Thus we could
704 pass arguments for long longs in registers rather than on the
705 stack. In the meantime, use the odd TI format. We make the
706 assumption that we won't have more than two floating point
707 args, six integer args, and that all the arguments are of the
708 same mode. */
709 if (mode == QFmode || mode == HFmode)
710 reg = c4x_fp_reglist[cum->floats];
711 else if (mode == QImode || mode == Pmode)
712 reg = c4x_int_reglist[0][cum->ints];
715 if (TARGET_DEBUG)
717 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
718 GET_MODE_NAME (mode), named);
719 if (reg)
720 fprintf (stderr, ", reg=%s", reg_names[reg]);
721 else
722 fprintf (stderr, ", stack");
723 fprintf (stderr, ")\n");
725 if (reg)
726 return gen_rtx_REG (mode, reg);
727 else
728 return NULL_RTX;
731 /* C[34]x arguments grow in weird ways (downwards) that the standard
732 varargs stuff can't handle.. */
734 c4x_va_arg (valist, type)
735 tree valist, type;
737 tree t;
739 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
740 build_int_2 (int_size_in_bytes (type), 0));
741 TREE_SIDE_EFFECTS (t) = 1;
743 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
747 static int
748 c4x_isr_reg_used_p (regno)
749 unsigned int regno;
751 /* Don't save/restore FP or ST, we handle them separately. */
752 if (regno == FRAME_POINTER_REGNUM
753 || IS_ST_REGNO (regno))
754 return 0;
756 /* We could be a little smarter abut saving/restoring DP.
757 We'll only save if for the big memory model or if
758 we're paranoid. ;-) */
759 if (IS_DP_REGNO (regno))
760 return ! TARGET_SMALL || TARGET_PARANOID;
762 /* Only save/restore regs in leaf function that are used. */
763 if (c4x_leaf_function)
764 return regs_ever_live[regno] && fixed_regs[regno] == 0;
766 /* Only save/restore regs that are used by the ISR and regs
767 that are likely to be used by functions the ISR calls
768 if they are not fixed. */
769 return IS_EXT_REGNO (regno)
770 || ((regs_ever_live[regno] || call_used_regs[regno])
771 && fixed_regs[regno] == 0);
775 static int
776 c4x_leaf_function_p ()
778 /* A leaf function makes no calls, so we only need
779 to save/restore the registers we actually use.
780 For the global variable leaf_function to be set, we need
781 to define LEAF_REGISTERS and all that it entails.
782 Let's check ourselves... */
784 if (lookup_attribute ("leaf_pretend",
785 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
786 return 1;
788 /* Use the leaf_pretend attribute at your own risk. This is a hack
789 to speed up ISRs that call a function infrequently where the
790 overhead of saving and restoring the additional registers is not
791 warranted. You must save and restore the additional registers
792 required by the called function. Caveat emptor. Here's enough
793 rope... */
795 if (leaf_function_p ())
796 return 1;
798 return 0;
802 static int
803 c4x_naked_function_p ()
805 tree type;
807 type = TREE_TYPE (current_function_decl);
808 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
813 c4x_interrupt_function_p ()
815 if (lookup_attribute ("interrupt",
816 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
817 return 1;
819 /* Look for TI style c_intnn. */
820 return current_function_name[0] == 'c'
821 && current_function_name[1] == '_'
822 && current_function_name[2] == 'i'
823 && current_function_name[3] == 'n'
824 && current_function_name[4] == 't'
825 && ISDIGIT (current_function_name[5])
826 && ISDIGIT (current_function_name[6]);
829 void
830 c4x_expand_prologue ()
832 unsigned int regno;
833 int size = get_frame_size ();
834 rtx insn;
836 /* In functions where ar3 is not used but frame pointers are still
837 specified, frame pointers are not adjusted (if >= -O2) and this
838 is used so it won't needlessly push the frame pointer. */
839 int dont_push_ar3;
841 /* For __naked__ function don't build a prologue. */
842 if (c4x_naked_function_p ())
844 return;
847 /* For __interrupt__ function build specific prologue. */
848 if (c4x_interrupt_function_p ())
850 c4x_leaf_function = c4x_leaf_function_p ();
852 insn = emit_insn (gen_push_st ());
853 RTX_FRAME_RELATED_P (insn) = 1;
854 if (size)
856 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
857 RTX_FRAME_RELATED_P (insn) = 1;
858 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
859 gen_rtx_REG (QImode, SP_REGNO)));
860 RTX_FRAME_RELATED_P (insn) = 1;
861 /* We require that an ISR uses fewer than 32768 words of
862 local variables, otherwise we have to go to lots of
863 effort to save a register, load it with the desired size,
864 adjust the stack pointer, and then restore the modified
865 register. Frankly, I think it is a poor ISR that
866 requires more than 32767 words of local temporary
867 storage! */
868 if (size > 32767)
869 error ("ISR %s requires %d words of local vars, max is 32767",
870 current_function_name, size);
872 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
873 gen_rtx_REG (QImode, SP_REGNO),
874 GEN_INT (size)));
875 RTX_FRAME_RELATED_P (insn) = 1;
877 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
879 if (c4x_isr_reg_used_p (regno))
881 if (regno == DP_REGNO)
883 insn = emit_insn (gen_push_dp ());
884 RTX_FRAME_RELATED_P (insn) = 1;
886 else
888 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
889 RTX_FRAME_RELATED_P (insn) = 1;
890 if (IS_EXT_REGNO (regno))
892 insn = emit_insn (gen_pushqf
893 (gen_rtx_REG (QFmode, regno)));
894 RTX_FRAME_RELATED_P (insn) = 1;
899 /* We need to clear the repeat mode flag if the ISR is
900 going to use a RPTB instruction or uses the RC, RS, or RE
901 registers. */
902 if (regs_ever_live[RC_REGNO]
903 || regs_ever_live[RS_REGNO]
904 || regs_ever_live[RE_REGNO])
906 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
907 RTX_FRAME_RELATED_P (insn) = 1;
910 /* Reload DP reg if we are paranoid about some turkey
911 violating small memory model rules. */
912 if (TARGET_SMALL && TARGET_PARANOID)
914 insn = emit_insn (gen_set_ldp_prologue
915 (gen_rtx_REG (QImode, DP_REGNO),
916 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
917 RTX_FRAME_RELATED_P (insn) = 1;
920 else
922 if (frame_pointer_needed)
924 if ((size != 0)
925 || (current_function_args_size != 0)
926 || (optimize < 2))
928 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
929 RTX_FRAME_RELATED_P (insn) = 1;
930 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
931 gen_rtx_REG (QImode, SP_REGNO)));
932 RTX_FRAME_RELATED_P (insn) = 1;
933 dont_push_ar3 = 1;
935 else
937 /* Since ar3 is not used, we don't need to push it. */
938 dont_push_ar3 = 1;
941 else
943 /* If we use ar3, we need to push it. */
944 dont_push_ar3 = 0;
945 if ((size != 0) || (current_function_args_size != 0))
947 /* If we are omitting the frame pointer, we still have
948 to make space for it so the offsets are correct
949 unless we don't use anything on the stack at all. */
950 size += 1;
954 if (size > 32767)
956 /* Local vars are too big, it will take multiple operations
957 to increment SP. */
958 if (TARGET_C3X)
960 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
961 GEN_INT(size >> 16)));
962 RTX_FRAME_RELATED_P (insn) = 1;
963 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
964 gen_rtx_REG (QImode, R1_REGNO),
965 GEN_INT(-16)));
966 RTX_FRAME_RELATED_P (insn) = 1;
968 else
970 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
971 GEN_INT(size & ~0xffff)));
972 RTX_FRAME_RELATED_P (insn) = 1;
974 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
975 gen_rtx_REG (QImode, R1_REGNO),
976 GEN_INT(size & 0xffff)));
977 RTX_FRAME_RELATED_P (insn) = 1;
978 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
979 gen_rtx_REG (QImode, SP_REGNO),
980 gen_rtx_REG (QImode, R1_REGNO)));
981 RTX_FRAME_RELATED_P (insn) = 1;
983 else if (size != 0)
985 /* Local vars take up less than 32767 words, so we can directly
986 add the number. */
987 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
988 gen_rtx_REG (QImode, SP_REGNO),
989 GEN_INT (size)));
990 RTX_FRAME_RELATED_P (insn) = 1;
993 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
995 if (regs_ever_live[regno] && ! call_used_regs[regno])
997 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
999 if (TARGET_PRESERVE_FLOAT)
1001 insn = emit_insn (gen_pushqi
1002 (gen_rtx_REG (QImode, regno)));
1003 RTX_FRAME_RELATED_P (insn) = 1;
1005 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1006 RTX_FRAME_RELATED_P (insn) = 1;
1008 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1010 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1011 RTX_FRAME_RELATED_P (insn) = 1;
1019 void
1020 c4x_expand_epilogue()
1022 int regno;
1023 int jump = 0;
1024 int dont_pop_ar3;
1025 rtx insn;
1026 int size = get_frame_size ();
1028 /* For __naked__ function build no epilogue. */
1029 if (c4x_naked_function_p ())
1031 insn = emit_jump_insn (gen_return_from_epilogue ());
1032 RTX_FRAME_RELATED_P (insn) = 1;
1033 return;
1036 /* For __interrupt__ function build specific epilogue. */
1037 if (c4x_interrupt_function_p ())
1039 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1041 if (! c4x_isr_reg_used_p (regno))
1042 continue;
1043 if (regno == DP_REGNO)
1045 insn = emit_insn (gen_pop_dp ());
1046 RTX_FRAME_RELATED_P (insn) = 1;
1048 else
1050 /* We have to use unspec because the compiler will delete insns
1051 that are not call-saved. */
1052 if (IS_EXT_REGNO (regno))
1054 insn = emit_insn (gen_popqf_unspec
1055 (gen_rtx_REG (QFmode, regno)));
1056 RTX_FRAME_RELATED_P (insn) = 1;
1058 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1062 if (size)
1064 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1065 gen_rtx_REG (QImode, SP_REGNO),
1066 GEN_INT(size)));
1067 RTX_FRAME_RELATED_P (insn) = 1;
1068 insn = emit_insn (gen_popqi
1069 (gen_rtx_REG (QImode, AR3_REGNO)));
1070 RTX_FRAME_RELATED_P (insn) = 1;
1072 insn = emit_insn (gen_pop_st ());
1073 RTX_FRAME_RELATED_P (insn) = 1;
1074 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1075 RTX_FRAME_RELATED_P (insn) = 1;
1077 else
1079 if (frame_pointer_needed)
1081 if ((size != 0)
1082 || (current_function_args_size != 0)
1083 || (optimize < 2))
1085 insn = emit_insn
1086 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1087 gen_rtx_MEM (QImode,
1088 gen_rtx_PLUS
1089 (QImode, gen_rtx_REG (QImode,
1090 AR3_REGNO),
1091 GEN_INT(-1)))));
1092 RTX_FRAME_RELATED_P (insn) = 1;
1094 /* We already have the return value and the fp,
1095 so we need to add those to the stack. */
1096 size += 2;
1097 jump = 1;
1098 dont_pop_ar3 = 1;
1100 else
1102 /* Since ar3 is not used for anything, we don't need to
1103 pop it. */
1104 dont_pop_ar3 = 1;
1107 else
1109 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1110 if (size || current_function_args_size)
1112 /* If we are ommitting the frame pointer, we still have
1113 to make space for it so the offsets are correct
1114 unless we don't use anything on the stack at all. */
1115 size += 1;
1119 /* Now restore the saved registers, putting in the delayed branch
1120 where required. */
1121 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1123 if (regs_ever_live[regno] && ! call_used_regs[regno])
1125 if (regno == AR3_REGNO && dont_pop_ar3)
1126 continue;
1128 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1130 insn = emit_insn (gen_popqf_unspec
1131 (gen_rtx_REG (QFmode, regno)));
1132 RTX_FRAME_RELATED_P (insn) = 1;
1133 if (TARGET_PRESERVE_FLOAT)
1135 insn = emit_insn (gen_popqi_unspec
1136 (gen_rtx_REG (QImode, regno)));
1137 RTX_FRAME_RELATED_P (insn) = 1;
1140 else
1142 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1143 RTX_FRAME_RELATED_P (insn) = 1;
1148 if (frame_pointer_needed)
1150 if ((size != 0)
1151 || (current_function_args_size != 0)
1152 || (optimize < 2))
1154 /* Restore the old FP. */
1155 insn = emit_insn
1156 (gen_movqi
1157 (gen_rtx_REG (QImode, AR3_REGNO),
1158 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1160 RTX_FRAME_RELATED_P (insn) = 1;
1164 if (size > 32767)
1166 /* Local vars are too big, it will take multiple operations
1167 to decrement SP. */
1168 if (TARGET_C3X)
1170 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1171 GEN_INT(size >> 16)));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1173 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1174 gen_rtx_REG (QImode, R3_REGNO),
1175 GEN_INT(-16)));
1176 RTX_FRAME_RELATED_P (insn) = 1;
1178 else
1180 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1181 GEN_INT(size & ~0xffff)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1184 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1185 gen_rtx_REG (QImode, R3_REGNO),
1186 GEN_INT(size & 0xffff)));
1187 RTX_FRAME_RELATED_P (insn) = 1;
1188 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1189 gen_rtx_REG (QImode, SP_REGNO),
1190 gen_rtx_REG (QImode, R3_REGNO)));
1191 RTX_FRAME_RELATED_P (insn) = 1;
1193 else if (size != 0)
1195 /* Local vars take up less than 32768 words, so we can directly
1196 subtract the number. */
1197 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1198 gen_rtx_REG (QImode, SP_REGNO),
1199 GEN_INT(size)));
1200 RTX_FRAME_RELATED_P (insn) = 1;
1203 if (jump)
1205 insn = emit_jump_insn (gen_return_indirect_internal
1206 (gen_rtx_REG (QImode, R2_REGNO)));
1207 RTX_FRAME_RELATED_P (insn) = 1;
1209 else
1211 insn = emit_jump_insn (gen_return_from_epilogue ());
1212 RTX_FRAME_RELATED_P (insn) = 1;
1219 c4x_null_epilogue_p ()
1221 int regno;
1223 if (reload_completed
1224 && ! c4x_naked_function_p ()
1225 && ! c4x_interrupt_function_p ()
1226 && ! current_function_calls_alloca
1227 && ! current_function_args_size
1228 && ! (optimize < 2)
1229 && ! get_frame_size ())
1231 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1232 if (regs_ever_live[regno] && ! call_used_regs[regno]
1233 && (regno != AR3_REGNO))
1234 return 1;
1235 return 0;
1237 return 1;
1242 c4x_emit_move_sequence (operands, mode)
1243 rtx *operands;
1244 enum machine_mode mode;
1246 rtx op0 = operands[0];
1247 rtx op1 = operands[1];
1249 if (! reload_in_progress
1250 && ! REG_P (op0)
1251 && ! REG_P (op1)
1252 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1253 op1 = force_reg (mode, op1);
1255 if (GET_CODE (op1) == LO_SUM
1256 && GET_MODE (op1) == Pmode
1257 && dp_reg_operand (XEXP (op1, 0), mode))
1259 /* expand_increment will sometimes create a LO_SUM immediate
1260 address. */
1261 op1 = XEXP (op1, 1);
1263 else if (symbolic_address_operand (op1, mode))
1265 if (TARGET_LOAD_ADDRESS)
1267 /* Alias analysis seems to do a better job if we force
1268 constant addresses to memory after reload. */
1269 emit_insn (gen_load_immed_address (op0, op1));
1270 return 1;
1272 else
1274 /* Stick symbol or label address into the constant pool. */
1275 op1 = force_const_mem (Pmode, op1);
1278 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1280 /* We could be a lot smarter about loading some of these
1281 constants... */
1282 op1 = force_const_mem (mode, op1);
1285 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1286 and emit associated (HIGH (SYMREF)) if large memory model.
1287 c4x_legitimize_address could be used to do this,
1288 perhaps by calling validize_address. */
1289 if (TARGET_EXPOSE_LDP
1290 && ! (reload_in_progress || reload_completed)
1291 && GET_CODE (op1) == MEM
1292 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1294 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1295 if (! TARGET_SMALL)
1296 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1297 op1 = change_address (op1, mode,
1298 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1301 if (TARGET_EXPOSE_LDP
1302 && ! (reload_in_progress || reload_completed)
1303 && GET_CODE (op0) == MEM
1304 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1306 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1307 if (! TARGET_SMALL)
1308 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1309 op0 = change_address (op0, mode,
1310 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1313 if (GET_CODE (op0) == SUBREG
1314 && mixed_subreg_operand (op0, mode))
1316 /* We should only generate these mixed mode patterns
1317 during RTL generation. If we need do it later on
1318 then we'll have to emit patterns that won't clobber CC. */
1319 if (reload_in_progress || reload_completed)
1320 abort ();
1321 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1322 op0 = SUBREG_REG (op0);
1323 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1325 op0 = copy_rtx (op0);
1326 PUT_MODE (op0, QImode);
1328 else
1329 abort ();
1331 if (mode == QFmode)
1332 emit_insn (gen_storeqf_int_clobber (op0, op1));
1333 else
1334 abort ();
1335 return 1;
1338 if (GET_CODE (op1) == SUBREG
1339 && mixed_subreg_operand (op1, mode))
1341 /* We should only generate these mixed mode patterns
1342 during RTL generation. If we need do it later on
1343 then we'll have to emit patterns that won't clobber CC. */
1344 if (reload_in_progress || reload_completed)
1345 abort ();
1346 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1347 op1 = SUBREG_REG (op1);
1348 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1350 op1 = copy_rtx (op1);
1351 PUT_MODE (op1, QImode);
1353 else
1354 abort ();
1356 if (mode == QFmode)
1357 emit_insn (gen_loadqf_int_clobber (op0, op1));
1358 else
1359 abort ();
1360 return 1;
1363 if (mode == QImode
1364 && reg_operand (op0, mode)
1365 && const_int_operand (op1, mode)
1366 && ! IS_INT16_CONST (INTVAL (op1))
1367 && ! IS_HIGH_CONST (INTVAL (op1)))
1369 emit_insn (gen_loadqi_big_constant (op0, op1));
1370 return 1;
1373 if (mode == HImode
1374 && reg_operand (op0, mode)
1375 && const_int_operand (op1, mode))
1377 emit_insn (gen_loadhi_big_constant (op0, op1));
1378 return 1;
1381 /* Adjust operands in case we have modified them. */
1382 operands[0] = op0;
1383 operands[1] = op1;
1385 /* Emit normal pattern. */
1386 return 0;
1390 void
1391 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1392 rtx libcall;
1393 enum rtx_code code;
1394 enum machine_mode dmode;
1395 enum machine_mode smode;
1396 int noperands;
1397 rtx *operands;
1399 rtx ret;
1400 rtx insns;
1401 rtx equiv;
1403 start_sequence ();
1404 switch (noperands)
1406 case 2:
1407 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1408 operands[1], smode);
1409 equiv = gen_rtx (code, dmode, operands[1]);
1410 break;
1412 case 3:
1413 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1414 operands[1], smode, operands[2], smode);
1415 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1416 break;
1418 default:
1419 abort ();
1422 insns = get_insns ();
1423 end_sequence ();
1424 emit_libcall_block (insns, operands[0], ret, equiv);
1428 void
1429 c4x_emit_libcall3 (libcall, code, mode, operands)
1430 rtx libcall;
1431 enum rtx_code code;
1432 enum machine_mode mode;
1433 rtx *operands;
1435 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1439 void
1440 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1441 rtx libcall;
1442 enum rtx_code code;
1443 enum machine_mode mode;
1444 rtx *operands;
1446 rtx ret;
1447 rtx insns;
1448 rtx equiv;
1450 start_sequence ();
1451 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1452 operands[1], mode, operands[2], mode);
1453 equiv = gen_rtx_TRUNCATE (mode,
1454 gen_rtx_LSHIFTRT (HImode,
1455 gen_rtx_MULT (HImode,
1456 gen_rtx (code, HImode, operands[1]),
1457 gen_rtx (code, HImode, operands[2])),
1458 GEN_INT (32)));
1459 insns = get_insns ();
1460 end_sequence ();
1461 emit_libcall_block (insns, operands[0], ret, equiv);
1466 c4x_check_legit_addr (mode, addr, strict)
1467 enum machine_mode mode;
1468 rtx addr;
1469 int strict;
1471 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1472 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1473 rtx disp = NULL_RTX; /* Displacement. */
1474 enum rtx_code code;
1476 code = GET_CODE (addr);
1477 switch (code)
1479 /* Register indirect with auto increment/decrement. We don't
1480 allow SP here---push_operand should recognize an operand
1481 being pushed on the stack. */
1483 case PRE_DEC:
1484 case PRE_INC:
1485 case POST_DEC:
1486 if (mode != QImode && mode != QFmode)
1487 return 0;
1489 case POST_INC:
1490 base = XEXP (addr, 0);
1491 if (! REG_P (base))
1492 return 0;
1493 break;
1495 case PRE_MODIFY:
1496 case POST_MODIFY:
1498 rtx op0 = XEXP (addr, 0);
1499 rtx op1 = XEXP (addr, 1);
1501 if (mode != QImode && mode != QFmode)
1502 return 0;
1504 if (! REG_P (op0)
1505 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1506 return 0;
1507 base = XEXP (op1, 0);
1508 if (base != op0)
1509 return 0;
1510 if (REG_P (XEXP (op1, 1)))
1511 indx = XEXP (op1, 1);
1512 else
1513 disp = XEXP (op1, 1);
1515 break;
1517 /* Register indirect. */
1518 case REG:
1519 base = addr;
1520 break;
1522 /* Register indirect with displacement or index. */
1523 case PLUS:
1525 rtx op0 = XEXP (addr, 0);
1526 rtx op1 = XEXP (addr, 1);
1527 enum rtx_code code0 = GET_CODE (op0);
1529 switch (code0)
1531 case REG:
1532 if (REG_P (op1))
1534 base = op0; /* Base + index. */
1535 indx = op1;
1536 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1538 base = op1;
1539 indx = op0;
1542 else
1544 base = op0; /* Base + displacement. */
1545 disp = op1;
1547 break;
1549 default:
1550 return 0;
1553 break;
1555 /* Direct addressing with DP register. */
1556 case LO_SUM:
1558 rtx op0 = XEXP (addr, 0);
1559 rtx op1 = XEXP (addr, 1);
1561 /* HImode and HFmode direct memory references aren't truly
1562 offsettable (consider case at end of data page). We
1563 probably get better code by loading a pointer and using an
1564 indirect memory reference. */
1565 if (mode == HImode || mode == HFmode)
1566 return 0;
1568 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1569 return 0;
1571 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1572 return 1;
1574 if (GET_CODE (op1) == CONST)
1575 return 1;
1576 return 0;
1578 break;
1580 /* Direct addressing with some work for the assembler... */
1581 case CONST:
1582 /* Direct addressing. */
1583 case LABEL_REF:
1584 case SYMBOL_REF:
1585 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1586 return 1;
1587 /* These need to be converted to a LO_SUM (...).
1588 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1589 return 0;
1591 /* Do not allow direct memory access to absolute addresses.
1592 This is more pain than it's worth, especially for the
1593 small memory model where we can't guarantee that
1594 this address is within the data page---we don't want
1595 to modify the DP register in the small memory model,
1596 even temporarily, since an interrupt can sneak in.... */
1597 case CONST_INT:
1598 return 0;
1600 /* Indirect indirect addressing. */
1601 case MEM:
1602 return 0;
1604 case CONST_DOUBLE:
1605 fatal_insn ("using CONST_DOUBLE for address", addr);
1607 default:
1608 return 0;
1611 /* Validate the base register. */
1612 if (base)
1614 /* Check that the address is offsettable for HImode and HFmode. */
1615 if (indx && (mode == HImode || mode == HFmode))
1616 return 0;
1618 /* Handle DP based stuff. */
1619 if (REGNO (base) == DP_REGNO)
1620 return 1;
1621 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1622 return 0;
1623 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1624 return 0;
1627 /* Now validate the index register. */
1628 if (indx)
1630 if (GET_CODE (indx) != REG)
1631 return 0;
1632 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1633 return 0;
1634 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1635 return 0;
1638 /* Validate displacement. */
1639 if (disp)
1641 if (GET_CODE (disp) != CONST_INT)
1642 return 0;
1643 if (mode == HImode || mode == HFmode)
1645 /* The offset displacement must be legitimate. */
1646 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1647 return 0;
1649 else
1651 if (! IS_DISP8_CONST (INTVAL (disp)))
1652 return 0;
1654 /* Can't add an index with a disp. */
1655 if (indx)
1656 return 0;
1658 return 1;
1663 c4x_legitimize_address (orig, mode)
1664 rtx orig ATTRIBUTE_UNUSED;
1665 enum machine_mode mode ATTRIBUTE_UNUSED;
1667 if (GET_CODE (orig) == SYMBOL_REF
1668 || GET_CODE (orig) == LABEL_REF)
1670 if (mode == HImode || mode == HFmode)
1672 /* We need to force the address into
1673 a register so that it is offsettable. */
1674 rtx addr_reg = gen_reg_rtx (Pmode);
1675 emit_move_insn (addr_reg, orig);
1676 return addr_reg;
1678 else
1680 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1682 if (! TARGET_SMALL)
1683 emit_insn (gen_set_ldp (dp_reg, orig));
1685 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1689 return NULL_RTX;
1693 /* Provide the costs of an addressing mode that contains ADDR.
1694 If ADDR is not a valid address, its cost is irrelevant.
1695 This is used in cse and loop optimisation to determine
1696 if it is worthwhile storing a common address into a register.
1697 Unfortunately, the C4x address cost depends on other operands. */
1699 static int
1700 c4x_address_cost (addr)
1701 rtx addr;
1703 switch (GET_CODE (addr))
1705 case REG:
1706 return 1;
1708 case POST_INC:
1709 case POST_DEC:
1710 case PRE_INC:
1711 case PRE_DEC:
1712 return 1;
1714 /* These shouldn't be directly generated. */
1715 case SYMBOL_REF:
1716 case LABEL_REF:
1717 case CONST:
1718 return 10;
1720 case LO_SUM:
1722 rtx op1 = XEXP (addr, 1);
1724 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1725 return TARGET_SMALL ? 3 : 4;
1727 if (GET_CODE (op1) == CONST)
1729 rtx offset = const0_rtx;
1731 op1 = eliminate_constant_term (op1, &offset);
1733 /* ??? These costs need rethinking... */
1734 if (GET_CODE (op1) == LABEL_REF)
1735 return 3;
1737 if (GET_CODE (op1) != SYMBOL_REF)
1738 return 4;
1740 if (INTVAL (offset) == 0)
1741 return 3;
1743 return 4;
1745 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1747 break;
1749 case PLUS:
1751 register rtx op0 = XEXP (addr, 0);
1752 register rtx op1 = XEXP (addr, 1);
1754 if (GET_CODE (op0) != REG)
1755 break;
1757 switch (GET_CODE (op1))
1759 default:
1760 break;
1762 case REG:
1763 /* This cost for REG+REG must be greater than the cost
1764 for REG if we want autoincrement addressing modes. */
1765 return 2;
1767 case CONST_INT:
1768 /* The following tries to improve GIV combination
1769 in strength reduce but appears not to help. */
1770 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1771 return 1;
1773 if (IS_DISP1_CONST (INTVAL (op1)))
1774 return 1;
1776 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1777 return 2;
1779 return 3;
1782 default:
1783 break;
1786 return 4;
1791 c4x_gen_compare_reg (code, x, y)
1792 enum rtx_code code;
1793 rtx x, y;
1795 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1796 rtx cc_reg;
1798 if (mode == CC_NOOVmode
1799 && (code == LE || code == GE || code == LT || code == GT))
1800 return NULL_RTX;
1802 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1803 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1804 gen_rtx_COMPARE (mode, x, y)));
1805 return cc_reg;
1808 char *
1809 c4x_output_cbranch (form, seq)
1810 const char *form;
1811 rtx seq;
1813 int delayed = 0;
1814 int annultrue = 0;
1815 int annulfalse = 0;
1816 rtx delay;
1817 char *cp;
1818 static char str[100];
1820 if (final_sequence)
1822 delay = XVECEXP (final_sequence, 0, 1);
1823 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1824 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1825 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1827 strcpy (str, form);
1828 cp = &str [strlen (str)];
1829 if (delayed)
1831 *cp++ = '%';
1832 *cp++ = '#';
1834 if (annultrue)
1836 *cp++ = 'a';
1837 *cp++ = 't';
1839 if (annulfalse)
1841 *cp++ = 'a';
1842 *cp++ = 'f';
1844 *cp++ = '\t';
1845 *cp++ = '%';
1846 *cp++ = 'l';
1847 *cp++ = '1';
1848 *cp = 0;
1849 return str;
1852 void
1853 c4x_print_operand (file, op, letter)
1854 FILE *file; /* File to write to. */
1855 rtx op; /* Operand to print. */
1856 int letter; /* %<letter> or 0. */
1858 rtx op1;
1859 enum rtx_code code;
1861 switch (letter)
1863 case '#': /* Delayed. */
1864 if (final_sequence)
1865 fprintf (file, "d");
1866 return;
1869 code = GET_CODE (op);
1870 switch (letter)
1872 case 'A': /* Direct address. */
1873 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1874 fprintf (file, "@");
1875 break;
1877 case 'H': /* Sethi. */
1878 output_addr_const (file, op);
1879 return;
1881 case 'I': /* Reversed condition. */
1882 code = reverse_condition (code);
1883 break;
1885 case 'L': /* Log 2 of constant. */
1886 if (code != CONST_INT)
1887 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1888 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1889 return;
1891 case 'N': /* Ones complement of small constant. */
1892 if (code != CONST_INT)
1893 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1894 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1895 return;
1897 case 'K': /* Generate ldp(k) if direct address. */
1898 if (! TARGET_SMALL
1899 && code == MEM
1900 && GET_CODE (XEXP (op, 0)) == LO_SUM
1901 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1902 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1904 op1 = XEXP (XEXP (op, 0), 1);
1905 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1907 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1908 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1909 fprintf (file, "\n");
1912 return;
1914 case 'M': /* Generate ldp(k) if direct address. */
1915 if (! TARGET_SMALL /* Only used in asm statements. */
1916 && code == MEM
1917 && (GET_CODE (XEXP (op, 0)) == CONST
1918 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1920 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1921 output_address (XEXP (op, 0));
1922 fprintf (file, "\n\t");
1924 return;
1926 case 'O': /* Offset address. */
1927 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1928 break;
1929 else if (code == MEM)
1930 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1931 else if (code == REG)
1932 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1933 else
1934 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1935 return;
1937 case 'C': /* Call. */
1938 break;
1940 case 'U': /* Call/callu. */
1941 if (code != SYMBOL_REF)
1942 fprintf (file, "u");
1943 return;
1945 default:
1946 break;
1949 switch (code)
1951 case REG:
1952 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1953 && ! TARGET_TI)
1954 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1955 else
1956 fprintf (file, "%s", reg_names[REGNO (op)]);
1957 break;
1959 case MEM:
1960 output_address (XEXP (op, 0));
1961 break;
1963 case CONST_DOUBLE:
1965 char str[64];
1967 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1968 sizeof (str), 0, 1);
1969 fprintf (file, "%s", str);
1971 break;
1973 case CONST_INT:
1974 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1975 break;
1977 case NE:
1978 fprintf (file, "ne");
1979 break;
1981 case EQ:
1982 fprintf (file, "eq");
1983 break;
1985 case GE:
1986 fprintf (file, "ge");
1987 break;
1989 case GT:
1990 fprintf (file, "gt");
1991 break;
1993 case LE:
1994 fprintf (file, "le");
1995 break;
1997 case LT:
1998 fprintf (file, "lt");
1999 break;
2001 case GEU:
2002 fprintf (file, "hs");
2003 break;
2005 case GTU:
2006 fprintf (file, "hi");
2007 break;
2009 case LEU:
2010 fprintf (file, "ls");
2011 break;
2013 case LTU:
2014 fprintf (file, "lo");
2015 break;
2017 case SYMBOL_REF:
2018 output_addr_const (file, op);
2019 break;
2021 case CONST:
2022 output_addr_const (file, XEXP (op, 0));
2023 break;
2025 case CODE_LABEL:
2026 break;
2028 default:
2029 fatal_insn ("c4x_print_operand: Bad operand case", op);
2030 break;
2035 void
2036 c4x_print_operand_address (file, addr)
2037 FILE *file;
2038 rtx addr;
2040 switch (GET_CODE (addr))
2042 case REG:
2043 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2044 break;
2046 case PRE_DEC:
2047 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2048 break;
2050 case POST_INC:
2051 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2052 break;
2054 case POST_MODIFY:
2056 rtx op0 = XEXP (XEXP (addr, 1), 0);
2057 rtx op1 = XEXP (XEXP (addr, 1), 1);
2059 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2060 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2061 reg_names[REGNO (op1)]);
2062 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2063 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2064 reg_names[REGNO (op0)], INTVAL (op1));
2065 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2066 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2067 reg_names[REGNO (op0)], -INTVAL (op1));
2068 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2069 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2070 reg_names[REGNO (op1)]);
2071 else
2072 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2074 break;
2076 case PRE_MODIFY:
2078 rtx op0 = XEXP (XEXP (addr, 1), 0);
2079 rtx op1 = XEXP (XEXP (addr, 1), 1);
2081 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2082 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2083 reg_names[REGNO (op1)]);
2084 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2085 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2086 reg_names[REGNO (op0)], INTVAL (op1));
2087 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2088 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2089 reg_names[REGNO (op0)], -INTVAL (op1));
2090 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2091 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2092 reg_names[REGNO (op1)]);
2093 else
2094 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2096 break;
2098 case PRE_INC:
2099 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2100 break;
2102 case POST_DEC:
2103 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2104 break;
2106 case PLUS: /* Indirect with displacement. */
2108 rtx op0 = XEXP (addr, 0);
2109 rtx op1 = XEXP (addr, 1);
2111 if (REG_P (op0))
2113 if (REG_P (op1))
2115 if (IS_INDEX_REG (op0))
2117 fprintf (file, "*+%s(%s)",
2118 reg_names[REGNO (op1)],
2119 reg_names[REGNO (op0)]); /* Index + base. */
2121 else
2123 fprintf (file, "*+%s(%s)",
2124 reg_names[REGNO (op0)],
2125 reg_names[REGNO (op1)]); /* Base + index. */
2128 else if (INTVAL (op1) < 0)
2130 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2131 reg_names[REGNO (op0)],
2132 -INTVAL (op1)); /* Base - displacement. */
2134 else
2136 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2137 reg_names[REGNO (op0)],
2138 INTVAL (op1)); /* Base + displacement. */
2141 else
2142 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2144 break;
2146 case LO_SUM:
2148 rtx op0 = XEXP (addr, 0);
2149 rtx op1 = XEXP (addr, 1);
2151 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2152 c4x_print_operand_address (file, op1);
2153 else
2154 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2156 break;
2158 case CONST:
2159 case SYMBOL_REF:
2160 case LABEL_REF:
2161 fprintf (file, "@");
2162 output_addr_const (file, addr);
2163 break;
2165 /* We shouldn't access CONST_INT addresses. */
2166 case CONST_INT:
2168 default:
2169 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2170 break;
2175 /* Return nonzero if the floating point operand will fit
2176 in the immediate field. */
2178 static int
2179 c4x_immed_float_p (op)
2180 rtx op;
2182 long convval[2];
2183 int exponent;
2184 REAL_VALUE_TYPE r;
2186 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2187 if (GET_MODE (op) == HFmode)
2188 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2189 else
2191 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2192 convval[1] = 0;
2195 /* Sign extend exponent. */
2196 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2197 if (exponent == -128)
2198 return 1; /* 0.0 */
2199 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2200 return 0; /* Precision doesn't fit. */
2201 return (exponent <= 7) /* Positive exp. */
2202 && (exponent >= -7); /* Negative exp. */
2206 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2207 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2209 None of the last four instructions from the bottom of the block can
2210 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2211 BcondAT or RETIcondD.
2213 This routine scans the four previous insns for a jump insn, and if
2214 one is found, returns 1 so that we bung in a nop instruction.
2215 This simple minded strategy will add a nop, when it may not
2216 be required. Say when there is a JUMP_INSN near the end of the
2217 block that doesn't get converted into a delayed branch.
2219 Note that we cannot have a call insn, since we don't generate
2220 repeat loops with calls in them (although I suppose we could, but
2221 there's no benefit.)
2223 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2226 c4x_rptb_nop_p (insn)
2227 rtx insn;
2229 rtx start_label;
2230 int i;
2232 /* Extract the start label from the jump pattern (rptb_end). */
2233 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2235 /* If there is a label at the end of the loop we must insert
2236 a NOP. */
2237 do {
2238 insn = previous_insn (insn);
2239 } while (GET_CODE (insn) == NOTE
2240 || GET_CODE (insn) == USE
2241 || GET_CODE (insn) == CLOBBER);
2242 if (GET_CODE (insn) == CODE_LABEL)
2243 return 1;
2245 for (i = 0; i < 4; i++)
2247 /* Search back for prev non-note and non-label insn. */
2248 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2249 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2251 if (insn == start_label)
2252 return i == 0;
2254 insn = previous_insn (insn);
2257 /* If we have a jump instruction we should insert a NOP. If we
2258 hit repeat block top we should only insert a NOP if the loop
2259 is empty. */
2260 if (GET_CODE (insn) == JUMP_INSN)
2261 return 1;
2262 insn = previous_insn (insn);
2264 return 0;
2268 /* The C4x looping instruction needs to be emitted at the top of the
2269 loop. Emitting the true RTL for a looping instruction at the top of
2270 the loop can cause problems with flow analysis. So instead, a dummy
2271 doloop insn is emitted at the end of the loop. This routine checks
2272 for the presence of this doloop insn and then searches back to the
2273 top of the loop, where it inserts the true looping insn (provided
2274 there are no instructions in the loop which would cause problems).
2275 Any additional labels can be emitted at this point. In addition, if
2276 the desired loop count register was not allocated, this routine does
2277 nothing.
2279 Before we can create a repeat block looping instruction we have to
2280 verify that there are no jumps outside the loop and no jumps outside
2281 the loop go into this loop. This can happen in the basic blocks reorder
2282 pass. The C4x cpu can not handle this. */
2284 static int
2285 c4x_label_ref_used_p (x, code_label)
2286 rtx x, code_label;
2288 enum rtx_code code;
2289 int i, j;
2290 const char *fmt;
2292 if (x == 0)
2293 return 0;
2295 code = GET_CODE (x);
2296 if (code == LABEL_REF)
2297 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2299 fmt = GET_RTX_FORMAT (code);
2300 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2302 if (fmt[i] == 'e')
2304 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2305 return 1;
2307 else if (fmt[i] == 'E')
2308 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2309 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2310 return 1;
2312 return 0;
2316 static int
2317 c4x_rptb_valid_p (insn, start_label)
2318 rtx insn, start_label;
2320 rtx end = insn;
2321 rtx start;
2322 rtx tmp;
2324 /* Find the start label. */
2325 for (; insn; insn = PREV_INSN (insn))
2326 if (insn == start_label)
2327 break;
2329 /* Note found then we can not use a rptb or rpts. The label was
2330 probably moved by the basic block reorder pass. */
2331 if (! insn)
2332 return 0;
2334 start = insn;
2335 /* If any jump jumps inside this block then we must fail. */
2336 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2338 if (GET_CODE (insn) == CODE_LABEL)
2340 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2341 if (GET_CODE (tmp) == JUMP_INSN
2342 && c4x_label_ref_used_p (tmp, insn))
2343 return 0;
2346 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2348 if (GET_CODE (insn) == CODE_LABEL)
2350 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2351 if (GET_CODE (tmp) == JUMP_INSN
2352 && c4x_label_ref_used_p (tmp, insn))
2353 return 0;
2356 /* If any jump jumps outside this block then we must fail. */
2357 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2359 if (GET_CODE (insn) == CODE_LABEL)
2361 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2362 if (GET_CODE (tmp) == JUMP_INSN
2363 && c4x_label_ref_used_p (tmp, insn))
2364 return 0;
2365 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2366 if (GET_CODE (tmp) == JUMP_INSN
2367 && c4x_label_ref_used_p (tmp, insn))
2368 return 0;
2372 /* All checks OK. */
2373 return 1;
2377 void
2378 c4x_rptb_insert (insn)
2379 rtx insn;
2381 rtx end_label;
2382 rtx start_label;
2383 rtx new_start_label;
2384 rtx count_reg;
2386 /* If the count register has not been allocated to RC, say if
2387 there is a movstr pattern in the loop, then do not insert a
2388 RPTB instruction. Instead we emit a decrement and branch
2389 at the end of the loop. */
2390 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2391 if (REGNO (count_reg) != RC_REGNO)
2392 return;
2394 /* Extract the start label from the jump pattern (rptb_end). */
2395 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2397 if (! c4x_rptb_valid_p (insn, start_label))
2399 /* We can not use the rptb insn. Replace it so reorg can use
2400 the delay slots of the jump insn. */
2401 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2402 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2403 emit_insn_before (gen_bge (start_label), insn);
2404 LABEL_NUSES (start_label)++;
2405 delete_insn (insn);
2406 return;
2409 end_label = gen_label_rtx ();
2410 LABEL_NUSES (end_label)++;
2411 emit_label_after (end_label, insn);
2413 new_start_label = gen_label_rtx ();
2414 LABEL_NUSES (new_start_label)++;
2416 for (; insn; insn = PREV_INSN (insn))
2418 if (insn == start_label)
2419 break;
2420 if (GET_CODE (insn) == JUMP_INSN &&
2421 JUMP_LABEL (insn) == start_label)
2422 redirect_jump (insn, new_start_label, 0);
2424 if (! insn)
2425 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2427 emit_label_after (new_start_label, insn);
2429 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2430 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2431 else
2432 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2433 if (LABEL_NUSES (start_label) == 0)
2434 delete_insn (start_label);
2438 /* We need to use direct addressing for large constants and addresses
2439 that cannot fit within an instruction. We must check for these
2440 after after the final jump optimisation pass, since this may
2441 introduce a local_move insn for a SYMBOL_REF. This pass
2442 must come before delayed branch slot filling since it can generate
2443 additional instructions.
2445 This function also fixes up RTPB style loops that didn't get RC
2446 allocated as the loop counter. */
2448 static void
2449 c4x_reorg ()
2451 rtx insn;
2453 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2455 /* Look for insn. */
2456 if (INSN_P (insn))
2458 int insn_code_number;
2459 rtx old;
2461 insn_code_number = recog_memoized (insn);
2463 if (insn_code_number < 0)
2464 continue;
2466 /* Insert the RTX for RPTB at the top of the loop
2467 and a label at the end of the loop. */
2468 if (insn_code_number == CODE_FOR_rptb_end)
2469 c4x_rptb_insert(insn);
2471 /* We need to split the insn here. Otherwise the calls to
2472 force_const_mem will not work for load_immed_address. */
2473 old = insn;
2475 /* Don't split the insn if it has been deleted. */
2476 if (! INSN_DELETED_P (old))
2477 insn = try_split (PATTERN(old), old, 1);
2479 /* When not optimizing, the old insn will be still left around
2480 with only the 'deleted' bit set. Transform it into a note
2481 to avoid confusion of subsequent processing. */
2482 if (INSN_DELETED_P (old))
2484 PUT_CODE (old, NOTE);
2485 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2486 NOTE_SOURCE_FILE (old) = 0;
2493 static int
2494 c4x_a_register (op)
2495 rtx op;
2497 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2501 static int
2502 c4x_x_register (op)
2503 rtx op;
2505 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2509 static int
2510 c4x_immed_int_constant (op)
2511 rtx op;
2513 if (GET_CODE (op) != CONST_INT)
2514 return 0;
2516 return GET_MODE (op) == VOIDmode
2517 || GET_MODE_CLASS (op) == MODE_INT
2518 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2522 static int
2523 c4x_immed_float_constant (op)
2524 rtx op;
2526 if (GET_CODE (op) != CONST_DOUBLE)
2527 return 0;
2529 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2530 present this only means that a MEM rtx has been generated. It does
2531 not mean the rtx is really in memory. */
2533 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2538 c4x_shiftable_constant (op)
2539 rtx op;
2541 int i;
2542 int mask;
2543 int val = INTVAL (op);
2545 for (i = 0; i < 16; i++)
2547 if (val & (1 << i))
2548 break;
2550 mask = ((0xffff >> i) << 16) | 0xffff;
2551 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2552 : (val >> i) & mask))
2553 return i;
2554 return -1;
2559 c4x_H_constant (op)
2560 rtx op;
2562 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2567 c4x_I_constant (op)
2568 rtx op;
2570 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2575 c4x_J_constant (op)
2576 rtx op;
2578 if (TARGET_C3X)
2579 return 0;
2580 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2584 static int
2585 c4x_K_constant (op)
2586 rtx op;
2588 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2589 return 0;
2590 return IS_INT5_CONST (INTVAL (op));
2595 c4x_L_constant (op)
2596 rtx op;
2598 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2602 static int
2603 c4x_N_constant (op)
2604 rtx op;
2606 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2610 static int
2611 c4x_O_constant (op)
2612 rtx op;
2614 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2618 /* The constraints do not have to check the register class,
2619 except when needed to discriminate between the constraints.
2620 The operand has been checked by the predicates to be valid. */
2622 /* ARx + 9-bit signed const or IRn
2623 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2624 We don't include the pre/post inc/dec forms here since
2625 they are handled by the <> constraints. */
2628 c4x_Q_constraint (op)
2629 rtx op;
2631 enum machine_mode mode = GET_MODE (op);
2633 if (GET_CODE (op) != MEM)
2634 return 0;
2635 op = XEXP (op, 0);
2636 switch (GET_CODE (op))
2638 case REG:
2639 return 1;
2641 case PLUS:
2643 rtx op0 = XEXP (op, 0);
2644 rtx op1 = XEXP (op, 1);
2646 if (! REG_P (op0))
2647 return 0;
2649 if (REG_P (op1))
2650 return 1;
2652 if (GET_CODE (op1) != CONST_INT)
2653 return 0;
2655 /* HImode and HFmode must be offsettable. */
2656 if (mode == HImode || mode == HFmode)
2657 return IS_DISP8_OFF_CONST (INTVAL (op1));
2659 return IS_DISP8_CONST (INTVAL (op1));
2661 break;
2663 default:
2664 break;
2666 return 0;
2670 /* ARx + 5-bit unsigned const
2671 *ARx, *+ARx(n) for n < 32. */
2674 c4x_R_constraint (op)
2675 rtx op;
2677 enum machine_mode mode = GET_MODE (op);
2679 if (TARGET_C3X)
2680 return 0;
2681 if (GET_CODE (op) != MEM)
2682 return 0;
2683 op = XEXP (op, 0);
2684 switch (GET_CODE (op))
2686 case REG:
2687 return 1;
2689 case PLUS:
2691 rtx op0 = XEXP (op, 0);
2692 rtx op1 = XEXP (op, 1);
2694 if (! REG_P (op0))
2695 return 0;
2697 if (GET_CODE (op1) != CONST_INT)
2698 return 0;
2700 /* HImode and HFmode must be offsettable. */
2701 if (mode == HImode || mode == HFmode)
2702 return IS_UINT5_CONST (INTVAL (op1) + 1);
2704 return IS_UINT5_CONST (INTVAL (op1));
2706 break;
2708 default:
2709 break;
2711 return 0;
2715 static int
2716 c4x_R_indirect (op)
2717 rtx op;
2719 enum machine_mode mode = GET_MODE (op);
2721 if (TARGET_C3X || GET_CODE (op) != MEM)
2722 return 0;
2724 op = XEXP (op, 0);
2725 switch (GET_CODE (op))
2727 case REG:
2728 return IS_ADDR_OR_PSEUDO_REG (op);
2730 case PLUS:
2732 rtx op0 = XEXP (op, 0);
2733 rtx op1 = XEXP (op, 1);
2735 /* HImode and HFmode must be offsettable. */
2736 if (mode == HImode || mode == HFmode)
2737 return IS_ADDR_OR_PSEUDO_REG (op0)
2738 && GET_CODE (op1) == CONST_INT
2739 && IS_UINT5_CONST (INTVAL (op1) + 1);
2741 return REG_P (op0)
2742 && IS_ADDR_OR_PSEUDO_REG (op0)
2743 && GET_CODE (op1) == CONST_INT
2744 && IS_UINT5_CONST (INTVAL (op1));
2746 break;
2748 default:
2749 break;
2751 return 0;
2755 /* ARx + 1-bit unsigned const or IRn
2756 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2757 We don't include the pre/post inc/dec forms here since
2758 they are handled by the <> constraints. */
2761 c4x_S_constraint (op)
2762 rtx op;
2764 enum machine_mode mode = GET_MODE (op);
2765 if (GET_CODE (op) != MEM)
2766 return 0;
2767 op = XEXP (op, 0);
2768 switch (GET_CODE (op))
2770 case REG:
2771 return 1;
2773 case PRE_MODIFY:
2774 case POST_MODIFY:
2776 rtx op0 = XEXP (op, 0);
2777 rtx op1 = XEXP (op, 1);
2779 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2780 || (op0 != XEXP (op1, 0)))
2781 return 0;
2783 op0 = XEXP (op1, 0);
2784 op1 = XEXP (op1, 1);
2785 return REG_P (op0) && REG_P (op1);
2786 /* Pre or post_modify with a displacement of 0 or 1
2787 should not be generated. */
2789 break;
2791 case PLUS:
2793 rtx op0 = XEXP (op, 0);
2794 rtx op1 = XEXP (op, 1);
2796 if (!REG_P (op0))
2797 return 0;
2799 if (REG_P (op1))
2800 return 1;
2802 if (GET_CODE (op1) != CONST_INT)
2803 return 0;
2805 /* HImode and HFmode must be offsettable. */
2806 if (mode == HImode || mode == HFmode)
2807 return IS_DISP1_OFF_CONST (INTVAL (op1));
2809 return IS_DISP1_CONST (INTVAL (op1));
2811 break;
2813 default:
2814 break;
2816 return 0;
2820 static int
2821 c4x_S_indirect (op)
2822 rtx op;
2824 enum machine_mode mode = GET_MODE (op);
2825 if (GET_CODE (op) != MEM)
2826 return 0;
2828 op = XEXP (op, 0);
2829 switch (GET_CODE (op))
2831 case PRE_DEC:
2832 case POST_DEC:
2833 if (mode != QImode && mode != QFmode)
2834 return 0;
2835 case PRE_INC:
2836 case POST_INC:
2837 op = XEXP (op, 0);
2839 case REG:
2840 return IS_ADDR_OR_PSEUDO_REG (op);
2842 case PRE_MODIFY:
2843 case POST_MODIFY:
2845 rtx op0 = XEXP (op, 0);
2846 rtx op1 = XEXP (op, 1);
2848 if (mode != QImode && mode != QFmode)
2849 return 0;
2851 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2852 || (op0 != XEXP (op1, 0)))
2853 return 0;
2855 op0 = XEXP (op1, 0);
2856 op1 = XEXP (op1, 1);
2857 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2858 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2859 /* Pre or post_modify with a displacement of 0 or 1
2860 should not be generated. */
2863 case PLUS:
2865 rtx op0 = XEXP (op, 0);
2866 rtx op1 = XEXP (op, 1);
2868 if (REG_P (op0))
2870 /* HImode and HFmode must be offsettable. */
2871 if (mode == HImode || mode == HFmode)
2872 return IS_ADDR_OR_PSEUDO_REG (op0)
2873 && GET_CODE (op1) == CONST_INT
2874 && IS_DISP1_OFF_CONST (INTVAL (op1));
2876 if (REG_P (op1))
2877 return (IS_INDEX_OR_PSEUDO_REG (op1)
2878 && IS_ADDR_OR_PSEUDO_REG (op0))
2879 || (IS_ADDR_OR_PSEUDO_REG (op1)
2880 && IS_INDEX_OR_PSEUDO_REG (op0));
2882 return IS_ADDR_OR_PSEUDO_REG (op0)
2883 && GET_CODE (op1) == CONST_INT
2884 && IS_DISP1_CONST (INTVAL (op1));
2887 break;
2889 default:
2890 break;
2892 return 0;
2896 /* Direct memory operand. */
2899 c4x_T_constraint (op)
2900 rtx op;
2902 if (GET_CODE (op) != MEM)
2903 return 0;
2904 op = XEXP (op, 0);
2906 if (GET_CODE (op) != LO_SUM)
2908 /* Allow call operands. */
2909 return GET_CODE (op) == SYMBOL_REF
2910 && GET_MODE (op) == Pmode
2911 && SYMBOL_REF_FUNCTION_P (op);
2914 /* HImode and HFmode are not offsettable. */
2915 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2916 return 0;
2918 if ((GET_CODE (XEXP (op, 0)) == REG)
2919 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2920 return c4x_U_constraint (XEXP (op, 1));
2922 return 0;
2926 /* Symbolic operand. */
2929 c4x_U_constraint (op)
2930 rtx op;
2932 /* Don't allow direct addressing to an arbitrary constant. */
2933 return GET_CODE (op) == CONST
2934 || GET_CODE (op) == SYMBOL_REF
2935 || GET_CODE (op) == LABEL_REF;
2940 c4x_autoinc_operand (op, mode)
2941 rtx op;
2942 enum machine_mode mode ATTRIBUTE_UNUSED;
2944 if (GET_CODE (op) == MEM)
2946 enum rtx_code code = GET_CODE (XEXP (op, 0));
2948 if (code == PRE_INC
2949 || code == PRE_DEC
2950 || code == POST_INC
2951 || code == POST_DEC
2952 || code == PRE_MODIFY
2953 || code == POST_MODIFY
2955 return 1;
2957 return 0;
2961 /* Match any operand. */
2964 any_operand (op, mode)
2965 register rtx op ATTRIBUTE_UNUSED;
2966 enum machine_mode mode ATTRIBUTE_UNUSED;
2968 return 1;
2972 /* Nonzero if OP is a floating point value with value 0.0. */
2975 fp_zero_operand (op, mode)
2976 rtx op;
2977 enum machine_mode mode ATTRIBUTE_UNUSED;
2979 REAL_VALUE_TYPE r;
2981 if (GET_CODE (op) != CONST_DOUBLE)
2982 return 0;
2983 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2984 return REAL_VALUES_EQUAL (r, dconst0);
2989 const_operand (op, mode)
2990 register rtx op;
2991 register enum machine_mode mode;
2993 switch (mode)
2995 case QFmode:
2996 case HFmode:
2997 if (GET_CODE (op) != CONST_DOUBLE
2998 || GET_MODE (op) != mode
2999 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3000 return 0;
3002 return c4x_immed_float_p (op);
3004 #if Pmode != QImode
3005 case Pmode:
3006 #endif
3007 case QImode:
3008 if (GET_CODE (op) == CONSTANT_P_RTX)
3009 return 1;
3011 if (GET_CODE (op) != CONST_INT
3012 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3013 || GET_MODE_CLASS (mode) != MODE_INT)
3014 return 0;
3016 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3018 case HImode:
3019 return 0;
3021 default:
3022 return 0;
3028 stik_const_operand (op, mode)
3029 rtx op;
3030 enum machine_mode mode ATTRIBUTE_UNUSED;
3032 return c4x_K_constant (op);
3037 not_const_operand (op, mode)
3038 rtx op;
3039 enum machine_mode mode ATTRIBUTE_UNUSED;
3041 return c4x_N_constant (op);
3046 reg_operand (op, mode)
3047 rtx op;
3048 enum machine_mode mode;
3050 if (GET_CODE (op) == SUBREG
3051 && GET_MODE (op) == QFmode)
3052 return 0;
3053 return register_operand (op, mode);
3058 mixed_subreg_operand (op, mode)
3059 rtx op;
3060 enum machine_mode mode ATTRIBUTE_UNUSED;
3062 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3063 int and a long double. */
3064 if (GET_CODE (op) == SUBREG
3065 && (GET_MODE (op) == QFmode)
3066 && (GET_MODE (SUBREG_REG (op)) == QImode
3067 || GET_MODE (SUBREG_REG (op)) == HImode))
3068 return 1;
3069 return 0;
3074 reg_imm_operand (op, mode)
3075 rtx op;
3076 enum machine_mode mode ATTRIBUTE_UNUSED;
3078 if (REG_P (op) || CONSTANT_P (op))
3079 return 1;
3080 return 0;
3085 not_modify_reg (op, mode)
3086 rtx op;
3087 enum machine_mode mode ATTRIBUTE_UNUSED;
3089 if (REG_P (op) || CONSTANT_P (op))
3090 return 1;
3091 if (GET_CODE (op) != MEM)
3092 return 0;
3093 op = XEXP (op, 0);
3094 switch (GET_CODE (op))
3096 case REG:
3097 return 1;
3099 case PLUS:
3101 rtx op0 = XEXP (op, 0);
3102 rtx op1 = XEXP (op, 1);
3104 if (! REG_P (op0))
3105 return 0;
3107 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3108 return 1;
3111 case LO_SUM:
3113 rtx op0 = XEXP (op, 0);
3115 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3116 return 1;
3118 break;
3120 case CONST:
3121 case SYMBOL_REF:
3122 case LABEL_REF:
3123 return 1;
3125 default:
3126 break;
3128 return 0;
3133 not_rc_reg (op, mode)
3134 rtx op;
3135 enum machine_mode mode ATTRIBUTE_UNUSED;
3137 if (REG_P (op) && REGNO (op) == RC_REGNO)
3138 return 0;
3139 return 1;
3143 /* Extended precision register R0-R1. */
3146 r0r1_reg_operand (op, mode)
3147 rtx op;
3148 enum machine_mode mode;
3150 if (! reg_operand (op, mode))
3151 return 0;
3152 if (GET_CODE (op) == SUBREG)
3153 op = SUBREG_REG (op);
3154 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3158 /* Extended precision register R2-R3. */
3161 r2r3_reg_operand (op, mode)
3162 rtx op;
3163 enum machine_mode mode;
3165 if (! reg_operand (op, mode))
3166 return 0;
3167 if (GET_CODE (op) == SUBREG)
3168 op = SUBREG_REG (op);
3169 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3173 /* Low extended precision register R0-R7. */
3176 ext_low_reg_operand (op, mode)
3177 rtx op;
3178 enum machine_mode mode;
3180 if (! reg_operand (op, mode))
3181 return 0;
3182 if (GET_CODE (op) == SUBREG)
3183 op = SUBREG_REG (op);
3184 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3188 /* Extended precision register. */
3191 ext_reg_operand (op, mode)
3192 rtx op;
3193 enum machine_mode mode;
3195 if (! reg_operand (op, mode))
3196 return 0;
3197 if (GET_CODE (op) == SUBREG)
3198 op = SUBREG_REG (op);
3199 if (! REG_P (op))
3200 return 0;
3201 return IS_EXT_OR_PSEUDO_REG (op);
3205 /* Standard precision register. */
3208 std_reg_operand (op, mode)
3209 rtx op;
3210 enum machine_mode mode;
3212 if (! reg_operand (op, mode))
3213 return 0;
3214 if (GET_CODE (op) == SUBREG)
3215 op = SUBREG_REG (op);
3216 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3219 /* Standard precision or normal register. */
3222 std_or_reg_operand (op, mode)
3223 rtx op;
3224 enum machine_mode mode;
3226 if (reload_in_progress)
3227 return std_reg_operand (op, mode);
3228 return reg_operand (op, mode);
3231 /* Address register. */
3234 addr_reg_operand (op, mode)
3235 rtx op;
3236 enum machine_mode mode;
3238 if (! reg_operand (op, mode))
3239 return 0;
3240 return c4x_a_register (op);
3244 /* Index register. */
3247 index_reg_operand (op, mode)
3248 rtx op;
3249 enum machine_mode mode;
3251 if (! reg_operand (op, mode))
3252 return 0;
3253 if (GET_CODE (op) == SUBREG)
3254 op = SUBREG_REG (op);
3255 return c4x_x_register (op);
3259 /* DP register. */
3262 dp_reg_operand (op, mode)
3263 rtx op;
3264 enum machine_mode mode ATTRIBUTE_UNUSED;
3266 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3270 /* SP register. */
3273 sp_reg_operand (op, mode)
3274 rtx op;
3275 enum machine_mode mode ATTRIBUTE_UNUSED;
3277 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3281 /* ST register. */
3284 st_reg_operand (op, mode)
3285 register rtx op;
3286 enum machine_mode mode ATTRIBUTE_UNUSED;
3288 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3292 /* RC register. */
3295 rc_reg_operand (op, mode)
3296 register rtx op;
3297 enum machine_mode mode ATTRIBUTE_UNUSED;
3299 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3304 call_address_operand (op, mode)
3305 rtx op;
3306 enum machine_mode mode ATTRIBUTE_UNUSED;
3308 return (REG_P (op) || symbolic_address_operand (op, mode));
3312 /* Symbolic address operand. */
3315 symbolic_address_operand (op, mode)
3316 register rtx op;
3317 enum machine_mode mode ATTRIBUTE_UNUSED;
3319 switch (GET_CODE (op))
3321 case CONST:
3322 case SYMBOL_REF:
3323 case LABEL_REF:
3324 return 1;
3325 default:
3326 return 0;
3331 /* Check dst operand of a move instruction. */
3334 dst_operand (op, mode)
3335 rtx op;
3336 enum machine_mode mode;
3338 if (GET_CODE (op) == SUBREG
3339 && mixed_subreg_operand (op, mode))
3340 return 0;
3342 if (REG_P (op))
3343 return reg_operand (op, mode);
3345 return nonimmediate_operand (op, mode);
3349 /* Check src operand of two operand arithmetic instructions. */
3352 src_operand (op, mode)
3353 rtx op;
3354 enum machine_mode mode;
3356 if (GET_CODE (op) == SUBREG
3357 && mixed_subreg_operand (op, mode))
3358 return 0;
3360 if (REG_P (op))
3361 return reg_operand (op, mode);
3363 if (mode == VOIDmode)
3364 mode = GET_MODE (op);
3366 if (GET_CODE (op) == CONST_INT)
3367 return (mode == QImode || mode == Pmode || mode == HImode)
3368 && c4x_I_constant (op);
3370 /* We don't like CONST_DOUBLE integers. */
3371 if (GET_CODE (op) == CONST_DOUBLE)
3372 return c4x_H_constant (op);
3374 /* Disallow symbolic addresses. Only the predicate
3375 symbolic_address_operand will match these. */
3376 if (GET_CODE (op) == SYMBOL_REF
3377 || GET_CODE (op) == LABEL_REF
3378 || GET_CODE (op) == CONST)
3379 return 0;
3381 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3382 access to symbolic addresses. These operands will get forced
3383 into a register and the movqi expander will generate a
3384 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3385 if (GET_CODE (op) == MEM
3386 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3387 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3388 || GET_CODE (XEXP (op, 0)) == CONST)))
3389 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3391 return general_operand (op, mode);
3396 src_hi_operand (op, mode)
3397 rtx op;
3398 enum machine_mode mode;
3400 if (c4x_O_constant (op))
3401 return 1;
3402 return src_operand (op, mode);
3406 /* Check src operand of two operand logical instructions. */
3409 lsrc_operand (op, mode)
3410 rtx op;
3411 enum machine_mode mode;
3413 if (mode == VOIDmode)
3414 mode = GET_MODE (op);
3416 if (mode != QImode && mode != Pmode)
3417 fatal_insn ("mode not QImode", op);
3419 if (GET_CODE (op) == CONST_INT)
3420 return c4x_L_constant (op) || c4x_J_constant (op);
3422 return src_operand (op, mode);
3426 /* Check src operand of two operand tricky instructions. */
3429 tsrc_operand (op, mode)
3430 rtx op;
3431 enum machine_mode mode;
3433 if (mode == VOIDmode)
3434 mode = GET_MODE (op);
3436 if (mode != QImode && mode != Pmode)
3437 fatal_insn ("mode not QImode", op);
3439 if (GET_CODE (op) == CONST_INT)
3440 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3442 return src_operand (op, mode);
3446 /* Check src operand of two operand non immedidate instructions. */
3449 nonimmediate_src_operand (op, mode)
3450 rtx op;
3451 enum machine_mode mode;
3453 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3454 return 0;
3456 return src_operand (op, mode);
3460 /* Check logical src operand of two operand non immedidate instructions. */
3463 nonimmediate_lsrc_operand (op, mode)
3464 rtx op;
3465 enum machine_mode mode;
3467 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3468 return 0;
3470 return lsrc_operand (op, mode);
3475 reg_or_const_operand (op, mode)
3476 rtx op;
3477 enum machine_mode mode;
3479 return reg_operand (op, mode) || const_operand (op, mode);
3483 /* Check for indirect operands allowable in parallel instruction. */
3486 par_ind_operand (op, mode)
3487 rtx op;
3488 enum machine_mode mode;
3490 if (mode != VOIDmode && mode != GET_MODE (op))
3491 return 0;
3493 return c4x_S_indirect (op);
3497 /* Check for operands allowable in parallel instruction. */
3500 parallel_operand (op, mode)
3501 rtx op;
3502 enum machine_mode mode;
3504 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3508 static void
3509 c4x_S_address_parse (op, base, incdec, index, disp)
3510 rtx op;
3511 int *base;
3512 int *incdec;
3513 int *index;
3514 int *disp;
3516 *base = 0;
3517 *incdec = 0;
3518 *index = 0;
3519 *disp = 0;
3521 if (GET_CODE (op) != MEM)
3522 fatal_insn ("invalid indirect memory address", op);
3524 op = XEXP (op, 0);
3525 switch (GET_CODE (op))
3527 case PRE_DEC:
3528 *base = REGNO (XEXP (op, 0));
3529 *incdec = 1;
3530 *disp = -1;
3531 return;
3533 case POST_DEC:
3534 *base = REGNO (XEXP (op, 0));
3535 *incdec = 1;
3536 *disp = 0;
3537 return;
3539 case PRE_INC:
3540 *base = REGNO (XEXP (op, 0));
3541 *incdec = 1;
3542 *disp = 1;
3543 return;
3545 case POST_INC:
3546 *base = REGNO (XEXP (op, 0));
3547 *incdec = 1;
3548 *disp = 0;
3549 return;
3551 case POST_MODIFY:
3552 *base = REGNO (XEXP (op, 0));
3553 if (REG_P (XEXP (XEXP (op, 1), 1)))
3555 *index = REGNO (XEXP (XEXP (op, 1), 1));
3556 *disp = 0; /* ??? */
3558 else
3559 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3560 *incdec = 1;
3561 return;
3563 case PRE_MODIFY:
3564 *base = REGNO (XEXP (op, 0));
3565 if (REG_P (XEXP (XEXP (op, 1), 1)))
3567 *index = REGNO (XEXP (XEXP (op, 1), 1));
3568 *disp = 1; /* ??? */
3570 else
3571 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3572 *incdec = 1;
3574 return;
3576 case REG:
3577 *base = REGNO (op);
3578 return;
3580 case PLUS:
3582 rtx op0 = XEXP (op, 0);
3583 rtx op1 = XEXP (op, 1);
3585 if (c4x_a_register (op0))
3587 if (c4x_x_register (op1))
3589 *base = REGNO (op0);
3590 *index = REGNO (op1);
3591 return;
3593 else if ((GET_CODE (op1) == CONST_INT
3594 && IS_DISP1_CONST (INTVAL (op1))))
3596 *base = REGNO (op0);
3597 *disp = INTVAL (op1);
3598 return;
3601 else if (c4x_x_register (op0) && c4x_a_register (op1))
3603 *base = REGNO (op1);
3604 *index = REGNO (op0);
3605 return;
3608 /* Fallthrough. */
3610 default:
3611 fatal_insn ("invalid indirect (S) memory address", op);
3617 c4x_address_conflict (op0, op1, store0, store1)
3618 rtx op0;
3619 rtx op1;
3620 int store0;
3621 int store1;
3623 int base0;
3624 int base1;
3625 int incdec0;
3626 int incdec1;
3627 int index0;
3628 int index1;
3629 int disp0;
3630 int disp1;
3632 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3633 return 1;
3635 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3636 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3638 if (store0 && store1)
3640 /* If we have two stores in parallel to the same address, then
3641 the C4x only executes one of the stores. This is unlikely to
3642 cause problems except when writing to a hardware device such
3643 as a FIFO since the second write will be lost. The user
3644 should flag the hardware location as being volatile so that
3645 we don't do this optimisation. While it is unlikely that we
3646 have an aliased address if both locations are not marked
3647 volatile, it is probably safer to flag a potential conflict
3648 if either location is volatile. */
3649 if (! flag_argument_noalias)
3651 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3652 return 1;
3656 /* If have a parallel load and a store to the same address, the load
3657 is performed first, so there is no conflict. Similarly, there is
3658 no conflict if have parallel loads from the same address. */
3660 /* Cannot use auto increment or auto decrement twice for same
3661 base register. */
3662 if (base0 == base1 && incdec0 && incdec0)
3663 return 1;
3665 /* It might be too confusing for GCC if we have use a base register
3666 with a side effect and a memory reference using the same register
3667 in parallel. */
3668 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3669 return 1;
3671 /* We can not optimize the case where op1 and op2 refer to the same
3672 address. */
3673 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3674 return 1;
3676 /* No conflict. */
3677 return 0;
3681 /* Check for while loop inside a decrement and branch loop. */
3684 c4x_label_conflict (insn, jump, db)
3685 rtx insn;
3686 rtx jump;
3687 rtx db;
3689 while (insn)
3691 if (GET_CODE (insn) == CODE_LABEL)
3693 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3694 return 1;
3695 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3696 return 0;
3698 insn = PREV_INSN (insn);
3700 return 1;
3704 /* Validate combination of operands for parallel load/store instructions. */
3707 valid_parallel_load_store (operands, mode)
3708 rtx *operands;
3709 enum machine_mode mode ATTRIBUTE_UNUSED;
3711 rtx op0 = operands[0];
3712 rtx op1 = operands[1];
3713 rtx op2 = operands[2];
3714 rtx op3 = operands[3];
3716 if (GET_CODE (op0) == SUBREG)
3717 op0 = SUBREG_REG (op0);
3718 if (GET_CODE (op1) == SUBREG)
3719 op1 = SUBREG_REG (op1);
3720 if (GET_CODE (op2) == SUBREG)
3721 op2 = SUBREG_REG (op2);
3722 if (GET_CODE (op3) == SUBREG)
3723 op3 = SUBREG_REG (op3);
3725 /* The patterns should only allow ext_low_reg_operand() or
3726 par_ind_operand() operands. Thus of the 4 operands, only 2
3727 should be REGs and the other 2 should be MEMs. */
3729 /* This test prevents the multipack pass from using this pattern if
3730 op0 is used as an index or base register in op2 or op3, since
3731 this combination will require reloading. */
3732 if (GET_CODE (op0) == REG
3733 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3734 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3735 return 0;
3737 /* LDI||LDI. */
3738 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3739 return (REGNO (op0) != REGNO (op2))
3740 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3741 && ! c4x_address_conflict (op1, op3, 0, 0);
3743 /* STI||STI. */
3744 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3745 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3746 && ! c4x_address_conflict (op0, op2, 1, 1);
3748 /* LDI||STI. */
3749 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3750 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3751 && ! c4x_address_conflict (op1, op2, 0, 1);
3753 /* STI||LDI. */
3754 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3755 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3756 && ! c4x_address_conflict (op0, op3, 1, 0);
3758 return 0;
3763 valid_parallel_operands_4 (operands, mode)
3764 rtx *operands;
3765 enum machine_mode mode ATTRIBUTE_UNUSED;
3767 rtx op0 = operands[0];
3768 rtx op2 = operands[2];
3770 if (GET_CODE (op0) == SUBREG)
3771 op0 = SUBREG_REG (op0);
3772 if (GET_CODE (op2) == SUBREG)
3773 op2 = SUBREG_REG (op2);
3775 /* This test prevents the multipack pass from using this pattern if
3776 op0 is used as an index or base register in op2, since this combination
3777 will require reloading. */
3778 if (GET_CODE (op0) == REG
3779 && GET_CODE (op2) == MEM
3780 && reg_mentioned_p (op0, XEXP (op2, 0)))
3781 return 0;
3783 return 1;
3788 valid_parallel_operands_5 (operands, mode)
3789 rtx *operands;
3790 enum machine_mode mode ATTRIBUTE_UNUSED;
3792 int regs = 0;
3793 rtx op0 = operands[0];
3794 rtx op1 = operands[1];
3795 rtx op2 = operands[2];
3796 rtx op3 = operands[3];
3798 if (GET_CODE (op0) == SUBREG)
3799 op0 = SUBREG_REG (op0);
3800 if (GET_CODE (op1) == SUBREG)
3801 op1 = SUBREG_REG (op1);
3802 if (GET_CODE (op2) == SUBREG)
3803 op2 = SUBREG_REG (op2);
3805 /* The patterns should only allow ext_low_reg_operand() or
3806 par_ind_operand() operands. Operands 1 and 2 may be commutative
3807 but only one of them can be a register. */
3808 if (GET_CODE (op1) == REG)
3809 regs++;
3810 if (GET_CODE (op2) == REG)
3811 regs++;
3813 if (regs != 1)
3814 return 0;
3816 /* This test prevents the multipack pass from using this pattern if
3817 op0 is used as an index or base register in op3, since this combination
3818 will require reloading. */
3819 if (GET_CODE (op0) == REG
3820 && GET_CODE (op3) == MEM
3821 && reg_mentioned_p (op0, XEXP (op3, 0)))
3822 return 0;
3824 return 1;
3829 valid_parallel_operands_6 (operands, mode)
3830 rtx *operands;
3831 enum machine_mode mode ATTRIBUTE_UNUSED;
3833 int regs = 0;
3834 rtx op0 = operands[0];
3835 rtx op1 = operands[1];
3836 rtx op2 = operands[2];
3837 rtx op4 = operands[4];
3838 rtx op5 = operands[5];
3840 if (GET_CODE (op1) == SUBREG)
3841 op1 = SUBREG_REG (op1);
3842 if (GET_CODE (op2) == SUBREG)
3843 op2 = SUBREG_REG (op2);
3844 if (GET_CODE (op4) == SUBREG)
3845 op4 = SUBREG_REG (op4);
3846 if (GET_CODE (op5) == SUBREG)
3847 op5 = SUBREG_REG (op5);
3849 /* The patterns should only allow ext_low_reg_operand() or
3850 par_ind_operand() operands. Thus of the 4 input operands, only 2
3851 should be REGs and the other 2 should be MEMs. */
3853 if (GET_CODE (op1) == REG)
3854 regs++;
3855 if (GET_CODE (op2) == REG)
3856 regs++;
3857 if (GET_CODE (op4) == REG)
3858 regs++;
3859 if (GET_CODE (op5) == REG)
3860 regs++;
3862 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3863 Perhaps we should count the MEMs as well? */
3864 if (regs != 2)
3865 return 0;
3867 /* This test prevents the multipack pass from using this pattern if
3868 op0 is used as an index or base register in op4 or op5, since
3869 this combination will require reloading. */
3870 if (GET_CODE (op0) == REG
3871 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3872 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3873 return 0;
3875 return 1;
3879 /* Validate combination of src operands. Note that the operands have
3880 been screened by the src_operand predicate. We just have to check
3881 that the combination of operands is valid. If FORCE is set, ensure
3882 that the destination regno is valid if we have a 2 operand insn. */
3884 static int
3885 c4x_valid_operands (code, operands, mode, force)
3886 enum rtx_code code;
3887 rtx *operands;
3888 enum machine_mode mode ATTRIBUTE_UNUSED;
3889 int force;
3891 rtx op1;
3892 rtx op2;
3893 enum rtx_code code1;
3894 enum rtx_code code2;
3896 if (code == COMPARE)
3898 op1 = operands[0];
3899 op2 = operands[1];
3901 else
3903 op1 = operands[1];
3904 op2 = operands[2];
3907 if (GET_CODE (op1) == SUBREG)
3908 op1 = SUBREG_REG (op1);
3909 if (GET_CODE (op2) == SUBREG)
3910 op2 = SUBREG_REG (op2);
3912 code1 = GET_CODE (op1);
3913 code2 = GET_CODE (op2);
3915 if (code1 == REG && code2 == REG)
3916 return 1;
3918 if (code1 == MEM && code2 == MEM)
3920 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3921 return 1;
3922 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3925 if (code1 == code2)
3926 return 0;
3928 if (code1 == REG)
3930 switch (code2)
3932 case CONST_INT:
3933 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3934 return 1;
3935 break;
3937 case CONST_DOUBLE:
3938 if (! c4x_H_constant (op2))
3939 return 0;
3940 break;
3942 /* Any valid memory operand screened by src_operand is OK. */
3943 case MEM:
3945 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3946 into a stack slot memory address comprising a PLUS and a
3947 constant. */
3948 case ADDRESSOF:
3949 break;
3951 default:
3952 fatal_insn ("c4x_valid_operands: Internal error", op2);
3953 break;
3956 /* Check that we have a valid destination register for a two operand
3957 instruction. */
3958 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3961 /* We assume MINUS is commutative since the subtract patterns
3962 also support the reverse subtract instructions. Since op1
3963 is not a register, and op2 is a register, op1 can only
3964 be a restricted memory operand for a shift instruction. */
3965 if (code == ASHIFTRT || code == LSHIFTRT
3966 || code == ASHIFT || code == COMPARE)
3967 return code2 == REG
3968 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3970 switch (code1)
3972 case CONST_INT:
3973 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3974 return 1;
3975 break;
3977 case CONST_DOUBLE:
3978 if (! c4x_H_constant (op1))
3979 return 0;
3980 break;
3982 /* Any valid memory operand screened by src_operand is OK. */
3983 case MEM:
3984 #if 0
3985 if (code2 != REG)
3986 return 0;
3987 #endif
3988 break;
3990 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3991 into a stack slot memory address comprising a PLUS and a
3992 constant. */
3993 case ADDRESSOF:
3994 break;
3996 default:
3997 abort ();
3998 break;
4001 /* Check that we have a valid destination register for a two operand
4002 instruction. */
4003 return ! force || REGNO (op1) == REGNO (operands[0]);
4007 int valid_operands (code, operands, mode)
4008 enum rtx_code code;
4009 rtx *operands;
4010 enum machine_mode mode;
4013 /* If we are not optimizing then we have to let anything go and let
4014 reload fix things up. instantiate_decl in function.c can produce
4015 invalid insns by changing the offset of a memory operand from a
4016 valid one into an invalid one, when the second operand is also a
4017 memory operand. The alternative is not to allow two memory
4018 operands for an insn when not optimizing. The problem only rarely
4019 occurs, for example with the C-torture program DFcmp.c. */
4021 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4026 legitimize_operands (code, operands, mode)
4027 enum rtx_code code;
4028 rtx *operands;
4029 enum machine_mode mode;
4031 /* Compare only has 2 operands. */
4032 if (code == COMPARE)
4034 /* During RTL generation, force constants into pseudos so that
4035 they can get hoisted out of loops. This will tie up an extra
4036 register but can save an extra cycle. Only do this if loop
4037 optimisation enabled. (We cannot pull this trick for add and
4038 sub instructions since the flow pass won't find
4039 autoincrements etc.) This allows us to generate compare
4040 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4041 of LDI *AR0++, R0; CMPI 42, R0.
4043 Note that expand_binops will try to load an expensive constant
4044 into a register if it is used within a loop. Unfortunately,
4045 the cost mechanism doesn't allow us to look at the other
4046 operand to decide whether the constant is expensive. */
4048 if (! reload_in_progress
4049 && TARGET_HOIST
4050 && optimize > 0
4051 && GET_CODE (operands[1]) == CONST_INT
4052 && preserve_subexpressions_p ()
4053 && rtx_cost (operands[1], code) > 1)
4054 operands[1] = force_reg (mode, operands[1]);
4056 if (! reload_in_progress
4057 && ! c4x_valid_operands (code, operands, mode, 0))
4058 operands[0] = force_reg (mode, operands[0]);
4059 return 1;
4062 /* We cannot do this for ADDI/SUBI insns since we will
4063 defeat the flow pass from finding autoincrement addressing
4064 opportunities. */
4065 if (! reload_in_progress
4066 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4067 && TARGET_HOIST
4068 && optimize > 1
4069 && GET_CODE (operands[2]) == CONST_INT
4070 && preserve_subexpressions_p ()
4071 && rtx_cost (operands[2], code) > 1)
4072 operands[2] = force_reg (mode, operands[2]);
4074 /* We can get better code on a C30 if we force constant shift counts
4075 into a register. This way they can get hoisted out of loops,
4076 tying up a register, but saving an instruction. The downside is
4077 that they may get allocated to an address or index register, and
4078 thus we will get a pipeline conflict if there is a nearby
4079 indirect address using an address register.
4081 Note that expand_binops will not try to load an expensive constant
4082 into a register if it is used within a loop for a shift insn. */
4084 if (! reload_in_progress
4085 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4087 /* If the operand combination is invalid, we force operand1 into a
4088 register, preventing reload from having doing to do this at a
4089 later stage. */
4090 operands[1] = force_reg (mode, operands[1]);
4091 if (TARGET_FORCE)
4093 emit_move_insn (operands[0], operands[1]);
4094 operands[1] = copy_rtx (operands[0]);
4096 else
4098 /* Just in case... */
4099 if (! c4x_valid_operands (code, operands, mode, 0))
4100 operands[2] = force_reg (mode, operands[2]);
4104 /* Right shifts require a negative shift count, but GCC expects
4105 a positive count, so we emit a NEG. */
4106 if ((code == ASHIFTRT || code == LSHIFTRT)
4107 && (GET_CODE (operands[2]) != CONST_INT))
4108 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4110 return 1;
4114 /* The following predicates are used for instruction scheduling. */
4117 group1_reg_operand (op, mode)
4118 rtx op;
4119 enum machine_mode mode;
4121 if (mode != VOIDmode && mode != GET_MODE (op))
4122 return 0;
4123 if (GET_CODE (op) == SUBREG)
4124 op = SUBREG_REG (op);
4125 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4130 group1_mem_operand (op, mode)
4131 rtx op;
4132 enum machine_mode mode;
4134 if (mode != VOIDmode && mode != GET_MODE (op))
4135 return 0;
4137 if (GET_CODE (op) == MEM)
4139 op = XEXP (op, 0);
4140 if (GET_CODE (op) == PLUS)
4142 rtx op0 = XEXP (op, 0);
4143 rtx op1 = XEXP (op, 1);
4145 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4146 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4147 return 1;
4149 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4150 return 1;
4153 return 0;
4157 /* Return true if any one of the address registers. */
4160 arx_reg_operand (op, mode)
4161 rtx op;
4162 enum machine_mode mode;
4164 if (mode != VOIDmode && mode != GET_MODE (op))
4165 return 0;
4166 if (GET_CODE (op) == SUBREG)
4167 op = SUBREG_REG (op);
4168 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4172 static int
4173 c4x_arn_reg_operand (op, mode, regno)
4174 rtx op;
4175 enum machine_mode mode;
4176 unsigned int regno;
4178 if (mode != VOIDmode && mode != GET_MODE (op))
4179 return 0;
4180 if (GET_CODE (op) == SUBREG)
4181 op = SUBREG_REG (op);
4182 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4186 static int
4187 c4x_arn_mem_operand (op, mode, regno)
4188 rtx op;
4189 enum machine_mode mode;
4190 unsigned int regno;
4192 if (mode != VOIDmode && mode != GET_MODE (op))
4193 return 0;
4195 if (GET_CODE (op) == MEM)
4197 op = XEXP (op, 0);
4198 switch (GET_CODE (op))
4200 case PRE_DEC:
4201 case POST_DEC:
4202 case PRE_INC:
4203 case POST_INC:
4204 op = XEXP (op, 0);
4206 case REG:
4207 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4209 case PRE_MODIFY:
4210 case POST_MODIFY:
4211 if (REG_P (XEXP (op, 0)) && (! reload_completed
4212 || (REGNO (XEXP (op, 0)) == regno)))
4213 return 1;
4214 if (REG_P (XEXP (XEXP (op, 1), 1))
4215 && (! reload_completed
4216 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4217 return 1;
4218 break;
4220 case PLUS:
4222 rtx op0 = XEXP (op, 0);
4223 rtx op1 = XEXP (op, 1);
4225 if ((REG_P (op0) && (! reload_completed
4226 || (REGNO (op0) == regno)))
4227 || (REG_P (op1) && (! reload_completed
4228 || (REGNO (op1) == regno))))
4229 return 1;
4231 break;
4233 default:
4234 break;
4237 return 0;
4242 ar0_reg_operand (op, mode)
4243 rtx op;
4244 enum machine_mode mode;
4246 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4251 ar0_mem_operand (op, mode)
4252 rtx op;
4253 enum machine_mode mode;
4255 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4260 ar1_reg_operand (op, mode)
4261 rtx op;
4262 enum machine_mode mode;
4264 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4269 ar1_mem_operand (op, mode)
4270 rtx op;
4271 enum machine_mode mode;
4273 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4278 ar2_reg_operand (op, mode)
4279 rtx op;
4280 enum machine_mode mode;
4282 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4287 ar2_mem_operand (op, mode)
4288 rtx op;
4289 enum machine_mode mode;
4291 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4296 ar3_reg_operand (op, mode)
4297 rtx op;
4298 enum machine_mode mode;
4300 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4305 ar3_mem_operand (op, mode)
4306 rtx op;
4307 enum machine_mode mode;
4309 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4314 ar4_reg_operand (op, mode)
4315 rtx op;
4316 enum machine_mode mode;
4318 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4323 ar4_mem_operand (op, mode)
4324 rtx op;
4325 enum machine_mode mode;
4327 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4332 ar5_reg_operand (op, mode)
4333 rtx op;
4334 enum machine_mode mode;
4336 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4341 ar5_mem_operand (op, mode)
4342 rtx op;
4343 enum machine_mode mode;
4345 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4350 ar6_reg_operand (op, mode)
4351 rtx op;
4352 enum machine_mode mode;
4354 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4359 ar6_mem_operand (op, mode)
4360 rtx op;
4361 enum machine_mode mode;
4363 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4368 ar7_reg_operand (op, mode)
4369 rtx op;
4370 enum machine_mode mode;
4372 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4377 ar7_mem_operand (op, mode)
4378 rtx op;
4379 enum machine_mode mode;
4381 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4386 ir0_reg_operand (op, mode)
4387 rtx op;
4388 enum machine_mode mode;
4390 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4395 ir0_mem_operand (op, mode)
4396 rtx op;
4397 enum machine_mode mode;
4399 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4404 ir1_reg_operand (op, mode)
4405 rtx op;
4406 enum machine_mode mode;
4408 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4413 ir1_mem_operand (op, mode)
4414 rtx op;
4415 enum machine_mode mode;
4417 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4421 /* This is similar to operand_subword but allows autoincrement
4422 addressing. */
4425 c4x_operand_subword (op, i, validate_address, mode)
4426 rtx op;
4427 int i;
4428 int validate_address;
4429 enum machine_mode mode;
4431 if (mode != HImode && mode != HFmode)
4432 fatal_insn ("c4x_operand_subword: invalid mode", op);
4434 if (mode == HFmode && REG_P (op))
4435 fatal_insn ("c4x_operand_subword: invalid operand", op);
4437 if (GET_CODE (op) == MEM)
4439 enum rtx_code code = GET_CODE (XEXP (op, 0));
4440 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4441 enum machine_mode submode;
4443 submode = mode;
4444 if (mode == HImode)
4445 submode = QImode;
4446 else if (mode == HFmode)
4447 submode = QFmode;
4449 switch (code)
4451 case POST_INC:
4452 case PRE_INC:
4453 return gen_rtx_MEM (submode, XEXP (op, 0));
4455 case POST_DEC:
4456 case PRE_DEC:
4457 case PRE_MODIFY:
4458 case POST_MODIFY:
4459 /* We could handle these with some difficulty.
4460 e.g., *p-- => *(p-=2); *(p+1). */
4461 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4463 case SYMBOL_REF:
4464 case LABEL_REF:
4465 case CONST:
4466 case CONST_INT:
4467 fatal_insn ("c4x_operand_subword: invalid address", op);
4469 /* Even though offsettable_address_p considers (MEM
4470 (LO_SUM)) to be offsettable, it is not safe if the
4471 address is at the end of the data page since we also have
4472 to fix up the associated high PART. In this case where
4473 we are trying to split a HImode or HFmode memory
4474 reference, we would have to emit another insn to reload a
4475 new HIGH value. It's easier to disable LO_SUM memory references
4476 in HImode or HFmode and we probably get better code. */
4477 case LO_SUM:
4478 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4480 default:
4481 break;
4485 return operand_subword (op, i, validate_address, mode);
4488 struct name_list
4490 struct name_list *next;
4491 const char *name;
4494 static struct name_list *global_head;
4495 static struct name_list *extern_head;
4498 /* Add NAME to list of global symbols and remove from external list if
4499 present on external list. */
4501 void
4502 c4x_global_label (name)
4503 const char *name;
4505 struct name_list *p, *last;
4507 /* Do not insert duplicate names, so linearly search through list of
4508 existing names. */
4509 p = global_head;
4510 while (p)
4512 if (strcmp (p->name, name) == 0)
4513 return;
4514 p = p->next;
4516 p = (struct name_list *) xmalloc (sizeof *p);
4517 p->next = global_head;
4518 p->name = name;
4519 global_head = p;
4521 /* Remove this name from ref list if present. */
4522 last = NULL;
4523 p = extern_head;
4524 while (p)
4526 if (strcmp (p->name, name) == 0)
4528 if (last)
4529 last->next = p->next;
4530 else
4531 extern_head = p->next;
4532 break;
4534 last = p;
4535 p = p->next;
4540 /* Add NAME to list of external symbols. */
4542 void
4543 c4x_external_ref (name)
4544 const char *name;
4546 struct name_list *p;
4548 /* Do not insert duplicate names. */
4549 p = extern_head;
4550 while (p)
4552 if (strcmp (p->name, name) == 0)
4553 return;
4554 p = p->next;
4557 /* Do not insert ref if global found. */
4558 p = global_head;
4559 while (p)
4561 if (strcmp (p->name, name) == 0)
4562 return;
4563 p = p->next;
4565 p = (struct name_list *) xmalloc (sizeof *p);
4566 p->next = extern_head;
4567 p->name = name;
4568 extern_head = p;
4571 /* We need to have a data section we can identify so that we can set
4572 the DP register back to a data pointer in the small memory model.
4573 This is only required for ISRs if we are paranoid that someone
4574 may have quietly changed this register on the sly. */
4575 static void
4576 c4x_file_start ()
4578 int dspversion = 0;
4579 if (TARGET_C30) dspversion = 30;
4580 if (TARGET_C31) dspversion = 31;
4581 if (TARGET_C32) dspversion = 32;
4582 if (TARGET_C33) dspversion = 33;
4583 if (TARGET_C40) dspversion = 40;
4584 if (TARGET_C44) dspversion = 44;
4586 default_file_start ();
4587 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4588 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4592 static void
4593 c4x_file_end ()
4595 struct name_list *p;
4597 /* Output all external names that are not global. */
4598 p = extern_head;
4599 while (p)
4601 fprintf (asm_out_file, "\t.ref\t");
4602 assemble_name (asm_out_file, p->name);
4603 fprintf (asm_out_file, "\n");
4604 p = p->next;
4606 fprintf (asm_out_file, "\t.end\n");
4610 static void
4611 c4x_check_attribute (attrib, list, decl, attributes)
4612 const char *attrib;
4613 tree list, decl, *attributes;
4615 while (list != NULL_TREE
4616 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4617 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4618 list = TREE_CHAIN (list);
4619 if (list)
4620 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4621 *attributes);
4625 static void
4626 c4x_insert_attributes (decl, attributes)
4627 tree decl, *attributes;
4629 switch (TREE_CODE (decl))
4631 case FUNCTION_DECL:
4632 c4x_check_attribute ("section", code_tree, decl, attributes);
4633 c4x_check_attribute ("const", pure_tree, decl, attributes);
4634 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4635 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4636 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4637 break;
4639 case VAR_DECL:
4640 c4x_check_attribute ("section", data_tree, decl, attributes);
4641 break;
4643 default:
4644 break;
4648 /* Table of valid machine attributes. */
4649 const struct attribute_spec c4x_attribute_table[] =
4651 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4652 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4653 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4654 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4655 { NULL, 0, 0, false, false, false, NULL }
4658 /* Handle an attribute requiring a FUNCTION_TYPE;
4659 arguments as in struct attribute_spec.handler. */
4660 static tree
4661 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4662 tree *node;
4663 tree name;
4664 tree args ATTRIBUTE_UNUSED;
4665 int flags ATTRIBUTE_UNUSED;
4666 bool *no_add_attrs;
4668 if (TREE_CODE (*node) != FUNCTION_TYPE)
4670 warning ("`%s' attribute only applies to functions",
4671 IDENTIFIER_POINTER (name));
4672 *no_add_attrs = true;
4675 return NULL_TREE;
4679 /* !!! FIXME to emit RPTS correctly. */
4682 c4x_rptb_rpts_p (insn, op)
4683 rtx insn, op;
4685 /* The next insn should be our label marking where the
4686 repeat block starts. */
4687 insn = NEXT_INSN (insn);
4688 if (GET_CODE (insn) != CODE_LABEL)
4690 /* Some insns may have been shifted between the RPTB insn
4691 and the top label... They were probably destined to
4692 be moved out of the loop. For now, let's leave them
4693 where they are and print a warning. We should
4694 probably move these insns before the repeat block insn. */
4695 if (TARGET_DEBUG)
4696 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4697 insn);
4698 return 0;
4701 /* Skip any notes. */
4702 insn = next_nonnote_insn (insn);
4704 /* This should be our first insn in the loop. */
4705 if (! INSN_P (insn))
4706 return 0;
4708 /* Skip any notes. */
4709 insn = next_nonnote_insn (insn);
4711 if (! INSN_P (insn))
4712 return 0;
4714 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4715 return 0;
4717 if (TARGET_RPTS)
4718 return 1;
4720 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4724 /* Check if register r11 is used as the destination of an insn. */
4726 static int
4727 c4x_r11_set_p(x)
4728 rtx x;
4730 rtx set;
4731 int i, j;
4732 const char *fmt;
4734 if (x == 0)
4735 return 0;
4737 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4738 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4740 if (INSN_P (x) && (set = single_set (x)))
4741 x = SET_DEST (set);
4743 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4744 return 1;
4746 fmt = GET_RTX_FORMAT (GET_CODE (x));
4747 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4749 if (fmt[i] == 'e')
4751 if (c4x_r11_set_p (XEXP (x, i)))
4752 return 1;
4754 else if (fmt[i] == 'E')
4755 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4756 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4757 return 1;
4759 return 0;
4763 /* The c4x sometimes has a problem when the insn before the laj insn
4764 sets the r11 register. Check for this situation. */
4767 c4x_check_laj_p (insn)
4768 rtx insn;
4770 insn = prev_nonnote_insn (insn);
4772 /* If this is the start of the function no nop is needed. */
4773 if (insn == 0)
4774 return 0;
4776 /* If the previous insn is a code label we have to insert a nop. This
4777 could be a jump or table jump. We can find the normal jumps by
4778 scanning the function but this will not find table jumps. */
4779 if (GET_CODE (insn) == CODE_LABEL)
4780 return 1;
4782 /* If the previous insn sets register r11 we have to insert a nop. */
4783 if (c4x_r11_set_p (insn))
4784 return 1;
4786 /* No nop needed. */
4787 return 0;
4791 /* Adjust the cost of a scheduling dependency. Return the new cost of
4792 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4793 A set of an address register followed by a use occurs a 2 cycle
4794 stall (reduced to a single cycle on the c40 using LDA), while
4795 a read of an address register followed by a use occurs a single cycle. */
4797 #define SET_USE_COST 3
4798 #define SETLDA_USE_COST 2
4799 #define READ_USE_COST 2
4801 static int
4802 c4x_adjust_cost (insn, link, dep_insn, cost)
4803 rtx insn;
4804 rtx link;
4805 rtx dep_insn;
4806 int cost;
4808 /* Don't worry about this until we know what registers have been
4809 assigned. */
4810 if (flag_schedule_insns == 0 && ! reload_completed)
4811 return 0;
4813 /* How do we handle dependencies where a read followed by another
4814 read causes a pipeline stall? For example, a read of ar0 followed
4815 by the use of ar0 for a memory reference. It looks like we
4816 need to extend the scheduler to handle this case. */
4818 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4819 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4820 so only deal with insns we know about. */
4821 if (recog_memoized (dep_insn) < 0)
4822 return 0;
4824 if (REG_NOTE_KIND (link) == 0)
4826 int max = 0;
4828 /* Data dependency; DEP_INSN writes a register that INSN reads some
4829 cycles later. */
4830 if (TARGET_C3X)
4832 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4833 max = SET_USE_COST > max ? SET_USE_COST : max;
4834 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4835 max = READ_USE_COST > max ? READ_USE_COST : max;
4837 else
4839 /* This could be significantly optimized. We should look
4840 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4841 insn uses ar0-ar7. We then test if the same register
4842 is used. The tricky bit is that some operands will
4843 use several registers... */
4844 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4845 max = SET_USE_COST > max ? SET_USE_COST : max;
4846 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4847 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4848 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4849 max = READ_USE_COST > max ? READ_USE_COST : max;
4851 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4852 max = SET_USE_COST > max ? SET_USE_COST : max;
4853 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4854 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4855 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4856 max = READ_USE_COST > max ? READ_USE_COST : max;
4858 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4859 max = SET_USE_COST > max ? SET_USE_COST : max;
4860 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4861 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4862 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4863 max = READ_USE_COST > max ? READ_USE_COST : max;
4865 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4866 max = SET_USE_COST > max ? SET_USE_COST : max;
4867 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4868 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4869 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4870 max = READ_USE_COST > max ? READ_USE_COST : max;
4872 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4873 max = SET_USE_COST > max ? SET_USE_COST : max;
4874 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4875 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4876 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4877 max = READ_USE_COST > max ? READ_USE_COST : max;
4879 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4880 max = SET_USE_COST > max ? SET_USE_COST : max;
4881 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4882 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4883 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4884 max = READ_USE_COST > max ? READ_USE_COST : max;
4886 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4887 max = SET_USE_COST > max ? SET_USE_COST : max;
4888 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4889 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4890 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4891 max = READ_USE_COST > max ? READ_USE_COST : max;
4893 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4894 max = SET_USE_COST > max ? SET_USE_COST : max;
4895 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4896 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4897 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4898 max = READ_USE_COST > max ? READ_USE_COST : max;
4900 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4901 max = SET_USE_COST > max ? SET_USE_COST : max;
4902 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4903 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4905 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4906 max = SET_USE_COST > max ? SET_USE_COST : max;
4907 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4908 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4911 if (max)
4912 cost = max;
4914 /* For other data dependencies, the default cost specified in the
4915 md is correct. */
4916 return cost;
4918 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4920 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4921 cycles later. */
4923 /* For c4x anti dependencies, the cost is 0. */
4924 return 0;
4926 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4928 /* Output dependency; DEP_INSN writes a register that INSN writes some
4929 cycles later. */
4931 /* For c4x output dependencies, the cost is 0. */
4932 return 0;
4934 else
4935 abort ();
4938 void
4939 c4x_init_builtins ()
4941 tree endlink = void_list_node;
4943 builtin_function ("fast_ftoi",
4944 build_function_type
4945 (integer_type_node,
4946 tree_cons (NULL_TREE, double_type_node, endlink)),
4947 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4948 builtin_function ("ansi_ftoi",
4949 build_function_type
4950 (integer_type_node,
4951 tree_cons (NULL_TREE, double_type_node, endlink)),
4952 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4953 if (TARGET_C3X)
4954 builtin_function ("fast_imult",
4955 build_function_type
4956 (integer_type_node,
4957 tree_cons (NULL_TREE, integer_type_node,
4958 tree_cons (NULL_TREE,
4959 integer_type_node, endlink))),
4960 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4961 else
4963 builtin_function ("toieee",
4964 build_function_type
4965 (double_type_node,
4966 tree_cons (NULL_TREE, double_type_node, endlink)),
4967 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4968 builtin_function ("frieee",
4969 build_function_type
4970 (double_type_node,
4971 tree_cons (NULL_TREE, double_type_node, endlink)),
4972 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4973 builtin_function ("fast_invf",
4974 build_function_type
4975 (double_type_node,
4976 tree_cons (NULL_TREE, double_type_node, endlink)),
4977 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4983 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4984 tree exp;
4985 rtx target;
4986 rtx subtarget ATTRIBUTE_UNUSED;
4987 enum machine_mode mode ATTRIBUTE_UNUSED;
4988 int ignore ATTRIBUTE_UNUSED;
4990 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4991 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4992 tree arglist = TREE_OPERAND (exp, 1);
4993 tree arg0, arg1;
4994 rtx r0, r1;
4996 switch (fcode)
4998 case C4X_BUILTIN_FIX:
4999 arg0 = TREE_VALUE (arglist);
5000 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5001 r0 = protect_from_queue (r0, 0);
5002 if (! target || ! register_operand (target, QImode))
5003 target = gen_reg_rtx (QImode);
5004 emit_insn (gen_fixqfqi_clobber (target, r0));
5005 return target;
5007 case C4X_BUILTIN_FIX_ANSI:
5008 arg0 = TREE_VALUE (arglist);
5009 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5010 r0 = protect_from_queue (r0, 0);
5011 if (! target || ! register_operand (target, QImode))
5012 target = gen_reg_rtx (QImode);
5013 emit_insn (gen_fix_truncqfqi2 (target, r0));
5014 return target;
5016 case C4X_BUILTIN_MPYI:
5017 if (! TARGET_C3X)
5018 break;
5019 arg0 = TREE_VALUE (arglist);
5020 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5021 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5022 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5023 r0 = protect_from_queue (r0, 0);
5024 r1 = protect_from_queue (r1, 0);
5025 if (! target || ! register_operand (target, QImode))
5026 target = gen_reg_rtx (QImode);
5027 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5028 return target;
5030 case C4X_BUILTIN_TOIEEE:
5031 if (TARGET_C3X)
5032 break;
5033 arg0 = TREE_VALUE (arglist);
5034 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5035 r0 = protect_from_queue (r0, 0);
5036 if (! target || ! register_operand (target, QFmode))
5037 target = gen_reg_rtx (QFmode);
5038 emit_insn (gen_toieee (target, r0));
5039 return target;
5041 case C4X_BUILTIN_FRIEEE:
5042 if (TARGET_C3X)
5043 break;
5044 arg0 = TREE_VALUE (arglist);
5045 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5046 put_var_into_stack (arg0, /*rescan=*/true);
5047 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5048 r0 = protect_from_queue (r0, 0);
5049 if (register_operand (r0, QFmode))
5051 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5052 emit_move_insn (r1, r0);
5053 r0 = r1;
5055 if (! target || ! register_operand (target, QFmode))
5056 target = gen_reg_rtx (QFmode);
5057 emit_insn (gen_frieee (target, r0));
5058 return target;
5060 case C4X_BUILTIN_RCPF:
5061 if (TARGET_C3X)
5062 break;
5063 arg0 = TREE_VALUE (arglist);
5064 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5065 r0 = protect_from_queue (r0, 0);
5066 if (! target || ! register_operand (target, QFmode))
5067 target = gen_reg_rtx (QFmode);
5068 emit_insn (gen_rcpfqf_clobber (target, r0));
5069 return target;
5071 return NULL_RTX;
5074 static void
5075 c4x_asm_named_section (name, flags)
5076 const char *name;
5077 unsigned int flags ATTRIBUTE_UNUSED;
5079 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5082 static void
5083 c4x_globalize_label (stream, name)
5084 FILE *stream;
5085 const char *name;
5087 default_globalize_label (stream, name);
5088 c4x_global_label (name);
5091 #define SHIFT_CODE_P(C) \
5092 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
5093 #define LOGICAL_CODE_P(C) \
5094 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
5096 /* Compute a (partial) cost for rtx X. Return true if the complete
5097 cost has been computed, and false if subexpressions should be
5098 scanned. In either case, *TOTAL contains the cost result. */
5100 static bool
5101 c4x_rtx_costs (x, code, outer_code, total)
5102 rtx x;
5103 int code, outer_code;
5104 int *total;
5106 HOST_WIDE_INT val;
5108 switch (code)
5110 /* Some small integers are effectively free for the C40. We should
5111 also consider if we are using the small memory model. With
5112 the big memory model we require an extra insn for a constant
5113 loaded from memory. */
5115 case CONST_INT:
5116 val = INTVAL (x);
5117 if (c4x_J_constant (x))
5118 *total = 0;
5119 else if (! TARGET_C3X
5120 && outer_code == AND
5121 && (val == 255 || val == 65535))
5122 *total = 0;
5123 else if (! TARGET_C3X
5124 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
5125 && (val == 16 || val == 24))
5126 *total = 0;
5127 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
5128 *total = 3;
5129 else if (LOGICAL_CODE_P (outer_code)
5130 ? c4x_L_constant (x) : c4x_I_constant (x))
5131 *total = 2;
5132 else
5133 *total = 4;
5134 return true;
5136 case CONST:
5137 case LABEL_REF:
5138 case SYMBOL_REF:
5139 *total = 4;
5140 return true;
5142 case CONST_DOUBLE:
5143 if (c4x_H_constant (x))
5144 *total = 2;
5145 else if (GET_MODE (x) == QFmode)
5146 *total = 4;
5147 else
5148 *total = 8;
5149 return true;
5151 /* ??? Note that we return true, rather than false so that rtx_cost
5152 doesn't include the constant costs. Otherwise expand_mult will
5153 think that it is cheaper to synthesize a multiply rather than to
5154 use a multiply instruction. I think this is because the algorithm
5155 synth_mult doesn't take into account the loading of the operands,
5156 whereas the calculation of mult_cost does. */
5157 case PLUS:
5158 case MINUS:
5159 case AND:
5160 case IOR:
5161 case XOR:
5162 case ASHIFT:
5163 case ASHIFTRT:
5164 case LSHIFTRT:
5165 *total = COSTS_N_INSNS (1);
5166 return true;
5168 case MULT:
5169 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5170 || TARGET_MPYI ? 1 : 14);
5171 return true;
5173 case DIV:
5174 case UDIV:
5175 case MOD:
5176 case UMOD:
5177 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5178 ? 15 : 50);
5179 return true;
5181 default:
5182 return false;