* Merge with edge-vector-mergepoint-20040918.
[official-gcc.git] / gcc / config / c4x / c4x.c
blob2d6e66006eb95d88d3d2f1bc96d66725cff002ca
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
53 #include "langhooks.h"
55 rtx smulhi3_libfunc;
56 rtx umulhi3_libfunc;
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
153 rtx c4x_compare_op0;
154 rtx c4x_compare_op1;
156 const char *c4x_rpts_cycles_string;
157 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
158 const char *c4x_cpu_version_string;
159 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
161 /* Pragma definitions. */
163 tree code_tree = NULL_TREE;
164 tree data_tree = NULL_TREE;
165 tree pure_tree = NULL_TREE;
166 tree noreturn_tree = NULL_TREE;
167 tree interrupt_tree = NULL_TREE;
168 tree naked_tree = NULL_TREE;
170 /* Forward declarations */
171 static int c4x_isr_reg_used_p (unsigned int);
172 static int c4x_leaf_function_p (void);
173 static int c4x_naked_function_p (void);
174 static int c4x_immed_float_p (rtx);
175 static int c4x_a_register (rtx);
176 static int c4x_x_register (rtx);
177 static int c4x_immed_int_constant (rtx);
178 static int c4x_immed_float_constant (rtx);
179 static int c4x_K_constant (rtx);
180 static int c4x_N_constant (rtx);
181 static int c4x_O_constant (rtx);
182 static int c4x_R_indirect (rtx);
183 static int c4x_S_indirect (rtx);
184 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
185 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
186 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
187 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
188 static void c4x_file_start (void);
189 static void c4x_file_end (void);
190 static void c4x_check_attribute (const char *, tree, tree, tree *);
191 static int c4x_r11_set_p (rtx);
192 static int c4x_rptb_valid_p (rtx, rtx);
193 static void c4x_reorg (void);
194 static int c4x_label_ref_used_p (rtx, rtx);
195 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
196 const struct attribute_spec c4x_attribute_table[];
197 static void c4x_insert_attributes (tree, tree *);
198 static void c4x_asm_named_section (const char *, unsigned int, tree);
199 static int c4x_adjust_cost (rtx, rtx, rtx, int);
200 static void c4x_globalize_label (FILE *, const char *);
201 static bool c4x_rtx_costs (rtx, int, int, int *);
202 static int c4x_address_cost (rtx);
203 static void c4x_init_libfuncs (void);
204 static void c4x_external_libcall (rtx);
205 static rtx c4x_struct_value_rtx (tree, int);
206 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
208 /* Initialize the GCC target structure. */
209 #undef TARGET_ASM_BYTE_OP
210 #define TARGET_ASM_BYTE_OP "\t.word\t"
211 #undef TARGET_ASM_ALIGNED_HI_OP
212 #define TARGET_ASM_ALIGNED_HI_OP NULL
213 #undef TARGET_ASM_ALIGNED_SI_OP
214 #define TARGET_ASM_ALIGNED_SI_OP NULL
215 #undef TARGET_ASM_FILE_START
216 #define TARGET_ASM_FILE_START c4x_file_start
217 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
218 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
219 #undef TARGET_ASM_FILE_END
220 #define TARGET_ASM_FILE_END c4x_file_end
222 #undef TARGET_ASM_EXTERNAL_LIBCALL
223 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
225 #undef TARGET_ATTRIBUTE_TABLE
226 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
228 #undef TARGET_INSERT_ATTRIBUTES
229 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
231 #undef TARGET_INIT_BUILTINS
232 #define TARGET_INIT_BUILTINS c4x_init_builtins
234 #undef TARGET_EXPAND_BUILTIN
235 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
237 #undef TARGET_SCHED_ADJUST_COST
238 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
240 #undef TARGET_ASM_GLOBALIZE_LABEL
241 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
243 #undef TARGET_RTX_COSTS
244 #define TARGET_RTX_COSTS c4x_rtx_costs
245 #undef TARGET_ADDRESS_COST
246 #define TARGET_ADDRESS_COST c4x_address_cost
248 #undef TARGET_MACHINE_DEPENDENT_REORG
249 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
251 #undef TARGET_INIT_LIBFUNCS
252 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
254 #undef TARGET_STRUCT_VALUE_RTX
255 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
257 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
258 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
260 struct gcc_target targetm = TARGET_INITIALIZER;
262 /* Override command line options.
263 Called once after all options have been parsed.
264 Mostly we process the processor
265 type and sometimes adjust other TARGET_ options. */
267 void
268 c4x_override_options (void)
270 if (c4x_rpts_cycles_string)
271 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
272 else
273 c4x_rpts_cycles = 0;
275 if (TARGET_C30)
276 c4x_cpu_version = 30;
277 else if (TARGET_C31)
278 c4x_cpu_version = 31;
279 else if (TARGET_C32)
280 c4x_cpu_version = 32;
281 else if (TARGET_C33)
282 c4x_cpu_version = 33;
283 else if (TARGET_C40)
284 c4x_cpu_version = 40;
285 else if (TARGET_C44)
286 c4x_cpu_version = 44;
287 else
288 c4x_cpu_version = 40;
290 /* -mcpu=xx overrides -m40 etc. */
291 if (c4x_cpu_version_string)
293 const char *p = c4x_cpu_version_string;
295 /* Also allow -mcpu=c30 etc. */
296 if (*p == 'c' || *p == 'C')
297 p++;
298 c4x_cpu_version = atoi (p);
301 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
302 C40_FLAG | C44_FLAG);
304 switch (c4x_cpu_version)
306 case 30: target_flags |= C30_FLAG; break;
307 case 31: target_flags |= C31_FLAG; break;
308 case 32: target_flags |= C32_FLAG; break;
309 case 33: target_flags |= C33_FLAG; break;
310 case 40: target_flags |= C40_FLAG; break;
311 case 44: target_flags |= C44_FLAG; break;
312 default:
313 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
314 c4x_cpu_version = 40;
315 target_flags |= C40_FLAG;
318 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
319 target_flags |= C3X_FLAG;
320 else
321 target_flags &= ~C3X_FLAG;
323 /* Convert foo / 8.0 into foo * 0.125, etc. */
324 set_fast_math_flags (1);
326 /* We should phase out the following at some stage.
327 This provides compatibility with the old -mno-aliases option. */
328 if (! TARGET_ALIASES && ! flag_argument_noalias)
329 flag_argument_noalias = 1;
333 /* This is called before c4x_override_options. */
335 void
336 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
337 int size ATTRIBUTE_UNUSED)
339 /* Scheduling before register allocation can screw up global
340 register allocation, especially for functions that use MPY||ADD
341 instructions. The benefit we gain we get by scheduling before
342 register allocation is probably marginal anyhow. */
343 flag_schedule_insns = 0;
347 /* Write an ASCII string. */
349 #define C4X_ASCII_LIMIT 40
351 void
352 c4x_output_ascii (FILE *stream, const char *ptr, int len)
354 char sbuf[C4X_ASCII_LIMIT + 1];
355 int s, l, special, first = 1, onlys;
357 if (len)
358 fprintf (stream, "\t.byte\t");
360 for (s = l = 0; len > 0; --len, ++ptr)
362 onlys = 0;
364 /* Escape " and \ with a \". */
365 special = *ptr == '\"' || *ptr == '\\';
367 /* If printable - add to buff. */
368 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
370 if (special)
371 sbuf[s++] = '\\';
372 sbuf[s++] = *ptr;
373 if (s < C4X_ASCII_LIMIT - 1)
374 continue;
375 onlys = 1;
377 if (s)
379 if (first)
380 first = 0;
381 else
383 fputc (',', stream);
384 l++;
387 sbuf[s] = 0;
388 fprintf (stream, "\"%s\"", sbuf);
389 l += s + 2;
390 if (TARGET_TI && l >= 80 && len > 1)
392 fprintf (stream, "\n\t.byte\t");
393 first = 1;
394 l = 0;
397 s = 0;
399 if (onlys)
400 continue;
402 if (first)
403 first = 0;
404 else
406 fputc (',', stream);
407 l++;
410 fprintf (stream, "%d", *ptr);
411 l += 3;
412 if (TARGET_TI && l >= 80 && len > 1)
414 fprintf (stream, "\n\t.byte\t");
415 first = 1;
416 l = 0;
419 if (s)
421 if (! first)
422 fputc (',', stream);
424 sbuf[s] = 0;
425 fprintf (stream, "\"%s\"", sbuf);
426 s = 0;
428 fputc ('\n', stream);
433 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
435 switch (mode)
437 #if Pmode != QImode
438 case Pmode: /* Pointer (24/32 bits). */
439 #endif
440 case QImode: /* Integer (32 bits). */
441 return IS_INT_REGNO (regno);
443 case QFmode: /* Float, Double (32 bits). */
444 case HFmode: /* Long Double (40 bits). */
445 return IS_EXT_REGNO (regno);
447 case CCmode: /* Condition Codes. */
448 case CC_NOOVmode: /* Condition Codes. */
449 return IS_ST_REGNO (regno);
451 case HImode: /* Long Long (64 bits). */
452 /* We need two registers to store long longs. Note that
453 it is much easier to constrain the first register
454 to start on an even boundary. */
455 return IS_INT_REGNO (regno)
456 && IS_INT_REGNO (regno + 1)
457 && (regno & 1) == 0;
459 default:
460 return 0; /* We don't support these modes. */
463 return 0;
466 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
468 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
470 /* We cannot copy call saved registers from mode QI into QF or from
471 mode QF into QI. */
472 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
473 return 0;
474 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
475 return 0;
476 /* We cannot copy from an extended (40 bit) register to a standard
477 (32 bit) register because we only set the condition codes for
478 extended registers. */
479 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
480 return 0;
481 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
482 return 0;
483 return 1;
486 /* The TI C3x C compiler register argument runtime model uses 6 registers,
487 AR2, R2, R3, RC, RS, RE.
489 The first two floating point arguments (float, double, long double)
490 that are found scanning from left to right are assigned to R2 and R3.
492 The remaining integer (char, short, int, long) or pointer arguments
493 are assigned to the remaining registers in the order AR2, R2, R3,
494 RC, RS, RE when scanning left to right, except for the last named
495 argument prior to an ellipsis denoting variable number of
496 arguments. We don't have to worry about the latter condition since
497 function.c treats the last named argument as anonymous (unnamed).
499 All arguments that cannot be passed in registers are pushed onto
500 the stack in reverse order (right to left). GCC handles that for us.
502 c4x_init_cumulative_args() is called at the start, so we can parse
503 the args to see how many floating point arguments and how many
504 integer (or pointer) arguments there are. c4x_function_arg() is
505 then called (sometimes repeatedly) for each argument (parsed left
506 to right) to obtain the register to pass the argument in, or zero
507 if the argument is to be passed on the stack. Once the compiler is
508 happy, c4x_function_arg_advance() is called.
510 Don't use R0 to pass arguments in, we use 0 to indicate a stack
511 argument. */
513 static const int c4x_int_reglist[3][6] =
515 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
516 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
517 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
520 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
523 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
524 function whose data type is FNTYPE.
525 For a library call, FNTYPE is 0. */
527 void
528 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
530 tree param, next_param;
532 cum->floats = cum->ints = 0;
533 cum->init = 0;
534 cum->var = 0;
535 cum->args = 0;
537 if (TARGET_DEBUG)
539 fprintf (stderr, "\nc4x_init_cumulative_args (");
540 if (fntype)
542 tree ret_type = TREE_TYPE (fntype);
544 fprintf (stderr, "fntype code = %s, ret code = %s",
545 tree_code_name[(int) TREE_CODE (fntype)],
546 tree_code_name[(int) TREE_CODE (ret_type)]);
548 else
549 fprintf (stderr, "no fntype");
551 if (libname)
552 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
555 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
557 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
558 param; param = next_param)
560 tree type;
562 next_param = TREE_CHAIN (param);
564 type = TREE_VALUE (param);
565 if (type && type != void_type_node)
567 enum machine_mode mode;
569 /* If the last arg doesn't have void type then we have
570 variable arguments. */
571 if (! next_param)
572 cum->var = 1;
574 if ((mode = TYPE_MODE (type)))
576 if (! targetm.calls.must_pass_in_stack (mode, type))
578 /* Look for float, double, or long double argument. */
579 if (mode == QFmode || mode == HFmode)
580 cum->floats++;
581 /* Look for integer, enumeral, boolean, char, or pointer
582 argument. */
583 else if (mode == QImode || mode == Pmode)
584 cum->ints++;
587 cum->args++;
591 if (TARGET_DEBUG)
592 fprintf (stderr, "%s%s, args = %d)\n",
593 cum->prototype ? ", prototype" : "",
594 cum->var ? ", variable args" : "",
595 cum->args);
599 /* Update the data in CUM to advance over an argument
600 of mode MODE and data type TYPE.
601 (TYPE is null for libcalls where that information may not be available.) */
603 void
604 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
605 tree type, int named)
607 if (TARGET_DEBUG)
608 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
609 GET_MODE_NAME (mode), named);
610 if (! TARGET_MEMPARM
611 && named
612 && type
613 && ! targetm.calls.must_pass_in_stack (mode, type))
615 /* Look for float, double, or long double argument. */
616 if (mode == QFmode || mode == HFmode)
617 cum->floats++;
618 /* Look for integer, enumeral, boolean, char, or pointer argument. */
619 else if (mode == QImode || mode == Pmode)
620 cum->ints++;
622 else if (! TARGET_MEMPARM && ! type)
624 /* Handle libcall arguments. */
625 if (mode == QFmode || mode == HFmode)
626 cum->floats++;
627 else if (mode == QImode || mode == Pmode)
628 cum->ints++;
630 return;
634 /* Define where to put the arguments to a function. Value is zero to
635 push the argument on the stack, or a hard register in which to
636 store the argument.
638 MODE is the argument's machine mode.
639 TYPE is the data type of the argument (as a tree).
640 This is null for libcalls where that information may
641 not be available.
642 CUM is a variable of type CUMULATIVE_ARGS which gives info about
643 the preceding args and about the function being called.
644 NAMED is nonzero if this argument is a named parameter
645 (otherwise it is an extra parameter matching an ellipsis). */
647 struct rtx_def *
648 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
649 tree type, int named)
651 int reg = 0; /* Default to passing argument on stack. */
653 if (! cum->init)
655 /* We can handle at most 2 floats in R2, R3. */
656 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
658 /* We can handle at most 6 integers minus number of floats passed
659 in registers. */
660 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
661 6 - cum->maxfloats : cum->ints;
663 /* If there is no prototype, assume all the arguments are integers. */
664 if (! cum->prototype)
665 cum->maxints = 6;
667 cum->ints = cum->floats = 0;
668 cum->init = 1;
671 /* This marks the last argument. We don't need to pass this through
672 to the call insn. */
673 if (type == void_type_node)
674 return 0;
676 if (! TARGET_MEMPARM
677 && named
678 && type
679 && ! targetm.calls.must_pass_in_stack (mode, type))
681 /* Look for float, double, or long double argument. */
682 if (mode == QFmode || mode == HFmode)
684 if (cum->floats < cum->maxfloats)
685 reg = c4x_fp_reglist[cum->floats];
687 /* Look for integer, enumeral, boolean, char, or pointer argument. */
688 else if (mode == QImode || mode == Pmode)
690 if (cum->ints < cum->maxints)
691 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
694 else if (! TARGET_MEMPARM && ! type)
696 /* We could use a different argument calling model for libcalls,
697 since we're only calling functions in libgcc. Thus we could
698 pass arguments for long longs in registers rather than on the
699 stack. In the meantime, use the odd TI format. We make the
700 assumption that we won't have more than two floating point
701 args, six integer args, and that all the arguments are of the
702 same mode. */
703 if (mode == QFmode || mode == HFmode)
704 reg = c4x_fp_reglist[cum->floats];
705 else if (mode == QImode || mode == Pmode)
706 reg = c4x_int_reglist[0][cum->ints];
709 if (TARGET_DEBUG)
711 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
712 GET_MODE_NAME (mode), named);
713 if (reg)
714 fprintf (stderr, ", reg=%s", reg_names[reg]);
715 else
716 fprintf (stderr, ", stack");
717 fprintf (stderr, ")\n");
719 if (reg)
720 return gen_rtx_REG (mode, reg);
721 else
722 return NULL_RTX;
725 /* C[34]x arguments grow in weird ways (downwards) that the standard
726 varargs stuff can't handle.. */
728 static tree
729 c4x_gimplify_va_arg_expr (tree valist, tree type,
730 tree *pre_p ATTRIBUTE_UNUSED,
731 tree *post_p ATTRIBUTE_UNUSED)
733 tree t;
734 bool indirect;
736 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
737 if (indirect)
738 type = build_pointer_type (type);
740 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
741 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
742 t = fold_convert (build_pointer_type (type), t);
743 t = build_fold_indirect_ref (t);
745 if (indirect)
746 t = build_fold_indirect_ref (t);
748 return t;
752 static int
753 c4x_isr_reg_used_p (unsigned int regno)
755 /* Don't save/restore FP or ST, we handle them separately. */
756 if (regno == FRAME_POINTER_REGNUM
757 || IS_ST_REGNO (regno))
758 return 0;
760 /* We could be a little smarter abut saving/restoring DP.
761 We'll only save if for the big memory model or if
762 we're paranoid. ;-) */
763 if (IS_DP_REGNO (regno))
764 return ! TARGET_SMALL || TARGET_PARANOID;
766 /* Only save/restore regs in leaf function that are used. */
767 if (c4x_leaf_function)
768 return regs_ever_live[regno] && fixed_regs[regno] == 0;
770 /* Only save/restore regs that are used by the ISR and regs
771 that are likely to be used by functions the ISR calls
772 if they are not fixed. */
773 return IS_EXT_REGNO (regno)
774 || ((regs_ever_live[regno] || call_used_regs[regno])
775 && fixed_regs[regno] == 0);
779 static int
780 c4x_leaf_function_p (void)
782 /* A leaf function makes no calls, so we only need
783 to save/restore the registers we actually use.
784 For the global variable leaf_function to be set, we need
785 to define LEAF_REGISTERS and all that it entails.
786 Let's check ourselves.... */
788 if (lookup_attribute ("leaf_pretend",
789 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
790 return 1;
792 /* Use the leaf_pretend attribute at your own risk. This is a hack
793 to speed up ISRs that call a function infrequently where the
794 overhead of saving and restoring the additional registers is not
795 warranted. You must save and restore the additional registers
796 required by the called function. Caveat emptor. Here's enough
797 rope... */
799 if (leaf_function_p ())
800 return 1;
802 return 0;
806 static int
807 c4x_naked_function_p (void)
809 tree type;
811 type = TREE_TYPE (current_function_decl);
812 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
817 c4x_interrupt_function_p (void)
819 const char *cfun_name;
820 if (lookup_attribute ("interrupt",
821 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
822 return 1;
824 /* Look for TI style c_intnn. */
825 cfun_name = current_function_name ();
826 return cfun_name[0] == 'c'
827 && cfun_name[1] == '_'
828 && cfun_name[2] == 'i'
829 && cfun_name[3] == 'n'
830 && cfun_name[4] == 't'
831 && ISDIGIT (cfun_name[5])
832 && ISDIGIT (cfun_name[6]);
835 void
836 c4x_expand_prologue (void)
838 unsigned int regno;
839 int size = get_frame_size ();
840 rtx insn;
842 /* In functions where ar3 is not used but frame pointers are still
843 specified, frame pointers are not adjusted (if >= -O2) and this
844 is used so it won't needlessly push the frame pointer. */
845 int dont_push_ar3;
847 /* For __naked__ function don't build a prologue. */
848 if (c4x_naked_function_p ())
850 return;
853 /* For __interrupt__ function build specific prologue. */
854 if (c4x_interrupt_function_p ())
856 c4x_leaf_function = c4x_leaf_function_p ();
858 insn = emit_insn (gen_push_st ());
859 RTX_FRAME_RELATED_P (insn) = 1;
860 if (size)
862 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
863 RTX_FRAME_RELATED_P (insn) = 1;
864 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
865 gen_rtx_REG (QImode, SP_REGNO)));
866 RTX_FRAME_RELATED_P (insn) = 1;
867 /* We require that an ISR uses fewer than 32768 words of
868 local variables, otherwise we have to go to lots of
869 effort to save a register, load it with the desired size,
870 adjust the stack pointer, and then restore the modified
871 register. Frankly, I think it is a poor ISR that
872 requires more than 32767 words of local temporary
873 storage! */
874 if (size > 32767)
875 error ("ISR %s requires %d words of local vars, max is 32767",
876 current_function_name (), size);
878 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
879 gen_rtx_REG (QImode, SP_REGNO),
880 GEN_INT (size)));
881 RTX_FRAME_RELATED_P (insn) = 1;
883 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
885 if (c4x_isr_reg_used_p (regno))
887 if (regno == DP_REGNO)
889 insn = emit_insn (gen_push_dp ());
890 RTX_FRAME_RELATED_P (insn) = 1;
892 else
894 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
895 RTX_FRAME_RELATED_P (insn) = 1;
896 if (IS_EXT_REGNO (regno))
898 insn = emit_insn (gen_pushqf
899 (gen_rtx_REG (QFmode, regno)));
900 RTX_FRAME_RELATED_P (insn) = 1;
905 /* We need to clear the repeat mode flag if the ISR is
906 going to use a RPTB instruction or uses the RC, RS, or RE
907 registers. */
908 if (regs_ever_live[RC_REGNO]
909 || regs_ever_live[RS_REGNO]
910 || regs_ever_live[RE_REGNO])
912 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
913 RTX_FRAME_RELATED_P (insn) = 1;
916 /* Reload DP reg if we are paranoid about some turkey
917 violating small memory model rules. */
918 if (TARGET_SMALL && TARGET_PARANOID)
920 insn = emit_insn (gen_set_ldp_prologue
921 (gen_rtx_REG (QImode, DP_REGNO),
922 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
923 RTX_FRAME_RELATED_P (insn) = 1;
926 else
928 if (frame_pointer_needed)
930 if ((size != 0)
931 || (current_function_args_size != 0)
932 || (optimize < 2))
934 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
935 RTX_FRAME_RELATED_P (insn) = 1;
936 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
937 gen_rtx_REG (QImode, SP_REGNO)));
938 RTX_FRAME_RELATED_P (insn) = 1;
939 dont_push_ar3 = 1;
941 else
943 /* Since ar3 is not used, we don't need to push it. */
944 dont_push_ar3 = 1;
947 else
949 /* If we use ar3, we need to push it. */
950 dont_push_ar3 = 0;
951 if ((size != 0) || (current_function_args_size != 0))
953 /* If we are omitting the frame pointer, we still have
954 to make space for it so the offsets are correct
955 unless we don't use anything on the stack at all. */
956 size += 1;
960 if (size > 32767)
962 /* Local vars are too big, it will take multiple operations
963 to increment SP. */
964 if (TARGET_C3X)
966 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
967 GEN_INT(size >> 16)));
968 RTX_FRAME_RELATED_P (insn) = 1;
969 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
970 gen_rtx_REG (QImode, R1_REGNO),
971 GEN_INT(-16)));
972 RTX_FRAME_RELATED_P (insn) = 1;
974 else
976 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
977 GEN_INT(size & ~0xffff)));
978 RTX_FRAME_RELATED_P (insn) = 1;
980 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
981 gen_rtx_REG (QImode, R1_REGNO),
982 GEN_INT(size & 0xffff)));
983 RTX_FRAME_RELATED_P (insn) = 1;
984 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
985 gen_rtx_REG (QImode, SP_REGNO),
986 gen_rtx_REG (QImode, R1_REGNO)));
987 RTX_FRAME_RELATED_P (insn) = 1;
989 else if (size != 0)
991 /* Local vars take up less than 32767 words, so we can directly
992 add the number. */
993 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
994 gen_rtx_REG (QImode, SP_REGNO),
995 GEN_INT (size)));
996 RTX_FRAME_RELATED_P (insn) = 1;
999 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1001 if (regs_ever_live[regno] && ! call_used_regs[regno])
1003 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1005 if (TARGET_PRESERVE_FLOAT)
1007 insn = emit_insn (gen_pushqi
1008 (gen_rtx_REG (QImode, regno)));
1009 RTX_FRAME_RELATED_P (insn) = 1;
1011 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1012 RTX_FRAME_RELATED_P (insn) = 1;
1014 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1016 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1017 RTX_FRAME_RELATED_P (insn) = 1;
1025 void
1026 c4x_expand_epilogue(void)
1028 int regno;
1029 int jump = 0;
1030 int dont_pop_ar3;
1031 rtx insn;
1032 int size = get_frame_size ();
1034 /* For __naked__ function build no epilogue. */
1035 if (c4x_naked_function_p ())
1037 insn = emit_jump_insn (gen_return_from_epilogue ());
1038 RTX_FRAME_RELATED_P (insn) = 1;
1039 return;
1042 /* For __interrupt__ function build specific epilogue. */
1043 if (c4x_interrupt_function_p ())
1045 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1047 if (! c4x_isr_reg_used_p (regno))
1048 continue;
1049 if (regno == DP_REGNO)
1051 insn = emit_insn (gen_pop_dp ());
1052 RTX_FRAME_RELATED_P (insn) = 1;
1054 else
1056 /* We have to use unspec because the compiler will delete insns
1057 that are not call-saved. */
1058 if (IS_EXT_REGNO (regno))
1060 insn = emit_insn (gen_popqf_unspec
1061 (gen_rtx_REG (QFmode, regno)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
1064 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1065 RTX_FRAME_RELATED_P (insn) = 1;
1068 if (size)
1070 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1071 gen_rtx_REG (QImode, SP_REGNO),
1072 GEN_INT(size)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1074 insn = emit_insn (gen_popqi
1075 (gen_rtx_REG (QImode, AR3_REGNO)));
1076 RTX_FRAME_RELATED_P (insn) = 1;
1078 insn = emit_insn (gen_pop_st ());
1079 RTX_FRAME_RELATED_P (insn) = 1;
1080 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1081 RTX_FRAME_RELATED_P (insn) = 1;
1083 else
1085 if (frame_pointer_needed)
1087 if ((size != 0)
1088 || (current_function_args_size != 0)
1089 || (optimize < 2))
1091 insn = emit_insn
1092 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1093 gen_rtx_MEM (QImode,
1094 gen_rtx_PLUS
1095 (QImode, gen_rtx_REG (QImode,
1096 AR3_REGNO),
1097 constm1_rtx))));
1098 RTX_FRAME_RELATED_P (insn) = 1;
1100 /* We already have the return value and the fp,
1101 so we need to add those to the stack. */
1102 size += 2;
1103 jump = 1;
1104 dont_pop_ar3 = 1;
1106 else
1108 /* Since ar3 is not used for anything, we don't need to
1109 pop it. */
1110 dont_pop_ar3 = 1;
1113 else
1115 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1116 if (size || current_function_args_size)
1118 /* If we are omitting the frame pointer, we still have
1119 to make space for it so the offsets are correct
1120 unless we don't use anything on the stack at all. */
1121 size += 1;
1125 /* Now restore the saved registers, putting in the delayed branch
1126 where required. */
1127 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1129 if (regs_ever_live[regno] && ! call_used_regs[regno])
1131 if (regno == AR3_REGNO && dont_pop_ar3)
1132 continue;
1134 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1136 insn = emit_insn (gen_popqf_unspec
1137 (gen_rtx_REG (QFmode, regno)));
1138 RTX_FRAME_RELATED_P (insn) = 1;
1139 if (TARGET_PRESERVE_FLOAT)
1141 insn = emit_insn (gen_popqi_unspec
1142 (gen_rtx_REG (QImode, regno)));
1143 RTX_FRAME_RELATED_P (insn) = 1;
1146 else
1148 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1149 RTX_FRAME_RELATED_P (insn) = 1;
1154 if (frame_pointer_needed)
1156 if ((size != 0)
1157 || (current_function_args_size != 0)
1158 || (optimize < 2))
1160 /* Restore the old FP. */
1161 insn = emit_insn
1162 (gen_movqi
1163 (gen_rtx_REG (QImode, AR3_REGNO),
1164 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1170 if (size > 32767)
1172 /* Local vars are too big, it will take multiple operations
1173 to decrement SP. */
1174 if (TARGET_C3X)
1176 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1177 GEN_INT(size >> 16)));
1178 RTX_FRAME_RELATED_P (insn) = 1;
1179 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1180 gen_rtx_REG (QImode, R3_REGNO),
1181 GEN_INT(-16)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1184 else
1186 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1187 GEN_INT(size & ~0xffff)));
1188 RTX_FRAME_RELATED_P (insn) = 1;
1190 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1191 gen_rtx_REG (QImode, R3_REGNO),
1192 GEN_INT(size & 0xffff)));
1193 RTX_FRAME_RELATED_P (insn) = 1;
1194 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1195 gen_rtx_REG (QImode, SP_REGNO),
1196 gen_rtx_REG (QImode, R3_REGNO)));
1197 RTX_FRAME_RELATED_P (insn) = 1;
1199 else if (size != 0)
1201 /* Local vars take up less than 32768 words, so we can directly
1202 subtract the number. */
1203 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1204 gen_rtx_REG (QImode, SP_REGNO),
1205 GEN_INT(size)));
1206 RTX_FRAME_RELATED_P (insn) = 1;
1209 if (jump)
1211 insn = emit_jump_insn (gen_return_indirect_internal
1212 (gen_rtx_REG (QImode, R2_REGNO)));
1213 RTX_FRAME_RELATED_P (insn) = 1;
1215 else
1217 insn = emit_jump_insn (gen_return_from_epilogue ());
1218 RTX_FRAME_RELATED_P (insn) = 1;
1225 c4x_null_epilogue_p (void)
1227 int regno;
1229 if (reload_completed
1230 && ! c4x_naked_function_p ()
1231 && ! c4x_interrupt_function_p ()
1232 && ! current_function_calls_alloca
1233 && ! current_function_args_size
1234 && ! (optimize < 2)
1235 && ! get_frame_size ())
1237 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1238 if (regs_ever_live[regno] && ! call_used_regs[regno]
1239 && (regno != AR3_REGNO))
1240 return 1;
1241 return 0;
1243 return 1;
1248 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1250 rtx op0 = operands[0];
1251 rtx op1 = operands[1];
1253 if (! reload_in_progress
1254 && ! REG_P (op0)
1255 && ! REG_P (op1)
1256 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1257 op1 = force_reg (mode, op1);
1259 if (GET_CODE (op1) == LO_SUM
1260 && GET_MODE (op1) == Pmode
1261 && dp_reg_operand (XEXP (op1, 0), mode))
1263 /* expand_increment will sometimes create a LO_SUM immediate
1264 address. Undo this silliness. */
1265 op1 = XEXP (op1, 1);
1268 if (symbolic_address_operand (op1, mode))
1270 if (TARGET_LOAD_ADDRESS)
1272 /* Alias analysis seems to do a better job if we force
1273 constant addresses to memory after reload. */
1274 emit_insn (gen_load_immed_address (op0, op1));
1275 return 1;
1277 else
1279 /* Stick symbol or label address into the constant pool. */
1280 op1 = force_const_mem (Pmode, op1);
1283 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1285 /* We could be a lot smarter about loading some of these
1286 constants... */
1287 op1 = force_const_mem (mode, op1);
1290 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1291 and emit associated (HIGH (SYMREF)) if large memory model.
1292 c4x_legitimize_address could be used to do this,
1293 perhaps by calling validize_address. */
1294 if (TARGET_EXPOSE_LDP
1295 && ! (reload_in_progress || reload_completed)
1296 && GET_CODE (op1) == MEM
1297 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1299 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1300 if (! TARGET_SMALL)
1301 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1302 op1 = change_address (op1, mode,
1303 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1306 if (TARGET_EXPOSE_LDP
1307 && ! (reload_in_progress || reload_completed)
1308 && GET_CODE (op0) == MEM
1309 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1311 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1312 if (! TARGET_SMALL)
1313 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1314 op0 = change_address (op0, mode,
1315 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1318 if (GET_CODE (op0) == SUBREG
1319 && mixed_subreg_operand (op0, mode))
1321 /* We should only generate these mixed mode patterns
1322 during RTL generation. If we need do it later on
1323 then we'll have to emit patterns that won't clobber CC. */
1324 if (reload_in_progress || reload_completed)
1325 abort ();
1326 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1327 op0 = SUBREG_REG (op0);
1328 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1330 op0 = copy_rtx (op0);
1331 PUT_MODE (op0, QImode);
1333 else
1334 abort ();
1336 if (mode == QFmode)
1337 emit_insn (gen_storeqf_int_clobber (op0, op1));
1338 else
1339 abort ();
1340 return 1;
1343 if (GET_CODE (op1) == SUBREG
1344 && mixed_subreg_operand (op1, mode))
1346 /* We should only generate these mixed mode patterns
1347 during RTL generation. If we need do it later on
1348 then we'll have to emit patterns that won't clobber CC. */
1349 if (reload_in_progress || reload_completed)
1350 abort ();
1351 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1352 op1 = SUBREG_REG (op1);
1353 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1355 op1 = copy_rtx (op1);
1356 PUT_MODE (op1, QImode);
1358 else
1359 abort ();
1361 if (mode == QFmode)
1362 emit_insn (gen_loadqf_int_clobber (op0, op1));
1363 else
1364 abort ();
1365 return 1;
1368 if (mode == QImode
1369 && reg_operand (op0, mode)
1370 && const_int_operand (op1, mode)
1371 && ! IS_INT16_CONST (INTVAL (op1))
1372 && ! IS_HIGH_CONST (INTVAL (op1)))
1374 emit_insn (gen_loadqi_big_constant (op0, op1));
1375 return 1;
1378 if (mode == HImode
1379 && reg_operand (op0, mode)
1380 && const_int_operand (op1, mode))
1382 emit_insn (gen_loadhi_big_constant (op0, op1));
1383 return 1;
1386 /* Adjust operands in case we have modified them. */
1387 operands[0] = op0;
1388 operands[1] = op1;
1390 /* Emit normal pattern. */
1391 return 0;
1395 void
1396 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1397 enum machine_mode dmode, enum machine_mode smode,
1398 int noperands, rtx *operands)
1400 rtx ret;
1401 rtx insns;
1402 rtx equiv;
1404 start_sequence ();
1405 switch (noperands)
1407 case 2:
1408 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1409 operands[1], smode);
1410 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1411 break;
1413 case 3:
1414 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1415 operands[1], smode, operands[2], smode);
1416 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1417 break;
1419 default:
1420 abort ();
1423 insns = get_insns ();
1424 end_sequence ();
1425 emit_libcall_block (insns, operands[0], ret, equiv);
1429 void
1430 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1431 enum machine_mode mode, rtx *operands)
1433 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1437 void
1438 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1439 enum machine_mode mode, rtx *operands)
1441 rtx ret;
1442 rtx insns;
1443 rtx equiv;
1445 start_sequence ();
1446 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1447 operands[1], mode, operands[2], mode);
1448 equiv = gen_rtx_TRUNCATE (mode,
1449 gen_rtx_LSHIFTRT (HImode,
1450 gen_rtx_MULT (HImode,
1451 gen_rtx_fmt_e (code, HImode, operands[1]),
1452 gen_rtx_fmt_e (code, HImode, operands[2])),
1453 GEN_INT (32)));
1454 insns = get_insns ();
1455 end_sequence ();
1456 emit_libcall_block (insns, operands[0], ret, equiv);
1461 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1463 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1464 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1465 rtx disp = NULL_RTX; /* Displacement. */
1466 enum rtx_code code;
1468 code = GET_CODE (addr);
1469 switch (code)
1471 /* Register indirect with auto increment/decrement. We don't
1472 allow SP here---push_operand should recognize an operand
1473 being pushed on the stack. */
1475 case PRE_DEC:
1476 case PRE_INC:
1477 case POST_DEC:
1478 if (mode != QImode && mode != QFmode)
1479 return 0;
1481 case POST_INC:
1482 base = XEXP (addr, 0);
1483 if (! REG_P (base))
1484 return 0;
1485 break;
1487 case PRE_MODIFY:
1488 case POST_MODIFY:
1490 rtx op0 = XEXP (addr, 0);
1491 rtx op1 = XEXP (addr, 1);
1493 if (mode != QImode && mode != QFmode)
1494 return 0;
1496 if (! REG_P (op0)
1497 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1498 return 0;
1499 base = XEXP (op1, 0);
1500 if (! REG_P (base))
1501 return 0;
1502 if (REGNO (base) != REGNO (op0))
1503 return 0;
1504 if (REG_P (XEXP (op1, 1)))
1505 indx = XEXP (op1, 1);
1506 else
1507 disp = XEXP (op1, 1);
1509 break;
1511 /* Register indirect. */
1512 case REG:
1513 base = addr;
1514 break;
1516 /* Register indirect with displacement or index. */
1517 case PLUS:
1519 rtx op0 = XEXP (addr, 0);
1520 rtx op1 = XEXP (addr, 1);
1521 enum rtx_code code0 = GET_CODE (op0);
1523 switch (code0)
1525 case REG:
1526 if (REG_P (op1))
1528 base = op0; /* Base + index. */
1529 indx = op1;
1530 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1532 base = op1;
1533 indx = op0;
1536 else
1538 base = op0; /* Base + displacement. */
1539 disp = op1;
1541 break;
1543 default:
1544 return 0;
1547 break;
1549 /* Direct addressing with DP register. */
1550 case LO_SUM:
1552 rtx op0 = XEXP (addr, 0);
1553 rtx op1 = XEXP (addr, 1);
1555 /* HImode and HFmode direct memory references aren't truly
1556 offsettable (consider case at end of data page). We
1557 probably get better code by loading a pointer and using an
1558 indirect memory reference. */
1559 if (mode == HImode || mode == HFmode)
1560 return 0;
1562 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1563 return 0;
1565 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1566 return 1;
1568 if (GET_CODE (op1) == CONST)
1569 return 1;
1570 return 0;
1572 break;
1574 /* Direct addressing with some work for the assembler... */
1575 case CONST:
1576 /* Direct addressing. */
1577 case LABEL_REF:
1578 case SYMBOL_REF:
1579 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1580 return 1;
1581 /* These need to be converted to a LO_SUM (...).
1582 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1583 return 0;
1585 /* Do not allow direct memory access to absolute addresses.
1586 This is more pain than it's worth, especially for the
1587 small memory model where we can't guarantee that
1588 this address is within the data page---we don't want
1589 to modify the DP register in the small memory model,
1590 even temporarily, since an interrupt can sneak in.... */
1591 case CONST_INT:
1592 return 0;
1594 /* Indirect indirect addressing. */
1595 case MEM:
1596 return 0;
1598 case CONST_DOUBLE:
1599 fatal_insn ("using CONST_DOUBLE for address", addr);
1601 default:
1602 return 0;
1605 /* Validate the base register. */
1606 if (base)
1608 /* Check that the address is offsettable for HImode and HFmode. */
1609 if (indx && (mode == HImode || mode == HFmode))
1610 return 0;
1612 /* Handle DP based stuff. */
1613 if (REGNO (base) == DP_REGNO)
1614 return 1;
1615 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1616 return 0;
1617 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1618 return 0;
1621 /* Now validate the index register. */
1622 if (indx)
1624 if (GET_CODE (indx) != REG)
1625 return 0;
1626 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1627 return 0;
1628 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1629 return 0;
1632 /* Validate displacement. */
1633 if (disp)
1635 if (GET_CODE (disp) != CONST_INT)
1636 return 0;
1637 if (mode == HImode || mode == HFmode)
1639 /* The offset displacement must be legitimate. */
1640 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1641 return 0;
1643 else
1645 if (! IS_DISP8_CONST (INTVAL (disp)))
1646 return 0;
1648 /* Can't add an index with a disp. */
1649 if (indx)
1650 return 0;
1652 return 1;
1657 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1658 enum machine_mode mode ATTRIBUTE_UNUSED)
1660 if (GET_CODE (orig) == SYMBOL_REF
1661 || GET_CODE (orig) == LABEL_REF)
1663 if (mode == HImode || mode == HFmode)
1665 /* We need to force the address into
1666 a register so that it is offsettable. */
1667 rtx addr_reg = gen_reg_rtx (Pmode);
1668 emit_move_insn (addr_reg, orig);
1669 return addr_reg;
1671 else
1673 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1675 if (! TARGET_SMALL)
1676 emit_insn (gen_set_ldp (dp_reg, orig));
1678 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1682 return NULL_RTX;
1686 /* Provide the costs of an addressing mode that contains ADDR.
1687 If ADDR is not a valid address, its cost is irrelevant.
1688 This is used in cse and loop optimization to determine
1689 if it is worthwhile storing a common address into a register.
1690 Unfortunately, the C4x address cost depends on other operands. */
1692 static int
1693 c4x_address_cost (rtx addr)
1695 switch (GET_CODE (addr))
1697 case REG:
1698 return 1;
1700 case POST_INC:
1701 case POST_DEC:
1702 case PRE_INC:
1703 case PRE_DEC:
1704 return 1;
1706 /* These shouldn't be directly generated. */
1707 case SYMBOL_REF:
1708 case LABEL_REF:
1709 case CONST:
1710 return 10;
1712 case LO_SUM:
1714 rtx op1 = XEXP (addr, 1);
1716 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1717 return TARGET_SMALL ? 3 : 4;
1719 if (GET_CODE (op1) == CONST)
1721 rtx offset = const0_rtx;
1723 op1 = eliminate_constant_term (op1, &offset);
1725 /* ??? These costs need rethinking... */
1726 if (GET_CODE (op1) == LABEL_REF)
1727 return 3;
1729 if (GET_CODE (op1) != SYMBOL_REF)
1730 return 4;
1732 if (INTVAL (offset) == 0)
1733 return 3;
1735 return 4;
1737 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1739 break;
1741 case PLUS:
1743 register rtx op0 = XEXP (addr, 0);
1744 register rtx op1 = XEXP (addr, 1);
1746 if (GET_CODE (op0) != REG)
1747 break;
1749 switch (GET_CODE (op1))
1751 default:
1752 break;
1754 case REG:
1755 /* This cost for REG+REG must be greater than the cost
1756 for REG if we want autoincrement addressing modes. */
1757 return 2;
1759 case CONST_INT:
1760 /* The following tries to improve GIV combination
1761 in strength reduce but appears not to help. */
1762 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1763 return 1;
1765 if (IS_DISP1_CONST (INTVAL (op1)))
1766 return 1;
1768 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1769 return 2;
1771 return 3;
1774 default:
1775 break;
1778 return 4;
1783 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1785 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1786 rtx cc_reg;
1788 if (mode == CC_NOOVmode
1789 && (code == LE || code == GE || code == LT || code == GT))
1790 return NULL_RTX;
1792 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1793 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1794 gen_rtx_COMPARE (mode, x, y)));
1795 return cc_reg;
1798 char *
1799 c4x_output_cbranch (const char *form, rtx seq)
1801 int delayed = 0;
1802 int annultrue = 0;
1803 int annulfalse = 0;
1804 rtx delay;
1805 char *cp;
1806 static char str[100];
1808 if (final_sequence)
1810 delay = XVECEXP (final_sequence, 0, 1);
1811 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1812 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1813 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1815 strcpy (str, form);
1816 cp = &str [strlen (str)];
1817 if (delayed)
1819 *cp++ = '%';
1820 *cp++ = '#';
1822 if (annultrue)
1824 *cp++ = 'a';
1825 *cp++ = 't';
1827 if (annulfalse)
1829 *cp++ = 'a';
1830 *cp++ = 'f';
1832 *cp++ = '\t';
1833 *cp++ = '%';
1834 *cp++ = 'l';
1835 *cp++ = '1';
1836 *cp = 0;
1837 return str;
1840 void
1841 c4x_print_operand (FILE *file, rtx op, int letter)
1843 rtx op1;
1844 enum rtx_code code;
1846 switch (letter)
1848 case '#': /* Delayed. */
1849 if (final_sequence)
1850 fprintf (file, "d");
1851 return;
1854 code = GET_CODE (op);
1855 switch (letter)
1857 case 'A': /* Direct address. */
1858 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1859 fprintf (file, "@");
1860 break;
1862 case 'H': /* Sethi. */
1863 output_addr_const (file, op);
1864 return;
1866 case 'I': /* Reversed condition. */
1867 code = reverse_condition (code);
1868 break;
1870 case 'L': /* Log 2 of constant. */
1871 if (code != CONST_INT)
1872 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1873 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1874 return;
1876 case 'N': /* Ones complement of small constant. */
1877 if (code != CONST_INT)
1878 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1879 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1880 return;
1882 case 'K': /* Generate ldp(k) if direct address. */
1883 if (! TARGET_SMALL
1884 && code == MEM
1885 && GET_CODE (XEXP (op, 0)) == LO_SUM
1886 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1887 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1889 op1 = XEXP (XEXP (op, 0), 1);
1890 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1892 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1893 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1894 fprintf (file, "\n");
1897 return;
1899 case 'M': /* Generate ldp(k) if direct address. */
1900 if (! TARGET_SMALL /* Only used in asm statements. */
1901 && code == MEM
1902 && (GET_CODE (XEXP (op, 0)) == CONST
1903 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1905 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1906 output_address (XEXP (op, 0));
1907 fprintf (file, "\n\t");
1909 return;
1911 case 'O': /* Offset address. */
1912 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1913 break;
1914 else if (code == MEM)
1915 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1916 else if (code == REG)
1917 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1918 else
1919 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1920 return;
1922 case 'C': /* Call. */
1923 break;
1925 case 'U': /* Call/callu. */
1926 if (code != SYMBOL_REF)
1927 fprintf (file, "u");
1928 return;
1930 default:
1931 break;
1934 switch (code)
1936 case REG:
1937 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1938 && ! TARGET_TI)
1939 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1940 else
1941 fprintf (file, "%s", reg_names[REGNO (op)]);
1942 break;
1944 case MEM:
1945 output_address (XEXP (op, 0));
1946 break;
1948 case CONST_DOUBLE:
1950 char str[64];
1952 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1953 sizeof (str), 0, 1);
1954 fprintf (file, "%s", str);
1956 break;
1958 case CONST_INT:
1959 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1960 break;
1962 case NE:
1963 fprintf (file, "ne");
1964 break;
1966 case EQ:
1967 fprintf (file, "eq");
1968 break;
1970 case GE:
1971 fprintf (file, "ge");
1972 break;
1974 case GT:
1975 fprintf (file, "gt");
1976 break;
1978 case LE:
1979 fprintf (file, "le");
1980 break;
1982 case LT:
1983 fprintf (file, "lt");
1984 break;
1986 case GEU:
1987 fprintf (file, "hs");
1988 break;
1990 case GTU:
1991 fprintf (file, "hi");
1992 break;
1994 case LEU:
1995 fprintf (file, "ls");
1996 break;
1998 case LTU:
1999 fprintf (file, "lo");
2000 break;
2002 case SYMBOL_REF:
2003 output_addr_const (file, op);
2004 break;
2006 case CONST:
2007 output_addr_const (file, XEXP (op, 0));
2008 break;
2010 case CODE_LABEL:
2011 break;
2013 default:
2014 fatal_insn ("c4x_print_operand: Bad operand case", op);
2015 break;
2020 void
2021 c4x_print_operand_address (FILE *file, rtx addr)
2023 switch (GET_CODE (addr))
2025 case REG:
2026 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2027 break;
2029 case PRE_DEC:
2030 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2031 break;
2033 case POST_INC:
2034 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2035 break;
2037 case POST_MODIFY:
2039 rtx op0 = XEXP (XEXP (addr, 1), 0);
2040 rtx op1 = XEXP (XEXP (addr, 1), 1);
2042 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2043 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2044 reg_names[REGNO (op1)]);
2045 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2046 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2047 reg_names[REGNO (op0)], INTVAL (op1));
2048 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2049 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2050 reg_names[REGNO (op0)], -INTVAL (op1));
2051 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2052 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2053 reg_names[REGNO (op1)]);
2054 else
2055 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2057 break;
2059 case PRE_MODIFY:
2061 rtx op0 = XEXP (XEXP (addr, 1), 0);
2062 rtx op1 = XEXP (XEXP (addr, 1), 1);
2064 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2065 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2066 reg_names[REGNO (op1)]);
2067 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2068 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2069 reg_names[REGNO (op0)], INTVAL (op1));
2070 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2071 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2072 reg_names[REGNO (op0)], -INTVAL (op1));
2073 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2074 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2075 reg_names[REGNO (op1)]);
2076 else
2077 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2079 break;
2081 case PRE_INC:
2082 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2083 break;
2085 case POST_DEC:
2086 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2087 break;
2089 case PLUS: /* Indirect with displacement. */
2091 rtx op0 = XEXP (addr, 0);
2092 rtx op1 = XEXP (addr, 1);
2094 if (REG_P (op0))
2096 if (REG_P (op1))
2098 if (IS_INDEX_REG (op0))
2100 fprintf (file, "*+%s(%s)",
2101 reg_names[REGNO (op1)],
2102 reg_names[REGNO (op0)]); /* Index + base. */
2104 else
2106 fprintf (file, "*+%s(%s)",
2107 reg_names[REGNO (op0)],
2108 reg_names[REGNO (op1)]); /* Base + index. */
2111 else if (INTVAL (op1) < 0)
2113 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2114 reg_names[REGNO (op0)],
2115 -INTVAL (op1)); /* Base - displacement. */
2117 else
2119 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2120 reg_names[REGNO (op0)],
2121 INTVAL (op1)); /* Base + displacement. */
2124 else
2125 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2127 break;
2129 case LO_SUM:
2131 rtx op0 = XEXP (addr, 0);
2132 rtx op1 = XEXP (addr, 1);
2134 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2135 c4x_print_operand_address (file, op1);
2136 else
2137 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2139 break;
2141 case CONST:
2142 case SYMBOL_REF:
2143 case LABEL_REF:
2144 fprintf (file, "@");
2145 output_addr_const (file, addr);
2146 break;
2148 /* We shouldn't access CONST_INT addresses. */
2149 case CONST_INT:
2151 default:
2152 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2153 break;
2158 /* Return nonzero if the floating point operand will fit
2159 in the immediate field. */
2161 static int
2162 c4x_immed_float_p (rtx op)
2164 long convval[2];
2165 int exponent;
2166 REAL_VALUE_TYPE r;
2168 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2169 if (GET_MODE (op) == HFmode)
2170 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2171 else
2173 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2174 convval[1] = 0;
2177 /* Sign extend exponent. */
2178 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2179 if (exponent == -128)
2180 return 1; /* 0.0 */
2181 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2182 return 0; /* Precision doesn't fit. */
2183 return (exponent <= 7) /* Positive exp. */
2184 && (exponent >= -7); /* Negative exp. */
2188 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2189 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2191 None of the last four instructions from the bottom of the block can
2192 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2193 BcondAT or RETIcondD.
2195 This routine scans the four previous insns for a jump insn, and if
2196 one is found, returns 1 so that we bung in a nop instruction.
2197 This simple minded strategy will add a nop, when it may not
2198 be required. Say when there is a JUMP_INSN near the end of the
2199 block that doesn't get converted into a delayed branch.
2201 Note that we cannot have a call insn, since we don't generate
2202 repeat loops with calls in them (although I suppose we could, but
2203 there's no benefit.)
2205 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2208 c4x_rptb_nop_p (rtx insn)
2210 rtx start_label;
2211 int i;
2213 /* Extract the start label from the jump pattern (rptb_end). */
2214 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2216 /* If there is a label at the end of the loop we must insert
2217 a NOP. */
2218 do {
2219 insn = previous_insn (insn);
2220 } while (GET_CODE (insn) == NOTE
2221 || GET_CODE (insn) == USE
2222 || GET_CODE (insn) == CLOBBER);
2223 if (GET_CODE (insn) == CODE_LABEL)
2224 return 1;
2226 for (i = 0; i < 4; i++)
2228 /* Search back for prev non-note and non-label insn. */
2229 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2230 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2232 if (insn == start_label)
2233 return i == 0;
2235 insn = previous_insn (insn);
2238 /* If we have a jump instruction we should insert a NOP. If we
2239 hit repeat block top we should only insert a NOP if the loop
2240 is empty. */
2241 if (GET_CODE (insn) == JUMP_INSN)
2242 return 1;
2243 insn = previous_insn (insn);
2245 return 0;
2249 /* The C4x looping instruction needs to be emitted at the top of the
2250 loop. Emitting the true RTL for a looping instruction at the top of
2251 the loop can cause problems with flow analysis. So instead, a dummy
2252 doloop insn is emitted at the end of the loop. This routine checks
2253 for the presence of this doloop insn and then searches back to the
2254 top of the loop, where it inserts the true looping insn (provided
2255 there are no instructions in the loop which would cause problems).
2256 Any additional labels can be emitted at this point. In addition, if
2257 the desired loop count register was not allocated, this routine does
2258 nothing.
2260 Before we can create a repeat block looping instruction we have to
2261 verify that there are no jumps outside the loop and no jumps outside
2262 the loop go into this loop. This can happen in the basic blocks reorder
2263 pass. The C4x cpu cannot handle this. */
2265 static int
2266 c4x_label_ref_used_p (rtx x, rtx code_label)
2268 enum rtx_code code;
2269 int i, j;
2270 const char *fmt;
2272 if (x == 0)
2273 return 0;
2275 code = GET_CODE (x);
2276 if (code == LABEL_REF)
2277 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2279 fmt = GET_RTX_FORMAT (code);
2280 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2282 if (fmt[i] == 'e')
2284 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2285 return 1;
2287 else if (fmt[i] == 'E')
2288 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2289 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2290 return 1;
2292 return 0;
2296 static int
2297 c4x_rptb_valid_p (rtx insn, rtx start_label)
2299 rtx end = insn;
2300 rtx start;
2301 rtx tmp;
2303 /* Find the start label. */
2304 for (; insn; insn = PREV_INSN (insn))
2305 if (insn == start_label)
2306 break;
2308 /* Note found then we cannot use a rptb or rpts. The label was
2309 probably moved by the basic block reorder pass. */
2310 if (! insn)
2311 return 0;
2313 start = insn;
2314 /* If any jump jumps inside this block then we must fail. */
2315 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2317 if (GET_CODE (insn) == CODE_LABEL)
2319 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2320 if (GET_CODE (tmp) == JUMP_INSN
2321 && c4x_label_ref_used_p (tmp, insn))
2322 return 0;
2325 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2327 if (GET_CODE (insn) == CODE_LABEL)
2329 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2330 if (GET_CODE (tmp) == JUMP_INSN
2331 && c4x_label_ref_used_p (tmp, insn))
2332 return 0;
2335 /* If any jump jumps outside this block then we must fail. */
2336 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2338 if (GET_CODE (insn) == CODE_LABEL)
2340 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2341 if (GET_CODE (tmp) == JUMP_INSN
2342 && c4x_label_ref_used_p (tmp, insn))
2343 return 0;
2344 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2345 if (GET_CODE (tmp) == JUMP_INSN
2346 && c4x_label_ref_used_p (tmp, insn))
2347 return 0;
2351 /* All checks OK. */
2352 return 1;
2356 void
2357 c4x_rptb_insert (rtx insn)
2359 rtx end_label;
2360 rtx start_label;
2361 rtx new_start_label;
2362 rtx count_reg;
2364 /* If the count register has not been allocated to RC, say if
2365 there is a movmem pattern in the loop, then do not insert a
2366 RPTB instruction. Instead we emit a decrement and branch
2367 at the end of the loop. */
2368 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2369 if (REGNO (count_reg) != RC_REGNO)
2370 return;
2372 /* Extract the start label from the jump pattern (rptb_end). */
2373 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2375 if (! c4x_rptb_valid_p (insn, start_label))
2377 /* We cannot use the rptb insn. Replace it so reorg can use
2378 the delay slots of the jump insn. */
2379 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2380 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2381 emit_insn_before (gen_bge (start_label), insn);
2382 LABEL_NUSES (start_label)++;
2383 delete_insn (insn);
2384 return;
2387 end_label = gen_label_rtx ();
2388 LABEL_NUSES (end_label)++;
2389 emit_label_after (end_label, insn);
2391 new_start_label = gen_label_rtx ();
2392 LABEL_NUSES (new_start_label)++;
2394 for (; insn; insn = PREV_INSN (insn))
2396 if (insn == start_label)
2397 break;
2398 if (GET_CODE (insn) == JUMP_INSN &&
2399 JUMP_LABEL (insn) == start_label)
2400 redirect_jump (insn, new_start_label, 0);
2402 if (! insn)
2403 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2405 emit_label_after (new_start_label, insn);
2407 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2408 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2409 else
2410 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2411 if (LABEL_NUSES (start_label) == 0)
2412 delete_insn (start_label);
2416 /* We need to use direct addressing for large constants and addresses
2417 that cannot fit within an instruction. We must check for these
2418 after after the final jump optimization pass, since this may
2419 introduce a local_move insn for a SYMBOL_REF. This pass
2420 must come before delayed branch slot filling since it can generate
2421 additional instructions.
2423 This function also fixes up RTPB style loops that didn't get RC
2424 allocated as the loop counter. */
2426 static void
2427 c4x_reorg (void)
2429 rtx insn;
2431 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2433 /* Look for insn. */
2434 if (INSN_P (insn))
2436 int insn_code_number;
2437 rtx old;
2439 insn_code_number = recog_memoized (insn);
2441 if (insn_code_number < 0)
2442 continue;
2444 /* Insert the RTX for RPTB at the top of the loop
2445 and a label at the end of the loop. */
2446 if (insn_code_number == CODE_FOR_rptb_end)
2447 c4x_rptb_insert(insn);
2449 /* We need to split the insn here. Otherwise the calls to
2450 force_const_mem will not work for load_immed_address. */
2451 old = insn;
2453 /* Don't split the insn if it has been deleted. */
2454 if (! INSN_DELETED_P (old))
2455 insn = try_split (PATTERN(old), old, 1);
2457 /* When not optimizing, the old insn will be still left around
2458 with only the 'deleted' bit set. Transform it into a note
2459 to avoid confusion of subsequent processing. */
2460 if (INSN_DELETED_P (old))
2462 PUT_CODE (old, NOTE);
2463 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2464 NOTE_SOURCE_FILE (old) = 0;
2471 static int
2472 c4x_a_register (rtx op)
2474 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2478 static int
2479 c4x_x_register (rtx op)
2481 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2485 static int
2486 c4x_immed_int_constant (rtx op)
2488 if (GET_CODE (op) != CONST_INT)
2489 return 0;
2491 return GET_MODE (op) == VOIDmode
2492 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2493 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2497 static int
2498 c4x_immed_float_constant (rtx op)
2500 if (GET_CODE (op) != CONST_DOUBLE)
2501 return 0;
2503 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2504 present this only means that a MEM rtx has been generated. It does
2505 not mean the rtx is really in memory. */
2507 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2512 c4x_shiftable_constant (rtx op)
2514 int i;
2515 int mask;
2516 int val = INTVAL (op);
2518 for (i = 0; i < 16; i++)
2520 if (val & (1 << i))
2521 break;
2523 mask = ((0xffff >> i) << 16) | 0xffff;
2524 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2525 : (val >> i) & mask))
2526 return i;
2527 return -1;
2532 c4x_H_constant (rtx op)
2534 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2539 c4x_I_constant (rtx op)
2541 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2546 c4x_J_constant (rtx op)
2548 if (TARGET_C3X)
2549 return 0;
2550 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2554 static int
2555 c4x_K_constant (rtx op)
2557 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2558 return 0;
2559 return IS_INT5_CONST (INTVAL (op));
2564 c4x_L_constant (rtx op)
2566 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2570 static int
2571 c4x_N_constant (rtx op)
2573 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2577 static int
2578 c4x_O_constant (rtx op)
2580 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2584 /* The constraints do not have to check the register class,
2585 except when needed to discriminate between the constraints.
2586 The operand has been checked by the predicates to be valid. */
2588 /* ARx + 9-bit signed const or IRn
2589 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2590 We don't include the pre/post inc/dec forms here since
2591 they are handled by the <> constraints. */
2594 c4x_Q_constraint (rtx op)
2596 enum machine_mode mode = GET_MODE (op);
2598 if (GET_CODE (op) != MEM)
2599 return 0;
2600 op = XEXP (op, 0);
2601 switch (GET_CODE (op))
2603 case REG:
2604 return 1;
2606 case PLUS:
2608 rtx op0 = XEXP (op, 0);
2609 rtx op1 = XEXP (op, 1);
2611 if (! REG_P (op0))
2612 return 0;
2614 if (REG_P (op1))
2615 return 1;
2617 if (GET_CODE (op1) != CONST_INT)
2618 return 0;
2620 /* HImode and HFmode must be offsettable. */
2621 if (mode == HImode || mode == HFmode)
2622 return IS_DISP8_OFF_CONST (INTVAL (op1));
2624 return IS_DISP8_CONST (INTVAL (op1));
2626 break;
2628 default:
2629 break;
2631 return 0;
2635 /* ARx + 5-bit unsigned const
2636 *ARx, *+ARx(n) for n < 32. */
2639 c4x_R_constraint (rtx op)
2641 enum machine_mode mode = GET_MODE (op);
2643 if (TARGET_C3X)
2644 return 0;
2645 if (GET_CODE (op) != MEM)
2646 return 0;
2647 op = XEXP (op, 0);
2648 switch (GET_CODE (op))
2650 case REG:
2651 return 1;
2653 case PLUS:
2655 rtx op0 = XEXP (op, 0);
2656 rtx op1 = XEXP (op, 1);
2658 if (! REG_P (op0))
2659 return 0;
2661 if (GET_CODE (op1) != CONST_INT)
2662 return 0;
2664 /* HImode and HFmode must be offsettable. */
2665 if (mode == HImode || mode == HFmode)
2666 return IS_UINT5_CONST (INTVAL (op1) + 1);
2668 return IS_UINT5_CONST (INTVAL (op1));
2670 break;
2672 default:
2673 break;
2675 return 0;
2679 static int
2680 c4x_R_indirect (rtx op)
2682 enum machine_mode mode = GET_MODE (op);
2684 if (TARGET_C3X || GET_CODE (op) != MEM)
2685 return 0;
2687 op = XEXP (op, 0);
2688 switch (GET_CODE (op))
2690 case REG:
2691 return IS_ADDR_OR_PSEUDO_REG (op);
2693 case PLUS:
2695 rtx op0 = XEXP (op, 0);
2696 rtx op1 = XEXP (op, 1);
2698 /* HImode and HFmode must be offsettable. */
2699 if (mode == HImode || mode == HFmode)
2700 return IS_ADDR_OR_PSEUDO_REG (op0)
2701 && GET_CODE (op1) == CONST_INT
2702 && IS_UINT5_CONST (INTVAL (op1) + 1);
2704 return REG_P (op0)
2705 && IS_ADDR_OR_PSEUDO_REG (op0)
2706 && GET_CODE (op1) == CONST_INT
2707 && IS_UINT5_CONST (INTVAL (op1));
2709 break;
2711 default:
2712 break;
2714 return 0;
2718 /* ARx + 1-bit unsigned const or IRn
2719 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2720 We don't include the pre/post inc/dec forms here since
2721 they are handled by the <> constraints. */
2724 c4x_S_constraint (rtx op)
2726 enum machine_mode mode = GET_MODE (op);
2727 if (GET_CODE (op) != MEM)
2728 return 0;
2729 op = XEXP (op, 0);
2730 switch (GET_CODE (op))
2732 case REG:
2733 return 1;
2735 case PRE_MODIFY:
2736 case POST_MODIFY:
2738 rtx op0 = XEXP (op, 0);
2739 rtx op1 = XEXP (op, 1);
2741 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2742 || (op0 != XEXP (op1, 0)))
2743 return 0;
2745 op0 = XEXP (op1, 0);
2746 op1 = XEXP (op1, 1);
2747 return REG_P (op0) && REG_P (op1);
2748 /* Pre or post_modify with a displacement of 0 or 1
2749 should not be generated. */
2751 break;
2753 case PLUS:
2755 rtx op0 = XEXP (op, 0);
2756 rtx op1 = XEXP (op, 1);
2758 if (!REG_P (op0))
2759 return 0;
2761 if (REG_P (op1))
2762 return 1;
2764 if (GET_CODE (op1) != CONST_INT)
2765 return 0;
2767 /* HImode and HFmode must be offsettable. */
2768 if (mode == HImode || mode == HFmode)
2769 return IS_DISP1_OFF_CONST (INTVAL (op1));
2771 return IS_DISP1_CONST (INTVAL (op1));
2773 break;
2775 default:
2776 break;
2778 return 0;
2782 static int
2783 c4x_S_indirect (rtx op)
2785 enum machine_mode mode = GET_MODE (op);
2786 if (GET_CODE (op) != MEM)
2787 return 0;
2789 op = XEXP (op, 0);
2790 switch (GET_CODE (op))
2792 case PRE_DEC:
2793 case POST_DEC:
2794 if (mode != QImode && mode != QFmode)
2795 return 0;
2796 case PRE_INC:
2797 case POST_INC:
2798 op = XEXP (op, 0);
2800 case REG:
2801 return IS_ADDR_OR_PSEUDO_REG (op);
2803 case PRE_MODIFY:
2804 case POST_MODIFY:
2806 rtx op0 = XEXP (op, 0);
2807 rtx op1 = XEXP (op, 1);
2809 if (mode != QImode && mode != QFmode)
2810 return 0;
2812 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2813 || (op0 != XEXP (op1, 0)))
2814 return 0;
2816 op0 = XEXP (op1, 0);
2817 op1 = XEXP (op1, 1);
2818 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2819 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2820 /* Pre or post_modify with a displacement of 0 or 1
2821 should not be generated. */
2824 case PLUS:
2826 rtx op0 = XEXP (op, 0);
2827 rtx op1 = XEXP (op, 1);
2829 if (REG_P (op0))
2831 /* HImode and HFmode must be offsettable. */
2832 if (mode == HImode || mode == HFmode)
2833 return IS_ADDR_OR_PSEUDO_REG (op0)
2834 && GET_CODE (op1) == CONST_INT
2835 && IS_DISP1_OFF_CONST (INTVAL (op1));
2837 if (REG_P (op1))
2838 return (IS_INDEX_OR_PSEUDO_REG (op1)
2839 && IS_ADDR_OR_PSEUDO_REG (op0))
2840 || (IS_ADDR_OR_PSEUDO_REG (op1)
2841 && IS_INDEX_OR_PSEUDO_REG (op0));
2843 return IS_ADDR_OR_PSEUDO_REG (op0)
2844 && GET_CODE (op1) == CONST_INT
2845 && IS_DISP1_CONST (INTVAL (op1));
2848 break;
2850 default:
2851 break;
2853 return 0;
2857 /* Direct memory operand. */
2860 c4x_T_constraint (rtx op)
2862 if (GET_CODE (op) != MEM)
2863 return 0;
2864 op = XEXP (op, 0);
2866 if (GET_CODE (op) != LO_SUM)
2868 /* Allow call operands. */
2869 return GET_CODE (op) == SYMBOL_REF
2870 && GET_MODE (op) == Pmode
2871 && SYMBOL_REF_FUNCTION_P (op);
2874 /* HImode and HFmode are not offsettable. */
2875 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2876 return 0;
2878 if ((GET_CODE (XEXP (op, 0)) == REG)
2879 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2880 return c4x_U_constraint (XEXP (op, 1));
2882 return 0;
2886 /* Symbolic operand. */
2889 c4x_U_constraint (rtx op)
2891 /* Don't allow direct addressing to an arbitrary constant. */
2892 return GET_CODE (op) == CONST
2893 || GET_CODE (op) == SYMBOL_REF
2894 || GET_CODE (op) == LABEL_REF;
2899 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2901 if (GET_CODE (op) == MEM)
2903 enum rtx_code code = GET_CODE (XEXP (op, 0));
2905 if (code == PRE_INC
2906 || code == PRE_DEC
2907 || code == POST_INC
2908 || code == POST_DEC
2909 || code == PRE_MODIFY
2910 || code == POST_MODIFY
2912 return 1;
2914 return 0;
2918 /* Match any operand. */
2921 any_operand (register rtx op ATTRIBUTE_UNUSED,
2922 enum machine_mode mode ATTRIBUTE_UNUSED)
2924 return 1;
2928 /* Nonzero if OP is a floating point value with value 0.0. */
2931 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2933 REAL_VALUE_TYPE r;
2935 if (GET_CODE (op) != CONST_DOUBLE)
2936 return 0;
2937 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2938 return REAL_VALUES_EQUAL (r, dconst0);
2943 const_operand (register rtx op, register enum machine_mode mode)
2945 switch (mode)
2947 case QFmode:
2948 case HFmode:
2949 if (GET_CODE (op) != CONST_DOUBLE
2950 || GET_MODE (op) != mode
2951 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2952 return 0;
2954 return c4x_immed_float_p (op);
2956 #if Pmode != QImode
2957 case Pmode:
2958 #endif
2959 case QImode:
2960 if (GET_CODE (op) != CONST_INT
2961 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2962 || GET_MODE_CLASS (mode) != MODE_INT)
2963 return 0;
2965 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2967 case HImode:
2968 return 0;
2970 default:
2971 return 0;
2977 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2979 return c4x_K_constant (op);
2984 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2986 return c4x_N_constant (op);
2991 reg_operand (rtx op, enum machine_mode mode)
2993 if (GET_CODE (op) == SUBREG
2994 && GET_MODE (op) == QFmode)
2995 return 0;
2996 return register_operand (op, mode);
3001 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3003 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3004 int and a long double. */
3005 if (GET_CODE (op) == SUBREG
3006 && (GET_MODE (op) == QFmode)
3007 && (GET_MODE (SUBREG_REG (op)) == QImode
3008 || GET_MODE (SUBREG_REG (op)) == HImode))
3009 return 1;
3010 return 0;
3015 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3017 if (REG_P (op) || CONSTANT_P (op))
3018 return 1;
3019 return 0;
3024 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3026 if (REG_P (op) || CONSTANT_P (op))
3027 return 1;
3028 if (GET_CODE (op) != MEM)
3029 return 0;
3030 op = XEXP (op, 0);
3031 switch (GET_CODE (op))
3033 case REG:
3034 return 1;
3036 case PLUS:
3038 rtx op0 = XEXP (op, 0);
3039 rtx op1 = XEXP (op, 1);
3041 if (! REG_P (op0))
3042 return 0;
3044 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3045 return 1;
3048 case LO_SUM:
3050 rtx op0 = XEXP (op, 0);
3052 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3053 return 1;
3055 break;
3057 case CONST:
3058 case SYMBOL_REF:
3059 case LABEL_REF:
3060 return 1;
3062 default:
3063 break;
3065 return 0;
3070 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3072 if (REG_P (op) && REGNO (op) == RC_REGNO)
3073 return 0;
3074 return 1;
3078 /* Extended precision register R0-R1. */
3081 r0r1_reg_operand (rtx op, enum machine_mode mode)
3083 if (! reg_operand (op, mode))
3084 return 0;
3085 if (GET_CODE (op) == SUBREG)
3086 op = SUBREG_REG (op);
3087 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3091 /* Extended precision register R2-R3. */
3094 r2r3_reg_operand (rtx op, enum machine_mode mode)
3096 if (! reg_operand (op, mode))
3097 return 0;
3098 if (GET_CODE (op) == SUBREG)
3099 op = SUBREG_REG (op);
3100 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3104 /* Low extended precision register R0-R7. */
3107 ext_low_reg_operand (rtx op, enum machine_mode mode)
3109 if (! reg_operand (op, mode))
3110 return 0;
3111 if (GET_CODE (op) == SUBREG)
3112 op = SUBREG_REG (op);
3113 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3117 /* Extended precision register. */
3120 ext_reg_operand (rtx op, enum machine_mode mode)
3122 if (! reg_operand (op, mode))
3123 return 0;
3124 if (GET_CODE (op) == SUBREG)
3125 op = SUBREG_REG (op);
3126 if (! REG_P (op))
3127 return 0;
3128 return IS_EXT_OR_PSEUDO_REG (op);
3132 /* Standard precision register. */
3135 std_reg_operand (rtx op, enum machine_mode mode)
3137 if (! reg_operand (op, mode))
3138 return 0;
3139 if (GET_CODE (op) == SUBREG)
3140 op = SUBREG_REG (op);
3141 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3144 /* Standard precision or normal register. */
3147 std_or_reg_operand (rtx op, enum machine_mode mode)
3149 if (reload_in_progress)
3150 return std_reg_operand (op, mode);
3151 return reg_operand (op, mode);
3154 /* Address register. */
3157 addr_reg_operand (rtx op, enum machine_mode mode)
3159 if (! reg_operand (op, mode))
3160 return 0;
3161 return c4x_a_register (op);
3165 /* Index register. */
3168 index_reg_operand (rtx op, enum machine_mode mode)
3170 if (! reg_operand (op, mode))
3171 return 0;
3172 if (GET_CODE (op) == SUBREG)
3173 op = SUBREG_REG (op);
3174 return c4x_x_register (op);
3178 /* DP register. */
3181 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3183 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3187 /* SP register. */
3190 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3192 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3196 /* ST register. */
3199 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3201 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3205 /* RC register. */
3208 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3210 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3215 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3217 return (REG_P (op) || symbolic_address_operand (op, mode));
3221 /* Symbolic address operand. */
3224 symbolic_address_operand (register rtx op,
3225 enum machine_mode mode ATTRIBUTE_UNUSED)
3227 switch (GET_CODE (op))
3229 case CONST:
3230 case SYMBOL_REF:
3231 case LABEL_REF:
3232 return 1;
3233 default:
3234 return 0;
3239 /* Check dst operand of a move instruction. */
3242 dst_operand (rtx op, enum machine_mode mode)
3244 if (GET_CODE (op) == SUBREG
3245 && mixed_subreg_operand (op, mode))
3246 return 0;
3248 if (REG_P (op))
3249 return reg_operand (op, mode);
3251 return nonimmediate_operand (op, mode);
3255 /* Check src operand of two operand arithmetic instructions. */
3258 src_operand (rtx op, enum machine_mode mode)
3260 if (GET_CODE (op) == SUBREG
3261 && mixed_subreg_operand (op, mode))
3262 return 0;
3264 if (REG_P (op))
3265 return reg_operand (op, mode);
3267 if (mode == VOIDmode)
3268 mode = GET_MODE (op);
3270 if (GET_CODE (op) == CONST_INT)
3271 return (mode == QImode || mode == Pmode || mode == HImode)
3272 && c4x_I_constant (op);
3274 /* We don't like CONST_DOUBLE integers. */
3275 if (GET_CODE (op) == CONST_DOUBLE)
3276 return c4x_H_constant (op);
3278 /* Disallow symbolic addresses. Only the predicate
3279 symbolic_address_operand will match these. */
3280 if (GET_CODE (op) == SYMBOL_REF
3281 || GET_CODE (op) == LABEL_REF
3282 || GET_CODE (op) == CONST)
3283 return 0;
3285 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3286 access to symbolic addresses. These operands will get forced
3287 into a register and the movqi expander will generate a
3288 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3289 if (GET_CODE (op) == MEM
3290 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3291 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3292 || GET_CODE (XEXP (op, 0)) == CONST)))
3293 return !TARGET_EXPOSE_LDP &&
3294 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3296 return general_operand (op, mode);
3301 src_hi_operand (rtx op, enum machine_mode mode)
3303 if (c4x_O_constant (op))
3304 return 1;
3305 return src_operand (op, mode);
3309 /* Check src operand of two operand logical instructions. */
3312 lsrc_operand (rtx op, enum machine_mode mode)
3314 if (mode == VOIDmode)
3315 mode = GET_MODE (op);
3317 if (mode != QImode && mode != Pmode)
3318 fatal_insn ("mode not QImode", op);
3320 if (GET_CODE (op) == CONST_INT)
3321 return c4x_L_constant (op) || c4x_J_constant (op);
3323 return src_operand (op, mode);
3327 /* Check src operand of two operand tricky instructions. */
3330 tsrc_operand (rtx op, enum machine_mode mode)
3332 if (mode == VOIDmode)
3333 mode = GET_MODE (op);
3335 if (mode != QImode && mode != Pmode)
3336 fatal_insn ("mode not QImode", op);
3338 if (GET_CODE (op) == CONST_INT)
3339 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3341 return src_operand (op, mode);
3345 /* Check src operand of two operand non immedidate instructions. */
3348 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3350 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3351 return 0;
3353 return src_operand (op, mode);
3357 /* Check logical src operand of two operand non immedidate instructions. */
3360 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3362 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3363 return 0;
3365 return lsrc_operand (op, mode);
3370 reg_or_const_operand (rtx op, enum machine_mode mode)
3372 return reg_operand (op, mode) || const_operand (op, mode);
3376 /* Check for indirect operands allowable in parallel instruction. */
3379 par_ind_operand (rtx op, enum machine_mode mode)
3381 if (mode != VOIDmode && mode != GET_MODE (op))
3382 return 0;
3384 return c4x_S_indirect (op);
3388 /* Check for operands allowable in parallel instruction. */
3391 parallel_operand (rtx op, enum machine_mode mode)
3393 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3397 static void
3398 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3400 *base = 0;
3401 *incdec = 0;
3402 *index = 0;
3403 *disp = 0;
3405 if (GET_CODE (op) != MEM)
3406 fatal_insn ("invalid indirect memory address", op);
3408 op = XEXP (op, 0);
3409 switch (GET_CODE (op))
3411 case PRE_DEC:
3412 *base = REGNO (XEXP (op, 0));
3413 *incdec = 1;
3414 *disp = -1;
3415 return;
3417 case POST_DEC:
3418 *base = REGNO (XEXP (op, 0));
3419 *incdec = 1;
3420 *disp = 0;
3421 return;
3423 case PRE_INC:
3424 *base = REGNO (XEXP (op, 0));
3425 *incdec = 1;
3426 *disp = 1;
3427 return;
3429 case POST_INC:
3430 *base = REGNO (XEXP (op, 0));
3431 *incdec = 1;
3432 *disp = 0;
3433 return;
3435 case POST_MODIFY:
3436 *base = REGNO (XEXP (op, 0));
3437 if (REG_P (XEXP (XEXP (op, 1), 1)))
3439 *index = REGNO (XEXP (XEXP (op, 1), 1));
3440 *disp = 0; /* ??? */
3442 else
3443 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3444 *incdec = 1;
3445 return;
3447 case PRE_MODIFY:
3448 *base = REGNO (XEXP (op, 0));
3449 if (REG_P (XEXP (XEXP (op, 1), 1)))
3451 *index = REGNO (XEXP (XEXP (op, 1), 1));
3452 *disp = 1; /* ??? */
3454 else
3455 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3456 *incdec = 1;
3458 return;
3460 case REG:
3461 *base = REGNO (op);
3462 return;
3464 case PLUS:
3466 rtx op0 = XEXP (op, 0);
3467 rtx op1 = XEXP (op, 1);
3469 if (c4x_a_register (op0))
3471 if (c4x_x_register (op1))
3473 *base = REGNO (op0);
3474 *index = REGNO (op1);
3475 return;
3477 else if ((GET_CODE (op1) == CONST_INT
3478 && IS_DISP1_CONST (INTVAL (op1))))
3480 *base = REGNO (op0);
3481 *disp = INTVAL (op1);
3482 return;
3485 else if (c4x_x_register (op0) && c4x_a_register (op1))
3487 *base = REGNO (op1);
3488 *index = REGNO (op0);
3489 return;
3492 /* Fall through. */
3494 default:
3495 fatal_insn ("invalid indirect (S) memory address", op);
3501 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3503 int base0;
3504 int base1;
3505 int incdec0;
3506 int incdec1;
3507 int index0;
3508 int index1;
3509 int disp0;
3510 int disp1;
3512 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3513 return 1;
3515 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3516 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3518 if (store0 && store1)
3520 /* If we have two stores in parallel to the same address, then
3521 the C4x only executes one of the stores. This is unlikely to
3522 cause problems except when writing to a hardware device such
3523 as a FIFO since the second write will be lost. The user
3524 should flag the hardware location as being volatile so that
3525 we don't do this optimization. While it is unlikely that we
3526 have an aliased address if both locations are not marked
3527 volatile, it is probably safer to flag a potential conflict
3528 if either location is volatile. */
3529 if (! flag_argument_noalias)
3531 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3532 return 1;
3536 /* If have a parallel load and a store to the same address, the load
3537 is performed first, so there is no conflict. Similarly, there is
3538 no conflict if have parallel loads from the same address. */
3540 /* Cannot use auto increment or auto decrement twice for same
3541 base register. */
3542 if (base0 == base1 && incdec0 && incdec0)
3543 return 1;
3545 /* It might be too confusing for GCC if we have use a base register
3546 with a side effect and a memory reference using the same register
3547 in parallel. */
3548 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3549 return 1;
3551 /* We cannot optimize the case where op1 and op2 refer to the same
3552 address. */
3553 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3554 return 1;
3556 /* No conflict. */
3557 return 0;
3561 /* Check for while loop inside a decrement and branch loop. */
3564 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3566 while (insn)
3568 if (GET_CODE (insn) == CODE_LABEL)
3570 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3571 return 1;
3572 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3573 return 0;
3575 insn = PREV_INSN (insn);
3577 return 1;
3581 /* Validate combination of operands for parallel load/store instructions. */
3584 valid_parallel_load_store (rtx *operands,
3585 enum machine_mode mode ATTRIBUTE_UNUSED)
3587 rtx op0 = operands[0];
3588 rtx op1 = operands[1];
3589 rtx op2 = operands[2];
3590 rtx op3 = operands[3];
3592 if (GET_CODE (op0) == SUBREG)
3593 op0 = SUBREG_REG (op0);
3594 if (GET_CODE (op1) == SUBREG)
3595 op1 = SUBREG_REG (op1);
3596 if (GET_CODE (op2) == SUBREG)
3597 op2 = SUBREG_REG (op2);
3598 if (GET_CODE (op3) == SUBREG)
3599 op3 = SUBREG_REG (op3);
3601 /* The patterns should only allow ext_low_reg_operand() or
3602 par_ind_operand() operands. Thus of the 4 operands, only 2
3603 should be REGs and the other 2 should be MEMs. */
3605 /* This test prevents the multipack pass from using this pattern if
3606 op0 is used as an index or base register in op2 or op3, since
3607 this combination will require reloading. */
3608 if (GET_CODE (op0) == REG
3609 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3610 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3611 return 0;
3613 /* LDI||LDI. */
3614 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3615 return (REGNO (op0) != REGNO (op2))
3616 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3617 && ! c4x_address_conflict (op1, op3, 0, 0);
3619 /* STI||STI. */
3620 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3621 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3622 && ! c4x_address_conflict (op0, op2, 1, 1);
3624 /* LDI||STI. */
3625 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3626 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3627 && ! c4x_address_conflict (op1, op2, 0, 1);
3629 /* STI||LDI. */
3630 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3631 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3632 && ! c4x_address_conflict (op0, op3, 1, 0);
3634 return 0;
3639 valid_parallel_operands_4 (rtx *operands,
3640 enum machine_mode mode ATTRIBUTE_UNUSED)
3642 rtx op0 = operands[0];
3643 rtx op2 = operands[2];
3645 if (GET_CODE (op0) == SUBREG)
3646 op0 = SUBREG_REG (op0);
3647 if (GET_CODE (op2) == SUBREG)
3648 op2 = SUBREG_REG (op2);
3650 /* This test prevents the multipack pass from using this pattern if
3651 op0 is used as an index or base register in op2, since this combination
3652 will require reloading. */
3653 if (GET_CODE (op0) == REG
3654 && GET_CODE (op2) == MEM
3655 && reg_mentioned_p (op0, XEXP (op2, 0)))
3656 return 0;
3658 return 1;
3663 valid_parallel_operands_5 (rtx *operands,
3664 enum machine_mode mode ATTRIBUTE_UNUSED)
3666 int regs = 0;
3667 rtx op0 = operands[0];
3668 rtx op1 = operands[1];
3669 rtx op2 = operands[2];
3670 rtx op3 = operands[3];
3672 if (GET_CODE (op0) == SUBREG)
3673 op0 = SUBREG_REG (op0);
3674 if (GET_CODE (op1) == SUBREG)
3675 op1 = SUBREG_REG (op1);
3676 if (GET_CODE (op2) == SUBREG)
3677 op2 = SUBREG_REG (op2);
3679 /* The patterns should only allow ext_low_reg_operand() or
3680 par_ind_operand() operands. Operands 1 and 2 may be commutative
3681 but only one of them can be a register. */
3682 if (GET_CODE (op1) == REG)
3683 regs++;
3684 if (GET_CODE (op2) == REG)
3685 regs++;
3687 if (regs != 1)
3688 return 0;
3690 /* This test prevents the multipack pass from using this pattern if
3691 op0 is used as an index or base register in op3, since this combination
3692 will require reloading. */
3693 if (GET_CODE (op0) == REG
3694 && GET_CODE (op3) == MEM
3695 && reg_mentioned_p (op0, XEXP (op3, 0)))
3696 return 0;
3698 return 1;
3703 valid_parallel_operands_6 (rtx *operands,
3704 enum machine_mode mode ATTRIBUTE_UNUSED)
3706 int regs = 0;
3707 rtx op0 = operands[0];
3708 rtx op1 = operands[1];
3709 rtx op2 = operands[2];
3710 rtx op4 = operands[4];
3711 rtx op5 = operands[5];
3713 if (GET_CODE (op1) == SUBREG)
3714 op1 = SUBREG_REG (op1);
3715 if (GET_CODE (op2) == SUBREG)
3716 op2 = SUBREG_REG (op2);
3717 if (GET_CODE (op4) == SUBREG)
3718 op4 = SUBREG_REG (op4);
3719 if (GET_CODE (op5) == SUBREG)
3720 op5 = SUBREG_REG (op5);
3722 /* The patterns should only allow ext_low_reg_operand() or
3723 par_ind_operand() operands. Thus of the 4 input operands, only 2
3724 should be REGs and the other 2 should be MEMs. */
3726 if (GET_CODE (op1) == REG)
3727 regs++;
3728 if (GET_CODE (op2) == REG)
3729 regs++;
3730 if (GET_CODE (op4) == REG)
3731 regs++;
3732 if (GET_CODE (op5) == REG)
3733 regs++;
3735 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3736 Perhaps we should count the MEMs as well? */
3737 if (regs != 2)
3738 return 0;
3740 /* This test prevents the multipack pass from using this pattern if
3741 op0 is used as an index or base register in op4 or op5, since
3742 this combination will require reloading. */
3743 if (GET_CODE (op0) == REG
3744 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3745 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3746 return 0;
3748 return 1;
3752 /* Validate combination of src operands. Note that the operands have
3753 been screened by the src_operand predicate. We just have to check
3754 that the combination of operands is valid. If FORCE is set, ensure
3755 that the destination regno is valid if we have a 2 operand insn. */
3757 static int
3758 c4x_valid_operands (enum rtx_code code, rtx *operands,
3759 enum machine_mode mode ATTRIBUTE_UNUSED,
3760 int force)
3762 rtx op0;
3763 rtx op1;
3764 rtx op2;
3765 enum rtx_code code1;
3766 enum rtx_code code2;
3769 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3770 if (code == IF_THEN_ELSE)
3771 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3773 if (code == COMPARE)
3775 op1 = operands[0];
3776 op2 = operands[1];
3778 else
3780 op1 = operands[1];
3781 op2 = operands[2];
3784 op0 = operands[0];
3786 if (GET_CODE (op0) == SUBREG)
3787 op0 = SUBREG_REG (op0);
3788 if (GET_CODE (op1) == SUBREG)
3789 op1 = SUBREG_REG (op1);
3790 if (GET_CODE (op2) == SUBREG)
3791 op2 = SUBREG_REG (op2);
3793 code1 = GET_CODE (op1);
3794 code2 = GET_CODE (op2);
3797 if (code1 == REG && code2 == REG)
3798 return 1;
3800 if (code1 == MEM && code2 == MEM)
3802 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3803 return 1;
3804 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3807 /* We cannot handle two MEMs or two CONSTS, etc. */
3808 if (code1 == code2)
3809 return 0;
3811 if (code1 == REG)
3813 switch (code2)
3815 case CONST_INT:
3816 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3817 return 1;
3818 break;
3820 case CONST_DOUBLE:
3821 if (! c4x_H_constant (op2))
3822 return 0;
3823 break;
3825 /* Any valid memory operand screened by src_operand is OK. */
3826 case MEM:
3827 break;
3829 default:
3830 fatal_insn ("c4x_valid_operands: Internal error", op2);
3831 break;
3834 if (GET_CODE (op0) == SCRATCH)
3835 return 1;
3837 if (!REG_P (op0))
3838 return 0;
3840 /* Check that we have a valid destination register for a two operand
3841 instruction. */
3842 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3846 /* Check non-commutative operators. */
3847 if (code == ASHIFTRT || code == LSHIFTRT
3848 || code == ASHIFT || code == COMPARE)
3849 return code2 == REG
3850 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3853 /* Assume MINUS is commutative since the subtract patterns
3854 also support the reverse subtract instructions. Since op1
3855 is not a register, and op2 is a register, op1 can only
3856 be a restricted memory operand for a shift instruction. */
3857 if (code2 == REG)
3859 switch (code1)
3861 case CONST_INT:
3862 break;
3864 case CONST_DOUBLE:
3865 if (! c4x_H_constant (op1))
3866 return 0;
3867 break;
3869 /* Any valid memory operand screened by src_operand is OK. */
3870 case MEM:
3871 break;
3873 default:
3874 abort ();
3875 break;
3878 if (GET_CODE (op0) == SCRATCH)
3879 return 1;
3881 if (!REG_P (op0))
3882 return 0;
3884 /* Check that we have a valid destination register for a two operand
3885 instruction. */
3886 return ! force || REGNO (op1) == REGNO (op0);
3889 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3890 return 1;
3892 return 0;
3896 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3899 /* If we are not optimizing then we have to let anything go and let
3900 reload fix things up. instantiate_decl in function.c can produce
3901 invalid insns by changing the offset of a memory operand from a
3902 valid one into an invalid one, when the second operand is also a
3903 memory operand. The alternative is not to allow two memory
3904 operands for an insn when not optimizing. The problem only rarely
3905 occurs, for example with the C-torture program DFcmp.c. */
3907 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3912 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3914 /* Compare only has 2 operands. */
3915 if (code == COMPARE)
3917 /* During RTL generation, force constants into pseudos so that
3918 they can get hoisted out of loops. This will tie up an extra
3919 register but can save an extra cycle. Only do this if loop
3920 optimization enabled. (We cannot pull this trick for add and
3921 sub instructions since the flow pass won't find
3922 autoincrements etc.) This allows us to generate compare
3923 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3924 of LDI *AR0++, R0; CMPI 42, R0.
3926 Note that expand_binops will try to load an expensive constant
3927 into a register if it is used within a loop. Unfortunately,
3928 the cost mechanism doesn't allow us to look at the other
3929 operand to decide whether the constant is expensive. */
3931 if (! reload_in_progress
3932 && TARGET_HOIST
3933 && optimize > 0
3934 && GET_CODE (operands[1]) == CONST_INT
3935 && rtx_cost (operands[1], code) > 1)
3936 operands[1] = force_reg (mode, operands[1]);
3938 if (! reload_in_progress
3939 && ! c4x_valid_operands (code, operands, mode, 0))
3940 operands[0] = force_reg (mode, operands[0]);
3941 return 1;
3944 /* We cannot do this for ADDI/SUBI insns since we will
3945 defeat the flow pass from finding autoincrement addressing
3946 opportunities. */
3947 if (! reload_in_progress
3948 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3949 && TARGET_HOIST
3950 && optimize > 1
3951 && GET_CODE (operands[2]) == CONST_INT
3952 && rtx_cost (operands[2], code) > 1)
3953 operands[2] = force_reg (mode, operands[2]);
3955 /* We can get better code on a C30 if we force constant shift counts
3956 into a register. This way they can get hoisted out of loops,
3957 tying up a register but saving an instruction. The downside is
3958 that they may get allocated to an address or index register, and
3959 thus we will get a pipeline conflict if there is a nearby
3960 indirect address using an address register.
3962 Note that expand_binops will not try to load an expensive constant
3963 into a register if it is used within a loop for a shift insn. */
3965 if (! reload_in_progress
3966 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3968 /* If the operand combination is invalid, we force operand1 into a
3969 register, preventing reload from having doing to do this at a
3970 later stage. */
3971 operands[1] = force_reg (mode, operands[1]);
3972 if (TARGET_FORCE)
3974 emit_move_insn (operands[0], operands[1]);
3975 operands[1] = copy_rtx (operands[0]);
3977 else
3979 /* Just in case... */
3980 if (! c4x_valid_operands (code, operands, mode, 0))
3981 operands[2] = force_reg (mode, operands[2]);
3985 /* Right shifts require a negative shift count, but GCC expects
3986 a positive count, so we emit a NEG. */
3987 if ((code == ASHIFTRT || code == LSHIFTRT)
3988 && (GET_CODE (operands[2]) != CONST_INT))
3989 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3992 /* When the shift count is greater than 32 then the result
3993 can be implementation dependent. We truncate the result to
3994 fit in 5 bits so that we do not emit invalid code when
3995 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3996 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3997 && (GET_CODE (operands[2]) == CONST_INT))
3998 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3999 operands[2]
4000 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
4002 return 1;
4006 /* The following predicates are used for instruction scheduling. */
4009 group1_reg_operand (rtx op, enum machine_mode mode)
4011 if (mode != VOIDmode && mode != GET_MODE (op))
4012 return 0;
4013 if (GET_CODE (op) == SUBREG)
4014 op = SUBREG_REG (op);
4015 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4020 group1_mem_operand (rtx op, enum machine_mode mode)
4022 if (mode != VOIDmode && mode != GET_MODE (op))
4023 return 0;
4025 if (GET_CODE (op) == MEM)
4027 op = XEXP (op, 0);
4028 if (GET_CODE (op) == PLUS)
4030 rtx op0 = XEXP (op, 0);
4031 rtx op1 = XEXP (op, 1);
4033 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4034 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4035 return 1;
4037 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4038 return 1;
4041 return 0;
4045 /* Return true if any one of the address registers. */
4048 arx_reg_operand (rtx op, enum machine_mode mode)
4050 if (mode != VOIDmode && mode != GET_MODE (op))
4051 return 0;
4052 if (GET_CODE (op) == SUBREG)
4053 op = SUBREG_REG (op);
4054 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4058 static int
4059 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4061 if (mode != VOIDmode && mode != GET_MODE (op))
4062 return 0;
4063 if (GET_CODE (op) == SUBREG)
4064 op = SUBREG_REG (op);
4065 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4069 static int
4070 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4072 if (mode != VOIDmode && mode != GET_MODE (op))
4073 return 0;
4075 if (GET_CODE (op) == MEM)
4077 op = XEXP (op, 0);
4078 switch (GET_CODE (op))
4080 case PRE_DEC:
4081 case POST_DEC:
4082 case PRE_INC:
4083 case POST_INC:
4084 op = XEXP (op, 0);
4086 case REG:
4087 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4089 case PRE_MODIFY:
4090 case POST_MODIFY:
4091 if (REG_P (XEXP (op, 0)) && (! reload_completed
4092 || (REGNO (XEXP (op, 0)) == regno)))
4093 return 1;
4094 if (REG_P (XEXP (XEXP (op, 1), 1))
4095 && (! reload_completed
4096 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4097 return 1;
4098 break;
4100 case PLUS:
4102 rtx op0 = XEXP (op, 0);
4103 rtx op1 = XEXP (op, 1);
4105 if ((REG_P (op0) && (! reload_completed
4106 || (REGNO (op0) == regno)))
4107 || (REG_P (op1) && (! reload_completed
4108 || (REGNO (op1) == regno))))
4109 return 1;
4111 break;
4113 default:
4114 break;
4117 return 0;
4122 ar0_reg_operand (rtx op, enum machine_mode mode)
4124 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4129 ar0_mem_operand (rtx op, enum machine_mode mode)
4131 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4136 ar1_reg_operand (rtx op, enum machine_mode mode)
4138 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4143 ar1_mem_operand (rtx op, enum machine_mode mode)
4145 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4150 ar2_reg_operand (rtx op, enum machine_mode mode)
4152 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4157 ar2_mem_operand (rtx op, enum machine_mode mode)
4159 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4164 ar3_reg_operand (rtx op, enum machine_mode mode)
4166 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4171 ar3_mem_operand (rtx op, enum machine_mode mode)
4173 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4178 ar4_reg_operand (rtx op, enum machine_mode mode)
4180 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4185 ar4_mem_operand (rtx op, enum machine_mode mode)
4187 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4192 ar5_reg_operand (rtx op, enum machine_mode mode)
4194 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4199 ar5_mem_operand (rtx op, enum machine_mode mode)
4201 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4206 ar6_reg_operand (rtx op, enum machine_mode mode)
4208 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4213 ar6_mem_operand (rtx op, enum machine_mode mode)
4215 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4220 ar7_reg_operand (rtx op, enum machine_mode mode)
4222 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4227 ar7_mem_operand (rtx op, enum machine_mode mode)
4229 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4234 ir0_reg_operand (rtx op, enum machine_mode mode)
4236 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4241 ir0_mem_operand (rtx op, enum machine_mode mode)
4243 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4248 ir1_reg_operand (rtx op, enum machine_mode mode)
4250 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4255 ir1_mem_operand (rtx op, enum machine_mode mode)
4257 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4261 /* This is similar to operand_subword but allows autoincrement
4262 addressing. */
4265 c4x_operand_subword (rtx op, int i, int validate_address,
4266 enum machine_mode mode)
4268 if (mode != HImode && mode != HFmode)
4269 fatal_insn ("c4x_operand_subword: invalid mode", op);
4271 if (mode == HFmode && REG_P (op))
4272 fatal_insn ("c4x_operand_subword: invalid operand", op);
4274 if (GET_CODE (op) == MEM)
4276 enum rtx_code code = GET_CODE (XEXP (op, 0));
4277 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4278 enum machine_mode submode;
4280 submode = mode;
4281 if (mode == HImode)
4282 submode = QImode;
4283 else if (mode == HFmode)
4284 submode = QFmode;
4286 switch (code)
4288 case POST_INC:
4289 case PRE_INC:
4290 return gen_rtx_MEM (submode, XEXP (op, 0));
4292 case POST_DEC:
4293 case PRE_DEC:
4294 case PRE_MODIFY:
4295 case POST_MODIFY:
4296 /* We could handle these with some difficulty.
4297 e.g., *p-- => *(p-=2); *(p+1). */
4298 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4300 case SYMBOL_REF:
4301 case LABEL_REF:
4302 case CONST:
4303 case CONST_INT:
4304 fatal_insn ("c4x_operand_subword: invalid address", op);
4306 /* Even though offsettable_address_p considers (MEM
4307 (LO_SUM)) to be offsettable, it is not safe if the
4308 address is at the end of the data page since we also have
4309 to fix up the associated high PART. In this case where
4310 we are trying to split a HImode or HFmode memory
4311 reference, we would have to emit another insn to reload a
4312 new HIGH value. It's easier to disable LO_SUM memory references
4313 in HImode or HFmode and we probably get better code. */
4314 case LO_SUM:
4315 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4317 default:
4318 break;
4322 return operand_subword (op, i, validate_address, mode);
4325 struct name_list
4327 struct name_list *next;
4328 const char *name;
4331 static struct name_list *global_head;
4332 static struct name_list *extern_head;
4335 /* Add NAME to list of global symbols and remove from external list if
4336 present on external list. */
4338 void
4339 c4x_global_label (const char *name)
4341 struct name_list *p, *last;
4343 /* Do not insert duplicate names, so linearly search through list of
4344 existing names. */
4345 p = global_head;
4346 while (p)
4348 if (strcmp (p->name, name) == 0)
4349 return;
4350 p = p->next;
4352 p = (struct name_list *) xmalloc (sizeof *p);
4353 p->next = global_head;
4354 p->name = name;
4355 global_head = p;
4357 /* Remove this name from ref list if present. */
4358 last = NULL;
4359 p = extern_head;
4360 while (p)
4362 if (strcmp (p->name, name) == 0)
4364 if (last)
4365 last->next = p->next;
4366 else
4367 extern_head = p->next;
4368 break;
4370 last = p;
4371 p = p->next;
4376 /* Add NAME to list of external symbols. */
4378 void
4379 c4x_external_ref (const char *name)
4381 struct name_list *p;
4383 /* Do not insert duplicate names. */
4384 p = extern_head;
4385 while (p)
4387 if (strcmp (p->name, name) == 0)
4388 return;
4389 p = p->next;
4392 /* Do not insert ref if global found. */
4393 p = global_head;
4394 while (p)
4396 if (strcmp (p->name, name) == 0)
4397 return;
4398 p = p->next;
4400 p = (struct name_list *) xmalloc (sizeof *p);
4401 p->next = extern_head;
4402 p->name = name;
4403 extern_head = p;
4406 /* We need to have a data section we can identify so that we can set
4407 the DP register back to a data pointer in the small memory model.
4408 This is only required for ISRs if we are paranoid that someone
4409 may have quietly changed this register on the sly. */
4410 static void
4411 c4x_file_start (void)
4413 int dspversion = 0;
4414 if (TARGET_C30) dspversion = 30;
4415 if (TARGET_C31) dspversion = 31;
4416 if (TARGET_C32) dspversion = 32;
4417 if (TARGET_C33) dspversion = 33;
4418 if (TARGET_C40) dspversion = 40;
4419 if (TARGET_C44) dspversion = 44;
4421 default_file_start ();
4422 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4423 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4427 static void
4428 c4x_file_end (void)
4430 struct name_list *p;
4432 /* Output all external names that are not global. */
4433 p = extern_head;
4434 while (p)
4436 fprintf (asm_out_file, "\t.ref\t");
4437 assemble_name (asm_out_file, p->name);
4438 fprintf (asm_out_file, "\n");
4439 p = p->next;
4441 fprintf (asm_out_file, "\t.end\n");
4445 static void
4446 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4448 while (list != NULL_TREE
4449 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4450 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4451 list = TREE_CHAIN (list);
4452 if (list)
4453 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4454 *attributes);
4458 static void
4459 c4x_insert_attributes (tree decl, tree *attributes)
4461 switch (TREE_CODE (decl))
4463 case FUNCTION_DECL:
4464 c4x_check_attribute ("section", code_tree, decl, attributes);
4465 c4x_check_attribute ("const", pure_tree, decl, attributes);
4466 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4467 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4468 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4469 break;
4471 case VAR_DECL:
4472 c4x_check_attribute ("section", data_tree, decl, attributes);
4473 break;
4475 default:
4476 break;
4480 /* Table of valid machine attributes. */
4481 const struct attribute_spec c4x_attribute_table[] =
4483 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4484 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4485 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4486 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4487 { NULL, 0, 0, false, false, false, NULL }
4490 /* Handle an attribute requiring a FUNCTION_TYPE;
4491 arguments as in struct attribute_spec.handler. */
4492 static tree
4493 c4x_handle_fntype_attribute (tree *node, tree name,
4494 tree args ATTRIBUTE_UNUSED,
4495 int flags ATTRIBUTE_UNUSED,
4496 bool *no_add_attrs)
4498 if (TREE_CODE (*node) != FUNCTION_TYPE)
4500 warning ("`%s' attribute only applies to functions",
4501 IDENTIFIER_POINTER (name));
4502 *no_add_attrs = true;
4505 return NULL_TREE;
4509 /* !!! FIXME to emit RPTS correctly. */
4512 c4x_rptb_rpts_p (rtx insn, rtx op)
4514 /* The next insn should be our label marking where the
4515 repeat block starts. */
4516 insn = NEXT_INSN (insn);
4517 if (GET_CODE (insn) != CODE_LABEL)
4519 /* Some insns may have been shifted between the RPTB insn
4520 and the top label... They were probably destined to
4521 be moved out of the loop. For now, let's leave them
4522 where they are and print a warning. We should
4523 probably move these insns before the repeat block insn. */
4524 if (TARGET_DEBUG)
4525 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4526 insn);
4527 return 0;
4530 /* Skip any notes. */
4531 insn = next_nonnote_insn (insn);
4533 /* This should be our first insn in the loop. */
4534 if (! INSN_P (insn))
4535 return 0;
4537 /* Skip any notes. */
4538 insn = next_nonnote_insn (insn);
4540 if (! INSN_P (insn))
4541 return 0;
4543 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4544 return 0;
4546 if (TARGET_RPTS)
4547 return 1;
4549 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4553 /* Check if register r11 is used as the destination of an insn. */
4555 static int
4556 c4x_r11_set_p(rtx x)
4558 rtx set;
4559 int i, j;
4560 const char *fmt;
4562 if (x == 0)
4563 return 0;
4565 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4566 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4568 if (INSN_P (x) && (set = single_set (x)))
4569 x = SET_DEST (set);
4571 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4572 return 1;
4574 fmt = GET_RTX_FORMAT (GET_CODE (x));
4575 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4577 if (fmt[i] == 'e')
4579 if (c4x_r11_set_p (XEXP (x, i)))
4580 return 1;
4582 else if (fmt[i] == 'E')
4583 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4584 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4585 return 1;
4587 return 0;
4591 /* The c4x sometimes has a problem when the insn before the laj insn
4592 sets the r11 register. Check for this situation. */
4595 c4x_check_laj_p (rtx insn)
4597 insn = prev_nonnote_insn (insn);
4599 /* If this is the start of the function no nop is needed. */
4600 if (insn == 0)
4601 return 0;
4603 /* If the previous insn is a code label we have to insert a nop. This
4604 could be a jump or table jump. We can find the normal jumps by
4605 scanning the function but this will not find table jumps. */
4606 if (GET_CODE (insn) == CODE_LABEL)
4607 return 1;
4609 /* If the previous insn sets register r11 we have to insert a nop. */
4610 if (c4x_r11_set_p (insn))
4611 return 1;
4613 /* No nop needed. */
4614 return 0;
4618 /* Adjust the cost of a scheduling dependency. Return the new cost of
4619 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4620 A set of an address register followed by a use occurs a 2 cycle
4621 stall (reduced to a single cycle on the c40 using LDA), while
4622 a read of an address register followed by a use occurs a single cycle. */
4624 #define SET_USE_COST 3
4625 #define SETLDA_USE_COST 2
4626 #define READ_USE_COST 2
4628 static int
4629 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4631 /* Don't worry about this until we know what registers have been
4632 assigned. */
4633 if (flag_schedule_insns == 0 && ! reload_completed)
4634 return 0;
4636 /* How do we handle dependencies where a read followed by another
4637 read causes a pipeline stall? For example, a read of ar0 followed
4638 by the use of ar0 for a memory reference. It looks like we
4639 need to extend the scheduler to handle this case. */
4641 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4642 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4643 so only deal with insns we know about. */
4644 if (recog_memoized (dep_insn) < 0)
4645 return 0;
4647 if (REG_NOTE_KIND (link) == 0)
4649 int max = 0;
4651 /* Data dependency; DEP_INSN writes a register that INSN reads some
4652 cycles later. */
4653 if (TARGET_C3X)
4655 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4656 max = SET_USE_COST > max ? SET_USE_COST : max;
4657 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4658 max = READ_USE_COST > max ? READ_USE_COST : max;
4660 else
4662 /* This could be significantly optimized. We should look
4663 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4664 insn uses ar0-ar7. We then test if the same register
4665 is used. The tricky bit is that some operands will
4666 use several registers... */
4667 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4668 max = SET_USE_COST > max ? SET_USE_COST : max;
4669 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4670 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4671 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4672 max = READ_USE_COST > max ? READ_USE_COST : max;
4674 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4675 max = SET_USE_COST > max ? SET_USE_COST : max;
4676 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4677 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4678 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4679 max = READ_USE_COST > max ? READ_USE_COST : max;
4681 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4682 max = SET_USE_COST > max ? SET_USE_COST : max;
4683 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4684 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4685 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4686 max = READ_USE_COST > max ? READ_USE_COST : max;
4688 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4689 max = SET_USE_COST > max ? SET_USE_COST : max;
4690 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4691 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4692 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4693 max = READ_USE_COST > max ? READ_USE_COST : max;
4695 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4696 max = SET_USE_COST > max ? SET_USE_COST : max;
4697 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4698 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4699 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4700 max = READ_USE_COST > max ? READ_USE_COST : max;
4702 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4703 max = SET_USE_COST > max ? SET_USE_COST : max;
4704 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4705 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4706 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4707 max = READ_USE_COST > max ? READ_USE_COST : max;
4709 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4710 max = SET_USE_COST > max ? SET_USE_COST : max;
4711 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4712 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4713 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4714 max = READ_USE_COST > max ? READ_USE_COST : max;
4716 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4717 max = SET_USE_COST > max ? SET_USE_COST : max;
4718 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4719 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4720 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4721 max = READ_USE_COST > max ? READ_USE_COST : max;
4723 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4724 max = SET_USE_COST > max ? SET_USE_COST : max;
4725 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4726 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4728 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4729 max = SET_USE_COST > max ? SET_USE_COST : max;
4730 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4731 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4734 if (max)
4735 cost = max;
4737 /* For other data dependencies, the default cost specified in the
4738 md is correct. */
4739 return cost;
4741 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4743 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4744 cycles later. */
4746 /* For c4x anti dependencies, the cost is 0. */
4747 return 0;
4749 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4751 /* Output dependency; DEP_INSN writes a register that INSN writes some
4752 cycles later. */
4754 /* For c4x output dependencies, the cost is 0. */
4755 return 0;
4757 else
4758 abort ();
4761 void
4762 c4x_init_builtins (void)
4764 tree endlink = void_list_node;
4766 lang_hooks.builtin_function ("fast_ftoi",
4767 build_function_type
4768 (integer_type_node,
4769 tree_cons (NULL_TREE, double_type_node,
4770 endlink)),
4771 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4772 lang_hooks.builtin_function ("ansi_ftoi",
4773 build_function_type
4774 (integer_type_node,
4775 tree_cons (NULL_TREE, double_type_node,
4776 endlink)),
4777 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4778 NULL_TREE);
4779 if (TARGET_C3X)
4780 lang_hooks.builtin_function ("fast_imult",
4781 build_function_type
4782 (integer_type_node,
4783 tree_cons (NULL_TREE, integer_type_node,
4784 tree_cons (NULL_TREE,
4785 integer_type_node,
4786 endlink))),
4787 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4788 NULL_TREE);
4789 else
4791 lang_hooks.builtin_function ("toieee",
4792 build_function_type
4793 (double_type_node,
4794 tree_cons (NULL_TREE, double_type_node,
4795 endlink)),
4796 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4797 NULL_TREE);
4798 lang_hooks.builtin_function ("frieee",
4799 build_function_type
4800 (double_type_node,
4801 tree_cons (NULL_TREE, double_type_node,
4802 endlink)),
4803 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4804 NULL_TREE);
4805 lang_hooks.builtin_function ("fast_invf",
4806 build_function_type
4807 (double_type_node,
4808 tree_cons (NULL_TREE, double_type_node,
4809 endlink)),
4810 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4811 NULL_TREE);
4817 c4x_expand_builtin (tree exp, rtx target,
4818 rtx subtarget ATTRIBUTE_UNUSED,
4819 enum machine_mode mode ATTRIBUTE_UNUSED,
4820 int ignore ATTRIBUTE_UNUSED)
4822 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4823 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4824 tree arglist = TREE_OPERAND (exp, 1);
4825 tree arg0, arg1;
4826 rtx r0, r1;
4828 switch (fcode)
4830 case C4X_BUILTIN_FIX:
4831 arg0 = TREE_VALUE (arglist);
4832 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4833 if (! target || ! register_operand (target, QImode))
4834 target = gen_reg_rtx (QImode);
4835 emit_insn (gen_fixqfqi_clobber (target, r0));
4836 return target;
4838 case C4X_BUILTIN_FIX_ANSI:
4839 arg0 = TREE_VALUE (arglist);
4840 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4841 if (! target || ! register_operand (target, QImode))
4842 target = gen_reg_rtx (QImode);
4843 emit_insn (gen_fix_truncqfqi2 (target, r0));
4844 return target;
4846 case C4X_BUILTIN_MPYI:
4847 if (! TARGET_C3X)
4848 break;
4849 arg0 = TREE_VALUE (arglist);
4850 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4851 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4852 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4853 if (! target || ! register_operand (target, QImode))
4854 target = gen_reg_rtx (QImode);
4855 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4856 return target;
4858 case C4X_BUILTIN_TOIEEE:
4859 if (TARGET_C3X)
4860 break;
4861 arg0 = TREE_VALUE (arglist);
4862 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4863 if (! target || ! register_operand (target, QFmode))
4864 target = gen_reg_rtx (QFmode);
4865 emit_insn (gen_toieee (target, r0));
4866 return target;
4868 case C4X_BUILTIN_FRIEEE:
4869 if (TARGET_C3X)
4870 break;
4871 arg0 = TREE_VALUE (arglist);
4872 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4873 if (register_operand (r0, QFmode))
4875 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4876 emit_move_insn (r1, r0);
4877 r0 = r1;
4879 if (! target || ! register_operand (target, QFmode))
4880 target = gen_reg_rtx (QFmode);
4881 emit_insn (gen_frieee (target, r0));
4882 return target;
4884 case C4X_BUILTIN_RCPF:
4885 if (TARGET_C3X)
4886 break;
4887 arg0 = TREE_VALUE (arglist);
4888 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4889 if (! target || ! register_operand (target, QFmode))
4890 target = gen_reg_rtx (QFmode);
4891 emit_insn (gen_rcpfqf_clobber (target, r0));
4892 return target;
4894 return NULL_RTX;
4897 static void
4898 c4x_init_libfuncs (void)
4900 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4901 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4902 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4903 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4904 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4905 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4906 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4907 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4908 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4909 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4910 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4911 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4912 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4913 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4914 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4915 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4916 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4917 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4918 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4919 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4920 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4921 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4922 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4923 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4926 static void
4927 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4928 tree decl ATTRIBUTE_UNUSED)
4930 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4933 static void
4934 c4x_globalize_label (FILE *stream, const char *name)
4936 default_globalize_label (stream, name);
4937 c4x_global_label (name);
4940 #define SHIFT_CODE_P(C) \
4941 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4942 #define LOGICAL_CODE_P(C) \
4943 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4945 /* Compute a (partial) cost for rtx X. Return true if the complete
4946 cost has been computed, and false if subexpressions should be
4947 scanned. In either case, *TOTAL contains the cost result. */
4949 static bool
4950 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4952 HOST_WIDE_INT val;
4954 switch (code)
4956 /* Some small integers are effectively free for the C40. We should
4957 also consider if we are using the small memory model. With
4958 the big memory model we require an extra insn for a constant
4959 loaded from memory. */
4961 case CONST_INT:
4962 val = INTVAL (x);
4963 if (c4x_J_constant (x))
4964 *total = 0;
4965 else if (! TARGET_C3X
4966 && outer_code == AND
4967 && (val == 255 || val == 65535))
4968 *total = 0;
4969 else if (! TARGET_C3X
4970 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4971 && (val == 16 || val == 24))
4972 *total = 0;
4973 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4974 *total = 3;
4975 else if (LOGICAL_CODE_P (outer_code)
4976 ? c4x_L_constant (x) : c4x_I_constant (x))
4977 *total = 2;
4978 else
4979 *total = 4;
4980 return true;
4982 case CONST:
4983 case LABEL_REF:
4984 case SYMBOL_REF:
4985 *total = 4;
4986 return true;
4988 case CONST_DOUBLE:
4989 if (c4x_H_constant (x))
4990 *total = 2;
4991 else if (GET_MODE (x) == QFmode)
4992 *total = 4;
4993 else
4994 *total = 8;
4995 return true;
4997 /* ??? Note that we return true, rather than false so that rtx_cost
4998 doesn't include the constant costs. Otherwise expand_mult will
4999 think that it is cheaper to synthesize a multiply rather than to
5000 use a multiply instruction. I think this is because the algorithm
5001 synth_mult doesn't take into account the loading of the operands,
5002 whereas the calculation of mult_cost does. */
5003 case PLUS:
5004 case MINUS:
5005 case AND:
5006 case IOR:
5007 case XOR:
5008 case ASHIFT:
5009 case ASHIFTRT:
5010 case LSHIFTRT:
5011 *total = COSTS_N_INSNS (1);
5012 return true;
5014 case MULT:
5015 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5016 || TARGET_MPYI ? 1 : 14);
5017 return true;
5019 case DIV:
5020 case UDIV:
5021 case MOD:
5022 case UMOD:
5023 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5024 ? 15 : 50);
5025 return true;
5027 default:
5028 return false;
5032 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5034 static void
5035 c4x_external_libcall (rtx fun)
5037 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5038 c4x_external_ref (XSTR (fun, 0));
5041 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
5043 static rtx
5044 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5045 int incoming ATTRIBUTE_UNUSED)
5047 return gen_rtx_REG (Pmode, AR0_REGNO);