gcc/ChangeLog:
[official-gcc.git] / gcc / config / c4x / c4x.c
blob22aab7dce794b97c6089aa2a3b91418aec0aad5d
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 rtx smulhi3_libfunc;
55 rtx umulhi3_libfunc;
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
65 static int c4x_leaf_function;
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
152 rtx c4x_compare_op0;
153 rtx c4x_compare_op1;
155 const char *c4x_rpts_cycles_string;
156 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
157 const char *c4x_cpu_version_string;
158 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
160 /* Pragma definitions. */
162 tree code_tree = NULL_TREE;
163 tree data_tree = NULL_TREE;
164 tree pure_tree = NULL_TREE;
165 tree noreturn_tree = NULL_TREE;
166 tree interrupt_tree = NULL_TREE;
167 tree naked_tree = NULL_TREE;
169 /* Forward declarations */
170 static int c4x_isr_reg_used_p (unsigned int);
171 static int c4x_leaf_function_p (void);
172 static int c4x_naked_function_p (void);
173 static int c4x_immed_float_p (rtx);
174 static int c4x_a_register (rtx);
175 static int c4x_x_register (rtx);
176 static int c4x_immed_int_constant (rtx);
177 static int c4x_immed_float_constant (rtx);
178 static int c4x_K_constant (rtx);
179 static int c4x_N_constant (rtx);
180 static int c4x_O_constant (rtx);
181 static int c4x_R_indirect (rtx);
182 static int c4x_S_indirect (rtx);
183 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
184 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
185 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
186 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
187 static void c4x_file_start (void);
188 static void c4x_file_end (void);
189 static void c4x_check_attribute (const char *, tree, tree, tree *);
190 static int c4x_r11_set_p (rtx);
191 static int c4x_rptb_valid_p (rtx, rtx);
192 static void c4x_reorg (void);
193 static int c4x_label_ref_used_p (rtx, rtx);
194 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
195 const struct attribute_spec c4x_attribute_table[];
196 static void c4x_insert_attributes (tree, tree *);
197 static void c4x_asm_named_section (const char *, unsigned int);
198 static int c4x_adjust_cost (rtx, rtx, rtx, int);
199 static void c4x_globalize_label (FILE *, const char *);
200 static bool c4x_rtx_costs (rtx, int, int, int *);
201 static int c4x_address_cost (rtx);
202 static void c4x_init_libfuncs (void);
203 static void c4x_external_libcall (rtx);
204 static rtx c4x_struct_value_rtx (tree, int);
206 /* Initialize the GCC target structure. */
207 #undef TARGET_ASM_BYTE_OP
208 #define TARGET_ASM_BYTE_OP "\t.word\t"
209 #undef TARGET_ASM_ALIGNED_HI_OP
210 #define TARGET_ASM_ALIGNED_HI_OP NULL
211 #undef TARGET_ASM_ALIGNED_SI_OP
212 #define TARGET_ASM_ALIGNED_SI_OP NULL
213 #undef TARGET_ASM_FILE_START
214 #define TARGET_ASM_FILE_START c4x_file_start
215 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
216 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
217 #undef TARGET_ASM_FILE_END
218 #define TARGET_ASM_FILE_END c4x_file_end
220 #undef TARGET_ASM_EXTERNAL_LIBCALL
221 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
226 #undef TARGET_INSERT_ATTRIBUTES
227 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
229 #undef TARGET_INIT_BUILTINS
230 #define TARGET_INIT_BUILTINS c4x_init_builtins
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
235 #undef TARGET_SCHED_ADJUST_COST
236 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
238 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
239 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
241 #undef TARGET_ASM_GLOBALIZE_LABEL
242 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
244 #undef TARGET_RTX_COSTS
245 #define TARGET_RTX_COSTS c4x_rtx_costs
246 #undef TARGET_ADDRESS_COST
247 #define TARGET_ADDRESS_COST c4x_address_cost
249 #undef TARGET_MACHINE_DEPENDENT_REORG
250 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
252 #undef TARGET_INIT_LIBFUNCS
253 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
255 #undef TARGET_STRUCT_VALUE_RTX
256 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
258 struct gcc_target targetm = TARGET_INITIALIZER;
260 /* Override command line options.
261 Called once after all options have been parsed.
262 Mostly we process the processor
263 type and sometimes adjust other TARGET_ options. */
265 void
266 c4x_override_options (void)
268 if (c4x_rpts_cycles_string)
269 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
270 else
271 c4x_rpts_cycles = 0;
273 if (TARGET_C30)
274 c4x_cpu_version = 30;
275 else if (TARGET_C31)
276 c4x_cpu_version = 31;
277 else if (TARGET_C32)
278 c4x_cpu_version = 32;
279 else if (TARGET_C33)
280 c4x_cpu_version = 33;
281 else if (TARGET_C40)
282 c4x_cpu_version = 40;
283 else if (TARGET_C44)
284 c4x_cpu_version = 44;
285 else
286 c4x_cpu_version = 40;
288 /* -mcpu=xx overrides -m40 etc. */
289 if (c4x_cpu_version_string)
291 const char *p = c4x_cpu_version_string;
293 /* Also allow -mcpu=c30 etc. */
294 if (*p == 'c' || *p == 'C')
295 p++;
296 c4x_cpu_version = atoi (p);
299 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
300 C40_FLAG | C44_FLAG);
302 switch (c4x_cpu_version)
304 case 30: target_flags |= C30_FLAG; break;
305 case 31: target_flags |= C31_FLAG; break;
306 case 32: target_flags |= C32_FLAG; break;
307 case 33: target_flags |= C33_FLAG; break;
308 case 40: target_flags |= C40_FLAG; break;
309 case 44: target_flags |= C44_FLAG; break;
310 default:
311 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
312 c4x_cpu_version = 40;
313 target_flags |= C40_FLAG;
316 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
317 target_flags |= C3X_FLAG;
318 else
319 target_flags &= ~C3X_FLAG;
321 /* Convert foo / 8.0 into foo * 0.125, etc. */
322 set_fast_math_flags (1);
324 /* We should phase out the following at some stage.
325 This provides compatibility with the old -mno-aliases option. */
326 if (! TARGET_ALIASES && ! flag_argument_noalias)
327 flag_argument_noalias = 1;
331 /* This is called before c4x_override_options. */
333 void
334 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
335 int size ATTRIBUTE_UNUSED)
337 /* Scheduling before register allocation can screw up global
338 register allocation, especially for functions that use MPY||ADD
339 instructions. The benefit we gain we get by scheduling before
340 register allocation is probably marginal anyhow. */
341 flag_schedule_insns = 0;
345 /* Write an ASCII string. */
347 #define C4X_ASCII_LIMIT 40
349 void
350 c4x_output_ascii (FILE *stream, const char *ptr, int len)
352 char sbuf[C4X_ASCII_LIMIT + 1];
353 int s, l, special, first = 1, onlys;
355 if (len)
356 fprintf (stream, "\t.byte\t");
358 for (s = l = 0; len > 0; --len, ++ptr)
360 onlys = 0;
362 /* Escape " and \ with a \". */
363 special = *ptr == '\"' || *ptr == '\\';
365 /* If printable - add to buff. */
366 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
368 if (special)
369 sbuf[s++] = '\\';
370 sbuf[s++] = *ptr;
371 if (s < C4X_ASCII_LIMIT - 1)
372 continue;
373 onlys = 1;
375 if (s)
377 if (first)
378 first = 0;
379 else
381 fputc (',', stream);
382 l++;
385 sbuf[s] = 0;
386 fprintf (stream, "\"%s\"", sbuf);
387 l += s + 2;
388 if (TARGET_TI && l >= 80 && len > 1)
390 fprintf (stream, "\n\t.byte\t");
391 first = 1;
392 l = 0;
395 s = 0;
397 if (onlys)
398 continue;
400 if (first)
401 first = 0;
402 else
404 fputc (',', stream);
405 l++;
408 fprintf (stream, "%d", *ptr);
409 l += 3;
410 if (TARGET_TI && l >= 80 && len > 1)
412 fprintf (stream, "\n\t.byte\t");
413 first = 1;
414 l = 0;
417 if (s)
419 if (! first)
420 fputc (',', stream);
422 sbuf[s] = 0;
423 fprintf (stream, "\"%s\"", sbuf);
424 s = 0;
426 fputc ('\n', stream);
431 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
433 switch (mode)
435 #if Pmode != QImode
436 case Pmode: /* Pointer (24/32 bits). */
437 #endif
438 case QImode: /* Integer (32 bits). */
439 return IS_INT_REGNO (regno);
441 case QFmode: /* Float, Double (32 bits). */
442 case HFmode: /* Long Double (40 bits). */
443 return IS_EXT_REGNO (regno);
445 case CCmode: /* Condition Codes. */
446 case CC_NOOVmode: /* Condition Codes. */
447 return IS_ST_REGNO (regno);
449 case HImode: /* Long Long (64 bits). */
450 /* We need two registers to store long longs. Note that
451 it is much easier to constrain the first register
452 to start on an even boundary. */
453 return IS_INT_REGNO (regno)
454 && IS_INT_REGNO (regno + 1)
455 && (regno & 1) == 0;
457 default:
458 return 0; /* We don't support these modes. */
461 return 0;
464 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
466 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
468 /* We can not copy call saved registers from mode QI into QF or from
469 mode QF into QI. */
470 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
471 return 0;
472 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
473 return 0;
474 /* We cannot copy from an extended (40 bit) register to a standard
475 (32 bit) register because we only set the condition codes for
476 extended registers. */
477 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
478 return 0;
479 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
480 return 0;
481 return 1;
484 /* The TI C3x C compiler register argument runtime model uses 6 registers,
485 AR2, R2, R3, RC, RS, RE.
487 The first two floating point arguments (float, double, long double)
488 that are found scanning from left to right are assigned to R2 and R3.
490 The remaining integer (char, short, int, long) or pointer arguments
491 are assigned to the remaining registers in the order AR2, R2, R3,
492 RC, RS, RE when scanning left to right, except for the last named
493 argument prior to an ellipsis denoting variable number of
494 arguments. We don't have to worry about the latter condition since
495 function.c treats the last named argument as anonymous (unnamed).
497 All arguments that cannot be passed in registers are pushed onto
498 the stack in reverse order (right to left). GCC handles that for us.
500 c4x_init_cumulative_args() is called at the start, so we can parse
501 the args to see how many floating point arguments and how many
502 integer (or pointer) arguments there are. c4x_function_arg() is
503 then called (sometimes repeatedly) for each argument (parsed left
504 to right) to obtain the register to pass the argument in, or zero
505 if the argument is to be passed on the stack. Once the compiler is
506 happy, c4x_function_arg_advance() is called.
508 Don't use R0 to pass arguments in, we use 0 to indicate a stack
509 argument. */
511 static const int c4x_int_reglist[3][6] =
513 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
514 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
515 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
518 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
521 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
522 function whose data type is FNTYPE.
523 For a library call, FNTYPE is 0. */
525 void
526 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
528 tree param, next_param;
530 cum->floats = cum->ints = 0;
531 cum->init = 0;
532 cum->var = 0;
533 cum->args = 0;
535 if (TARGET_DEBUG)
537 fprintf (stderr, "\nc4x_init_cumulative_args (");
538 if (fntype)
540 tree ret_type = TREE_TYPE (fntype);
542 fprintf (stderr, "fntype code = %s, ret code = %s",
543 tree_code_name[(int) TREE_CODE (fntype)],
544 tree_code_name[(int) TREE_CODE (ret_type)]);
546 else
547 fprintf (stderr, "no fntype");
549 if (libname)
550 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
553 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
555 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
556 param; param = next_param)
558 tree type;
560 next_param = TREE_CHAIN (param);
562 type = TREE_VALUE (param);
563 if (type && type != void_type_node)
565 enum machine_mode mode;
567 /* If the last arg doesn't have void type then we have
568 variable arguments. */
569 if (! next_param)
570 cum->var = 1;
572 if ((mode = TYPE_MODE (type)))
574 if (! MUST_PASS_IN_STACK (mode, type))
576 /* Look for float, double, or long double argument. */
577 if (mode == QFmode || mode == HFmode)
578 cum->floats++;
579 /* Look for integer, enumeral, boolean, char, or pointer
580 argument. */
581 else if (mode == QImode || mode == Pmode)
582 cum->ints++;
585 cum->args++;
589 if (TARGET_DEBUG)
590 fprintf (stderr, "%s%s, args = %d)\n",
591 cum->prototype ? ", prototype" : "",
592 cum->var ? ", variable args" : "",
593 cum->args);
597 /* Update the data in CUM to advance over an argument
598 of mode MODE and data type TYPE.
599 (TYPE is null for libcalls where that information may not be available.) */
601 void
602 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
603 tree type, int named)
605 if (TARGET_DEBUG)
606 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
607 GET_MODE_NAME (mode), named);
608 if (! TARGET_MEMPARM
609 && named
610 && type
611 && ! MUST_PASS_IN_STACK (mode, type))
613 /* Look for float, double, or long double argument. */
614 if (mode == QFmode || mode == HFmode)
615 cum->floats++;
616 /* Look for integer, enumeral, boolean, char, or pointer argument. */
617 else if (mode == QImode || mode == Pmode)
618 cum->ints++;
620 else if (! TARGET_MEMPARM && ! type)
622 /* Handle libcall arguments. */
623 if (mode == QFmode || mode == HFmode)
624 cum->floats++;
625 else if (mode == QImode || mode == Pmode)
626 cum->ints++;
628 return;
632 /* Define where to put the arguments to a function. Value is zero to
633 push the argument on the stack, or a hard register in which to
634 store the argument.
636 MODE is the argument's machine mode.
637 TYPE is the data type of the argument (as a tree).
638 This is null for libcalls where that information may
639 not be available.
640 CUM is a variable of type CUMULATIVE_ARGS which gives info about
641 the preceding args and about the function being called.
642 NAMED is nonzero if this argument is a named parameter
643 (otherwise it is an extra parameter matching an ellipsis). */
645 struct rtx_def *
646 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
647 tree type, int named)
649 int reg = 0; /* Default to passing argument on stack. */
651 if (! cum->init)
653 /* We can handle at most 2 floats in R2, R3. */
654 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
656 /* We can handle at most 6 integers minus number of floats passed
657 in registers. */
658 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
659 6 - cum->maxfloats : cum->ints;
661 /* If there is no prototype, assume all the arguments are integers. */
662 if (! cum->prototype)
663 cum->maxints = 6;
665 cum->ints = cum->floats = 0;
666 cum->init = 1;
669 /* This marks the last argument. We don't need to pass this through
670 to the call insn. */
671 if (type == void_type_node)
672 return 0;
674 if (! TARGET_MEMPARM
675 && named
676 && type
677 && ! MUST_PASS_IN_STACK (mode, type))
679 /* Look for float, double, or long double argument. */
680 if (mode == QFmode || mode == HFmode)
682 if (cum->floats < cum->maxfloats)
683 reg = c4x_fp_reglist[cum->floats];
685 /* Look for integer, enumeral, boolean, char, or pointer argument. */
686 else if (mode == QImode || mode == Pmode)
688 if (cum->ints < cum->maxints)
689 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
692 else if (! TARGET_MEMPARM && ! type)
694 /* We could use a different argument calling model for libcalls,
695 since we're only calling functions in libgcc. Thus we could
696 pass arguments for long longs in registers rather than on the
697 stack. In the meantime, use the odd TI format. We make the
698 assumption that we won't have more than two floating point
699 args, six integer args, and that all the arguments are of the
700 same mode. */
701 if (mode == QFmode || mode == HFmode)
702 reg = c4x_fp_reglist[cum->floats];
703 else if (mode == QImode || mode == Pmode)
704 reg = c4x_int_reglist[0][cum->ints];
707 if (TARGET_DEBUG)
709 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
710 GET_MODE_NAME (mode), named);
711 if (reg)
712 fprintf (stderr, ", reg=%s", reg_names[reg]);
713 else
714 fprintf (stderr, ", stack");
715 fprintf (stderr, ")\n");
717 if (reg)
718 return gen_rtx_REG (mode, reg);
719 else
720 return NULL_RTX;
723 /* C[34]x arguments grow in weird ways (downwards) that the standard
724 varargs stuff can't handle.. */
726 c4x_va_arg (tree valist, tree type)
728 tree t;
730 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
731 build_int_2 (int_size_in_bytes (type), 0));
732 TREE_SIDE_EFFECTS (t) = 1;
734 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
738 static int
739 c4x_isr_reg_used_p (unsigned int regno)
741 /* Don't save/restore FP or ST, we handle them separately. */
742 if (regno == FRAME_POINTER_REGNUM
743 || IS_ST_REGNO (regno))
744 return 0;
746 /* We could be a little smarter abut saving/restoring DP.
747 We'll only save if for the big memory model or if
748 we're paranoid. ;-) */
749 if (IS_DP_REGNO (regno))
750 return ! TARGET_SMALL || TARGET_PARANOID;
752 /* Only save/restore regs in leaf function that are used. */
753 if (c4x_leaf_function)
754 return regs_ever_live[regno] && fixed_regs[regno] == 0;
756 /* Only save/restore regs that are used by the ISR and regs
757 that are likely to be used by functions the ISR calls
758 if they are not fixed. */
759 return IS_EXT_REGNO (regno)
760 || ((regs_ever_live[regno] || call_used_regs[regno])
761 && fixed_regs[regno] == 0);
765 static int
766 c4x_leaf_function_p (void)
768 /* A leaf function makes no calls, so we only need
769 to save/restore the registers we actually use.
770 For the global variable leaf_function to be set, we need
771 to define LEAF_REGISTERS and all that it entails.
772 Let's check ourselves.... */
774 if (lookup_attribute ("leaf_pretend",
775 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
776 return 1;
778 /* Use the leaf_pretend attribute at your own risk. This is a hack
779 to speed up ISRs that call a function infrequently where the
780 overhead of saving and restoring the additional registers is not
781 warranted. You must save and restore the additional registers
782 required by the called function. Caveat emptor. Here's enough
783 rope... */
785 if (leaf_function_p ())
786 return 1;
788 return 0;
792 static int
793 c4x_naked_function_p (void)
795 tree type;
797 type = TREE_TYPE (current_function_decl);
798 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
803 c4x_interrupt_function_p (void)
805 const char *cfun_name;
806 if (lookup_attribute ("interrupt",
807 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
808 return 1;
810 /* Look for TI style c_intnn. */
811 cfun_name = current_function_name ();
812 return cfun_name[0] == 'c'
813 && cfun_name[1] == '_'
814 && cfun_name[2] == 'i'
815 && cfun_name[3] == 'n'
816 && cfun_name[4] == 't'
817 && ISDIGIT (cfun_name[5])
818 && ISDIGIT (cfun_name[6]);
821 void
822 c4x_expand_prologue (void)
824 unsigned int regno;
825 int size = get_frame_size ();
826 rtx insn;
828 /* In functions where ar3 is not used but frame pointers are still
829 specified, frame pointers are not adjusted (if >= -O2) and this
830 is used so it won't needlessly push the frame pointer. */
831 int dont_push_ar3;
833 /* For __naked__ function don't build a prologue. */
834 if (c4x_naked_function_p ())
836 return;
839 /* For __interrupt__ function build specific prologue. */
840 if (c4x_interrupt_function_p ())
842 c4x_leaf_function = c4x_leaf_function_p ();
844 insn = emit_insn (gen_push_st ());
845 RTX_FRAME_RELATED_P (insn) = 1;
846 if (size)
848 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
849 RTX_FRAME_RELATED_P (insn) = 1;
850 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
851 gen_rtx_REG (QImode, SP_REGNO)));
852 RTX_FRAME_RELATED_P (insn) = 1;
853 /* We require that an ISR uses fewer than 32768 words of
854 local variables, otherwise we have to go to lots of
855 effort to save a register, load it with the desired size,
856 adjust the stack pointer, and then restore the modified
857 register. Frankly, I think it is a poor ISR that
858 requires more than 32767 words of local temporary
859 storage! */
860 if (size > 32767)
861 error ("ISR %s requires %d words of local vars, max is 32767",
862 current_function_name (), size);
864 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
865 gen_rtx_REG (QImode, SP_REGNO),
866 GEN_INT (size)));
867 RTX_FRAME_RELATED_P (insn) = 1;
869 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
871 if (c4x_isr_reg_used_p (regno))
873 if (regno == DP_REGNO)
875 insn = emit_insn (gen_push_dp ());
876 RTX_FRAME_RELATED_P (insn) = 1;
878 else
880 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
881 RTX_FRAME_RELATED_P (insn) = 1;
882 if (IS_EXT_REGNO (regno))
884 insn = emit_insn (gen_pushqf
885 (gen_rtx_REG (QFmode, regno)));
886 RTX_FRAME_RELATED_P (insn) = 1;
891 /* We need to clear the repeat mode flag if the ISR is
892 going to use a RPTB instruction or uses the RC, RS, or RE
893 registers. */
894 if (regs_ever_live[RC_REGNO]
895 || regs_ever_live[RS_REGNO]
896 || regs_ever_live[RE_REGNO])
898 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
899 RTX_FRAME_RELATED_P (insn) = 1;
902 /* Reload DP reg if we are paranoid about some turkey
903 violating small memory model rules. */
904 if (TARGET_SMALL && TARGET_PARANOID)
906 insn = emit_insn (gen_set_ldp_prologue
907 (gen_rtx_REG (QImode, DP_REGNO),
908 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
909 RTX_FRAME_RELATED_P (insn) = 1;
912 else
914 if (frame_pointer_needed)
916 if ((size != 0)
917 || (current_function_args_size != 0)
918 || (optimize < 2))
920 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
921 RTX_FRAME_RELATED_P (insn) = 1;
922 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
923 gen_rtx_REG (QImode, SP_REGNO)));
924 RTX_FRAME_RELATED_P (insn) = 1;
925 dont_push_ar3 = 1;
927 else
929 /* Since ar3 is not used, we don't need to push it. */
930 dont_push_ar3 = 1;
933 else
935 /* If we use ar3, we need to push it. */
936 dont_push_ar3 = 0;
937 if ((size != 0) || (current_function_args_size != 0))
939 /* If we are omitting the frame pointer, we still have
940 to make space for it so the offsets are correct
941 unless we don't use anything on the stack at all. */
942 size += 1;
946 if (size > 32767)
948 /* Local vars are too big, it will take multiple operations
949 to increment SP. */
950 if (TARGET_C3X)
952 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
953 GEN_INT(size >> 16)));
954 RTX_FRAME_RELATED_P (insn) = 1;
955 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
956 gen_rtx_REG (QImode, R1_REGNO),
957 GEN_INT(-16)));
958 RTX_FRAME_RELATED_P (insn) = 1;
960 else
962 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
963 GEN_INT(size & ~0xffff)));
964 RTX_FRAME_RELATED_P (insn) = 1;
966 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
967 gen_rtx_REG (QImode, R1_REGNO),
968 GEN_INT(size & 0xffff)));
969 RTX_FRAME_RELATED_P (insn) = 1;
970 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
971 gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, R1_REGNO)));
973 RTX_FRAME_RELATED_P (insn) = 1;
975 else if (size != 0)
977 /* Local vars take up less than 32767 words, so we can directly
978 add the number. */
979 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
980 gen_rtx_REG (QImode, SP_REGNO),
981 GEN_INT (size)));
982 RTX_FRAME_RELATED_P (insn) = 1;
985 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
987 if (regs_ever_live[regno] && ! call_used_regs[regno])
989 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
991 if (TARGET_PRESERVE_FLOAT)
993 insn = emit_insn (gen_pushqi
994 (gen_rtx_REG (QImode, regno)));
995 RTX_FRAME_RELATED_P (insn) = 1;
997 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
998 RTX_FRAME_RELATED_P (insn) = 1;
1000 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1002 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1003 RTX_FRAME_RELATED_P (insn) = 1;
1011 void
1012 c4x_expand_epilogue(void)
1014 int regno;
1015 int jump = 0;
1016 int dont_pop_ar3;
1017 rtx insn;
1018 int size = get_frame_size ();
1020 /* For __naked__ function build no epilogue. */
1021 if (c4x_naked_function_p ())
1023 insn = emit_jump_insn (gen_return_from_epilogue ());
1024 RTX_FRAME_RELATED_P (insn) = 1;
1025 return;
1028 /* For __interrupt__ function build specific epilogue. */
1029 if (c4x_interrupt_function_p ())
1031 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1033 if (! c4x_isr_reg_used_p (regno))
1034 continue;
1035 if (regno == DP_REGNO)
1037 insn = emit_insn (gen_pop_dp ());
1038 RTX_FRAME_RELATED_P (insn) = 1;
1040 else
1042 /* We have to use unspec because the compiler will delete insns
1043 that are not call-saved. */
1044 if (IS_EXT_REGNO (regno))
1046 insn = emit_insn (gen_popqf_unspec
1047 (gen_rtx_REG (QFmode, regno)));
1048 RTX_FRAME_RELATED_P (insn) = 1;
1050 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1054 if (size)
1056 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1057 gen_rtx_REG (QImode, SP_REGNO),
1058 GEN_INT(size)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_insn (gen_popqi
1061 (gen_rtx_REG (QImode, AR3_REGNO)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
1064 insn = emit_insn (gen_pop_st ());
1065 RTX_FRAME_RELATED_P (insn) = 1;
1066 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 else
1071 if (frame_pointer_needed)
1073 if ((size != 0)
1074 || (current_function_args_size != 0)
1075 || (optimize < 2))
1077 insn = emit_insn
1078 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1079 gen_rtx_MEM (QImode,
1080 gen_rtx_PLUS
1081 (QImode, gen_rtx_REG (QImode,
1082 AR3_REGNO),
1083 constm1_rtx))));
1084 RTX_FRAME_RELATED_P (insn) = 1;
1086 /* We already have the return value and the fp,
1087 so we need to add those to the stack. */
1088 size += 2;
1089 jump = 1;
1090 dont_pop_ar3 = 1;
1092 else
1094 /* Since ar3 is not used for anything, we don't need to
1095 pop it. */
1096 dont_pop_ar3 = 1;
1099 else
1101 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1102 if (size || current_function_args_size)
1104 /* If we are omitting the frame pointer, we still have
1105 to make space for it so the offsets are correct
1106 unless we don't use anything on the stack at all. */
1107 size += 1;
1111 /* Now restore the saved registers, putting in the delayed branch
1112 where required. */
1113 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1115 if (regs_ever_live[regno] && ! call_used_regs[regno])
1117 if (regno == AR3_REGNO && dont_pop_ar3)
1118 continue;
1120 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1122 insn = emit_insn (gen_popqf_unspec
1123 (gen_rtx_REG (QFmode, regno)));
1124 RTX_FRAME_RELATED_P (insn) = 1;
1125 if (TARGET_PRESERVE_FLOAT)
1127 insn = emit_insn (gen_popqi_unspec
1128 (gen_rtx_REG (QImode, regno)));
1129 RTX_FRAME_RELATED_P (insn) = 1;
1132 else
1134 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1140 if (frame_pointer_needed)
1142 if ((size != 0)
1143 || (current_function_args_size != 0)
1144 || (optimize < 2))
1146 /* Restore the old FP. */
1147 insn = emit_insn
1148 (gen_movqi
1149 (gen_rtx_REG (QImode, AR3_REGNO),
1150 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1152 RTX_FRAME_RELATED_P (insn) = 1;
1156 if (size > 32767)
1158 /* Local vars are too big, it will take multiple operations
1159 to decrement SP. */
1160 if (TARGET_C3X)
1162 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1163 GEN_INT(size >> 16)));
1164 RTX_FRAME_RELATED_P (insn) = 1;
1165 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1166 gen_rtx_REG (QImode, R3_REGNO),
1167 GEN_INT(-16)));
1168 RTX_FRAME_RELATED_P (insn) = 1;
1170 else
1172 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1173 GEN_INT(size & ~0xffff)));
1174 RTX_FRAME_RELATED_P (insn) = 1;
1176 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1177 gen_rtx_REG (QImode, R3_REGNO),
1178 GEN_INT(size & 0xffff)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1180 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1181 gen_rtx_REG (QImode, SP_REGNO),
1182 gen_rtx_REG (QImode, R3_REGNO)));
1183 RTX_FRAME_RELATED_P (insn) = 1;
1185 else if (size != 0)
1187 /* Local vars take up less than 32768 words, so we can directly
1188 subtract the number. */
1189 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1190 gen_rtx_REG (QImode, SP_REGNO),
1191 GEN_INT(size)));
1192 RTX_FRAME_RELATED_P (insn) = 1;
1195 if (jump)
1197 insn = emit_jump_insn (gen_return_indirect_internal
1198 (gen_rtx_REG (QImode, R2_REGNO)));
1199 RTX_FRAME_RELATED_P (insn) = 1;
1201 else
1203 insn = emit_jump_insn (gen_return_from_epilogue ());
1204 RTX_FRAME_RELATED_P (insn) = 1;
1211 c4x_null_epilogue_p (void)
1213 int regno;
1215 if (reload_completed
1216 && ! c4x_naked_function_p ()
1217 && ! c4x_interrupt_function_p ()
1218 && ! current_function_calls_alloca
1219 && ! current_function_args_size
1220 && ! (optimize < 2)
1221 && ! get_frame_size ())
1223 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1224 if (regs_ever_live[regno] && ! call_used_regs[regno]
1225 && (regno != AR3_REGNO))
1226 return 1;
1227 return 0;
1229 return 1;
1234 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1236 rtx op0 = operands[0];
1237 rtx op1 = operands[1];
1239 if (! reload_in_progress
1240 && ! REG_P (op0)
1241 && ! REG_P (op1)
1242 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1243 op1 = force_reg (mode, op1);
1245 if (GET_CODE (op1) == LO_SUM
1246 && GET_MODE (op1) == Pmode
1247 && dp_reg_operand (XEXP (op1, 0), mode))
1249 /* expand_increment will sometimes create a LO_SUM immediate
1250 address. Undo this silliness. */
1251 op1 = XEXP (op1, 1);
1254 if (symbolic_address_operand (op1, mode))
1256 if (TARGET_LOAD_ADDRESS)
1258 /* Alias analysis seems to do a better job if we force
1259 constant addresses to memory after reload. */
1260 emit_insn (gen_load_immed_address (op0, op1));
1261 return 1;
1263 else
1265 /* Stick symbol or label address into the constant pool. */
1266 op1 = force_const_mem (Pmode, op1);
1269 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1271 /* We could be a lot smarter about loading some of these
1272 constants... */
1273 op1 = force_const_mem (mode, op1);
1276 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1277 and emit associated (HIGH (SYMREF)) if large memory model.
1278 c4x_legitimize_address could be used to do this,
1279 perhaps by calling validize_address. */
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
1282 && GET_CODE (op1) == MEM
1283 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1288 op1 = change_address (op1, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1292 if (TARGET_EXPOSE_LDP
1293 && ! (reload_in_progress || reload_completed)
1294 && GET_CODE (op0) == MEM
1295 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1297 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1298 if (! TARGET_SMALL)
1299 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1300 op0 = change_address (op0, mode,
1301 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1304 if (GET_CODE (op0) == SUBREG
1305 && mixed_subreg_operand (op0, mode))
1307 /* We should only generate these mixed mode patterns
1308 during RTL generation. If we need do it later on
1309 then we'll have to emit patterns that won't clobber CC. */
1310 if (reload_in_progress || reload_completed)
1311 abort ();
1312 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1313 op0 = SUBREG_REG (op0);
1314 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1316 op0 = copy_rtx (op0);
1317 PUT_MODE (op0, QImode);
1319 else
1320 abort ();
1322 if (mode == QFmode)
1323 emit_insn (gen_storeqf_int_clobber (op0, op1));
1324 else
1325 abort ();
1326 return 1;
1329 if (GET_CODE (op1) == SUBREG
1330 && mixed_subreg_operand (op1, mode))
1332 /* We should only generate these mixed mode patterns
1333 during RTL generation. If we need do it later on
1334 then we'll have to emit patterns that won't clobber CC. */
1335 if (reload_in_progress || reload_completed)
1336 abort ();
1337 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1338 op1 = SUBREG_REG (op1);
1339 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1341 op1 = copy_rtx (op1);
1342 PUT_MODE (op1, QImode);
1344 else
1345 abort ();
1347 if (mode == QFmode)
1348 emit_insn (gen_loadqf_int_clobber (op0, op1));
1349 else
1350 abort ();
1351 return 1;
1354 if (mode == QImode
1355 && reg_operand (op0, mode)
1356 && const_int_operand (op1, mode)
1357 && ! IS_INT16_CONST (INTVAL (op1))
1358 && ! IS_HIGH_CONST (INTVAL (op1)))
1360 emit_insn (gen_loadqi_big_constant (op0, op1));
1361 return 1;
1364 if (mode == HImode
1365 && reg_operand (op0, mode)
1366 && const_int_operand (op1, mode))
1368 emit_insn (gen_loadhi_big_constant (op0, op1));
1369 return 1;
1372 /* Adjust operands in case we have modified them. */
1373 operands[0] = op0;
1374 operands[1] = op1;
1376 /* Emit normal pattern. */
1377 return 0;
1381 void
1382 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1383 enum machine_mode dmode, enum machine_mode smode,
1384 int noperands, rtx *operands)
1386 rtx ret;
1387 rtx insns;
1388 rtx equiv;
1390 start_sequence ();
1391 switch (noperands)
1393 case 2:
1394 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1395 operands[1], smode);
1396 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1397 break;
1399 case 3:
1400 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1401 operands[1], smode, operands[2], smode);
1402 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1403 break;
1405 default:
1406 abort ();
1409 insns = get_insns ();
1410 end_sequence ();
1411 emit_libcall_block (insns, operands[0], ret, equiv);
1415 void
1416 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1417 enum machine_mode mode, rtx *operands)
1419 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1423 void
1424 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1425 enum machine_mode mode, rtx *operands)
1427 rtx ret;
1428 rtx insns;
1429 rtx equiv;
1431 start_sequence ();
1432 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1433 operands[1], mode, operands[2], mode);
1434 equiv = gen_rtx_TRUNCATE (mode,
1435 gen_rtx_LSHIFTRT (HImode,
1436 gen_rtx_MULT (HImode,
1437 gen_rtx_fmt_e (code, HImode, operands[1]),
1438 gen_rtx_fmt_e (code, HImode, operands[2])),
1439 GEN_INT (32)));
1440 insns = get_insns ();
1441 end_sequence ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1447 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1449 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1450 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1451 rtx disp = NULL_RTX; /* Displacement. */
1452 enum rtx_code code;
1454 code = GET_CODE (addr);
1455 switch (code)
1457 /* Register indirect with auto increment/decrement. We don't
1458 allow SP here---push_operand should recognize an operand
1459 being pushed on the stack. */
1461 case PRE_DEC:
1462 case PRE_INC:
1463 case POST_DEC:
1464 if (mode != QImode && mode != QFmode)
1465 return 0;
1467 case POST_INC:
1468 base = XEXP (addr, 0);
1469 if (! REG_P (base))
1470 return 0;
1471 break;
1473 case PRE_MODIFY:
1474 case POST_MODIFY:
1476 rtx op0 = XEXP (addr, 0);
1477 rtx op1 = XEXP (addr, 1);
1479 if (mode != QImode && mode != QFmode)
1480 return 0;
1482 if (! REG_P (op0)
1483 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1484 return 0;
1485 base = XEXP (op1, 0);
1486 if (! REG_P (base))
1487 return 0;
1488 if (REGNO (base) != REGNO (op0))
1489 return 0;
1490 if (REG_P (XEXP (op1, 1)))
1491 indx = XEXP (op1, 1);
1492 else
1493 disp = XEXP (op1, 1);
1495 break;
1497 /* Register indirect. */
1498 case REG:
1499 base = addr;
1500 break;
1502 /* Register indirect with displacement or index. */
1503 case PLUS:
1505 rtx op0 = XEXP (addr, 0);
1506 rtx op1 = XEXP (addr, 1);
1507 enum rtx_code code0 = GET_CODE (op0);
1509 switch (code0)
1511 case REG:
1512 if (REG_P (op1))
1514 base = op0; /* Base + index. */
1515 indx = op1;
1516 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1518 base = op1;
1519 indx = op0;
1522 else
1524 base = op0; /* Base + displacement. */
1525 disp = op1;
1527 break;
1529 default:
1530 return 0;
1533 break;
1535 /* Direct addressing with DP register. */
1536 case LO_SUM:
1538 rtx op0 = XEXP (addr, 0);
1539 rtx op1 = XEXP (addr, 1);
1541 /* HImode and HFmode direct memory references aren't truly
1542 offsettable (consider case at end of data page). We
1543 probably get better code by loading a pointer and using an
1544 indirect memory reference. */
1545 if (mode == HImode || mode == HFmode)
1546 return 0;
1548 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1549 return 0;
1551 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1552 return 1;
1554 if (GET_CODE (op1) == CONST)
1555 return 1;
1556 return 0;
1558 break;
1560 /* Direct addressing with some work for the assembler... */
1561 case CONST:
1562 /* Direct addressing. */
1563 case LABEL_REF:
1564 case SYMBOL_REF:
1565 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1566 return 1;
1567 /* These need to be converted to a LO_SUM (...).
1568 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1569 return 0;
1571 /* Do not allow direct memory access to absolute addresses.
1572 This is more pain than it's worth, especially for the
1573 small memory model where we can't guarantee that
1574 this address is within the data page---we don't want
1575 to modify the DP register in the small memory model,
1576 even temporarily, since an interrupt can sneak in.... */
1577 case CONST_INT:
1578 return 0;
1580 /* Indirect indirect addressing. */
1581 case MEM:
1582 return 0;
1584 case CONST_DOUBLE:
1585 fatal_insn ("using CONST_DOUBLE for address", addr);
1587 default:
1588 return 0;
1591 /* Validate the base register. */
1592 if (base)
1594 /* Check that the address is offsettable for HImode and HFmode. */
1595 if (indx && (mode == HImode || mode == HFmode))
1596 return 0;
1598 /* Handle DP based stuff. */
1599 if (REGNO (base) == DP_REGNO)
1600 return 1;
1601 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1602 return 0;
1603 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1604 return 0;
1607 /* Now validate the index register. */
1608 if (indx)
1610 if (GET_CODE (indx) != REG)
1611 return 0;
1612 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1613 return 0;
1614 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1615 return 0;
1618 /* Validate displacement. */
1619 if (disp)
1621 if (GET_CODE (disp) != CONST_INT)
1622 return 0;
1623 if (mode == HImode || mode == HFmode)
1625 /* The offset displacement must be legitimate. */
1626 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1627 return 0;
1629 else
1631 if (! IS_DISP8_CONST (INTVAL (disp)))
1632 return 0;
1634 /* Can't add an index with a disp. */
1635 if (indx)
1636 return 0;
1638 return 1;
1643 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1644 enum machine_mode mode ATTRIBUTE_UNUSED)
1646 if (GET_CODE (orig) == SYMBOL_REF
1647 || GET_CODE (orig) == LABEL_REF)
1649 if (mode == HImode || mode == HFmode)
1651 /* We need to force the address into
1652 a register so that it is offsettable. */
1653 rtx addr_reg = gen_reg_rtx (Pmode);
1654 emit_move_insn (addr_reg, orig);
1655 return addr_reg;
1657 else
1659 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1661 if (! TARGET_SMALL)
1662 emit_insn (gen_set_ldp (dp_reg, orig));
1664 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1668 return NULL_RTX;
1672 /* Provide the costs of an addressing mode that contains ADDR.
1673 If ADDR is not a valid address, its cost is irrelevant.
1674 This is used in cse and loop optimization to determine
1675 if it is worthwhile storing a common address into a register.
1676 Unfortunately, the C4x address cost depends on other operands. */
1678 static int
1679 c4x_address_cost (rtx addr)
1681 switch (GET_CODE (addr))
1683 case REG:
1684 return 1;
1686 case POST_INC:
1687 case POST_DEC:
1688 case PRE_INC:
1689 case PRE_DEC:
1690 return 1;
1692 /* These shouldn't be directly generated. */
1693 case SYMBOL_REF:
1694 case LABEL_REF:
1695 case CONST:
1696 return 10;
1698 case LO_SUM:
1700 rtx op1 = XEXP (addr, 1);
1702 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1703 return TARGET_SMALL ? 3 : 4;
1705 if (GET_CODE (op1) == CONST)
1707 rtx offset = const0_rtx;
1709 op1 = eliminate_constant_term (op1, &offset);
1711 /* ??? These costs need rethinking... */
1712 if (GET_CODE (op1) == LABEL_REF)
1713 return 3;
1715 if (GET_CODE (op1) != SYMBOL_REF)
1716 return 4;
1718 if (INTVAL (offset) == 0)
1719 return 3;
1721 return 4;
1723 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1725 break;
1727 case PLUS:
1729 register rtx op0 = XEXP (addr, 0);
1730 register rtx op1 = XEXP (addr, 1);
1732 if (GET_CODE (op0) != REG)
1733 break;
1735 switch (GET_CODE (op1))
1737 default:
1738 break;
1740 case REG:
1741 /* This cost for REG+REG must be greater than the cost
1742 for REG if we want autoincrement addressing modes. */
1743 return 2;
1745 case CONST_INT:
1746 /* The following tries to improve GIV combination
1747 in strength reduce but appears not to help. */
1748 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1749 return 1;
1751 if (IS_DISP1_CONST (INTVAL (op1)))
1752 return 1;
1754 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1755 return 2;
1757 return 3;
1760 default:
1761 break;
1764 return 4;
1769 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1771 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1772 rtx cc_reg;
1774 if (mode == CC_NOOVmode
1775 && (code == LE || code == GE || code == LT || code == GT))
1776 return NULL_RTX;
1778 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1779 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1780 gen_rtx_COMPARE (mode, x, y)));
1781 return cc_reg;
1784 char *
1785 c4x_output_cbranch (const char *form, rtx seq)
1787 int delayed = 0;
1788 int annultrue = 0;
1789 int annulfalse = 0;
1790 rtx delay;
1791 char *cp;
1792 static char str[100];
1794 if (final_sequence)
1796 delay = XVECEXP (final_sequence, 0, 1);
1797 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1798 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1799 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1801 strcpy (str, form);
1802 cp = &str [strlen (str)];
1803 if (delayed)
1805 *cp++ = '%';
1806 *cp++ = '#';
1808 if (annultrue)
1810 *cp++ = 'a';
1811 *cp++ = 't';
1813 if (annulfalse)
1815 *cp++ = 'a';
1816 *cp++ = 'f';
1818 *cp++ = '\t';
1819 *cp++ = '%';
1820 *cp++ = 'l';
1821 *cp++ = '1';
1822 *cp = 0;
1823 return str;
1826 void
1827 c4x_print_operand (FILE *file, rtx op, int letter)
1829 rtx op1;
1830 enum rtx_code code;
1832 switch (letter)
1834 case '#': /* Delayed. */
1835 if (final_sequence)
1836 fprintf (file, "d");
1837 return;
1840 code = GET_CODE (op);
1841 switch (letter)
1843 case 'A': /* Direct address. */
1844 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1845 fprintf (file, "@");
1846 break;
1848 case 'H': /* Sethi. */
1849 output_addr_const (file, op);
1850 return;
1852 case 'I': /* Reversed condition. */
1853 code = reverse_condition (code);
1854 break;
1856 case 'L': /* Log 2 of constant. */
1857 if (code != CONST_INT)
1858 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1859 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1860 return;
1862 case 'N': /* Ones complement of small constant. */
1863 if (code != CONST_INT)
1864 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1865 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1866 return;
1868 case 'K': /* Generate ldp(k) if direct address. */
1869 if (! TARGET_SMALL
1870 && code == MEM
1871 && GET_CODE (XEXP (op, 0)) == LO_SUM
1872 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1873 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1875 op1 = XEXP (XEXP (op, 0), 1);
1876 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1878 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1879 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1880 fprintf (file, "\n");
1883 return;
1885 case 'M': /* Generate ldp(k) if direct address. */
1886 if (! TARGET_SMALL /* Only used in asm statements. */
1887 && code == MEM
1888 && (GET_CODE (XEXP (op, 0)) == CONST
1889 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1891 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1892 output_address (XEXP (op, 0));
1893 fprintf (file, "\n\t");
1895 return;
1897 case 'O': /* Offset address. */
1898 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1899 break;
1900 else if (code == MEM)
1901 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1902 else if (code == REG)
1903 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1904 else
1905 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1906 return;
1908 case 'C': /* Call. */
1909 break;
1911 case 'U': /* Call/callu. */
1912 if (code != SYMBOL_REF)
1913 fprintf (file, "u");
1914 return;
1916 default:
1917 break;
1920 switch (code)
1922 case REG:
1923 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1924 && ! TARGET_TI)
1925 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1926 else
1927 fprintf (file, "%s", reg_names[REGNO (op)]);
1928 break;
1930 case MEM:
1931 output_address (XEXP (op, 0));
1932 break;
1934 case CONST_DOUBLE:
1936 char str[64];
1938 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1939 sizeof (str), 0, 1);
1940 fprintf (file, "%s", str);
1942 break;
1944 case CONST_INT:
1945 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1946 break;
1948 case NE:
1949 fprintf (file, "ne");
1950 break;
1952 case EQ:
1953 fprintf (file, "eq");
1954 break;
1956 case GE:
1957 fprintf (file, "ge");
1958 break;
1960 case GT:
1961 fprintf (file, "gt");
1962 break;
1964 case LE:
1965 fprintf (file, "le");
1966 break;
1968 case LT:
1969 fprintf (file, "lt");
1970 break;
1972 case GEU:
1973 fprintf (file, "hs");
1974 break;
1976 case GTU:
1977 fprintf (file, "hi");
1978 break;
1980 case LEU:
1981 fprintf (file, "ls");
1982 break;
1984 case LTU:
1985 fprintf (file, "lo");
1986 break;
1988 case SYMBOL_REF:
1989 output_addr_const (file, op);
1990 break;
1992 case CONST:
1993 output_addr_const (file, XEXP (op, 0));
1994 break;
1996 case CODE_LABEL:
1997 break;
1999 default:
2000 fatal_insn ("c4x_print_operand: Bad operand case", op);
2001 break;
2006 void
2007 c4x_print_operand_address (FILE *file, rtx addr)
2009 switch (GET_CODE (addr))
2011 case REG:
2012 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2013 break;
2015 case PRE_DEC:
2016 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2017 break;
2019 case POST_INC:
2020 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2021 break;
2023 case POST_MODIFY:
2025 rtx op0 = XEXP (XEXP (addr, 1), 0);
2026 rtx op1 = XEXP (XEXP (addr, 1), 1);
2028 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2029 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2030 reg_names[REGNO (op1)]);
2031 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2032 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2033 reg_names[REGNO (op0)], INTVAL (op1));
2034 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2035 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2036 reg_names[REGNO (op0)], -INTVAL (op1));
2037 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2038 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2039 reg_names[REGNO (op1)]);
2040 else
2041 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2043 break;
2045 case PRE_MODIFY:
2047 rtx op0 = XEXP (XEXP (addr, 1), 0);
2048 rtx op1 = XEXP (XEXP (addr, 1), 1);
2050 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2051 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2052 reg_names[REGNO (op1)]);
2053 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2054 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2055 reg_names[REGNO (op0)], INTVAL (op1));
2056 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2057 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2058 reg_names[REGNO (op0)], -INTVAL (op1));
2059 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2060 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2061 reg_names[REGNO (op1)]);
2062 else
2063 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2065 break;
2067 case PRE_INC:
2068 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2069 break;
2071 case POST_DEC:
2072 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2073 break;
2075 case PLUS: /* Indirect with displacement. */
2077 rtx op0 = XEXP (addr, 0);
2078 rtx op1 = XEXP (addr, 1);
2080 if (REG_P (op0))
2082 if (REG_P (op1))
2084 if (IS_INDEX_REG (op0))
2086 fprintf (file, "*+%s(%s)",
2087 reg_names[REGNO (op1)],
2088 reg_names[REGNO (op0)]); /* Index + base. */
2090 else
2092 fprintf (file, "*+%s(%s)",
2093 reg_names[REGNO (op0)],
2094 reg_names[REGNO (op1)]); /* Base + index. */
2097 else if (INTVAL (op1) < 0)
2099 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2100 reg_names[REGNO (op0)],
2101 -INTVAL (op1)); /* Base - displacement. */
2103 else
2105 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2106 reg_names[REGNO (op0)],
2107 INTVAL (op1)); /* Base + displacement. */
2110 else
2111 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2113 break;
2115 case LO_SUM:
2117 rtx op0 = XEXP (addr, 0);
2118 rtx op1 = XEXP (addr, 1);
2120 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2121 c4x_print_operand_address (file, op1);
2122 else
2123 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2125 break;
2127 case CONST:
2128 case SYMBOL_REF:
2129 case LABEL_REF:
2130 fprintf (file, "@");
2131 output_addr_const (file, addr);
2132 break;
2134 /* We shouldn't access CONST_INT addresses. */
2135 case CONST_INT:
2137 default:
2138 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2139 break;
2144 /* Return nonzero if the floating point operand will fit
2145 in the immediate field. */
2147 static int
2148 c4x_immed_float_p (rtx op)
2150 long convval[2];
2151 int exponent;
2152 REAL_VALUE_TYPE r;
2154 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2155 if (GET_MODE (op) == HFmode)
2156 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2157 else
2159 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2160 convval[1] = 0;
2163 /* Sign extend exponent. */
2164 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2165 if (exponent == -128)
2166 return 1; /* 0.0 */
2167 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2168 return 0; /* Precision doesn't fit. */
2169 return (exponent <= 7) /* Positive exp. */
2170 && (exponent >= -7); /* Negative exp. */
2174 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2175 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2177 None of the last four instructions from the bottom of the block can
2178 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2179 BcondAT or RETIcondD.
2181 This routine scans the four previous insns for a jump insn, and if
2182 one is found, returns 1 so that we bung in a nop instruction.
2183 This simple minded strategy will add a nop, when it may not
2184 be required. Say when there is a JUMP_INSN near the end of the
2185 block that doesn't get converted into a delayed branch.
2187 Note that we cannot have a call insn, since we don't generate
2188 repeat loops with calls in them (although I suppose we could, but
2189 there's no benefit.)
2191 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2194 c4x_rptb_nop_p (rtx insn)
2196 rtx start_label;
2197 int i;
2199 /* Extract the start label from the jump pattern (rptb_end). */
2200 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2202 /* If there is a label at the end of the loop we must insert
2203 a NOP. */
2204 do {
2205 insn = previous_insn (insn);
2206 } while (GET_CODE (insn) == NOTE
2207 || GET_CODE (insn) == USE
2208 || GET_CODE (insn) == CLOBBER);
2209 if (GET_CODE (insn) == CODE_LABEL)
2210 return 1;
2212 for (i = 0; i < 4; i++)
2214 /* Search back for prev non-note and non-label insn. */
2215 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2216 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2218 if (insn == start_label)
2219 return i == 0;
2221 insn = previous_insn (insn);
2224 /* If we have a jump instruction we should insert a NOP. If we
2225 hit repeat block top we should only insert a NOP if the loop
2226 is empty. */
2227 if (GET_CODE (insn) == JUMP_INSN)
2228 return 1;
2229 insn = previous_insn (insn);
2231 return 0;
2235 /* The C4x looping instruction needs to be emitted at the top of the
2236 loop. Emitting the true RTL for a looping instruction at the top of
2237 the loop can cause problems with flow analysis. So instead, a dummy
2238 doloop insn is emitted at the end of the loop. This routine checks
2239 for the presence of this doloop insn and then searches back to the
2240 top of the loop, where it inserts the true looping insn (provided
2241 there are no instructions in the loop which would cause problems).
2242 Any additional labels can be emitted at this point. In addition, if
2243 the desired loop count register was not allocated, this routine does
2244 nothing.
2246 Before we can create a repeat block looping instruction we have to
2247 verify that there are no jumps outside the loop and no jumps outside
2248 the loop go into this loop. This can happen in the basic blocks reorder
2249 pass. The C4x cpu can not handle this. */
2251 static int
2252 c4x_label_ref_used_p (rtx x, rtx code_label)
2254 enum rtx_code code;
2255 int i, j;
2256 const char *fmt;
2258 if (x == 0)
2259 return 0;
2261 code = GET_CODE (x);
2262 if (code == LABEL_REF)
2263 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2265 fmt = GET_RTX_FORMAT (code);
2266 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2268 if (fmt[i] == 'e')
2270 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2271 return 1;
2273 else if (fmt[i] == 'E')
2274 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2275 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2276 return 1;
2278 return 0;
2282 static int
2283 c4x_rptb_valid_p (rtx insn, rtx start_label)
2285 rtx end = insn;
2286 rtx start;
2287 rtx tmp;
2289 /* Find the start label. */
2290 for (; insn; insn = PREV_INSN (insn))
2291 if (insn == start_label)
2292 break;
2294 /* Note found then we can not use a rptb or rpts. The label was
2295 probably moved by the basic block reorder pass. */
2296 if (! insn)
2297 return 0;
2299 start = insn;
2300 /* If any jump jumps inside this block then we must fail. */
2301 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2303 if (GET_CODE (insn) == CODE_LABEL)
2305 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2306 if (GET_CODE (tmp) == JUMP_INSN
2307 && c4x_label_ref_used_p (tmp, insn))
2308 return 0;
2311 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2313 if (GET_CODE (insn) == CODE_LABEL)
2315 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2316 if (GET_CODE (tmp) == JUMP_INSN
2317 && c4x_label_ref_used_p (tmp, insn))
2318 return 0;
2321 /* If any jump jumps outside this block then we must fail. */
2322 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2324 if (GET_CODE (insn) == CODE_LABEL)
2326 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2327 if (GET_CODE (tmp) == JUMP_INSN
2328 && c4x_label_ref_used_p (tmp, insn))
2329 return 0;
2330 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2331 if (GET_CODE (tmp) == JUMP_INSN
2332 && c4x_label_ref_used_p (tmp, insn))
2333 return 0;
2337 /* All checks OK. */
2338 return 1;
2342 void
2343 c4x_rptb_insert (rtx insn)
2345 rtx end_label;
2346 rtx start_label;
2347 rtx new_start_label;
2348 rtx count_reg;
2350 /* If the count register has not been allocated to RC, say if
2351 there is a movmem pattern in the loop, then do not insert a
2352 RPTB instruction. Instead we emit a decrement and branch
2353 at the end of the loop. */
2354 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2355 if (REGNO (count_reg) != RC_REGNO)
2356 return;
2358 /* Extract the start label from the jump pattern (rptb_end). */
2359 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2361 if (! c4x_rptb_valid_p (insn, start_label))
2363 /* We can not use the rptb insn. Replace it so reorg can use
2364 the delay slots of the jump insn. */
2365 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2366 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2367 emit_insn_before (gen_bge (start_label), insn);
2368 LABEL_NUSES (start_label)++;
2369 delete_insn (insn);
2370 return;
2373 end_label = gen_label_rtx ();
2374 LABEL_NUSES (end_label)++;
2375 emit_label_after (end_label, insn);
2377 new_start_label = gen_label_rtx ();
2378 LABEL_NUSES (new_start_label)++;
2380 for (; insn; insn = PREV_INSN (insn))
2382 if (insn == start_label)
2383 break;
2384 if (GET_CODE (insn) == JUMP_INSN &&
2385 JUMP_LABEL (insn) == start_label)
2386 redirect_jump (insn, new_start_label, 0);
2388 if (! insn)
2389 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2391 emit_label_after (new_start_label, insn);
2393 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2394 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2395 else
2396 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2397 if (LABEL_NUSES (start_label) == 0)
2398 delete_insn (start_label);
2402 /* We need to use direct addressing for large constants and addresses
2403 that cannot fit within an instruction. We must check for these
2404 after after the final jump optimization pass, since this may
2405 introduce a local_move insn for a SYMBOL_REF. This pass
2406 must come before delayed branch slot filling since it can generate
2407 additional instructions.
2409 This function also fixes up RTPB style loops that didn't get RC
2410 allocated as the loop counter. */
2412 static void
2413 c4x_reorg (void)
2415 rtx insn;
2417 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2419 /* Look for insn. */
2420 if (INSN_P (insn))
2422 int insn_code_number;
2423 rtx old;
2425 insn_code_number = recog_memoized (insn);
2427 if (insn_code_number < 0)
2428 continue;
2430 /* Insert the RTX for RPTB at the top of the loop
2431 and a label at the end of the loop. */
2432 if (insn_code_number == CODE_FOR_rptb_end)
2433 c4x_rptb_insert(insn);
2435 /* We need to split the insn here. Otherwise the calls to
2436 force_const_mem will not work for load_immed_address. */
2437 old = insn;
2439 /* Don't split the insn if it has been deleted. */
2440 if (! INSN_DELETED_P (old))
2441 insn = try_split (PATTERN(old), old, 1);
2443 /* When not optimizing, the old insn will be still left around
2444 with only the 'deleted' bit set. Transform it into a note
2445 to avoid confusion of subsequent processing. */
2446 if (INSN_DELETED_P (old))
2448 PUT_CODE (old, NOTE);
2449 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2450 NOTE_SOURCE_FILE (old) = 0;
2457 static int
2458 c4x_a_register (rtx op)
2460 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2464 static int
2465 c4x_x_register (rtx op)
2467 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2471 static int
2472 c4x_immed_int_constant (rtx op)
2474 if (GET_CODE (op) != CONST_INT)
2475 return 0;
2477 return GET_MODE (op) == VOIDmode
2478 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2479 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2483 static int
2484 c4x_immed_float_constant (rtx op)
2486 if (GET_CODE (op) != CONST_DOUBLE)
2487 return 0;
2489 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2490 present this only means that a MEM rtx has been generated. It does
2491 not mean the rtx is really in memory. */
2493 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2498 c4x_shiftable_constant (rtx op)
2500 int i;
2501 int mask;
2502 int val = INTVAL (op);
2504 for (i = 0; i < 16; i++)
2506 if (val & (1 << i))
2507 break;
2509 mask = ((0xffff >> i) << 16) | 0xffff;
2510 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2511 : (val >> i) & mask))
2512 return i;
2513 return -1;
2518 c4x_H_constant (rtx op)
2520 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2525 c4x_I_constant (rtx op)
2527 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2532 c4x_J_constant (rtx op)
2534 if (TARGET_C3X)
2535 return 0;
2536 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2540 static int
2541 c4x_K_constant (rtx op)
2543 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2544 return 0;
2545 return IS_INT5_CONST (INTVAL (op));
2550 c4x_L_constant (rtx op)
2552 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2556 static int
2557 c4x_N_constant (rtx op)
2559 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2563 static int
2564 c4x_O_constant (rtx op)
2566 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2570 /* The constraints do not have to check the register class,
2571 except when needed to discriminate between the constraints.
2572 The operand has been checked by the predicates to be valid. */
2574 /* ARx + 9-bit signed const or IRn
2575 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2576 We don't include the pre/post inc/dec forms here since
2577 they are handled by the <> constraints. */
2580 c4x_Q_constraint (rtx op)
2582 enum machine_mode mode = GET_MODE (op);
2584 if (GET_CODE (op) != MEM)
2585 return 0;
2586 op = XEXP (op, 0);
2587 switch (GET_CODE (op))
2589 case REG:
2590 return 1;
2592 case PLUS:
2594 rtx op0 = XEXP (op, 0);
2595 rtx op1 = XEXP (op, 1);
2597 if (! REG_P (op0))
2598 return 0;
2600 if (REG_P (op1))
2601 return 1;
2603 if (GET_CODE (op1) != CONST_INT)
2604 return 0;
2606 /* HImode and HFmode must be offsettable. */
2607 if (mode == HImode || mode == HFmode)
2608 return IS_DISP8_OFF_CONST (INTVAL (op1));
2610 return IS_DISP8_CONST (INTVAL (op1));
2612 break;
2614 default:
2615 break;
2617 return 0;
2621 /* ARx + 5-bit unsigned const
2622 *ARx, *+ARx(n) for n < 32. */
2625 c4x_R_constraint (rtx op)
2627 enum machine_mode mode = GET_MODE (op);
2629 if (TARGET_C3X)
2630 return 0;
2631 if (GET_CODE (op) != MEM)
2632 return 0;
2633 op = XEXP (op, 0);
2634 switch (GET_CODE (op))
2636 case REG:
2637 return 1;
2639 case PLUS:
2641 rtx op0 = XEXP (op, 0);
2642 rtx op1 = XEXP (op, 1);
2644 if (! REG_P (op0))
2645 return 0;
2647 if (GET_CODE (op1) != CONST_INT)
2648 return 0;
2650 /* HImode and HFmode must be offsettable. */
2651 if (mode == HImode || mode == HFmode)
2652 return IS_UINT5_CONST (INTVAL (op1) + 1);
2654 return IS_UINT5_CONST (INTVAL (op1));
2656 break;
2658 default:
2659 break;
2661 return 0;
2665 static int
2666 c4x_R_indirect (rtx op)
2668 enum machine_mode mode = GET_MODE (op);
2670 if (TARGET_C3X || GET_CODE (op) != MEM)
2671 return 0;
2673 op = XEXP (op, 0);
2674 switch (GET_CODE (op))
2676 case REG:
2677 return IS_ADDR_OR_PSEUDO_REG (op);
2679 case PLUS:
2681 rtx op0 = XEXP (op, 0);
2682 rtx op1 = XEXP (op, 1);
2684 /* HImode and HFmode must be offsettable. */
2685 if (mode == HImode || mode == HFmode)
2686 return IS_ADDR_OR_PSEUDO_REG (op0)
2687 && GET_CODE (op1) == CONST_INT
2688 && IS_UINT5_CONST (INTVAL (op1) + 1);
2690 return REG_P (op0)
2691 && IS_ADDR_OR_PSEUDO_REG (op0)
2692 && GET_CODE (op1) == CONST_INT
2693 && IS_UINT5_CONST (INTVAL (op1));
2695 break;
2697 default:
2698 break;
2700 return 0;
2704 /* ARx + 1-bit unsigned const or IRn
2705 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2706 We don't include the pre/post inc/dec forms here since
2707 they are handled by the <> constraints. */
2710 c4x_S_constraint (rtx op)
2712 enum machine_mode mode = GET_MODE (op);
2713 if (GET_CODE (op) != MEM)
2714 return 0;
2715 op = XEXP (op, 0);
2716 switch (GET_CODE (op))
2718 case REG:
2719 return 1;
2721 case PRE_MODIFY:
2722 case POST_MODIFY:
2724 rtx op0 = XEXP (op, 0);
2725 rtx op1 = XEXP (op, 1);
2727 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2728 || (op0 != XEXP (op1, 0)))
2729 return 0;
2731 op0 = XEXP (op1, 0);
2732 op1 = XEXP (op1, 1);
2733 return REG_P (op0) && REG_P (op1);
2734 /* Pre or post_modify with a displacement of 0 or 1
2735 should not be generated. */
2737 break;
2739 case PLUS:
2741 rtx op0 = XEXP (op, 0);
2742 rtx op1 = XEXP (op, 1);
2744 if (!REG_P (op0))
2745 return 0;
2747 if (REG_P (op1))
2748 return 1;
2750 if (GET_CODE (op1) != CONST_INT)
2751 return 0;
2753 /* HImode and HFmode must be offsettable. */
2754 if (mode == HImode || mode == HFmode)
2755 return IS_DISP1_OFF_CONST (INTVAL (op1));
2757 return IS_DISP1_CONST (INTVAL (op1));
2759 break;
2761 default:
2762 break;
2764 return 0;
2768 static int
2769 c4x_S_indirect (rtx op)
2771 enum machine_mode mode = GET_MODE (op);
2772 if (GET_CODE (op) != MEM)
2773 return 0;
2775 op = XEXP (op, 0);
2776 switch (GET_CODE (op))
2778 case PRE_DEC:
2779 case POST_DEC:
2780 if (mode != QImode && mode != QFmode)
2781 return 0;
2782 case PRE_INC:
2783 case POST_INC:
2784 op = XEXP (op, 0);
2786 case REG:
2787 return IS_ADDR_OR_PSEUDO_REG (op);
2789 case PRE_MODIFY:
2790 case POST_MODIFY:
2792 rtx op0 = XEXP (op, 0);
2793 rtx op1 = XEXP (op, 1);
2795 if (mode != QImode && mode != QFmode)
2796 return 0;
2798 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2799 || (op0 != XEXP (op1, 0)))
2800 return 0;
2802 op0 = XEXP (op1, 0);
2803 op1 = XEXP (op1, 1);
2804 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2805 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2806 /* Pre or post_modify with a displacement of 0 or 1
2807 should not be generated. */
2810 case PLUS:
2812 rtx op0 = XEXP (op, 0);
2813 rtx op1 = XEXP (op, 1);
2815 if (REG_P (op0))
2817 /* HImode and HFmode must be offsettable. */
2818 if (mode == HImode || mode == HFmode)
2819 return IS_ADDR_OR_PSEUDO_REG (op0)
2820 && GET_CODE (op1) == CONST_INT
2821 && IS_DISP1_OFF_CONST (INTVAL (op1));
2823 if (REG_P (op1))
2824 return (IS_INDEX_OR_PSEUDO_REG (op1)
2825 && IS_ADDR_OR_PSEUDO_REG (op0))
2826 || (IS_ADDR_OR_PSEUDO_REG (op1)
2827 && IS_INDEX_OR_PSEUDO_REG (op0));
2829 return IS_ADDR_OR_PSEUDO_REG (op0)
2830 && GET_CODE (op1) == CONST_INT
2831 && IS_DISP1_CONST (INTVAL (op1));
2834 break;
2836 default:
2837 break;
2839 return 0;
2843 /* Direct memory operand. */
2846 c4x_T_constraint (rtx op)
2848 if (GET_CODE (op) != MEM)
2849 return 0;
2850 op = XEXP (op, 0);
2852 if (GET_CODE (op) != LO_SUM)
2854 /* Allow call operands. */
2855 return GET_CODE (op) == SYMBOL_REF
2856 && GET_MODE (op) == Pmode
2857 && SYMBOL_REF_FUNCTION_P (op);
2860 /* HImode and HFmode are not offsettable. */
2861 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2862 return 0;
2864 if ((GET_CODE (XEXP (op, 0)) == REG)
2865 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2866 return c4x_U_constraint (XEXP (op, 1));
2868 return 0;
2872 /* Symbolic operand. */
2875 c4x_U_constraint (rtx op)
2877 /* Don't allow direct addressing to an arbitrary constant. */
2878 return GET_CODE (op) == CONST
2879 || GET_CODE (op) == SYMBOL_REF
2880 || GET_CODE (op) == LABEL_REF;
2885 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2887 if (GET_CODE (op) == MEM)
2889 enum rtx_code code = GET_CODE (XEXP (op, 0));
2891 if (code == PRE_INC
2892 || code == PRE_DEC
2893 || code == POST_INC
2894 || code == POST_DEC
2895 || code == PRE_MODIFY
2896 || code == POST_MODIFY
2898 return 1;
2900 return 0;
2904 /* Match any operand. */
2907 any_operand (register rtx op ATTRIBUTE_UNUSED,
2908 enum machine_mode mode ATTRIBUTE_UNUSED)
2910 return 1;
2914 /* Nonzero if OP is a floating point value with value 0.0. */
2917 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2919 REAL_VALUE_TYPE r;
2921 if (GET_CODE (op) != CONST_DOUBLE)
2922 return 0;
2923 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2924 return REAL_VALUES_EQUAL (r, dconst0);
2929 const_operand (register rtx op, register enum machine_mode mode)
2931 switch (mode)
2933 case QFmode:
2934 case HFmode:
2935 if (GET_CODE (op) != CONST_DOUBLE
2936 || GET_MODE (op) != mode
2937 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2938 return 0;
2940 return c4x_immed_float_p (op);
2942 #if Pmode != QImode
2943 case Pmode:
2944 #endif
2945 case QImode:
2946 if (GET_CODE (op) != CONST_INT
2947 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2948 || GET_MODE_CLASS (mode) != MODE_INT)
2949 return 0;
2951 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2953 case HImode:
2954 return 0;
2956 default:
2957 return 0;
2963 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2965 return c4x_K_constant (op);
2970 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2972 return c4x_N_constant (op);
2977 reg_operand (rtx op, enum machine_mode mode)
2979 if (GET_CODE (op) == SUBREG
2980 && GET_MODE (op) == QFmode)
2981 return 0;
2982 return register_operand (op, mode);
2987 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2989 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2990 int and a long double. */
2991 if (GET_CODE (op) == SUBREG
2992 && (GET_MODE (op) == QFmode)
2993 && (GET_MODE (SUBREG_REG (op)) == QImode
2994 || GET_MODE (SUBREG_REG (op)) == HImode))
2995 return 1;
2996 return 0;
3001 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3003 if (REG_P (op) || CONSTANT_P (op))
3004 return 1;
3005 return 0;
3010 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3012 if (REG_P (op) || CONSTANT_P (op))
3013 return 1;
3014 if (GET_CODE (op) != MEM)
3015 return 0;
3016 op = XEXP (op, 0);
3017 switch (GET_CODE (op))
3019 case REG:
3020 return 1;
3022 case PLUS:
3024 rtx op0 = XEXP (op, 0);
3025 rtx op1 = XEXP (op, 1);
3027 if (! REG_P (op0))
3028 return 0;
3030 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3031 return 1;
3034 case LO_SUM:
3036 rtx op0 = XEXP (op, 0);
3038 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3039 return 1;
3041 break;
3043 case CONST:
3044 case SYMBOL_REF:
3045 case LABEL_REF:
3046 return 1;
3048 default:
3049 break;
3051 return 0;
3056 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3058 if (REG_P (op) && REGNO (op) == RC_REGNO)
3059 return 0;
3060 return 1;
3064 /* Extended precision register R0-R1. */
3067 r0r1_reg_operand (rtx op, enum machine_mode mode)
3069 if (! reg_operand (op, mode))
3070 return 0;
3071 if (GET_CODE (op) == SUBREG)
3072 op = SUBREG_REG (op);
3073 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3077 /* Extended precision register R2-R3. */
3080 r2r3_reg_operand (rtx op, enum machine_mode mode)
3082 if (! reg_operand (op, mode))
3083 return 0;
3084 if (GET_CODE (op) == SUBREG)
3085 op = SUBREG_REG (op);
3086 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3090 /* Low extended precision register R0-R7. */
3093 ext_low_reg_operand (rtx op, enum machine_mode mode)
3095 if (! reg_operand (op, mode))
3096 return 0;
3097 if (GET_CODE (op) == SUBREG)
3098 op = SUBREG_REG (op);
3099 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3103 /* Extended precision register. */
3106 ext_reg_operand (rtx op, enum machine_mode mode)
3108 if (! reg_operand (op, mode))
3109 return 0;
3110 if (GET_CODE (op) == SUBREG)
3111 op = SUBREG_REG (op);
3112 if (! REG_P (op))
3113 return 0;
3114 return IS_EXT_OR_PSEUDO_REG (op);
3118 /* Standard precision register. */
3121 std_reg_operand (rtx op, enum machine_mode mode)
3123 if (! reg_operand (op, mode))
3124 return 0;
3125 if (GET_CODE (op) == SUBREG)
3126 op = SUBREG_REG (op);
3127 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3130 /* Standard precision or normal register. */
3133 std_or_reg_operand (rtx op, enum machine_mode mode)
3135 if (reload_in_progress)
3136 return std_reg_operand (op, mode);
3137 return reg_operand (op, mode);
3140 /* Address register. */
3143 addr_reg_operand (rtx op, enum machine_mode mode)
3145 if (! reg_operand (op, mode))
3146 return 0;
3147 return c4x_a_register (op);
3151 /* Index register. */
3154 index_reg_operand (rtx op, enum machine_mode mode)
3156 if (! reg_operand (op, mode))
3157 return 0;
3158 if (GET_CODE (op) == SUBREG)
3159 op = SUBREG_REG (op);
3160 return c4x_x_register (op);
3164 /* DP register. */
3167 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3169 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3173 /* SP register. */
3176 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3178 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3182 /* ST register. */
3185 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3187 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3191 /* RC register. */
3194 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3196 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3201 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3203 return (REG_P (op) || symbolic_address_operand (op, mode));
3207 /* Symbolic address operand. */
3210 symbolic_address_operand (register rtx op,
3211 enum machine_mode mode ATTRIBUTE_UNUSED)
3213 switch (GET_CODE (op))
3215 case CONST:
3216 case SYMBOL_REF:
3217 case LABEL_REF:
3218 return 1;
3219 default:
3220 return 0;
3225 /* Check dst operand of a move instruction. */
3228 dst_operand (rtx op, enum machine_mode mode)
3230 if (GET_CODE (op) == SUBREG
3231 && mixed_subreg_operand (op, mode))
3232 return 0;
3234 if (REG_P (op))
3235 return reg_operand (op, mode);
3237 return nonimmediate_operand (op, mode);
3241 /* Check src operand of two operand arithmetic instructions. */
3244 src_operand (rtx op, enum machine_mode mode)
3246 if (GET_CODE (op) == SUBREG
3247 && mixed_subreg_operand (op, mode))
3248 return 0;
3250 if (REG_P (op))
3251 return reg_operand (op, mode);
3253 if (mode == VOIDmode)
3254 mode = GET_MODE (op);
3256 if (GET_CODE (op) == CONST_INT)
3257 return (mode == QImode || mode == Pmode || mode == HImode)
3258 && c4x_I_constant (op);
3260 /* We don't like CONST_DOUBLE integers. */
3261 if (GET_CODE (op) == CONST_DOUBLE)
3262 return c4x_H_constant (op);
3264 /* Disallow symbolic addresses. Only the predicate
3265 symbolic_address_operand will match these. */
3266 if (GET_CODE (op) == SYMBOL_REF
3267 || GET_CODE (op) == LABEL_REF
3268 || GET_CODE (op) == CONST)
3269 return 0;
3271 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3272 access to symbolic addresses. These operands will get forced
3273 into a register and the movqi expander will generate a
3274 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3275 if (GET_CODE (op) == MEM
3276 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3277 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3278 || GET_CODE (XEXP (op, 0)) == CONST)))
3279 return !TARGET_EXPOSE_LDP &&
3280 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3282 return general_operand (op, mode);
3287 src_hi_operand (rtx op, enum machine_mode mode)
3289 if (c4x_O_constant (op))
3290 return 1;
3291 return src_operand (op, mode);
3295 /* Check src operand of two operand logical instructions. */
3298 lsrc_operand (rtx op, enum machine_mode mode)
3300 if (mode == VOIDmode)
3301 mode = GET_MODE (op);
3303 if (mode != QImode && mode != Pmode)
3304 fatal_insn ("mode not QImode", op);
3306 if (GET_CODE (op) == CONST_INT)
3307 return c4x_L_constant (op) || c4x_J_constant (op);
3309 return src_operand (op, mode);
3313 /* Check src operand of two operand tricky instructions. */
3316 tsrc_operand (rtx op, enum machine_mode mode)
3318 if (mode == VOIDmode)
3319 mode = GET_MODE (op);
3321 if (mode != QImode && mode != Pmode)
3322 fatal_insn ("mode not QImode", op);
3324 if (GET_CODE (op) == CONST_INT)
3325 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3327 return src_operand (op, mode);
3331 /* Check src operand of two operand non immedidate instructions. */
3334 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3336 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3337 return 0;
3339 return src_operand (op, mode);
3343 /* Check logical src operand of two operand non immedidate instructions. */
3346 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3348 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3349 return 0;
3351 return lsrc_operand (op, mode);
3356 reg_or_const_operand (rtx op, enum machine_mode mode)
3358 return reg_operand (op, mode) || const_operand (op, mode);
3362 /* Check for indirect operands allowable in parallel instruction. */
3365 par_ind_operand (rtx op, enum machine_mode mode)
3367 if (mode != VOIDmode && mode != GET_MODE (op))
3368 return 0;
3370 return c4x_S_indirect (op);
3374 /* Check for operands allowable in parallel instruction. */
3377 parallel_operand (rtx op, enum machine_mode mode)
3379 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3383 static void
3384 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3386 *base = 0;
3387 *incdec = 0;
3388 *index = 0;
3389 *disp = 0;
3391 if (GET_CODE (op) != MEM)
3392 fatal_insn ("invalid indirect memory address", op);
3394 op = XEXP (op, 0);
3395 switch (GET_CODE (op))
3397 case PRE_DEC:
3398 *base = REGNO (XEXP (op, 0));
3399 *incdec = 1;
3400 *disp = -1;
3401 return;
3403 case POST_DEC:
3404 *base = REGNO (XEXP (op, 0));
3405 *incdec = 1;
3406 *disp = 0;
3407 return;
3409 case PRE_INC:
3410 *base = REGNO (XEXP (op, 0));
3411 *incdec = 1;
3412 *disp = 1;
3413 return;
3415 case POST_INC:
3416 *base = REGNO (XEXP (op, 0));
3417 *incdec = 1;
3418 *disp = 0;
3419 return;
3421 case POST_MODIFY:
3422 *base = REGNO (XEXP (op, 0));
3423 if (REG_P (XEXP (XEXP (op, 1), 1)))
3425 *index = REGNO (XEXP (XEXP (op, 1), 1));
3426 *disp = 0; /* ??? */
3428 else
3429 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3430 *incdec = 1;
3431 return;
3433 case PRE_MODIFY:
3434 *base = REGNO (XEXP (op, 0));
3435 if (REG_P (XEXP (XEXP (op, 1), 1)))
3437 *index = REGNO (XEXP (XEXP (op, 1), 1));
3438 *disp = 1; /* ??? */
3440 else
3441 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3442 *incdec = 1;
3444 return;
3446 case REG:
3447 *base = REGNO (op);
3448 return;
3450 case PLUS:
3452 rtx op0 = XEXP (op, 0);
3453 rtx op1 = XEXP (op, 1);
3455 if (c4x_a_register (op0))
3457 if (c4x_x_register (op1))
3459 *base = REGNO (op0);
3460 *index = REGNO (op1);
3461 return;
3463 else if ((GET_CODE (op1) == CONST_INT
3464 && IS_DISP1_CONST (INTVAL (op1))))
3466 *base = REGNO (op0);
3467 *disp = INTVAL (op1);
3468 return;
3471 else if (c4x_x_register (op0) && c4x_a_register (op1))
3473 *base = REGNO (op1);
3474 *index = REGNO (op0);
3475 return;
3478 /* Fall through. */
3480 default:
3481 fatal_insn ("invalid indirect (S) memory address", op);
3487 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3489 int base0;
3490 int base1;
3491 int incdec0;
3492 int incdec1;
3493 int index0;
3494 int index1;
3495 int disp0;
3496 int disp1;
3498 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3499 return 1;
3501 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3502 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3504 if (store0 && store1)
3506 /* If we have two stores in parallel to the same address, then
3507 the C4x only executes one of the stores. This is unlikely to
3508 cause problems except when writing to a hardware device such
3509 as a FIFO since the second write will be lost. The user
3510 should flag the hardware location as being volatile so that
3511 we don't do this optimization. While it is unlikely that we
3512 have an aliased address if both locations are not marked
3513 volatile, it is probably safer to flag a potential conflict
3514 if either location is volatile. */
3515 if (! flag_argument_noalias)
3517 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3518 return 1;
3522 /* If have a parallel load and a store to the same address, the load
3523 is performed first, so there is no conflict. Similarly, there is
3524 no conflict if have parallel loads from the same address. */
3526 /* Cannot use auto increment or auto decrement twice for same
3527 base register. */
3528 if (base0 == base1 && incdec0 && incdec0)
3529 return 1;
3531 /* It might be too confusing for GCC if we have use a base register
3532 with a side effect and a memory reference using the same register
3533 in parallel. */
3534 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3535 return 1;
3537 /* We can not optimize the case where op1 and op2 refer to the same
3538 address. */
3539 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3540 return 1;
3542 /* No conflict. */
3543 return 0;
3547 /* Check for while loop inside a decrement and branch loop. */
3550 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3552 while (insn)
3554 if (GET_CODE (insn) == CODE_LABEL)
3556 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3557 return 1;
3558 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3559 return 0;
3561 insn = PREV_INSN (insn);
3563 return 1;
3567 /* Validate combination of operands for parallel load/store instructions. */
3570 valid_parallel_load_store (rtx *operands,
3571 enum machine_mode mode ATTRIBUTE_UNUSED)
3573 rtx op0 = operands[0];
3574 rtx op1 = operands[1];
3575 rtx op2 = operands[2];
3576 rtx op3 = operands[3];
3578 if (GET_CODE (op0) == SUBREG)
3579 op0 = SUBREG_REG (op0);
3580 if (GET_CODE (op1) == SUBREG)
3581 op1 = SUBREG_REG (op1);
3582 if (GET_CODE (op2) == SUBREG)
3583 op2 = SUBREG_REG (op2);
3584 if (GET_CODE (op3) == SUBREG)
3585 op3 = SUBREG_REG (op3);
3587 /* The patterns should only allow ext_low_reg_operand() or
3588 par_ind_operand() operands. Thus of the 4 operands, only 2
3589 should be REGs and the other 2 should be MEMs. */
3591 /* This test prevents the multipack pass from using this pattern if
3592 op0 is used as an index or base register in op2 or op3, since
3593 this combination will require reloading. */
3594 if (GET_CODE (op0) == REG
3595 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3596 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3597 return 0;
3599 /* LDI||LDI. */
3600 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3601 return (REGNO (op0) != REGNO (op2))
3602 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3603 && ! c4x_address_conflict (op1, op3, 0, 0);
3605 /* STI||STI. */
3606 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3607 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3608 && ! c4x_address_conflict (op0, op2, 1, 1);
3610 /* LDI||STI. */
3611 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3612 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3613 && ! c4x_address_conflict (op1, op2, 0, 1);
3615 /* STI||LDI. */
3616 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3617 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3618 && ! c4x_address_conflict (op0, op3, 1, 0);
3620 return 0;
3625 valid_parallel_operands_4 (rtx *operands,
3626 enum machine_mode mode ATTRIBUTE_UNUSED)
3628 rtx op0 = operands[0];
3629 rtx op2 = operands[2];
3631 if (GET_CODE (op0) == SUBREG)
3632 op0 = SUBREG_REG (op0);
3633 if (GET_CODE (op2) == SUBREG)
3634 op2 = SUBREG_REG (op2);
3636 /* This test prevents the multipack pass from using this pattern if
3637 op0 is used as an index or base register in op2, since this combination
3638 will require reloading. */
3639 if (GET_CODE (op0) == REG
3640 && GET_CODE (op2) == MEM
3641 && reg_mentioned_p (op0, XEXP (op2, 0)))
3642 return 0;
3644 return 1;
3649 valid_parallel_operands_5 (rtx *operands,
3650 enum machine_mode mode ATTRIBUTE_UNUSED)
3652 int regs = 0;
3653 rtx op0 = operands[0];
3654 rtx op1 = operands[1];
3655 rtx op2 = operands[2];
3656 rtx op3 = operands[3];
3658 if (GET_CODE (op0) == SUBREG)
3659 op0 = SUBREG_REG (op0);
3660 if (GET_CODE (op1) == SUBREG)
3661 op1 = SUBREG_REG (op1);
3662 if (GET_CODE (op2) == SUBREG)
3663 op2 = SUBREG_REG (op2);
3665 /* The patterns should only allow ext_low_reg_operand() or
3666 par_ind_operand() operands. Operands 1 and 2 may be commutative
3667 but only one of them can be a register. */
3668 if (GET_CODE (op1) == REG)
3669 regs++;
3670 if (GET_CODE (op2) == REG)
3671 regs++;
3673 if (regs != 1)
3674 return 0;
3676 /* This test prevents the multipack pass from using this pattern if
3677 op0 is used as an index or base register in op3, since this combination
3678 will require reloading. */
3679 if (GET_CODE (op0) == REG
3680 && GET_CODE (op3) == MEM
3681 && reg_mentioned_p (op0, XEXP (op3, 0)))
3682 return 0;
3684 return 1;
3689 valid_parallel_operands_6 (rtx *operands,
3690 enum machine_mode mode ATTRIBUTE_UNUSED)
3692 int regs = 0;
3693 rtx op0 = operands[0];
3694 rtx op1 = operands[1];
3695 rtx op2 = operands[2];
3696 rtx op4 = operands[4];
3697 rtx op5 = operands[5];
3699 if (GET_CODE (op1) == SUBREG)
3700 op1 = SUBREG_REG (op1);
3701 if (GET_CODE (op2) == SUBREG)
3702 op2 = SUBREG_REG (op2);
3703 if (GET_CODE (op4) == SUBREG)
3704 op4 = SUBREG_REG (op4);
3705 if (GET_CODE (op5) == SUBREG)
3706 op5 = SUBREG_REG (op5);
3708 /* The patterns should only allow ext_low_reg_operand() or
3709 par_ind_operand() operands. Thus of the 4 input operands, only 2
3710 should be REGs and the other 2 should be MEMs. */
3712 if (GET_CODE (op1) == REG)
3713 regs++;
3714 if (GET_CODE (op2) == REG)
3715 regs++;
3716 if (GET_CODE (op4) == REG)
3717 regs++;
3718 if (GET_CODE (op5) == REG)
3719 regs++;
3721 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3722 Perhaps we should count the MEMs as well? */
3723 if (regs != 2)
3724 return 0;
3726 /* This test prevents the multipack pass from using this pattern if
3727 op0 is used as an index or base register in op4 or op5, since
3728 this combination will require reloading. */
3729 if (GET_CODE (op0) == REG
3730 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3731 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3732 return 0;
3734 return 1;
3738 /* Validate combination of src operands. Note that the operands have
3739 been screened by the src_operand predicate. We just have to check
3740 that the combination of operands is valid. If FORCE is set, ensure
3741 that the destination regno is valid if we have a 2 operand insn. */
3743 static int
3744 c4x_valid_operands (enum rtx_code code, rtx *operands,
3745 enum machine_mode mode ATTRIBUTE_UNUSED,
3746 int force)
3748 rtx op0;
3749 rtx op1;
3750 rtx op2;
3751 enum rtx_code code1;
3752 enum rtx_code code2;
3755 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3756 if (code == IF_THEN_ELSE)
3757 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3759 if (code == COMPARE)
3761 op1 = operands[0];
3762 op2 = operands[1];
3764 else
3766 op1 = operands[1];
3767 op2 = operands[2];
3770 op0 = operands[0];
3772 if (GET_CODE (op0) == SUBREG)
3773 op0 = SUBREG_REG (op0);
3774 if (GET_CODE (op1) == SUBREG)
3775 op1 = SUBREG_REG (op1);
3776 if (GET_CODE (op2) == SUBREG)
3777 op2 = SUBREG_REG (op2);
3779 code1 = GET_CODE (op1);
3780 code2 = GET_CODE (op2);
3783 if (code1 == REG && code2 == REG)
3784 return 1;
3786 if (code1 == MEM && code2 == MEM)
3788 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3789 return 1;
3790 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3793 /* We cannot handle two MEMs or two CONSTS, etc. */
3794 if (code1 == code2)
3795 return 0;
3797 if (code1 == REG)
3799 switch (code2)
3801 case CONST_INT:
3802 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3803 return 1;
3804 break;
3806 case CONST_DOUBLE:
3807 if (! c4x_H_constant (op2))
3808 return 0;
3809 break;
3811 /* Any valid memory operand screened by src_operand is OK. */
3812 case MEM:
3813 break;
3815 default:
3816 fatal_insn ("c4x_valid_operands: Internal error", op2);
3817 break;
3820 if (GET_CODE (op0) == SCRATCH)
3821 return 1;
3823 if (!REG_P (op0))
3824 return 0;
3826 /* Check that we have a valid destination register for a two operand
3827 instruction. */
3828 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3832 /* Check non-commutative operators. */
3833 if (code == ASHIFTRT || code == LSHIFTRT
3834 || code == ASHIFT || code == COMPARE)
3835 return code2 == REG
3836 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3839 /* Assume MINUS is commutative since the subtract patterns
3840 also support the reverse subtract instructions. Since op1
3841 is not a register, and op2 is a register, op1 can only
3842 be a restricted memory operand for a shift instruction. */
3843 if (code2 == REG)
3845 switch (code1)
3847 case CONST_INT:
3848 break;
3850 case CONST_DOUBLE:
3851 if (! c4x_H_constant (op1))
3852 return 0;
3853 break;
3855 /* Any valid memory operand screened by src_operand is OK. */
3856 case MEM:
3857 break;
3859 default:
3860 abort ();
3861 break;
3864 if (GET_CODE (op0) == SCRATCH)
3865 return 1;
3867 if (!REG_P (op0))
3868 return 0;
3870 /* Check that we have a valid destination register for a two operand
3871 instruction. */
3872 return ! force || REGNO (op1) == REGNO (op0);
3875 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3876 return 1;
3878 return 0;
3882 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3885 /* If we are not optimizing then we have to let anything go and let
3886 reload fix things up. instantiate_decl in function.c can produce
3887 invalid insns by changing the offset of a memory operand from a
3888 valid one into an invalid one, when the second operand is also a
3889 memory operand. The alternative is not to allow two memory
3890 operands for an insn when not optimizing. The problem only rarely
3891 occurs, for example with the C-torture program DFcmp.c. */
3893 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3898 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3900 /* Compare only has 2 operands. */
3901 if (code == COMPARE)
3903 /* During RTL generation, force constants into pseudos so that
3904 they can get hoisted out of loops. This will tie up an extra
3905 register but can save an extra cycle. Only do this if loop
3906 optimization enabled. (We cannot pull this trick for add and
3907 sub instructions since the flow pass won't find
3908 autoincrements etc.) This allows us to generate compare
3909 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3910 of LDI *AR0++, R0; CMPI 42, R0.
3912 Note that expand_binops will try to load an expensive constant
3913 into a register if it is used within a loop. Unfortunately,
3914 the cost mechanism doesn't allow us to look at the other
3915 operand to decide whether the constant is expensive. */
3917 if (! reload_in_progress
3918 && TARGET_HOIST
3919 && optimize > 0
3920 && GET_CODE (operands[1]) == CONST_INT
3921 && preserve_subexpressions_p ()
3922 && rtx_cost (operands[1], code) > 1)
3923 operands[1] = force_reg (mode, operands[1]);
3925 if (! reload_in_progress
3926 && ! c4x_valid_operands (code, operands, mode, 0))
3927 operands[0] = force_reg (mode, operands[0]);
3928 return 1;
3931 /* We cannot do this for ADDI/SUBI insns since we will
3932 defeat the flow pass from finding autoincrement addressing
3933 opportunities. */
3934 if (! reload_in_progress
3935 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3936 && TARGET_HOIST
3937 && optimize > 1
3938 && GET_CODE (operands[2]) == CONST_INT
3939 && preserve_subexpressions_p ()
3940 && rtx_cost (operands[2], code) > 1)
3941 operands[2] = force_reg (mode, operands[2]);
3943 /* We can get better code on a C30 if we force constant shift counts
3944 into a register. This way they can get hoisted out of loops,
3945 tying up a register but saving an instruction. The downside is
3946 that they may get allocated to an address or index register, and
3947 thus we will get a pipeline conflict if there is a nearby
3948 indirect address using an address register.
3950 Note that expand_binops will not try to load an expensive constant
3951 into a register if it is used within a loop for a shift insn. */
3953 if (! reload_in_progress
3954 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3956 /* If the operand combination is invalid, we force operand1 into a
3957 register, preventing reload from having doing to do this at a
3958 later stage. */
3959 operands[1] = force_reg (mode, operands[1]);
3960 if (TARGET_FORCE)
3962 emit_move_insn (operands[0], operands[1]);
3963 operands[1] = copy_rtx (operands[0]);
3965 else
3967 /* Just in case... */
3968 if (! c4x_valid_operands (code, operands, mode, 0))
3969 operands[2] = force_reg (mode, operands[2]);
3973 /* Right shifts require a negative shift count, but GCC expects
3974 a positive count, so we emit a NEG. */
3975 if ((code == ASHIFTRT || code == LSHIFTRT)
3976 && (GET_CODE (operands[2]) != CONST_INT))
3977 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3980 /* When the shift count is greater than 32 then the result
3981 can be implementation dependent. We truncate the result to
3982 fit in 5 bits so that we do not emit invalid code when
3983 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3984 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3985 && (GET_CODE (operands[2]) == CONST_INT))
3986 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3987 operands[2]
3988 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3990 return 1;
3994 /* The following predicates are used for instruction scheduling. */
3997 group1_reg_operand (rtx op, enum machine_mode mode)
3999 if (mode != VOIDmode && mode != GET_MODE (op))
4000 return 0;
4001 if (GET_CODE (op) == SUBREG)
4002 op = SUBREG_REG (op);
4003 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4008 group1_mem_operand (rtx op, enum machine_mode mode)
4010 if (mode != VOIDmode && mode != GET_MODE (op))
4011 return 0;
4013 if (GET_CODE (op) == MEM)
4015 op = XEXP (op, 0);
4016 if (GET_CODE (op) == PLUS)
4018 rtx op0 = XEXP (op, 0);
4019 rtx op1 = XEXP (op, 1);
4021 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4022 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4023 return 1;
4025 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4026 return 1;
4029 return 0;
4033 /* Return true if any one of the address registers. */
4036 arx_reg_operand (rtx op, enum machine_mode mode)
4038 if (mode != VOIDmode && mode != GET_MODE (op))
4039 return 0;
4040 if (GET_CODE (op) == SUBREG)
4041 op = SUBREG_REG (op);
4042 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4046 static int
4047 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4049 if (mode != VOIDmode && mode != GET_MODE (op))
4050 return 0;
4051 if (GET_CODE (op) == SUBREG)
4052 op = SUBREG_REG (op);
4053 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4057 static int
4058 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4060 if (mode != VOIDmode && mode != GET_MODE (op))
4061 return 0;
4063 if (GET_CODE (op) == MEM)
4065 op = XEXP (op, 0);
4066 switch (GET_CODE (op))
4068 case PRE_DEC:
4069 case POST_DEC:
4070 case PRE_INC:
4071 case POST_INC:
4072 op = XEXP (op, 0);
4074 case REG:
4075 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4077 case PRE_MODIFY:
4078 case POST_MODIFY:
4079 if (REG_P (XEXP (op, 0)) && (! reload_completed
4080 || (REGNO (XEXP (op, 0)) == regno)))
4081 return 1;
4082 if (REG_P (XEXP (XEXP (op, 1), 1))
4083 && (! reload_completed
4084 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4085 return 1;
4086 break;
4088 case PLUS:
4090 rtx op0 = XEXP (op, 0);
4091 rtx op1 = XEXP (op, 1);
4093 if ((REG_P (op0) && (! reload_completed
4094 || (REGNO (op0) == regno)))
4095 || (REG_P (op1) && (! reload_completed
4096 || (REGNO (op1) == regno))))
4097 return 1;
4099 break;
4101 default:
4102 break;
4105 return 0;
4110 ar0_reg_operand (rtx op, enum machine_mode mode)
4112 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4117 ar0_mem_operand (rtx op, enum machine_mode mode)
4119 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4124 ar1_reg_operand (rtx op, enum machine_mode mode)
4126 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4131 ar1_mem_operand (rtx op, enum machine_mode mode)
4133 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4138 ar2_reg_operand (rtx op, enum machine_mode mode)
4140 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4145 ar2_mem_operand (rtx op, enum machine_mode mode)
4147 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4152 ar3_reg_operand (rtx op, enum machine_mode mode)
4154 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4159 ar3_mem_operand (rtx op, enum machine_mode mode)
4161 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4166 ar4_reg_operand (rtx op, enum machine_mode mode)
4168 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4173 ar4_mem_operand (rtx op, enum machine_mode mode)
4175 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4180 ar5_reg_operand (rtx op, enum machine_mode mode)
4182 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4187 ar5_mem_operand (rtx op, enum machine_mode mode)
4189 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4194 ar6_reg_operand (rtx op, enum machine_mode mode)
4196 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4201 ar6_mem_operand (rtx op, enum machine_mode mode)
4203 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4208 ar7_reg_operand (rtx op, enum machine_mode mode)
4210 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4215 ar7_mem_operand (rtx op, enum machine_mode mode)
4217 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4222 ir0_reg_operand (rtx op, enum machine_mode mode)
4224 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4229 ir0_mem_operand (rtx op, enum machine_mode mode)
4231 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4236 ir1_reg_operand (rtx op, enum machine_mode mode)
4238 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4243 ir1_mem_operand (rtx op, enum machine_mode mode)
4245 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4249 /* This is similar to operand_subword but allows autoincrement
4250 addressing. */
4253 c4x_operand_subword (rtx op, int i, int validate_address,
4254 enum machine_mode mode)
4256 if (mode != HImode && mode != HFmode)
4257 fatal_insn ("c4x_operand_subword: invalid mode", op);
4259 if (mode == HFmode && REG_P (op))
4260 fatal_insn ("c4x_operand_subword: invalid operand", op);
4262 if (GET_CODE (op) == MEM)
4264 enum rtx_code code = GET_CODE (XEXP (op, 0));
4265 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4266 enum machine_mode submode;
4268 submode = mode;
4269 if (mode == HImode)
4270 submode = QImode;
4271 else if (mode == HFmode)
4272 submode = QFmode;
4274 switch (code)
4276 case POST_INC:
4277 case PRE_INC:
4278 return gen_rtx_MEM (submode, XEXP (op, 0));
4280 case POST_DEC:
4281 case PRE_DEC:
4282 case PRE_MODIFY:
4283 case POST_MODIFY:
4284 /* We could handle these with some difficulty.
4285 e.g., *p-- => *(p-=2); *(p+1). */
4286 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4288 case SYMBOL_REF:
4289 case LABEL_REF:
4290 case CONST:
4291 case CONST_INT:
4292 fatal_insn ("c4x_operand_subword: invalid address", op);
4294 /* Even though offsettable_address_p considers (MEM
4295 (LO_SUM)) to be offsettable, it is not safe if the
4296 address is at the end of the data page since we also have
4297 to fix up the associated high PART. In this case where
4298 we are trying to split a HImode or HFmode memory
4299 reference, we would have to emit another insn to reload a
4300 new HIGH value. It's easier to disable LO_SUM memory references
4301 in HImode or HFmode and we probably get better code. */
4302 case LO_SUM:
4303 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4305 default:
4306 break;
4310 return operand_subword (op, i, validate_address, mode);
4313 struct name_list
4315 struct name_list *next;
4316 const char *name;
4319 static struct name_list *global_head;
4320 static struct name_list *extern_head;
4323 /* Add NAME to list of global symbols and remove from external list if
4324 present on external list. */
4326 void
4327 c4x_global_label (const char *name)
4329 struct name_list *p, *last;
4331 /* Do not insert duplicate names, so linearly search through list of
4332 existing names. */
4333 p = global_head;
4334 while (p)
4336 if (strcmp (p->name, name) == 0)
4337 return;
4338 p = p->next;
4340 p = (struct name_list *) xmalloc (sizeof *p);
4341 p->next = global_head;
4342 p->name = name;
4343 global_head = p;
4345 /* Remove this name from ref list if present. */
4346 last = NULL;
4347 p = extern_head;
4348 while (p)
4350 if (strcmp (p->name, name) == 0)
4352 if (last)
4353 last->next = p->next;
4354 else
4355 extern_head = p->next;
4356 break;
4358 last = p;
4359 p = p->next;
4364 /* Add NAME to list of external symbols. */
4366 void
4367 c4x_external_ref (const char *name)
4369 struct name_list *p;
4371 /* Do not insert duplicate names. */
4372 p = extern_head;
4373 while (p)
4375 if (strcmp (p->name, name) == 0)
4376 return;
4377 p = p->next;
4380 /* Do not insert ref if global found. */
4381 p = global_head;
4382 while (p)
4384 if (strcmp (p->name, name) == 0)
4385 return;
4386 p = p->next;
4388 p = (struct name_list *) xmalloc (sizeof *p);
4389 p->next = extern_head;
4390 p->name = name;
4391 extern_head = p;
4394 /* We need to have a data section we can identify so that we can set
4395 the DP register back to a data pointer in the small memory model.
4396 This is only required for ISRs if we are paranoid that someone
4397 may have quietly changed this register on the sly. */
4398 static void
4399 c4x_file_start (void)
4401 int dspversion = 0;
4402 if (TARGET_C30) dspversion = 30;
4403 if (TARGET_C31) dspversion = 31;
4404 if (TARGET_C32) dspversion = 32;
4405 if (TARGET_C33) dspversion = 33;
4406 if (TARGET_C40) dspversion = 40;
4407 if (TARGET_C44) dspversion = 44;
4409 default_file_start ();
4410 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4411 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4415 static void
4416 c4x_file_end (void)
4418 struct name_list *p;
4420 /* Output all external names that are not global. */
4421 p = extern_head;
4422 while (p)
4424 fprintf (asm_out_file, "\t.ref\t");
4425 assemble_name (asm_out_file, p->name);
4426 fprintf (asm_out_file, "\n");
4427 p = p->next;
4429 fprintf (asm_out_file, "\t.end\n");
4433 static void
4434 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4436 while (list != NULL_TREE
4437 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4438 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4439 list = TREE_CHAIN (list);
4440 if (list)
4441 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4442 *attributes);
4446 static void
4447 c4x_insert_attributes (tree decl, tree *attributes)
4449 switch (TREE_CODE (decl))
4451 case FUNCTION_DECL:
4452 c4x_check_attribute ("section", code_tree, decl, attributes);
4453 c4x_check_attribute ("const", pure_tree, decl, attributes);
4454 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4455 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4456 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4457 break;
4459 case VAR_DECL:
4460 c4x_check_attribute ("section", data_tree, decl, attributes);
4461 break;
4463 default:
4464 break;
4468 /* Table of valid machine attributes. */
4469 const struct attribute_spec c4x_attribute_table[] =
4471 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4472 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4473 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4474 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4475 { NULL, 0, 0, false, false, false, NULL }
4478 /* Handle an attribute requiring a FUNCTION_TYPE;
4479 arguments as in struct attribute_spec.handler. */
4480 static tree
4481 c4x_handle_fntype_attribute (tree *node, tree name,
4482 tree args ATTRIBUTE_UNUSED,
4483 int flags ATTRIBUTE_UNUSED,
4484 bool *no_add_attrs)
4486 if (TREE_CODE (*node) != FUNCTION_TYPE)
4488 warning ("`%s' attribute only applies to functions",
4489 IDENTIFIER_POINTER (name));
4490 *no_add_attrs = true;
4493 return NULL_TREE;
4497 /* !!! FIXME to emit RPTS correctly. */
4500 c4x_rptb_rpts_p (rtx insn, rtx op)
4502 /* The next insn should be our label marking where the
4503 repeat block starts. */
4504 insn = NEXT_INSN (insn);
4505 if (GET_CODE (insn) != CODE_LABEL)
4507 /* Some insns may have been shifted between the RPTB insn
4508 and the top label... They were probably destined to
4509 be moved out of the loop. For now, let's leave them
4510 where they are and print a warning. We should
4511 probably move these insns before the repeat block insn. */
4512 if (TARGET_DEBUG)
4513 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4514 insn);
4515 return 0;
4518 /* Skip any notes. */
4519 insn = next_nonnote_insn (insn);
4521 /* This should be our first insn in the loop. */
4522 if (! INSN_P (insn))
4523 return 0;
4525 /* Skip any notes. */
4526 insn = next_nonnote_insn (insn);
4528 if (! INSN_P (insn))
4529 return 0;
4531 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4532 return 0;
4534 if (TARGET_RPTS)
4535 return 1;
4537 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4541 /* Check if register r11 is used as the destination of an insn. */
4543 static int
4544 c4x_r11_set_p(rtx x)
4546 rtx set;
4547 int i, j;
4548 const char *fmt;
4550 if (x == 0)
4551 return 0;
4553 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4554 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4556 if (INSN_P (x) && (set = single_set (x)))
4557 x = SET_DEST (set);
4559 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4560 return 1;
4562 fmt = GET_RTX_FORMAT (GET_CODE (x));
4563 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4565 if (fmt[i] == 'e')
4567 if (c4x_r11_set_p (XEXP (x, i)))
4568 return 1;
4570 else if (fmt[i] == 'E')
4571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4572 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4573 return 1;
4575 return 0;
4579 /* The c4x sometimes has a problem when the insn before the laj insn
4580 sets the r11 register. Check for this situation. */
4583 c4x_check_laj_p (rtx insn)
4585 insn = prev_nonnote_insn (insn);
4587 /* If this is the start of the function no nop is needed. */
4588 if (insn == 0)
4589 return 0;
4591 /* If the previous insn is a code label we have to insert a nop. This
4592 could be a jump or table jump. We can find the normal jumps by
4593 scanning the function but this will not find table jumps. */
4594 if (GET_CODE (insn) == CODE_LABEL)
4595 return 1;
4597 /* If the previous insn sets register r11 we have to insert a nop. */
4598 if (c4x_r11_set_p (insn))
4599 return 1;
4601 /* No nop needed. */
4602 return 0;
4606 /* Adjust the cost of a scheduling dependency. Return the new cost of
4607 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4608 A set of an address register followed by a use occurs a 2 cycle
4609 stall (reduced to a single cycle on the c40 using LDA), while
4610 a read of an address register followed by a use occurs a single cycle. */
4612 #define SET_USE_COST 3
4613 #define SETLDA_USE_COST 2
4614 #define READ_USE_COST 2
4616 static int
4617 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4619 /* Don't worry about this until we know what registers have been
4620 assigned. */
4621 if (flag_schedule_insns == 0 && ! reload_completed)
4622 return 0;
4624 /* How do we handle dependencies where a read followed by another
4625 read causes a pipeline stall? For example, a read of ar0 followed
4626 by the use of ar0 for a memory reference. It looks like we
4627 need to extend the scheduler to handle this case. */
4629 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4630 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4631 so only deal with insns we know about. */
4632 if (recog_memoized (dep_insn) < 0)
4633 return 0;
4635 if (REG_NOTE_KIND (link) == 0)
4637 int max = 0;
4639 /* Data dependency; DEP_INSN writes a register that INSN reads some
4640 cycles later. */
4641 if (TARGET_C3X)
4643 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4644 max = SET_USE_COST > max ? SET_USE_COST : max;
4645 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4646 max = READ_USE_COST > max ? READ_USE_COST : max;
4648 else
4650 /* This could be significantly optimized. We should look
4651 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4652 insn uses ar0-ar7. We then test if the same register
4653 is used. The tricky bit is that some operands will
4654 use several registers... */
4655 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4656 max = SET_USE_COST > max ? SET_USE_COST : max;
4657 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4658 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4659 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4660 max = READ_USE_COST > max ? READ_USE_COST : max;
4662 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4663 max = SET_USE_COST > max ? SET_USE_COST : max;
4664 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4665 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4666 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4667 max = READ_USE_COST > max ? READ_USE_COST : max;
4669 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4670 max = SET_USE_COST > max ? SET_USE_COST : max;
4671 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4672 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4673 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4674 max = READ_USE_COST > max ? READ_USE_COST : max;
4676 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4677 max = SET_USE_COST > max ? SET_USE_COST : max;
4678 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4679 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4680 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4681 max = READ_USE_COST > max ? READ_USE_COST : max;
4683 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4684 max = SET_USE_COST > max ? SET_USE_COST : max;
4685 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4686 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4687 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4688 max = READ_USE_COST > max ? READ_USE_COST : max;
4690 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4691 max = SET_USE_COST > max ? SET_USE_COST : max;
4692 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4693 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4694 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4695 max = READ_USE_COST > max ? READ_USE_COST : max;
4697 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4698 max = SET_USE_COST > max ? SET_USE_COST : max;
4699 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4700 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4701 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4702 max = READ_USE_COST > max ? READ_USE_COST : max;
4704 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4705 max = SET_USE_COST > max ? SET_USE_COST : max;
4706 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4707 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4708 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4709 max = READ_USE_COST > max ? READ_USE_COST : max;
4711 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4712 max = SET_USE_COST > max ? SET_USE_COST : max;
4713 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4714 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4716 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4717 max = SET_USE_COST > max ? SET_USE_COST : max;
4718 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4719 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4722 if (max)
4723 cost = max;
4725 /* For other data dependencies, the default cost specified in the
4726 md is correct. */
4727 return cost;
4729 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4731 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4732 cycles later. */
4734 /* For c4x anti dependencies, the cost is 0. */
4735 return 0;
4737 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4739 /* Output dependency; DEP_INSN writes a register that INSN writes some
4740 cycles later. */
4742 /* For c4x output dependencies, the cost is 0. */
4743 return 0;
4745 else
4746 abort ();
4749 void
4750 c4x_init_builtins (void)
4752 tree endlink = void_list_node;
4754 builtin_function ("fast_ftoi",
4755 build_function_type
4756 (integer_type_node,
4757 tree_cons (NULL_TREE, double_type_node, endlink)),
4758 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4759 builtin_function ("ansi_ftoi",
4760 build_function_type
4761 (integer_type_node,
4762 tree_cons (NULL_TREE, double_type_node, endlink)),
4763 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4764 if (TARGET_C3X)
4765 builtin_function ("fast_imult",
4766 build_function_type
4767 (integer_type_node,
4768 tree_cons (NULL_TREE, integer_type_node,
4769 tree_cons (NULL_TREE,
4770 integer_type_node, endlink))),
4771 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4772 else
4774 builtin_function ("toieee",
4775 build_function_type
4776 (double_type_node,
4777 tree_cons (NULL_TREE, double_type_node, endlink)),
4778 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4779 builtin_function ("frieee",
4780 build_function_type
4781 (double_type_node,
4782 tree_cons (NULL_TREE, double_type_node, endlink)),
4783 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4784 builtin_function ("fast_invf",
4785 build_function_type
4786 (double_type_node,
4787 tree_cons (NULL_TREE, double_type_node, endlink)),
4788 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4794 c4x_expand_builtin (tree exp, rtx target,
4795 rtx subtarget ATTRIBUTE_UNUSED,
4796 enum machine_mode mode ATTRIBUTE_UNUSED,
4797 int ignore ATTRIBUTE_UNUSED)
4799 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4800 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4801 tree arglist = TREE_OPERAND (exp, 1);
4802 tree arg0, arg1;
4803 rtx r0, r1;
4805 switch (fcode)
4807 case C4X_BUILTIN_FIX:
4808 arg0 = TREE_VALUE (arglist);
4809 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4810 r0 = protect_from_queue (r0, 0);
4811 if (! target || ! register_operand (target, QImode))
4812 target = gen_reg_rtx (QImode);
4813 emit_insn (gen_fixqfqi_clobber (target, r0));
4814 return target;
4816 case C4X_BUILTIN_FIX_ANSI:
4817 arg0 = TREE_VALUE (arglist);
4818 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4819 r0 = protect_from_queue (r0, 0);
4820 if (! target || ! register_operand (target, QImode))
4821 target = gen_reg_rtx (QImode);
4822 emit_insn (gen_fix_truncqfqi2 (target, r0));
4823 return target;
4825 case C4X_BUILTIN_MPYI:
4826 if (! TARGET_C3X)
4827 break;
4828 arg0 = TREE_VALUE (arglist);
4829 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4830 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4831 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4832 r0 = protect_from_queue (r0, 0);
4833 r1 = protect_from_queue (r1, 0);
4834 if (! target || ! register_operand (target, QImode))
4835 target = gen_reg_rtx (QImode);
4836 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4837 return target;
4839 case C4X_BUILTIN_TOIEEE:
4840 if (TARGET_C3X)
4841 break;
4842 arg0 = TREE_VALUE (arglist);
4843 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4844 r0 = protect_from_queue (r0, 0);
4845 if (! target || ! register_operand (target, QFmode))
4846 target = gen_reg_rtx (QFmode);
4847 emit_insn (gen_toieee (target, r0));
4848 return target;
4850 case C4X_BUILTIN_FRIEEE:
4851 if (TARGET_C3X)
4852 break;
4853 arg0 = TREE_VALUE (arglist);
4854 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4855 r0 = protect_from_queue (r0, 0);
4856 if (register_operand (r0, QFmode))
4858 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4859 emit_move_insn (r1, r0);
4860 r0 = r1;
4862 if (! target || ! register_operand (target, QFmode))
4863 target = gen_reg_rtx (QFmode);
4864 emit_insn (gen_frieee (target, r0));
4865 return target;
4867 case C4X_BUILTIN_RCPF:
4868 if (TARGET_C3X)
4869 break;
4870 arg0 = TREE_VALUE (arglist);
4871 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4872 r0 = protect_from_queue (r0, 0);
4873 if (! target || ! register_operand (target, QFmode))
4874 target = gen_reg_rtx (QFmode);
4875 emit_insn (gen_rcpfqf_clobber (target, r0));
4876 return target;
4878 return NULL_RTX;
4881 static void
4882 c4x_init_libfuncs (void)
4884 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4885 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4886 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4887 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4888 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4889 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4890 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4891 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4892 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4893 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4894 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4895 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4896 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4897 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4898 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4899 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4900 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4901 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4902 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4903 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4904 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4905 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4906 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4907 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4910 static void
4911 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED)
4913 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4916 static void
4917 c4x_globalize_label (FILE *stream, const char *name)
4919 default_globalize_label (stream, name);
4920 c4x_global_label (name);
4923 #define SHIFT_CODE_P(C) \
4924 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4925 #define LOGICAL_CODE_P(C) \
4926 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4928 /* Compute a (partial) cost for rtx X. Return true if the complete
4929 cost has been computed, and false if subexpressions should be
4930 scanned. In either case, *TOTAL contains the cost result. */
4932 static bool
4933 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4935 HOST_WIDE_INT val;
4937 switch (code)
4939 /* Some small integers are effectively free for the C40. We should
4940 also consider if we are using the small memory model. With
4941 the big memory model we require an extra insn for a constant
4942 loaded from memory. */
4944 case CONST_INT:
4945 val = INTVAL (x);
4946 if (c4x_J_constant (x))
4947 *total = 0;
4948 else if (! TARGET_C3X
4949 && outer_code == AND
4950 && (val == 255 || val == 65535))
4951 *total = 0;
4952 else if (! TARGET_C3X
4953 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4954 && (val == 16 || val == 24))
4955 *total = 0;
4956 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4957 *total = 3;
4958 else if (LOGICAL_CODE_P (outer_code)
4959 ? c4x_L_constant (x) : c4x_I_constant (x))
4960 *total = 2;
4961 else
4962 *total = 4;
4963 return true;
4965 case CONST:
4966 case LABEL_REF:
4967 case SYMBOL_REF:
4968 *total = 4;
4969 return true;
4971 case CONST_DOUBLE:
4972 if (c4x_H_constant (x))
4973 *total = 2;
4974 else if (GET_MODE (x) == QFmode)
4975 *total = 4;
4976 else
4977 *total = 8;
4978 return true;
4980 /* ??? Note that we return true, rather than false so that rtx_cost
4981 doesn't include the constant costs. Otherwise expand_mult will
4982 think that it is cheaper to synthesize a multiply rather than to
4983 use a multiply instruction. I think this is because the algorithm
4984 synth_mult doesn't take into account the loading of the operands,
4985 whereas the calculation of mult_cost does. */
4986 case PLUS:
4987 case MINUS:
4988 case AND:
4989 case IOR:
4990 case XOR:
4991 case ASHIFT:
4992 case ASHIFTRT:
4993 case LSHIFTRT:
4994 *total = COSTS_N_INSNS (1);
4995 return true;
4997 case MULT:
4998 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4999 || TARGET_MPYI ? 1 : 14);
5000 return true;
5002 case DIV:
5003 case UDIV:
5004 case MOD:
5005 case UMOD:
5006 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5007 ? 15 : 50);
5008 return true;
5010 default:
5011 return false;
5015 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5017 static void
5018 c4x_external_libcall (rtx fun)
5020 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5021 c4x_external_ref (XSTR (fun, 0));
5024 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
5026 static rtx
5027 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5028 int incoming ATTRIBUTE_UNUSED)
5030 return gen_rtx_REG (Pmode, AR0_REGNO);