2008-01-10 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / config / c4x / c4x.c
blob3cef36b4b399c0ef71e0493fd703ae6c66ea5dff
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
3 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 3, or (at your option)
14 any later version.
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "cpplib.h"
48 #include "toplev.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 rtx smulhi3_libfunc;
55 rtx umulhi3_libfunc;
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
65 static int c4x_leaf_function;
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
152 rtx c4x_compare_op0;
153 rtx c4x_compare_op1;
155 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
157 /* Pragma definitions. */
159 tree code_tree = NULL_TREE;
160 tree data_tree = NULL_TREE;
161 tree pure_tree = NULL_TREE;
162 tree noreturn_tree = NULL_TREE;
163 tree interrupt_tree = NULL_TREE;
164 tree naked_tree = NULL_TREE;
166 /* Forward declarations */
167 static bool c4x_handle_option (size_t, const char *, int);
168 static int c4x_isr_reg_used_p (unsigned int);
169 static int c4x_leaf_function_p (void);
170 static int c4x_naked_function_p (void);
171 static int c4x_immed_int_constant (rtx);
172 static int c4x_immed_float_constant (rtx);
173 static int c4x_R_indirect (rtx);
174 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
175 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
176 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
177 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
178 static void c4x_file_start (void);
179 static void c4x_file_end (void);
180 static void c4x_check_attribute (const char *, tree, tree, tree *);
181 static int c4x_r11_set_p (rtx);
182 static int c4x_rptb_valid_p (rtx, rtx);
183 static void c4x_reorg (void);
184 static int c4x_label_ref_used_p (rtx, rtx);
185 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
186 const struct attribute_spec c4x_attribute_table[];
187 static void c4x_insert_attributes (tree, tree *);
188 static void c4x_asm_named_section (const char *, unsigned int, tree);
189 static int c4x_adjust_cost (rtx, rtx, rtx, int);
190 static void c4x_globalize_label (FILE *, const char *);
191 static bool c4x_rtx_costs (rtx, int, int, int *);
192 static int c4x_address_cost (rtx);
193 static void c4x_init_libfuncs (void);
194 static void c4x_external_libcall (rtx);
195 static rtx c4x_struct_value_rtx (tree, int);
196 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 #undef TARGET_ASM_FILE_START
206 #define TARGET_ASM_FILE_START c4x_file_start
207 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
208 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
209 #undef TARGET_ASM_FILE_END
210 #define TARGET_ASM_FILE_END c4x_file_end
212 #undef TARGET_ASM_EXTERNAL_LIBCALL
213 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
215 /* Play safe, not the fastest code. */
216 #undef TARGET_DEFAULT_TARGET_FLAGS
217 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
218 | MASK_PARALLEL_MPY | MASK_RPTB)
219 #undef TARGET_HANDLE_OPTION
220 #define TARGET_HANDLE_OPTION c4x_handle_option
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
225 #undef TARGET_INSERT_ATTRIBUTES
226 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
228 #undef TARGET_INIT_BUILTINS
229 #define TARGET_INIT_BUILTINS c4x_init_builtins
231 #undef TARGET_EXPAND_BUILTIN
232 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
234 #undef TARGET_SCHED_ADJUST_COST
235 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
237 #undef TARGET_ASM_GLOBALIZE_LABEL
238 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
240 #undef TARGET_RTX_COSTS
241 #define TARGET_RTX_COSTS c4x_rtx_costs
242 #undef TARGET_ADDRESS_COST
243 #define TARGET_ADDRESS_COST c4x_address_cost
245 #undef TARGET_MACHINE_DEPENDENT_REORG
246 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
248 #undef TARGET_INIT_LIBFUNCS
249 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
251 #undef TARGET_STRUCT_VALUE_RTX
252 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
254 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
255 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
257 struct gcc_target targetm = TARGET_INITIALIZER;
259 /* Implement TARGET_HANDLE_OPTION. */
261 static bool
262 c4x_handle_option (size_t code, const char *arg, int value)
264 switch (code)
266 case OPT_m30: c4x_cpu_version = 30; return true;
267 case OPT_m31: c4x_cpu_version = 31; return true;
268 case OPT_m32: c4x_cpu_version = 32; return true;
269 case OPT_m33: c4x_cpu_version = 33; return true;
270 case OPT_m40: c4x_cpu_version = 40; return true;
271 case OPT_m44: c4x_cpu_version = 44; return true;
273 case OPT_mcpu_:
274 if (arg[0] == 'c' || arg[0] == 'C')
275 arg++;
276 value = atoi (arg);
277 switch (value)
279 case 30: case 31: case 32: case 33: case 40: case 44:
280 c4x_cpu_version = value;
281 return true;
283 return false;
285 default:
286 return true;
290 /* Override command line options.
291 Called once after all options have been parsed.
292 Mostly we process the processor
293 type and sometimes adjust other TARGET_ options. */
295 void
296 c4x_override_options (void)
298 /* Convert foo / 8.0 into foo * 0.125, etc. */
299 set_fast_math_flags (1);
301 /* We should phase out the following at some stage.
302 This provides compatibility with the old -mno-aliases option. */
303 if (! TARGET_ALIASES && ! flag_argument_noalias)
304 flag_argument_noalias = 1;
306 if (!TARGET_C3X)
307 target_flags |= MASK_MPYI | MASK_DB;
309 if (optimize < 2)
310 target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
312 if (!TARGET_PARALLEL)
313 target_flags &= ~MASK_PARALLEL_MPY;
317 /* This is called before c4x_override_options. */
319 void
320 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
321 int size ATTRIBUTE_UNUSED)
323 /* Scheduling before register allocation can screw up global
324 register allocation, especially for functions that use MPY||ADD
325 instructions. The benefit we gain we get by scheduling before
326 register allocation is probably marginal anyhow. */
327 flag_schedule_insns = 0;
331 /* Write an ASCII string. */
333 #define C4X_ASCII_LIMIT 40
335 void
336 c4x_output_ascii (FILE *stream, const char *ptr, int len)
338 char sbuf[C4X_ASCII_LIMIT + 1];
339 int s, l, special, first = 1, onlys;
341 if (len)
342 fprintf (stream, "\t.byte\t");
344 for (s = l = 0; len > 0; --len, ++ptr)
346 onlys = 0;
348 /* Escape " and \ with a \". */
349 special = *ptr == '\"' || *ptr == '\\';
351 /* If printable - add to buff. */
352 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
354 if (special)
355 sbuf[s++] = '\\';
356 sbuf[s++] = *ptr;
357 if (s < C4X_ASCII_LIMIT - 1)
358 continue;
359 onlys = 1;
361 if (s)
363 if (first)
364 first = 0;
365 else
367 fputc (',', stream);
368 l++;
371 sbuf[s] = 0;
372 fprintf (stream, "\"%s\"", sbuf);
373 l += s + 2;
374 if (TARGET_TI && l >= 80 && len > 1)
376 fprintf (stream, "\n\t.byte\t");
377 first = 1;
378 l = 0;
381 s = 0;
383 if (onlys)
384 continue;
386 if (first)
387 first = 0;
388 else
390 fputc (',', stream);
391 l++;
394 fprintf (stream, "%d", *ptr);
395 l += 3;
396 if (TARGET_TI && l >= 80 && len > 1)
398 fprintf (stream, "\n\t.byte\t");
399 first = 1;
400 l = 0;
403 if (s)
405 if (! first)
406 fputc (',', stream);
408 sbuf[s] = 0;
409 fprintf (stream, "\"%s\"", sbuf);
410 s = 0;
412 fputc ('\n', stream);
417 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
419 switch (mode)
421 #if Pmode != QImode
422 case Pmode: /* Pointer (24/32 bits). */
423 #endif
424 case QImode: /* Integer (32 bits). */
425 return IS_INT_REGNO (regno);
427 case QFmode: /* Float, Double (32 bits). */
428 case HFmode: /* Long Double (40 bits). */
429 return IS_EXT_REGNO (regno);
431 case CCmode: /* Condition Codes. */
432 case CC_NOOVmode: /* Condition Codes. */
433 return IS_ST_REGNO (regno);
435 case HImode: /* Long Long (64 bits). */
436 /* We need two registers to store long longs. Note that
437 it is much easier to constrain the first register
438 to start on an even boundary. */
439 return IS_INT_REGNO (regno)
440 && IS_INT_REGNO (regno + 1)
441 && (regno & 1) == 0;
443 default:
444 return 0; /* We don't support these modes. */
447 return 0;
450 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
452 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
454 /* We cannot copy call saved registers from mode QI into QF or from
455 mode QF into QI. */
456 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
457 return 0;
458 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
459 return 0;
460 /* We cannot copy from an extended (40 bit) register to a standard
461 (32 bit) register because we only set the condition codes for
462 extended registers. */
463 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
464 return 0;
465 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
466 return 0;
467 return 1;
470 /* The TI C3x C compiler register argument runtime model uses 6 registers,
471 AR2, R2, R3, RC, RS, RE.
473 The first two floating point arguments (float, double, long double)
474 that are found scanning from left to right are assigned to R2 and R3.
476 The remaining integer (char, short, int, long) or pointer arguments
477 are assigned to the remaining registers in the order AR2, R2, R3,
478 RC, RS, RE when scanning left to right, except for the last named
479 argument prior to an ellipsis denoting variable number of
480 arguments. We don't have to worry about the latter condition since
481 function.c treats the last named argument as anonymous (unnamed).
483 All arguments that cannot be passed in registers are pushed onto
484 the stack in reverse order (right to left). GCC handles that for us.
486 c4x_init_cumulative_args() is called at the start, so we can parse
487 the args to see how many floating point arguments and how many
488 integer (or pointer) arguments there are. c4x_function_arg() is
489 then called (sometimes repeatedly) for each argument (parsed left
490 to right) to obtain the register to pass the argument in, or zero
491 if the argument is to be passed on the stack. Once the compiler is
492 happy, c4x_function_arg_advance() is called.
494 Don't use R0 to pass arguments in, we use 0 to indicate a stack
495 argument. */
497 static const int c4x_int_reglist[3][6] =
499 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
500 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
501 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
504 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
507 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
508 function whose data type is FNTYPE.
509 For a library call, FNTYPE is 0. */
511 void
512 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
514 tree param, next_param;
516 cum->floats = cum->ints = 0;
517 cum->init = 0;
518 cum->var = 0;
519 cum->args = 0;
521 if (TARGET_DEBUG)
523 fprintf (stderr, "\nc4x_init_cumulative_args (");
524 if (fntype)
526 tree ret_type = TREE_TYPE (fntype);
528 fprintf (stderr, "fntype code = %s, ret code = %s",
529 tree_code_name[(int) TREE_CODE (fntype)],
530 tree_code_name[(int) TREE_CODE (ret_type)]);
532 else
533 fprintf (stderr, "no fntype");
535 if (libname)
536 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
539 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
541 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
542 param; param = next_param)
544 tree type;
546 next_param = TREE_CHAIN (param);
548 type = TREE_VALUE (param);
549 if (type && type != void_type_node)
551 enum machine_mode mode;
553 /* If the last arg doesn't have void type then we have
554 variable arguments. */
555 if (! next_param)
556 cum->var = 1;
558 if ((mode = TYPE_MODE (type)))
560 if (! targetm.calls.must_pass_in_stack (mode, type))
562 /* Look for float, double, or long double argument. */
563 if (mode == QFmode || mode == HFmode)
564 cum->floats++;
565 /* Look for integer, enumeral, boolean, char, or pointer
566 argument. */
567 else if (mode == QImode || mode == Pmode)
568 cum->ints++;
571 cum->args++;
575 if (TARGET_DEBUG)
576 fprintf (stderr, "%s%s, args = %d)\n",
577 cum->prototype ? ", prototype" : "",
578 cum->var ? ", variable args" : "",
579 cum->args);
583 /* Update the data in CUM to advance over an argument
584 of mode MODE and data type TYPE.
585 (TYPE is null for libcalls where that information may not be available.) */
587 void
588 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
589 tree type, int named)
591 if (TARGET_DEBUG)
592 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
593 GET_MODE_NAME (mode), named);
594 if (! TARGET_MEMPARM
595 && named
596 && type
597 && ! targetm.calls.must_pass_in_stack (mode, type))
599 /* Look for float, double, or long double argument. */
600 if (mode == QFmode || mode == HFmode)
601 cum->floats++;
602 /* Look for integer, enumeral, boolean, char, or pointer argument. */
603 else if (mode == QImode || mode == Pmode)
604 cum->ints++;
606 else if (! TARGET_MEMPARM && ! type)
608 /* Handle libcall arguments. */
609 if (mode == QFmode || mode == HFmode)
610 cum->floats++;
611 else if (mode == QImode || mode == Pmode)
612 cum->ints++;
614 return;
618 /* Define where to put the arguments to a function. Value is zero to
619 push the argument on the stack, or a hard register in which to
620 store the argument.
622 MODE is the argument's machine mode.
623 TYPE is the data type of the argument (as a tree).
624 This is null for libcalls where that information may
625 not be available.
626 CUM is a variable of type CUMULATIVE_ARGS which gives info about
627 the preceding args and about the function being called.
628 NAMED is nonzero if this argument is a named parameter
629 (otherwise it is an extra parameter matching an ellipsis). */
631 struct rtx_def *
632 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
633 tree type, int named)
635 int reg = 0; /* Default to passing argument on stack. */
637 if (! cum->init)
639 /* We can handle at most 2 floats in R2, R3. */
640 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
642 /* We can handle at most 6 integers minus number of floats passed
643 in registers. */
644 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
645 6 - cum->maxfloats : cum->ints;
647 /* If there is no prototype, assume all the arguments are integers. */
648 if (! cum->prototype)
649 cum->maxints = 6;
651 cum->ints = cum->floats = 0;
652 cum->init = 1;
655 /* This marks the last argument. We don't need to pass this through
656 to the call insn. */
657 if (type == void_type_node)
658 return 0;
660 if (! TARGET_MEMPARM
661 && named
662 && type
663 && ! targetm.calls.must_pass_in_stack (mode, type))
665 /* Look for float, double, or long double argument. */
666 if (mode == QFmode || mode == HFmode)
668 if (cum->floats < cum->maxfloats)
669 reg = c4x_fp_reglist[cum->floats];
671 /* Look for integer, enumeral, boolean, char, or pointer argument. */
672 else if (mode == QImode || mode == Pmode)
674 if (cum->ints < cum->maxints)
675 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
678 else if (! TARGET_MEMPARM && ! type)
680 /* We could use a different argument calling model for libcalls,
681 since we're only calling functions in libgcc. Thus we could
682 pass arguments for long longs in registers rather than on the
683 stack. In the meantime, use the odd TI format. We make the
684 assumption that we won't have more than two floating point
685 args, six integer args, and that all the arguments are of the
686 same mode. */
687 if (mode == QFmode || mode == HFmode)
688 reg = c4x_fp_reglist[cum->floats];
689 else if (mode == QImode || mode == Pmode)
690 reg = c4x_int_reglist[0][cum->ints];
693 if (TARGET_DEBUG)
695 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
696 GET_MODE_NAME (mode), named);
697 if (reg)
698 fprintf (stderr, ", reg=%s", reg_names[reg]);
699 else
700 fprintf (stderr, ", stack");
701 fprintf (stderr, ")\n");
703 if (reg)
704 return gen_rtx_REG (mode, reg);
705 else
706 return NULL_RTX;
709 /* C[34]x arguments grow in weird ways (downwards) that the standard
710 varargs stuff can't handle.. */
712 static tree
713 c4x_gimplify_va_arg_expr (tree valist, tree type,
714 tree *pre_p ATTRIBUTE_UNUSED,
715 tree *post_p ATTRIBUTE_UNUSED)
717 tree t;
718 bool indirect;
720 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
721 if (indirect)
722 type = build_pointer_type (type);
724 t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
725 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
726 t = fold_convert (build_pointer_type (type), t);
727 t = build_va_arg_indirect_ref (t);
729 if (indirect)
730 t = build_va_arg_indirect_ref (t);
732 return t;
736 static int
737 c4x_isr_reg_used_p (unsigned int regno)
739 /* Don't save/restore FP or ST, we handle them separately. */
740 if (regno == FRAME_POINTER_REGNUM
741 || IS_ST_REGNO (regno))
742 return 0;
744 /* We could be a little smarter abut saving/restoring DP.
745 We'll only save if for the big memory model or if
746 we're paranoid. ;-) */
747 if (IS_DP_REGNO (regno))
748 return ! TARGET_SMALL || TARGET_PARANOID;
750 /* Only save/restore regs in leaf function that are used. */
751 if (c4x_leaf_function)
752 return df_regs_ever_live_p (regno) && fixed_regs[regno] == 0;
754 /* Only save/restore regs that are used by the ISR and regs
755 that are likely to be used by functions the ISR calls
756 if they are not fixed. */
757 return IS_EXT_REGNO (regno)
758 || ((df_regs_ever_live_p (regno) || call_used_regs[regno])
759 && fixed_regs[regno] == 0);
763 static int
764 c4x_leaf_function_p (void)
766 /* A leaf function makes no calls, so we only need
767 to save/restore the registers we actually use.
768 For the global variable leaf_function to be set, we need
769 to define LEAF_REGISTERS and all that it entails.
770 Let's check ourselves.... */
772 if (lookup_attribute ("leaf_pretend",
773 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
774 return 1;
776 /* Use the leaf_pretend attribute at your own risk. This is a hack
777 to speed up ISRs that call a function infrequently where the
778 overhead of saving and restoring the additional registers is not
779 warranted. You must save and restore the additional registers
780 required by the called function. Caveat emptor. Here's enough
781 rope... */
783 if (leaf_function_p ())
784 return 1;
786 return 0;
790 static int
791 c4x_naked_function_p (void)
793 tree type;
795 type = TREE_TYPE (current_function_decl);
796 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
801 c4x_interrupt_function_p (void)
803 const char *cfun_name;
804 if (lookup_attribute ("interrupt",
805 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
806 return 1;
808 /* Look for TI style c_intnn. */
809 cfun_name = current_function_name ();
810 return cfun_name[0] == 'c'
811 && cfun_name[1] == '_'
812 && cfun_name[2] == 'i'
813 && cfun_name[3] == 'n'
814 && cfun_name[4] == 't'
815 && ISDIGIT (cfun_name[5])
816 && ISDIGIT (cfun_name[6]);
819 void
820 c4x_expand_prologue (void)
822 unsigned int regno;
823 int size = get_frame_size ();
824 rtx insn;
826 /* In functions where ar3 is not used but frame pointers are still
827 specified, frame pointers are not adjusted (if >= -O2) and this
828 is used so it won't needlessly push the frame pointer. */
829 int dont_push_ar3;
831 /* For __naked__ function don't build a prologue. */
832 if (c4x_naked_function_p ())
834 return;
837 /* For __interrupt__ function build specific prologue. */
838 if (c4x_interrupt_function_p ())
840 c4x_leaf_function = c4x_leaf_function_p ();
842 insn = emit_insn (gen_push_st ());
843 RTX_FRAME_RELATED_P (insn) = 1;
844 if (size)
846 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
847 RTX_FRAME_RELATED_P (insn) = 1;
848 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
849 gen_rtx_REG (QImode, SP_REGNO)));
850 RTX_FRAME_RELATED_P (insn) = 1;
851 /* We require that an ISR uses fewer than 32768 words of
852 local variables, otherwise we have to go to lots of
853 effort to save a register, load it with the desired size,
854 adjust the stack pointer, and then restore the modified
855 register. Frankly, I think it is a poor ISR that
856 requires more than 32767 words of local temporary
857 storage! */
858 if (size > 32767)
859 error ("ISR %s requires %d words of local vars, max is 32767",
860 current_function_name (), size);
862 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
863 gen_rtx_REG (QImode, SP_REGNO),
864 GEN_INT (size)));
865 RTX_FRAME_RELATED_P (insn) = 1;
867 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
869 if (c4x_isr_reg_used_p (regno))
871 if (regno == DP_REGNO)
873 insn = emit_insn (gen_push_dp ());
874 RTX_FRAME_RELATED_P (insn) = 1;
876 else
878 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
879 RTX_FRAME_RELATED_P (insn) = 1;
880 if (IS_EXT_REGNO (regno))
882 insn = emit_insn (gen_pushqf
883 (gen_rtx_REG (QFmode, regno)));
884 RTX_FRAME_RELATED_P (insn) = 1;
889 /* We need to clear the repeat mode flag if the ISR is
890 going to use a RPTB instruction or uses the RC, RS, or RE
891 registers. */
892 if (df_regs_ever_live_p (RC_REGNO)
893 || df_regs_ever_live_p (RS_REGNO)
894 || df_regs_ever_live_p (RE_REGNO))
896 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
897 RTX_FRAME_RELATED_P (insn) = 1;
900 /* Reload DP reg if we are paranoid about some turkey
901 violating small memory model rules. */
902 if (TARGET_SMALL && TARGET_PARANOID)
904 insn = emit_insn (gen_set_ldp_prologue
905 (gen_rtx_REG (QImode, DP_REGNO),
906 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
907 RTX_FRAME_RELATED_P (insn) = 1;
910 else
912 if (frame_pointer_needed)
914 if ((size != 0)
915 || (current_function_args_size != 0)
916 || (optimize < 2))
918 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
919 RTX_FRAME_RELATED_P (insn) = 1;
920 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
921 gen_rtx_REG (QImode, SP_REGNO)));
922 RTX_FRAME_RELATED_P (insn) = 1;
923 dont_push_ar3 = 1;
925 else
927 /* Since ar3 is not used, we don't need to push it. */
928 dont_push_ar3 = 1;
931 else
933 /* If we use ar3, we need to push it. */
934 dont_push_ar3 = 0;
935 if ((size != 0) || (current_function_args_size != 0))
937 /* If we are omitting the frame pointer, we still have
938 to make space for it so the offsets are correct
939 unless we don't use anything on the stack at all. */
940 size += 1;
944 if (size > 32767)
946 /* Local vars are too big, it will take multiple operations
947 to increment SP. */
948 if (TARGET_C3X)
950 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
951 GEN_INT(size >> 16)));
952 RTX_FRAME_RELATED_P (insn) = 1;
953 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
954 gen_rtx_REG (QImode, R1_REGNO),
955 GEN_INT(-16)));
956 RTX_FRAME_RELATED_P (insn) = 1;
958 else
960 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
961 GEN_INT(size & ~0xffff)));
962 RTX_FRAME_RELATED_P (insn) = 1;
964 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
965 gen_rtx_REG (QImode, R1_REGNO),
966 GEN_INT(size & 0xffff)));
967 RTX_FRAME_RELATED_P (insn) = 1;
968 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
969 gen_rtx_REG (QImode, SP_REGNO),
970 gen_rtx_REG (QImode, R1_REGNO)));
971 RTX_FRAME_RELATED_P (insn) = 1;
973 else if (size != 0)
975 /* Local vars take up less than 32767 words, so we can directly
976 add the number. */
977 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
978 gen_rtx_REG (QImode, SP_REGNO),
979 GEN_INT (size)));
980 RTX_FRAME_RELATED_P (insn) = 1;
983 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
985 if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
987 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
989 if (TARGET_PRESERVE_FLOAT)
991 insn = emit_insn (gen_pushqi
992 (gen_rtx_REG (QImode, regno)));
993 RTX_FRAME_RELATED_P (insn) = 1;
995 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
996 RTX_FRAME_RELATED_P (insn) = 1;
998 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1000 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1001 RTX_FRAME_RELATED_P (insn) = 1;
1009 void
1010 c4x_expand_epilogue(void)
1012 int regno;
1013 int jump = 0;
1014 int dont_pop_ar3;
1015 rtx insn;
1016 int size = get_frame_size ();
1018 /* For __naked__ function build no epilogue. */
1019 if (c4x_naked_function_p ())
1021 insn = emit_jump_insn (gen_return_from_epilogue ());
1022 RTX_FRAME_RELATED_P (insn) = 1;
1023 return;
1026 /* For __interrupt__ function build specific epilogue. */
1027 if (c4x_interrupt_function_p ())
1029 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1031 if (! c4x_isr_reg_used_p (regno))
1032 continue;
1033 if (regno == DP_REGNO)
1035 insn = emit_insn (gen_pop_dp ());
1036 RTX_FRAME_RELATED_P (insn) = 1;
1038 else
1040 /* We have to use unspec because the compiler will delete insns
1041 that are not call-saved. */
1042 if (IS_EXT_REGNO (regno))
1044 insn = emit_insn (gen_popqf_unspec
1045 (gen_rtx_REG (QFmode, regno)));
1046 RTX_FRAME_RELATED_P (insn) = 1;
1048 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1052 if (size)
1054 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1055 gen_rtx_REG (QImode, SP_REGNO),
1056 GEN_INT(size)));
1057 RTX_FRAME_RELATED_P (insn) = 1;
1058 insn = emit_insn (gen_popqi
1059 (gen_rtx_REG (QImode, AR3_REGNO)));
1060 RTX_FRAME_RELATED_P (insn) = 1;
1062 insn = emit_insn (gen_pop_st ());
1063 RTX_FRAME_RELATED_P (insn) = 1;
1064 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1065 RTX_FRAME_RELATED_P (insn) = 1;
1067 else
1069 if (frame_pointer_needed)
1071 if ((size != 0)
1072 || (current_function_args_size != 0)
1073 || (optimize < 2))
1075 insn = emit_insn
1076 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1077 gen_rtx_MEM (QImode,
1078 gen_rtx_PLUS
1079 (QImode, gen_rtx_REG (QImode,
1080 AR3_REGNO),
1081 constm1_rtx))));
1082 RTX_FRAME_RELATED_P (insn) = 1;
1084 /* We already have the return value and the fp,
1085 so we need to add those to the stack. */
1086 size += 2;
1087 jump = 1;
1088 dont_pop_ar3 = 1;
1090 else
1092 /* Since ar3 is not used for anything, we don't need to
1093 pop it. */
1094 dont_pop_ar3 = 1;
1097 else
1099 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1100 if (size || current_function_args_size)
1102 /* If we are omitting the frame pointer, we still have
1103 to make space for it so the offsets are correct
1104 unless we don't use anything on the stack at all. */
1105 size += 1;
1109 /* Now restore the saved registers, putting in the delayed branch
1110 where required. */
1111 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1113 if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
1115 if (regno == AR3_REGNO && dont_pop_ar3)
1116 continue;
1118 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1120 insn = emit_insn (gen_popqf_unspec
1121 (gen_rtx_REG (QFmode, regno)));
1122 RTX_FRAME_RELATED_P (insn) = 1;
1123 if (TARGET_PRESERVE_FLOAT)
1125 insn = emit_insn (gen_popqi_unspec
1126 (gen_rtx_REG (QImode, regno)));
1127 RTX_FRAME_RELATED_P (insn) = 1;
1130 else
1132 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1133 RTX_FRAME_RELATED_P (insn) = 1;
1138 if (frame_pointer_needed)
1140 if ((size != 0)
1141 || (current_function_args_size != 0)
1142 || (optimize < 2))
1144 /* Restore the old FP. */
1145 insn = emit_insn
1146 (gen_movqi
1147 (gen_rtx_REG (QImode, AR3_REGNO),
1148 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1150 RTX_FRAME_RELATED_P (insn) = 1;
1154 if (size > 32767)
1156 /* Local vars are too big, it will take multiple operations
1157 to decrement SP. */
1158 if (TARGET_C3X)
1160 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1161 GEN_INT(size >> 16)));
1162 RTX_FRAME_RELATED_P (insn) = 1;
1163 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1164 gen_rtx_REG (QImode, R3_REGNO),
1165 GEN_INT(-16)));
1166 RTX_FRAME_RELATED_P (insn) = 1;
1168 else
1170 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1171 GEN_INT(size & ~0xffff)));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1174 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1175 gen_rtx_REG (QImode, R3_REGNO),
1176 GEN_INT(size & 0xffff)));
1177 RTX_FRAME_RELATED_P (insn) = 1;
1178 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1179 gen_rtx_REG (QImode, SP_REGNO),
1180 gen_rtx_REG (QImode, R3_REGNO)));
1181 RTX_FRAME_RELATED_P (insn) = 1;
1183 else if (size != 0)
1185 /* Local vars take up less than 32768 words, so we can directly
1186 subtract the number. */
1187 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1188 gen_rtx_REG (QImode, SP_REGNO),
1189 GEN_INT(size)));
1190 RTX_FRAME_RELATED_P (insn) = 1;
1193 if (jump)
1195 insn = emit_jump_insn (gen_return_indirect_internal
1196 (gen_rtx_REG (QImode, R2_REGNO)));
1197 RTX_FRAME_RELATED_P (insn) = 1;
1199 else
1201 insn = emit_jump_insn (gen_return_from_epilogue ());
1202 RTX_FRAME_RELATED_P (insn) = 1;
1209 c4x_null_epilogue_p (void)
1211 int regno;
1213 if (reload_completed
1214 && ! c4x_naked_function_p ()
1215 && ! c4x_interrupt_function_p ()
1216 && ! current_function_calls_alloca
1217 && ! current_function_args_size
1218 && ! (optimize < 2)
1219 && ! get_frame_size ())
1221 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1222 if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]
1223 && (regno != AR3_REGNO))
1224 return 1;
1225 return 0;
1227 return 1;
1232 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1234 rtx op0 = operands[0];
1235 rtx op1 = operands[1];
1237 if (! reload_in_progress
1238 && ! REG_P (op0)
1239 && ! REG_P (op1)
1240 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1241 op1 = force_reg (mode, op1);
1243 if (GET_CODE (op1) == LO_SUM
1244 && GET_MODE (op1) == Pmode
1245 && dp_reg_operand (XEXP (op1, 0), mode))
1247 /* expand_increment will sometimes create a LO_SUM immediate
1248 address. Undo this silliness. */
1249 op1 = XEXP (op1, 1);
1252 if (symbolic_address_operand (op1, mode))
1254 if (TARGET_LOAD_ADDRESS)
1256 /* Alias analysis seems to do a better job if we force
1257 constant addresses to memory after reload. */
1258 emit_insn (gen_load_immed_address (op0, op1));
1259 return 1;
1261 else
1263 /* Stick symbol or label address into the constant pool. */
1264 op1 = force_const_mem (Pmode, op1);
1267 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1269 /* We could be a lot smarter about loading some of these
1270 constants... */
1271 op1 = force_const_mem (mode, op1);
1274 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1275 and emit associated (HIGH (SYMREF)) if large memory model.
1276 c4x_legitimize_address could be used to do this,
1277 perhaps by calling validize_address. */
1278 if (TARGET_EXPOSE_LDP
1279 && ! (reload_in_progress || reload_completed)
1280 && GET_CODE (op1) == MEM
1281 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1283 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1284 if (! TARGET_SMALL)
1285 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1286 op1 = change_address (op1, mode,
1287 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1290 if (TARGET_EXPOSE_LDP
1291 && ! (reload_in_progress || reload_completed)
1292 && GET_CODE (op0) == MEM
1293 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1295 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1296 if (! TARGET_SMALL)
1297 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1298 op0 = change_address (op0, mode,
1299 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1302 if (GET_CODE (op0) == SUBREG
1303 && mixed_subreg_operand (op0, mode))
1305 /* We should only generate these mixed mode patterns
1306 during RTL generation. If we need do it later on
1307 then we'll have to emit patterns that won't clobber CC. */
1308 if (reload_in_progress || reload_completed)
1309 abort ();
1310 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1311 op0 = SUBREG_REG (op0);
1312 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1314 op0 = copy_rtx (op0);
1315 PUT_MODE (op0, QImode);
1317 else
1318 abort ();
1320 if (mode == QFmode)
1321 emit_insn (gen_storeqf_int_clobber (op0, op1));
1322 else
1323 abort ();
1324 return 1;
1327 if (GET_CODE (op1) == SUBREG
1328 && mixed_subreg_operand (op1, mode))
1330 /* We should only generate these mixed mode patterns
1331 during RTL generation. If we need do it later on
1332 then we'll have to emit patterns that won't clobber CC. */
1333 if (reload_in_progress || reload_completed)
1334 abort ();
1335 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1336 op1 = SUBREG_REG (op1);
1337 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1339 op1 = copy_rtx (op1);
1340 PUT_MODE (op1, QImode);
1342 else
1343 abort ();
1345 if (mode == QFmode)
1346 emit_insn (gen_loadqf_int_clobber (op0, op1));
1347 else
1348 abort ();
1349 return 1;
1352 if (mode == QImode
1353 && reg_operand (op0, mode)
1354 && const_int_operand (op1, mode)
1355 && ! IS_INT16_CONST (INTVAL (op1))
1356 && ! IS_HIGH_CONST (INTVAL (op1)))
1358 emit_insn (gen_loadqi_big_constant (op0, op1));
1359 return 1;
1362 if (mode == HImode
1363 && reg_operand (op0, mode)
1364 && const_int_operand (op1, mode))
1366 emit_insn (gen_loadhi_big_constant (op0, op1));
1367 return 1;
1370 /* Adjust operands in case we have modified them. */
1371 operands[0] = op0;
1372 operands[1] = op1;
1374 /* Emit normal pattern. */
1375 return 0;
1379 void
1380 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1381 enum machine_mode dmode, enum machine_mode smode,
1382 int noperands, rtx *operands)
1384 rtx ret;
1385 rtx insns;
1386 rtx equiv;
1388 start_sequence ();
1389 switch (noperands)
1391 case 2:
1392 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1393 operands[1], smode);
1394 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1395 break;
1397 case 3:
1398 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1399 operands[1], smode, operands[2], smode);
1400 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1401 break;
1403 default:
1404 abort ();
1407 insns = get_insns ();
1408 end_sequence ();
1409 emit_libcall_block (insns, operands[0], ret, equiv);
1413 void
1414 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1415 enum machine_mode mode, rtx *operands)
1417 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1421 void
1422 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1423 enum machine_mode mode, rtx *operands)
1425 rtx ret;
1426 rtx insns;
1427 rtx equiv;
1429 start_sequence ();
1430 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1431 operands[1], mode, operands[2], mode);
1432 equiv = gen_rtx_TRUNCATE (mode,
1433 gen_rtx_LSHIFTRT (HImode,
1434 gen_rtx_MULT (HImode,
1435 gen_rtx_fmt_e (code, HImode, operands[1]),
1436 gen_rtx_fmt_e (code, HImode, operands[2])),
1437 GEN_INT (32)));
1438 insns = get_insns ();
1439 end_sequence ();
1440 emit_libcall_block (insns, operands[0], ret, equiv);
1445 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1447 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1448 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1449 rtx disp = NULL_RTX; /* Displacement. */
1450 enum rtx_code code;
1452 code = GET_CODE (addr);
1453 switch (code)
1455 /* Register indirect with auto increment/decrement. We don't
1456 allow SP here---push_operand should recognize an operand
1457 being pushed on the stack. */
1459 case PRE_DEC:
1460 case PRE_INC:
1461 case POST_DEC:
1462 if (mode != QImode && mode != QFmode)
1463 return 0;
1465 case POST_INC:
1466 base = XEXP (addr, 0);
1467 if (! REG_P (base))
1468 return 0;
1469 break;
1471 case PRE_MODIFY:
1472 case POST_MODIFY:
1474 rtx op0 = XEXP (addr, 0);
1475 rtx op1 = XEXP (addr, 1);
1477 if (mode != QImode && mode != QFmode)
1478 return 0;
1480 if (! REG_P (op0)
1481 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1482 return 0;
1483 base = XEXP (op1, 0);
1484 if (! REG_P (base))
1485 return 0;
1486 if (REGNO (base) != REGNO (op0))
1487 return 0;
1488 if (REG_P (XEXP (op1, 1)))
1489 indx = XEXP (op1, 1);
1490 else
1491 disp = XEXP (op1, 1);
1493 break;
1495 /* Register indirect. */
1496 case REG:
1497 base = addr;
1498 break;
1500 /* Register indirect with displacement or index. */
1501 case PLUS:
1503 rtx op0 = XEXP (addr, 0);
1504 rtx op1 = XEXP (addr, 1);
1505 enum rtx_code code0 = GET_CODE (op0);
1507 switch (code0)
1509 case REG:
1510 if (REG_P (op1))
1512 base = op0; /* Base + index. */
1513 indx = op1;
1514 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1516 base = op1;
1517 indx = op0;
1520 else
1522 base = op0; /* Base + displacement. */
1523 disp = op1;
1525 break;
1527 default:
1528 return 0;
1531 break;
1533 /* Direct addressing with DP register. */
1534 case LO_SUM:
1536 rtx op0 = XEXP (addr, 0);
1537 rtx op1 = XEXP (addr, 1);
1539 /* HImode and HFmode direct memory references aren't truly
1540 offsettable (consider case at end of data page). We
1541 probably get better code by loading a pointer and using an
1542 indirect memory reference. */
1543 if (mode == HImode || mode == HFmode)
1544 return 0;
1546 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1547 return 0;
1549 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1550 return 1;
1552 if (GET_CODE (op1) == CONST)
1553 return 1;
1554 return 0;
1556 break;
1558 /* Direct addressing with some work for the assembler... */
1559 case CONST:
1560 /* Direct addressing. */
1561 case LABEL_REF:
1562 case SYMBOL_REF:
1563 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1564 return 1;
1565 /* These need to be converted to a LO_SUM (...).
1566 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1567 return 0;
1569 /* Do not allow direct memory access to absolute addresses.
1570 This is more pain than it's worth, especially for the
1571 small memory model where we can't guarantee that
1572 this address is within the data page---we don't want
1573 to modify the DP register in the small memory model,
1574 even temporarily, since an interrupt can sneak in.... */
1575 case CONST_INT:
1576 return 0;
1578 /* Indirect indirect addressing. */
1579 case MEM:
1580 return 0;
1582 case CONST_DOUBLE:
1583 fatal_insn ("using CONST_DOUBLE for address", addr);
1585 default:
1586 return 0;
1589 /* Validate the base register. */
1590 if (base)
1592 /* Check that the address is offsettable for HImode and HFmode. */
1593 if (indx && (mode == HImode || mode == HFmode))
1594 return 0;
1596 /* Handle DP based stuff. */
1597 if (REGNO (base) == DP_REGNO)
1598 return 1;
1599 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1600 return 0;
1601 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1602 return 0;
1605 /* Now validate the index register. */
1606 if (indx)
1608 if (GET_CODE (indx) != REG)
1609 return 0;
1610 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1611 return 0;
1612 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1613 return 0;
1616 /* Validate displacement. */
1617 if (disp)
1619 if (GET_CODE (disp) != CONST_INT)
1620 return 0;
1621 if (mode == HImode || mode == HFmode)
1623 /* The offset displacement must be legitimate. */
1624 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1625 return 0;
1627 else
1629 if (! IS_DISP8_CONST (INTVAL (disp)))
1630 return 0;
1632 /* Can't add an index with a disp. */
1633 if (indx)
1634 return 0;
1636 return 1;
1641 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1642 enum machine_mode mode ATTRIBUTE_UNUSED)
1644 if (GET_CODE (orig) == SYMBOL_REF
1645 || GET_CODE (orig) == LABEL_REF)
1647 if (mode == HImode || mode == HFmode)
1649 /* We need to force the address into
1650 a register so that it is offsettable. */
1651 rtx addr_reg = gen_reg_rtx (Pmode);
1652 emit_move_insn (addr_reg, orig);
1653 return addr_reg;
1655 else
1657 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1659 if (! TARGET_SMALL)
1660 emit_insn (gen_set_ldp (dp_reg, orig));
1662 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1666 return NULL_RTX;
1670 /* Provide the costs of an addressing mode that contains ADDR.
1671 If ADDR is not a valid address, its cost is irrelevant.
1672 This is used in cse and loop optimization to determine
1673 if it is worthwhile storing a common address into a register.
1674 Unfortunately, the C4x address cost depends on other operands. */
1676 static int
1677 c4x_address_cost (rtx addr)
1679 switch (GET_CODE (addr))
1681 case REG:
1682 return 1;
1684 case POST_INC:
1685 case POST_DEC:
1686 case PRE_INC:
1687 case PRE_DEC:
1688 return 1;
1690 /* These shouldn't be directly generated. */
1691 case SYMBOL_REF:
1692 case LABEL_REF:
1693 case CONST:
1694 return 10;
1696 case LO_SUM:
1698 rtx op1 = XEXP (addr, 1);
1700 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1701 return TARGET_SMALL ? 3 : 4;
1703 if (GET_CODE (op1) == CONST)
1705 rtx offset = const0_rtx;
1707 op1 = eliminate_constant_term (op1, &offset);
1709 /* ??? These costs need rethinking... */
1710 if (GET_CODE (op1) == LABEL_REF)
1711 return 3;
1713 if (GET_CODE (op1) != SYMBOL_REF)
1714 return 4;
1716 if (INTVAL (offset) == 0)
1717 return 3;
1719 return 4;
1721 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1723 break;
1725 case PLUS:
1727 register rtx op0 = XEXP (addr, 0);
1728 register rtx op1 = XEXP (addr, 1);
1730 if (GET_CODE (op0) != REG)
1731 break;
1733 switch (GET_CODE (op1))
1735 default:
1736 break;
1738 case REG:
1739 /* This cost for REG+REG must be greater than the cost
1740 for REG if we want autoincrement addressing modes. */
1741 return 2;
1743 case CONST_INT:
1744 /* The following tries to improve GIV combination
1745 in strength reduce but appears not to help. */
1746 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1747 return 1;
1749 if (IS_DISP1_CONST (INTVAL (op1)))
1750 return 1;
1752 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1753 return 2;
1755 return 3;
1758 default:
1759 break;
1762 return 4;
1767 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1769 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1770 rtx cc_reg;
1772 if (mode == CC_NOOVmode
1773 && (code == LE || code == GE || code == LT || code == GT))
1774 return NULL_RTX;
1776 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1777 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1778 gen_rtx_COMPARE (mode, x, y)));
1779 return cc_reg;
1782 char *
1783 c4x_output_cbranch (const char *form, rtx seq)
1785 int delayed = 0;
1786 int annultrue = 0;
1787 int annulfalse = 0;
1788 rtx delay;
1789 char *cp;
1790 static char str[100];
1792 if (final_sequence)
1794 delay = XVECEXP (final_sequence, 0, 1);
1795 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1796 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1797 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1799 strcpy (str, form);
1800 cp = &str [strlen (str)];
1801 if (delayed)
1803 *cp++ = '%';
1804 *cp++ = '#';
1806 if (annultrue)
1808 *cp++ = 'a';
1809 *cp++ = 't';
1811 if (annulfalse)
1813 *cp++ = 'a';
1814 *cp++ = 'f';
1816 *cp++ = '\t';
1817 *cp++ = '%';
1818 *cp++ = 'l';
1819 *cp++ = '1';
1820 *cp = 0;
1821 return str;
1824 void
1825 c4x_print_operand (FILE *file, rtx op, int letter)
1827 rtx op1;
1828 enum rtx_code code;
1830 switch (letter)
1832 case '#': /* Delayed. */
1833 if (final_sequence)
1834 fprintf (file, "d");
1835 return;
1838 code = GET_CODE (op);
1839 switch (letter)
1841 case 'A': /* Direct address. */
1842 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1843 fprintf (file, "@");
1844 break;
1846 case 'H': /* Sethi. */
1847 output_addr_const (file, op);
1848 return;
1850 case 'I': /* Reversed condition. */
1851 code = reverse_condition (code);
1852 break;
1854 case 'L': /* Log 2 of constant. */
1855 if (code != CONST_INT)
1856 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1857 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1858 return;
1860 case 'N': /* Ones complement of small constant. */
1861 if (code != CONST_INT)
1862 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1863 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1864 return;
1866 case 'K': /* Generate ldp(k) if direct address. */
1867 if (! TARGET_SMALL
1868 && code == MEM
1869 && GET_CODE (XEXP (op, 0)) == LO_SUM
1870 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1871 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1873 op1 = XEXP (XEXP (op, 0), 1);
1874 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1876 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1877 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1878 fprintf (file, "\n");
1881 return;
1883 case 'M': /* Generate ldp(k) if direct address. */
1884 if (! TARGET_SMALL /* Only used in asm statements. */
1885 && code == MEM
1886 && (GET_CODE (XEXP (op, 0)) == CONST
1887 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1889 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1890 output_address (XEXP (op, 0));
1891 fprintf (file, "\n\t");
1893 return;
1895 case 'O': /* Offset address. */
1896 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1897 break;
1898 else if (code == MEM)
1899 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1900 else if (code == REG)
1901 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1902 else
1903 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1904 return;
1906 case 'C': /* Call. */
1907 break;
1909 case 'U': /* Call/callu. */
1910 if (code != SYMBOL_REF)
1911 fprintf (file, "u");
1912 return;
1914 default:
1915 break;
1918 switch (code)
1920 case REG:
1921 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1922 && ! TARGET_TI)
1923 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1924 else
1925 fprintf (file, "%s", reg_names[REGNO (op)]);
1926 break;
1928 case MEM:
1929 output_address (XEXP (op, 0));
1930 break;
1932 case CONST_DOUBLE:
1934 char str[64];
1936 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1937 sizeof (str), 0, 1);
1938 fprintf (file, "%s", str);
1940 break;
1942 case CONST_INT:
1943 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1944 break;
1946 case NE:
1947 fprintf (file, "ne");
1948 break;
1950 case EQ:
1951 fprintf (file, "eq");
1952 break;
1954 case GE:
1955 fprintf (file, "ge");
1956 break;
1958 case GT:
1959 fprintf (file, "gt");
1960 break;
1962 case LE:
1963 fprintf (file, "le");
1964 break;
1966 case LT:
1967 fprintf (file, "lt");
1968 break;
1970 case GEU:
1971 fprintf (file, "hs");
1972 break;
1974 case GTU:
1975 fprintf (file, "hi");
1976 break;
1978 case LEU:
1979 fprintf (file, "ls");
1980 break;
1982 case LTU:
1983 fprintf (file, "lo");
1984 break;
1986 case SYMBOL_REF:
1987 output_addr_const (file, op);
1988 break;
1990 case CONST:
1991 output_addr_const (file, XEXP (op, 0));
1992 break;
1994 case CODE_LABEL:
1995 break;
1997 default:
1998 fatal_insn ("c4x_print_operand: Bad operand case", op);
1999 break;
2004 void
2005 c4x_print_operand_address (FILE *file, rtx addr)
2007 switch (GET_CODE (addr))
2009 case REG:
2010 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2011 break;
2013 case PRE_DEC:
2014 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2015 break;
2017 case POST_INC:
2018 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2019 break;
2021 case POST_MODIFY:
2023 rtx op0 = XEXP (XEXP (addr, 1), 0);
2024 rtx op1 = XEXP (XEXP (addr, 1), 1);
2026 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2027 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2028 reg_names[REGNO (op1)]);
2029 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2030 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2031 reg_names[REGNO (op0)], INTVAL (op1));
2032 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2033 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2034 reg_names[REGNO (op0)], -INTVAL (op1));
2035 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2036 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2037 reg_names[REGNO (op1)]);
2038 else
2039 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2041 break;
2043 case PRE_MODIFY:
2045 rtx op0 = XEXP (XEXP (addr, 1), 0);
2046 rtx op1 = XEXP (XEXP (addr, 1), 1);
2048 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2049 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2050 reg_names[REGNO (op1)]);
2051 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2052 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2053 reg_names[REGNO (op0)], INTVAL (op1));
2054 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2055 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2056 reg_names[REGNO (op0)], -INTVAL (op1));
2057 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2058 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2059 reg_names[REGNO (op1)]);
2060 else
2061 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2063 break;
2065 case PRE_INC:
2066 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2067 break;
2069 case POST_DEC:
2070 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2071 break;
2073 case PLUS: /* Indirect with displacement. */
2075 rtx op0 = XEXP (addr, 0);
2076 rtx op1 = XEXP (addr, 1);
2078 if (REG_P (op0))
2080 if (REG_P (op1))
2082 if (IS_INDEX_REG (op0))
2084 fprintf (file, "*+%s(%s)",
2085 reg_names[REGNO (op1)],
2086 reg_names[REGNO (op0)]); /* Index + base. */
2088 else
2090 fprintf (file, "*+%s(%s)",
2091 reg_names[REGNO (op0)],
2092 reg_names[REGNO (op1)]); /* Base + index. */
2095 else if (INTVAL (op1) < 0)
2097 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2098 reg_names[REGNO (op0)],
2099 -INTVAL (op1)); /* Base - displacement. */
2101 else
2103 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2104 reg_names[REGNO (op0)],
2105 INTVAL (op1)); /* Base + displacement. */
2108 else
2109 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2111 break;
2113 case LO_SUM:
2115 rtx op0 = XEXP (addr, 0);
2116 rtx op1 = XEXP (addr, 1);
2118 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2119 c4x_print_operand_address (file, op1);
2120 else
2121 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2123 break;
2125 case CONST:
2126 case SYMBOL_REF:
2127 case LABEL_REF:
2128 fprintf (file, "@");
2129 output_addr_const (file, addr);
2130 break;
2132 /* We shouldn't access CONST_INT addresses. */
2133 case CONST_INT:
2135 default:
2136 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2137 break;
2142 /* Return nonzero if the floating point operand will fit
2143 in the immediate field. */
2146 c4x_immed_float_p (rtx op)
2148 long convval[2];
2149 int exponent;
2150 REAL_VALUE_TYPE r;
2152 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2153 if (GET_MODE (op) == HFmode)
2154 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2155 else
2157 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2158 convval[1] = 0;
2161 /* Sign extend exponent. */
2162 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2163 if (exponent == -128)
2164 return 1; /* 0.0 */
2165 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2166 return 0; /* Precision doesn't fit. */
2167 return (exponent <= 7) /* Positive exp. */
2168 && (exponent >= -7); /* Negative exp. */
2172 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2173 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2175 None of the last four instructions from the bottom of the block can
2176 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2177 BcondAT or RETIcondD.
2179 This routine scans the four previous insns for a jump insn, and if
2180 one is found, returns 1 so that we bung in a nop instruction.
2181 This simple minded strategy will add a nop, when it may not
2182 be required. Say when there is a JUMP_INSN near the end of the
2183 block that doesn't get converted into a delayed branch.
2185 Note that we cannot have a call insn, since we don't generate
2186 repeat loops with calls in them (although I suppose we could, but
2187 there's no benefit.)
2189 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2192 c4x_rptb_nop_p (rtx insn)
2194 rtx start_label;
2195 int i;
2197 /* Extract the start label from the jump pattern (rptb_end). */
2198 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2200 /* If there is a label at the end of the loop we must insert
2201 a NOP. */
2202 do {
2203 insn = previous_insn (insn);
2204 } while (GET_CODE (insn) == NOTE
2205 || GET_CODE (insn) == USE
2206 || GET_CODE (insn) == CLOBBER);
2207 if (GET_CODE (insn) == CODE_LABEL)
2208 return 1;
2210 for (i = 0; i < 4; i++)
2212 /* Search back for prev non-note and non-label insn. */
2213 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2214 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2216 if (insn == start_label)
2217 return i == 0;
2219 insn = previous_insn (insn);
2222 /* If we have a jump instruction we should insert a NOP. If we
2223 hit repeat block top we should only insert a NOP if the loop
2224 is empty. */
2225 if (GET_CODE (insn) == JUMP_INSN)
2226 return 1;
2227 insn = previous_insn (insn);
2229 return 0;
2233 /* The C4x looping instruction needs to be emitted at the top of the
2234 loop. Emitting the true RTL for a looping instruction at the top of
2235 the loop can cause problems with flow analysis. So instead, a dummy
2236 doloop insn is emitted at the end of the loop. This routine checks
2237 for the presence of this doloop insn and then searches back to the
2238 top of the loop, where it inserts the true looping insn (provided
2239 there are no instructions in the loop which would cause problems).
2240 Any additional labels can be emitted at this point. In addition, if
2241 the desired loop count register was not allocated, this routine does
2242 nothing.
2244 Before we can create a repeat block looping instruction we have to
2245 verify that there are no jumps outside the loop and no jumps outside
2246 the loop go into this loop. This can happen in the basic blocks reorder
2247 pass. The C4x cpu cannot handle this. */
2249 static int
2250 c4x_label_ref_used_p (rtx x, rtx code_label)
2252 enum rtx_code code;
2253 int i, j;
2254 const char *fmt;
2256 if (x == 0)
2257 return 0;
2259 code = GET_CODE (x);
2260 if (code == LABEL_REF)
2261 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2263 fmt = GET_RTX_FORMAT (code);
2264 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2266 if (fmt[i] == 'e')
2268 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2269 return 1;
2271 else if (fmt[i] == 'E')
2272 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2273 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2274 return 1;
2276 return 0;
2280 static int
2281 c4x_rptb_valid_p (rtx insn, rtx start_label)
2283 rtx end = insn;
2284 rtx start;
2285 rtx tmp;
2287 /* Find the start label. */
2288 for (; insn; insn = PREV_INSN (insn))
2289 if (insn == start_label)
2290 break;
2292 /* Note found then we cannot use a rptb or rpts. The label was
2293 probably moved by the basic block reorder pass. */
2294 if (! insn)
2295 return 0;
2297 start = insn;
2298 /* If any jump jumps inside this block then we must fail. */
2299 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2301 if (GET_CODE (insn) == CODE_LABEL)
2303 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2304 if (GET_CODE (tmp) == JUMP_INSN
2305 && c4x_label_ref_used_p (tmp, insn))
2306 return 0;
2309 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2311 if (GET_CODE (insn) == CODE_LABEL)
2313 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2314 if (GET_CODE (tmp) == JUMP_INSN
2315 && c4x_label_ref_used_p (tmp, insn))
2316 return 0;
2319 /* If any jump jumps outside this block then we must fail. */
2320 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2322 if (GET_CODE (insn) == CODE_LABEL)
2324 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2325 if (GET_CODE (tmp) == JUMP_INSN
2326 && c4x_label_ref_used_p (tmp, insn))
2327 return 0;
2328 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2329 if (GET_CODE (tmp) == JUMP_INSN
2330 && c4x_label_ref_used_p (tmp, insn))
2331 return 0;
2335 /* All checks OK. */
2336 return 1;
2340 void
2341 c4x_rptb_insert (rtx insn)
2343 rtx end_label;
2344 rtx start_label;
2345 rtx new_start_label;
2346 rtx count_reg;
2348 /* If the count register has not been allocated to RC, say if
2349 there is a movmem pattern in the loop, then do not insert a
2350 RPTB instruction. Instead we emit a decrement and branch
2351 at the end of the loop. */
2352 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2353 if (REGNO (count_reg) != RC_REGNO)
2354 return;
2356 /* Extract the start label from the jump pattern (rptb_end). */
2357 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2359 if (! c4x_rptb_valid_p (insn, start_label))
2361 /* We cannot use the rptb insn. Replace it so reorg can use
2362 the delay slots of the jump insn. */
2363 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2364 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2365 emit_insn_before (gen_bge (start_label), insn);
2366 LABEL_NUSES (start_label)++;
2367 delete_insn (insn);
2368 return;
2371 end_label = gen_label_rtx ();
2372 LABEL_NUSES (end_label)++;
2373 emit_label_after (end_label, insn);
2375 new_start_label = gen_label_rtx ();
2376 LABEL_NUSES (new_start_label)++;
2378 for (; insn; insn = PREV_INSN (insn))
2380 if (insn == start_label)
2381 break;
2382 if (GET_CODE (insn) == JUMP_INSN &&
2383 JUMP_LABEL (insn) == start_label)
2384 redirect_jump (insn, new_start_label, 0);
2386 if (! insn)
2387 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2389 emit_label_after (new_start_label, insn);
2391 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2392 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2393 else
2394 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2395 if (LABEL_NUSES (start_label) == 0)
2396 delete_insn (start_label);
2400 /* We need to use direct addressing for large constants and addresses
2401 that cannot fit within an instruction. We must check for these
2402 after after the final jump optimization pass, since this may
2403 introduce a local_move insn for a SYMBOL_REF. This pass
2404 must come before delayed branch slot filling since it can generate
2405 additional instructions.
2407 This function also fixes up RTPB style loops that didn't get RC
2408 allocated as the loop counter. */
2410 static void
2411 c4x_reorg (void)
2413 rtx insn;
2415 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2417 /* Look for insn. */
2418 if (INSN_P (insn))
2420 int insn_code_number;
2421 rtx old;
2423 insn_code_number = recog_memoized (insn);
2425 if (insn_code_number < 0)
2426 continue;
2428 /* Insert the RTX for RPTB at the top of the loop
2429 and a label at the end of the loop. */
2430 if (insn_code_number == CODE_FOR_rptb_end)
2431 c4x_rptb_insert(insn);
2433 /* We need to split the insn here. Otherwise the calls to
2434 force_const_mem will not work for load_immed_address. */
2435 old = insn;
2437 /* Don't split the insn if it has been deleted. */
2438 if (! INSN_DELETED_P (old))
2439 insn = try_split (PATTERN(old), old, 1);
2441 /* When not optimizing, the old insn will be still left around
2442 with only the 'deleted' bit set. Transform it into a note
2443 to avoid confusion of subsequent processing. */
2444 if (INSN_DELETED_P (old))
2445 SET_INSN_DELETED (old);
2452 c4x_a_register (rtx op)
2454 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2459 c4x_x_register (rtx op)
2461 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2465 static int
2466 c4x_immed_int_constant (rtx op)
2468 if (GET_CODE (op) != CONST_INT)
2469 return 0;
2471 return GET_MODE (op) == VOIDmode
2472 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2473 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2477 static int
2478 c4x_immed_float_constant (rtx op)
2480 if (GET_CODE (op) != CONST_DOUBLE)
2481 return 0;
2483 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2484 present this only means that a MEM rtx has been generated. It does
2485 not mean the rtx is really in memory. */
2487 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2492 c4x_shiftable_constant (rtx op)
2494 int i;
2495 int mask;
2496 int val = INTVAL (op);
2498 for (i = 0; i < 16; i++)
2500 if (val & (1 << i))
2501 break;
2503 mask = ((0xffff >> i) << 16) | 0xffff;
2504 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2505 : (val >> i) & mask))
2506 return i;
2507 return -1;
2512 c4x_H_constant (rtx op)
2514 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2519 c4x_I_constant (rtx op)
2521 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2526 c4x_J_constant (rtx op)
2528 if (TARGET_C3X)
2529 return 0;
2530 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2535 c4x_K_constant (rtx op)
2537 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2538 return 0;
2539 return IS_INT5_CONST (INTVAL (op));
2544 c4x_L_constant (rtx op)
2546 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2551 c4x_N_constant (rtx op)
2553 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2558 c4x_O_constant (rtx op)
2560 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2564 /* The constraints do not have to check the register class,
2565 except when needed to discriminate between the constraints.
2566 The operand has been checked by the predicates to be valid. */
2568 /* ARx + 9-bit signed const or IRn
2569 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2570 We don't include the pre/post inc/dec forms here since
2571 they are handled by the <> constraints. */
2574 c4x_Q_constraint (rtx op)
2576 enum machine_mode mode = GET_MODE (op);
2578 if (GET_CODE (op) != MEM)
2579 return 0;
2580 op = XEXP (op, 0);
2581 switch (GET_CODE (op))
2583 case REG:
2584 return 1;
2586 case PLUS:
2588 rtx op0 = XEXP (op, 0);
2589 rtx op1 = XEXP (op, 1);
2591 if (! REG_P (op0))
2592 return 0;
2594 if (REG_P (op1))
2595 return 1;
2597 if (GET_CODE (op1) != CONST_INT)
2598 return 0;
2600 /* HImode and HFmode must be offsettable. */
2601 if (mode == HImode || mode == HFmode)
2602 return IS_DISP8_OFF_CONST (INTVAL (op1));
2604 return IS_DISP8_CONST (INTVAL (op1));
2606 break;
2608 default:
2609 break;
2611 return 0;
2615 /* ARx + 5-bit unsigned const
2616 *ARx, *+ARx(n) for n < 32. */
2619 c4x_R_constraint (rtx op)
2621 enum machine_mode mode = GET_MODE (op);
2623 if (TARGET_C3X)
2624 return 0;
2625 if (GET_CODE (op) != MEM)
2626 return 0;
2627 op = XEXP (op, 0);
2628 switch (GET_CODE (op))
2630 case REG:
2631 return 1;
2633 case PLUS:
2635 rtx op0 = XEXP (op, 0);
2636 rtx op1 = XEXP (op, 1);
2638 if (! REG_P (op0))
2639 return 0;
2641 if (GET_CODE (op1) != CONST_INT)
2642 return 0;
2644 /* HImode and HFmode must be offsettable. */
2645 if (mode == HImode || mode == HFmode)
2646 return IS_UINT5_CONST (INTVAL (op1) + 1);
2648 return IS_UINT5_CONST (INTVAL (op1));
2650 break;
2652 default:
2653 break;
2655 return 0;
2659 static int
2660 c4x_R_indirect (rtx op)
2662 enum machine_mode mode = GET_MODE (op);
2664 if (TARGET_C3X || GET_CODE (op) != MEM)
2665 return 0;
2667 op = XEXP (op, 0);
2668 switch (GET_CODE (op))
2670 case REG:
2671 return IS_ADDR_OR_PSEUDO_REG (op);
2673 case PLUS:
2675 rtx op0 = XEXP (op, 0);
2676 rtx op1 = XEXP (op, 1);
2678 /* HImode and HFmode must be offsettable. */
2679 if (mode == HImode || mode == HFmode)
2680 return IS_ADDR_OR_PSEUDO_REG (op0)
2681 && GET_CODE (op1) == CONST_INT
2682 && IS_UINT5_CONST (INTVAL (op1) + 1);
2684 return REG_P (op0)
2685 && IS_ADDR_OR_PSEUDO_REG (op0)
2686 && GET_CODE (op1) == CONST_INT
2687 && IS_UINT5_CONST (INTVAL (op1));
2689 break;
2691 default:
2692 break;
2694 return 0;
2698 /* ARx + 1-bit unsigned const or IRn
2699 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2700 We don't include the pre/post inc/dec forms here since
2701 they are handled by the <> constraints. */
2704 c4x_S_constraint (rtx op)
2706 enum machine_mode mode = GET_MODE (op);
2707 if (GET_CODE (op) != MEM)
2708 return 0;
2709 op = XEXP (op, 0);
2710 switch (GET_CODE (op))
2712 case REG:
2713 return 1;
2715 case PRE_MODIFY:
2716 case POST_MODIFY:
2718 rtx op0 = XEXP (op, 0);
2719 rtx op1 = XEXP (op, 1);
2721 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2722 || (op0 != XEXP (op1, 0)))
2723 return 0;
2725 op0 = XEXP (op1, 0);
2726 op1 = XEXP (op1, 1);
2727 return REG_P (op0) && REG_P (op1);
2728 /* Pre or post_modify with a displacement of 0 or 1
2729 should not be generated. */
2731 break;
2733 case PLUS:
2735 rtx op0 = XEXP (op, 0);
2736 rtx op1 = XEXP (op, 1);
2738 if (!REG_P (op0))
2739 return 0;
2741 if (REG_P (op1))
2742 return 1;
2744 if (GET_CODE (op1) != CONST_INT)
2745 return 0;
2747 /* HImode and HFmode must be offsettable. */
2748 if (mode == HImode || mode == HFmode)
2749 return IS_DISP1_OFF_CONST (INTVAL (op1));
2751 return IS_DISP1_CONST (INTVAL (op1));
2753 break;
2755 default:
2756 break;
2758 return 0;
2763 c4x_S_indirect (rtx op)
2765 enum machine_mode mode = GET_MODE (op);
2766 if (GET_CODE (op) != MEM)
2767 return 0;
2769 op = XEXP (op, 0);
2770 switch (GET_CODE (op))
2772 case PRE_DEC:
2773 case POST_DEC:
2774 if (mode != QImode && mode != QFmode)
2775 return 0;
2776 case PRE_INC:
2777 case POST_INC:
2778 op = XEXP (op, 0);
2780 case REG:
2781 return IS_ADDR_OR_PSEUDO_REG (op);
2783 case PRE_MODIFY:
2784 case POST_MODIFY:
2786 rtx op0 = XEXP (op, 0);
2787 rtx op1 = XEXP (op, 1);
2789 if (mode != QImode && mode != QFmode)
2790 return 0;
2792 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2793 || (op0 != XEXP (op1, 0)))
2794 return 0;
2796 op0 = XEXP (op1, 0);
2797 op1 = XEXP (op1, 1);
2798 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2799 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2800 /* Pre or post_modify with a displacement of 0 or 1
2801 should not be generated. */
2804 case PLUS:
2806 rtx op0 = XEXP (op, 0);
2807 rtx op1 = XEXP (op, 1);
2809 if (REG_P (op0))
2811 /* HImode and HFmode must be offsettable. */
2812 if (mode == HImode || mode == HFmode)
2813 return IS_ADDR_OR_PSEUDO_REG (op0)
2814 && GET_CODE (op1) == CONST_INT
2815 && IS_DISP1_OFF_CONST (INTVAL (op1));
2817 if (REG_P (op1))
2818 return (IS_INDEX_OR_PSEUDO_REG (op1)
2819 && IS_ADDR_OR_PSEUDO_REG (op0))
2820 || (IS_ADDR_OR_PSEUDO_REG (op1)
2821 && IS_INDEX_OR_PSEUDO_REG (op0));
2823 return IS_ADDR_OR_PSEUDO_REG (op0)
2824 && GET_CODE (op1) == CONST_INT
2825 && IS_DISP1_CONST (INTVAL (op1));
2828 break;
2830 default:
2831 break;
2833 return 0;
2837 /* Direct memory operand. */
2840 c4x_T_constraint (rtx op)
2842 if (GET_CODE (op) != MEM)
2843 return 0;
2844 op = XEXP (op, 0);
2846 if (GET_CODE (op) != LO_SUM)
2848 /* Allow call operands. */
2849 return GET_CODE (op) == SYMBOL_REF
2850 && GET_MODE (op) == Pmode
2851 && SYMBOL_REF_FUNCTION_P (op);
2854 /* HImode and HFmode are not offsettable. */
2855 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2856 return 0;
2858 if ((GET_CODE (XEXP (op, 0)) == REG)
2859 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2860 return c4x_U_constraint (XEXP (op, 1));
2862 return 0;
2866 /* Symbolic operand. */
2869 c4x_U_constraint (rtx op)
2871 /* Don't allow direct addressing to an arbitrary constant. */
2872 return GET_CODE (op) == CONST
2873 || GET_CODE (op) == SYMBOL_REF
2874 || GET_CODE (op) == LABEL_REF;
2879 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2881 if (GET_CODE (op) == MEM)
2883 enum rtx_code code = GET_CODE (XEXP (op, 0));
2885 if (code == PRE_INC
2886 || code == PRE_DEC
2887 || code == POST_INC
2888 || code == POST_DEC
2889 || code == PRE_MODIFY
2890 || code == POST_MODIFY
2892 return 1;
2894 return 0;
2899 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2901 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2902 int and a long double. */
2903 if (GET_CODE (op) == SUBREG
2904 && (GET_MODE (op) == QFmode)
2905 && (GET_MODE (SUBREG_REG (op)) == QImode
2906 || GET_MODE (SUBREG_REG (op)) == HImode))
2907 return 1;
2908 return 0;
2913 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2915 if (REG_P (op) || CONSTANT_P (op))
2916 return 1;
2917 return 0;
2922 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2924 if (REG_P (op) || CONSTANT_P (op))
2925 return 1;
2926 if (GET_CODE (op) != MEM)
2927 return 0;
2928 op = XEXP (op, 0);
2929 switch (GET_CODE (op))
2931 case REG:
2932 return 1;
2934 case PLUS:
2936 rtx op0 = XEXP (op, 0);
2937 rtx op1 = XEXP (op, 1);
2939 if (! REG_P (op0))
2940 return 0;
2942 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
2943 return 1;
2946 case LO_SUM:
2948 rtx op0 = XEXP (op, 0);
2950 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2951 return 1;
2953 break;
2955 case CONST:
2956 case SYMBOL_REF:
2957 case LABEL_REF:
2958 return 1;
2960 default:
2961 break;
2963 return 0;
2968 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2970 if (REG_P (op) && REGNO (op) == RC_REGNO)
2971 return 0;
2972 return 1;
2976 static void
2977 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
2979 *base = 0;
2980 *incdec = 0;
2981 *index = 0;
2982 *disp = 0;
2984 if (GET_CODE (op) != MEM)
2985 fatal_insn ("invalid indirect memory address", op);
2987 op = XEXP (op, 0);
2988 switch (GET_CODE (op))
2990 case PRE_DEC:
2991 *base = REGNO (XEXP (op, 0));
2992 *incdec = 1;
2993 *disp = -1;
2994 return;
2996 case POST_DEC:
2997 *base = REGNO (XEXP (op, 0));
2998 *incdec = 1;
2999 *disp = 0;
3000 return;
3002 case PRE_INC:
3003 *base = REGNO (XEXP (op, 0));
3004 *incdec = 1;
3005 *disp = 1;
3006 return;
3008 case POST_INC:
3009 *base = REGNO (XEXP (op, 0));
3010 *incdec = 1;
3011 *disp = 0;
3012 return;
3014 case POST_MODIFY:
3015 *base = REGNO (XEXP (op, 0));
3016 if (REG_P (XEXP (XEXP (op, 1), 1)))
3018 *index = REGNO (XEXP (XEXP (op, 1), 1));
3019 *disp = 0; /* ??? */
3021 else
3022 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3023 *incdec = 1;
3024 return;
3026 case PRE_MODIFY:
3027 *base = REGNO (XEXP (op, 0));
3028 if (REG_P (XEXP (XEXP (op, 1), 1)))
3030 *index = REGNO (XEXP (XEXP (op, 1), 1));
3031 *disp = 1; /* ??? */
3033 else
3034 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3035 *incdec = 1;
3037 return;
3039 case REG:
3040 *base = REGNO (op);
3041 return;
3043 case PLUS:
3045 rtx op0 = XEXP (op, 0);
3046 rtx op1 = XEXP (op, 1);
3048 if (c4x_a_register (op0))
3050 if (c4x_x_register (op1))
3052 *base = REGNO (op0);
3053 *index = REGNO (op1);
3054 return;
3056 else if ((GET_CODE (op1) == CONST_INT
3057 && IS_DISP1_CONST (INTVAL (op1))))
3059 *base = REGNO (op0);
3060 *disp = INTVAL (op1);
3061 return;
3064 else if (c4x_x_register (op0) && c4x_a_register (op1))
3066 *base = REGNO (op1);
3067 *index = REGNO (op0);
3068 return;
3071 /* Fall through. */
3073 default:
3074 fatal_insn ("invalid indirect (S) memory address", op);
3080 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3082 int base0;
3083 int base1;
3084 int incdec0;
3085 int incdec1;
3086 int index0;
3087 int index1;
3088 int disp0;
3089 int disp1;
3091 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3092 return 1;
3094 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3095 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3097 if (store0 && store1)
3099 /* If we have two stores in parallel to the same address, then
3100 the C4x only executes one of the stores. This is unlikely to
3101 cause problems except when writing to a hardware device such
3102 as a FIFO since the second write will be lost. The user
3103 should flag the hardware location as being volatile so that
3104 we don't do this optimization. While it is unlikely that we
3105 have an aliased address if both locations are not marked
3106 volatile, it is probably safer to flag a potential conflict
3107 if either location is volatile. */
3108 if (! flag_argument_noalias)
3110 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3111 return 1;
3115 /* If have a parallel load and a store to the same address, the load
3116 is performed first, so there is no conflict. Similarly, there is
3117 no conflict if have parallel loads from the same address. */
3119 /* Cannot use auto increment or auto decrement twice for same
3120 base register. */
3121 if (base0 == base1 && incdec0 && incdec0)
3122 return 1;
3124 /* It might be too confusing for GCC if we have use a base register
3125 with a side effect and a memory reference using the same register
3126 in parallel. */
3127 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3128 return 1;
3130 /* We cannot optimize the case where op1 and op2 refer to the same
3131 address. */
3132 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3133 return 1;
3135 /* No conflict. */
3136 return 0;
3140 /* Check for while loop inside a decrement and branch loop. */
3143 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3145 while (insn)
3147 if (GET_CODE (insn) == CODE_LABEL)
3149 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3150 return 1;
3151 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3152 return 0;
3154 insn = PREV_INSN (insn);
3156 return 1;
3160 /* Validate combination of operands for parallel load/store instructions. */
3163 valid_parallel_load_store (rtx *operands,
3164 enum machine_mode mode ATTRIBUTE_UNUSED)
3166 rtx op0 = operands[0];
3167 rtx op1 = operands[1];
3168 rtx op2 = operands[2];
3169 rtx op3 = operands[3];
3171 if (GET_CODE (op0) == SUBREG)
3172 op0 = SUBREG_REG (op0);
3173 if (GET_CODE (op1) == SUBREG)
3174 op1 = SUBREG_REG (op1);
3175 if (GET_CODE (op2) == SUBREG)
3176 op2 = SUBREG_REG (op2);
3177 if (GET_CODE (op3) == SUBREG)
3178 op3 = SUBREG_REG (op3);
3180 /* The patterns should only allow ext_low_reg_operand() or
3181 par_ind_operand() operands. Thus of the 4 operands, only 2
3182 should be REGs and the other 2 should be MEMs. */
3184 /* This test prevents the multipack pass from using this pattern if
3185 op0 is used as an index or base register in op2 or op3, since
3186 this combination will require reloading. */
3187 if (GET_CODE (op0) == REG
3188 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3189 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3190 return 0;
3192 /* LDI||LDI. */
3193 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3194 return (REGNO (op0) != REGNO (op2))
3195 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3196 && ! c4x_address_conflict (op1, op3, 0, 0);
3198 /* STI||STI. */
3199 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3200 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3201 && ! c4x_address_conflict (op0, op2, 1, 1);
3203 /* LDI||STI. */
3204 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3205 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3206 && ! c4x_address_conflict (op1, op2, 0, 1);
3208 /* STI||LDI. */
3209 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3210 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3211 && ! c4x_address_conflict (op0, op3, 1, 0);
3213 return 0;
3218 valid_parallel_operands_4 (rtx *operands,
3219 enum machine_mode mode ATTRIBUTE_UNUSED)
3221 rtx op0 = operands[0];
3222 rtx op2 = operands[2];
3224 if (GET_CODE (op0) == SUBREG)
3225 op0 = SUBREG_REG (op0);
3226 if (GET_CODE (op2) == SUBREG)
3227 op2 = SUBREG_REG (op2);
3229 /* This test prevents the multipack pass from using this pattern if
3230 op0 is used as an index or base register in op2, since this combination
3231 will require reloading. */
3232 if (GET_CODE (op0) == REG
3233 && GET_CODE (op2) == MEM
3234 && reg_mentioned_p (op0, XEXP (op2, 0)))
3235 return 0;
3237 return 1;
3242 valid_parallel_operands_5 (rtx *operands,
3243 enum machine_mode mode ATTRIBUTE_UNUSED)
3245 int regs = 0;
3246 rtx op0 = operands[0];
3247 rtx op1 = operands[1];
3248 rtx op2 = operands[2];
3249 rtx op3 = operands[3];
3251 if (GET_CODE (op0) == SUBREG)
3252 op0 = SUBREG_REG (op0);
3253 if (GET_CODE (op1) == SUBREG)
3254 op1 = SUBREG_REG (op1);
3255 if (GET_CODE (op2) == SUBREG)
3256 op2 = SUBREG_REG (op2);
3258 /* The patterns should only allow ext_low_reg_operand() or
3259 par_ind_operand() operands. Operands 1 and 2 may be commutative
3260 but only one of them can be a register. */
3261 if (GET_CODE (op1) == REG)
3262 regs++;
3263 if (GET_CODE (op2) == REG)
3264 regs++;
3266 if (regs != 1)
3267 return 0;
3269 /* This test prevents the multipack pass from using this pattern if
3270 op0 is used as an index or base register in op3, since this combination
3271 will require reloading. */
3272 if (GET_CODE (op0) == REG
3273 && GET_CODE (op3) == MEM
3274 && reg_mentioned_p (op0, XEXP (op3, 0)))
3275 return 0;
3277 return 1;
3282 valid_parallel_operands_6 (rtx *operands,
3283 enum machine_mode mode ATTRIBUTE_UNUSED)
3285 int regs = 0;
3286 rtx op0 = operands[0];
3287 rtx op1 = operands[1];
3288 rtx op2 = operands[2];
3289 rtx op4 = operands[4];
3290 rtx op5 = operands[5];
3292 if (GET_CODE (op1) == SUBREG)
3293 op1 = SUBREG_REG (op1);
3294 if (GET_CODE (op2) == SUBREG)
3295 op2 = SUBREG_REG (op2);
3296 if (GET_CODE (op4) == SUBREG)
3297 op4 = SUBREG_REG (op4);
3298 if (GET_CODE (op5) == SUBREG)
3299 op5 = SUBREG_REG (op5);
3301 /* The patterns should only allow ext_low_reg_operand() or
3302 par_ind_operand() operands. Thus of the 4 input operands, only 2
3303 should be REGs and the other 2 should be MEMs. */
3305 if (GET_CODE (op1) == REG)
3306 regs++;
3307 if (GET_CODE (op2) == REG)
3308 regs++;
3309 if (GET_CODE (op4) == REG)
3310 regs++;
3311 if (GET_CODE (op5) == REG)
3312 regs++;
3314 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3315 Perhaps we should count the MEMs as well? */
3316 if (regs != 2)
3317 return 0;
3319 /* This test prevents the multipack pass from using this pattern if
3320 op0 is used as an index or base register in op4 or op5, since
3321 this combination will require reloading. */
3322 if (GET_CODE (op0) == REG
3323 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3324 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3325 return 0;
3327 return 1;
3331 /* Validate combination of src operands. Note that the operands have
3332 been screened by the src_operand predicate. We just have to check
3333 that the combination of operands is valid. If FORCE is set, ensure
3334 that the destination regno is valid if we have a 2 operand insn. */
3336 static int
3337 c4x_valid_operands (enum rtx_code code, rtx *operands,
3338 enum machine_mode mode ATTRIBUTE_UNUSED,
3339 int force)
3341 rtx op0;
3342 rtx op1;
3343 rtx op2;
3344 enum rtx_code code1;
3345 enum rtx_code code2;
3348 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3349 if (code == IF_THEN_ELSE)
3350 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3352 if (code == COMPARE)
3354 op1 = operands[0];
3355 op2 = operands[1];
3357 else
3359 op1 = operands[1];
3360 op2 = operands[2];
3363 op0 = operands[0];
3365 if (GET_CODE (op0) == SUBREG)
3366 op0 = SUBREG_REG (op0);
3367 if (GET_CODE (op1) == SUBREG)
3368 op1 = SUBREG_REG (op1);
3369 if (GET_CODE (op2) == SUBREG)
3370 op2 = SUBREG_REG (op2);
3372 code1 = GET_CODE (op1);
3373 code2 = GET_CODE (op2);
3376 if (code1 == REG && code2 == REG)
3377 return 1;
3379 if (code1 == MEM && code2 == MEM)
3381 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3382 return 1;
3383 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3386 /* We cannot handle two MEMs or two CONSTS, etc. */
3387 if (code1 == code2)
3388 return 0;
3390 if (code1 == REG)
3392 switch (code2)
3394 case CONST_INT:
3395 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3396 return 1;
3397 break;
3399 case CONST_DOUBLE:
3400 if (! c4x_H_constant (op2))
3401 return 0;
3402 break;
3404 /* Any valid memory operand screened by src_operand is OK. */
3405 case MEM:
3406 break;
3408 default:
3409 fatal_insn ("c4x_valid_operands: Internal error", op2);
3410 break;
3413 if (GET_CODE (op0) == SCRATCH)
3414 return 1;
3416 if (!REG_P (op0))
3417 return 0;
3419 /* Check that we have a valid destination register for a two operand
3420 instruction. */
3421 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3425 /* Check non-commutative operators. */
3426 if (code == ASHIFTRT || code == LSHIFTRT
3427 || code == ASHIFT || code == COMPARE)
3428 return code2 == REG
3429 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3432 /* Assume MINUS is commutative since the subtract patterns
3433 also support the reverse subtract instructions. Since op1
3434 is not a register, and op2 is a register, op1 can only
3435 be a restricted memory operand for a shift instruction. */
3436 if (code2 == REG)
3438 switch (code1)
3440 case CONST_INT:
3441 break;
3443 case CONST_DOUBLE:
3444 if (! c4x_H_constant (op1))
3445 return 0;
3446 break;
3448 /* Any valid memory operand screened by src_operand is OK. */
3449 case MEM:
3450 break;
3452 default:
3453 abort ();
3454 break;
3457 if (GET_CODE (op0) == SCRATCH)
3458 return 1;
3460 if (!REG_P (op0))
3461 return 0;
3463 /* Check that we have a valid destination register for a two operand
3464 instruction. */
3465 return ! force || REGNO (op1) == REGNO (op0);
3468 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3469 return 1;
3471 return 0;
3475 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3478 /* If we are not optimizing then we have to let anything go and let
3479 reload fix things up. instantiate_decl in function.c can produce
3480 invalid insns by changing the offset of a memory operand from a
3481 valid one into an invalid one, when the second operand is also a
3482 memory operand. The alternative is not to allow two memory
3483 operands for an insn when not optimizing. The problem only rarely
3484 occurs, for example with the C-torture program DFcmp.c. */
3486 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3491 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3493 /* Compare only has 2 operands. */
3494 if (code == COMPARE)
3496 /* During RTL generation, force constants into pseudos so that
3497 they can get hoisted out of loops. This will tie up an extra
3498 register but can save an extra cycle. Only do this if loop
3499 optimization enabled. (We cannot pull this trick for add and
3500 sub instructions since the flow pass won't find
3501 autoincrements etc.) This allows us to generate compare
3502 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3503 of LDI *AR0++, R0; CMPI 42, R0.
3505 Note that expand_binops will try to load an expensive constant
3506 into a register if it is used within a loop. Unfortunately,
3507 the cost mechanism doesn't allow us to look at the other
3508 operand to decide whether the constant is expensive. */
3510 if (! reload_in_progress
3511 && TARGET_HOIST
3512 && optimize > 0
3513 && GET_CODE (operands[1]) == CONST_INT
3514 && rtx_cost (operands[1], code) > 1)
3515 operands[1] = force_reg (mode, operands[1]);
3517 if (! reload_in_progress
3518 && ! c4x_valid_operands (code, operands, mode, 0))
3519 operands[0] = force_reg (mode, operands[0]);
3520 return 1;
3523 /* We cannot do this for ADDI/SUBI insns since we will
3524 defeat the flow pass from finding autoincrement addressing
3525 opportunities. */
3526 if (! reload_in_progress
3527 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3528 && TARGET_HOIST
3529 && optimize > 1
3530 && GET_CODE (operands[2]) == CONST_INT
3531 && rtx_cost (operands[2], code) > 1)
3532 operands[2] = force_reg (mode, operands[2]);
3534 /* We can get better code on a C30 if we force constant shift counts
3535 into a register. This way they can get hoisted out of loops,
3536 tying up a register but saving an instruction. The downside is
3537 that they may get allocated to an address or index register, and
3538 thus we will get a pipeline conflict if there is a nearby
3539 indirect address using an address register.
3541 Note that expand_binops will not try to load an expensive constant
3542 into a register if it is used within a loop for a shift insn. */
3544 if (! reload_in_progress
3545 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3547 /* If the operand combination is invalid, we force operand1 into a
3548 register, preventing reload from having doing to do this at a
3549 later stage. */
3550 operands[1] = force_reg (mode, operands[1]);
3551 if (TARGET_FORCE)
3553 emit_move_insn (operands[0], operands[1]);
3554 operands[1] = copy_rtx (operands[0]);
3556 else
3558 /* Just in case... */
3559 if (! c4x_valid_operands (code, operands, mode, 0))
3560 operands[2] = force_reg (mode, operands[2]);
3564 /* Right shifts require a negative shift count, but GCC expects
3565 a positive count, so we emit a NEG. */
3566 if ((code == ASHIFTRT || code == LSHIFTRT)
3567 && (GET_CODE (operands[2]) != CONST_INT))
3568 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3571 /* When the shift count is greater than 32 then the result
3572 can be implementation dependent. We truncate the result to
3573 fit in 5 bits so that we do not emit invalid code when
3574 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3575 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3576 && (GET_CODE (operands[2]) == CONST_INT))
3577 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3578 operands[2]
3579 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3581 return 1;
3585 /* The following predicates are used for instruction scheduling. */
3588 group1_reg_operand (rtx op, enum machine_mode mode)
3590 if (mode != VOIDmode && mode != GET_MODE (op))
3591 return 0;
3592 if (GET_CODE (op) == SUBREG)
3593 op = SUBREG_REG (op);
3594 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
3599 group1_mem_operand (rtx op, enum machine_mode mode)
3601 if (mode != VOIDmode && mode != GET_MODE (op))
3602 return 0;
3604 if (GET_CODE (op) == MEM)
3606 op = XEXP (op, 0);
3607 if (GET_CODE (op) == PLUS)
3609 rtx op0 = XEXP (op, 0);
3610 rtx op1 = XEXP (op, 1);
3612 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
3613 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
3614 return 1;
3616 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
3617 return 1;
3620 return 0;
3624 /* Return true if any one of the address registers. */
3627 arx_reg_operand (rtx op, enum machine_mode mode)
3629 if (mode != VOIDmode && mode != GET_MODE (op))
3630 return 0;
3631 if (GET_CODE (op) == SUBREG)
3632 op = SUBREG_REG (op);
3633 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
3637 static int
3638 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
3640 if (mode != VOIDmode && mode != GET_MODE (op))
3641 return 0;
3642 if (GET_CODE (op) == SUBREG)
3643 op = SUBREG_REG (op);
3644 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3648 static int
3649 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
3651 if (mode != VOIDmode && mode != GET_MODE (op))
3652 return 0;
3654 if (GET_CODE (op) == MEM)
3656 op = XEXP (op, 0);
3657 switch (GET_CODE (op))
3659 case PRE_DEC:
3660 case POST_DEC:
3661 case PRE_INC:
3662 case POST_INC:
3663 op = XEXP (op, 0);
3665 case REG:
3666 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3668 case PRE_MODIFY:
3669 case POST_MODIFY:
3670 if (REG_P (XEXP (op, 0)) && (! reload_completed
3671 || (REGNO (XEXP (op, 0)) == regno)))
3672 return 1;
3673 if (REG_P (XEXP (XEXP (op, 1), 1))
3674 && (! reload_completed
3675 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
3676 return 1;
3677 break;
3679 case PLUS:
3681 rtx op0 = XEXP (op, 0);
3682 rtx op1 = XEXP (op, 1);
3684 if ((REG_P (op0) && (! reload_completed
3685 || (REGNO (op0) == regno)))
3686 || (REG_P (op1) && (! reload_completed
3687 || (REGNO (op1) == regno))))
3688 return 1;
3690 break;
3692 default:
3693 break;
3696 return 0;
3701 ar0_reg_operand (rtx op, enum machine_mode mode)
3703 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
3708 ar0_mem_operand (rtx op, enum machine_mode mode)
3710 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
3715 ar1_reg_operand (rtx op, enum machine_mode mode)
3717 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
3722 ar1_mem_operand (rtx op, enum machine_mode mode)
3724 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
3729 ar2_reg_operand (rtx op, enum machine_mode mode)
3731 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
3736 ar2_mem_operand (rtx op, enum machine_mode mode)
3738 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
3743 ar3_reg_operand (rtx op, enum machine_mode mode)
3745 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
3750 ar3_mem_operand (rtx op, enum machine_mode mode)
3752 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
3757 ar4_reg_operand (rtx op, enum machine_mode mode)
3759 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
3764 ar4_mem_operand (rtx op, enum machine_mode mode)
3766 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
3771 ar5_reg_operand (rtx op, enum machine_mode mode)
3773 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
3778 ar5_mem_operand (rtx op, enum machine_mode mode)
3780 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
3785 ar6_reg_operand (rtx op, enum machine_mode mode)
3787 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
3792 ar6_mem_operand (rtx op, enum machine_mode mode)
3794 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
3799 ar7_reg_operand (rtx op, enum machine_mode mode)
3801 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
3806 ar7_mem_operand (rtx op, enum machine_mode mode)
3808 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
3813 ir0_reg_operand (rtx op, enum machine_mode mode)
3815 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
3820 ir0_mem_operand (rtx op, enum machine_mode mode)
3822 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
3827 ir1_reg_operand (rtx op, enum machine_mode mode)
3829 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
3834 ir1_mem_operand (rtx op, enum machine_mode mode)
3836 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
3840 /* This is similar to operand_subword but allows autoincrement
3841 addressing. */
3844 c4x_operand_subword (rtx op, int i, int validate_address,
3845 enum machine_mode mode)
3847 if (mode != HImode && mode != HFmode)
3848 fatal_insn ("c4x_operand_subword: invalid mode", op);
3850 if (mode == HFmode && REG_P (op))
3851 fatal_insn ("c4x_operand_subword: invalid operand", op);
3853 if (GET_CODE (op) == MEM)
3855 enum rtx_code code = GET_CODE (XEXP (op, 0));
3856 enum machine_mode mode = GET_MODE (XEXP (op, 0));
3857 enum machine_mode submode;
3859 submode = mode;
3860 if (mode == HImode)
3861 submode = QImode;
3862 else if (mode == HFmode)
3863 submode = QFmode;
3865 switch (code)
3867 case POST_INC:
3868 case PRE_INC:
3869 return gen_rtx_MEM (submode, XEXP (op, 0));
3871 case POST_DEC:
3872 case PRE_DEC:
3873 case PRE_MODIFY:
3874 case POST_MODIFY:
3875 /* We could handle these with some difficulty.
3876 e.g., *p-- => *(p-=2); *(p+1). */
3877 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
3879 case SYMBOL_REF:
3880 case LABEL_REF:
3881 case CONST:
3882 case CONST_INT:
3883 fatal_insn ("c4x_operand_subword: invalid address", op);
3885 /* Even though offsettable_address_p considers (MEM
3886 (LO_SUM)) to be offsettable, it is not safe if the
3887 address is at the end of the data page since we also have
3888 to fix up the associated high PART. In this case where
3889 we are trying to split a HImode or HFmode memory
3890 reference, we would have to emit another insn to reload a
3891 new HIGH value. It's easier to disable LO_SUM memory references
3892 in HImode or HFmode and we probably get better code. */
3893 case LO_SUM:
3894 fatal_insn ("c4x_operand_subword: address not offsettable", op);
3896 default:
3897 break;
3901 return operand_subword (op, i, validate_address, mode);
3904 struct name_list
3906 struct name_list *next;
3907 const char *name;
3910 static struct name_list *global_head;
3911 static struct name_list *extern_head;
3914 /* Add NAME to list of global symbols and remove from external list if
3915 present on external list. */
3917 void
3918 c4x_global_label (const char *name)
3920 struct name_list *p, *last;
3922 /* Do not insert duplicate names, so linearly search through list of
3923 existing names. */
3924 p = global_head;
3925 while (p)
3927 if (strcmp (p->name, name) == 0)
3928 return;
3929 p = p->next;
3931 p = (struct name_list *) xmalloc (sizeof *p);
3932 p->next = global_head;
3933 p->name = name;
3934 global_head = p;
3936 /* Remove this name from ref list if present. */
3937 last = NULL;
3938 p = extern_head;
3939 while (p)
3941 if (strcmp (p->name, name) == 0)
3943 if (last)
3944 last->next = p->next;
3945 else
3946 extern_head = p->next;
3947 break;
3949 last = p;
3950 p = p->next;
3955 /* Add NAME to list of external symbols. */
3957 void
3958 c4x_external_ref (const char *name)
3960 struct name_list *p;
3962 /* Do not insert duplicate names. */
3963 p = extern_head;
3964 while (p)
3966 if (strcmp (p->name, name) == 0)
3967 return;
3968 p = p->next;
3971 /* Do not insert ref if global found. */
3972 p = global_head;
3973 while (p)
3975 if (strcmp (p->name, name) == 0)
3976 return;
3977 p = p->next;
3979 p = (struct name_list *) xmalloc (sizeof *p);
3980 p->next = extern_head;
3981 p->name = name;
3982 extern_head = p;
3985 /* We need to have a data section we can identify so that we can set
3986 the DP register back to a data pointer in the small memory model.
3987 This is only required for ISRs if we are paranoid that someone
3988 may have quietly changed this register on the sly. */
3989 static void
3990 c4x_file_start (void)
3992 default_file_start ();
3993 fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
3994 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
3998 static void
3999 c4x_file_end (void)
4001 struct name_list *p;
4003 /* Output all external names that are not global. */
4004 p = extern_head;
4005 while (p)
4007 fprintf (asm_out_file, "\t.ref\t");
4008 assemble_name (asm_out_file, p->name);
4009 fprintf (asm_out_file, "\n");
4010 p = p->next;
4012 fprintf (asm_out_file, "\t.end\n");
4016 static void
4017 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4019 while (list != NULL_TREE
4020 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4021 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4022 list = TREE_CHAIN (list);
4023 if (list)
4024 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4025 *attributes);
4029 static void
4030 c4x_insert_attributes (tree decl, tree *attributes)
4032 switch (TREE_CODE (decl))
4034 case FUNCTION_DECL:
4035 c4x_check_attribute ("section", code_tree, decl, attributes);
4036 c4x_check_attribute ("const", pure_tree, decl, attributes);
4037 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4038 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4039 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4040 break;
4042 case VAR_DECL:
4043 c4x_check_attribute ("section", data_tree, decl, attributes);
4044 break;
4046 default:
4047 break;
4051 /* Table of valid machine attributes. */
4052 const struct attribute_spec c4x_attribute_table[] =
4054 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4055 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4056 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4057 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4058 { NULL, 0, 0, false, false, false, NULL }
4061 /* Handle an attribute requiring a FUNCTION_TYPE;
4062 arguments as in struct attribute_spec.handler. */
4063 static tree
4064 c4x_handle_fntype_attribute (tree *node, tree name,
4065 tree args ATTRIBUTE_UNUSED,
4066 int flags ATTRIBUTE_UNUSED,
4067 bool *no_add_attrs)
4069 if (TREE_CODE (*node) != FUNCTION_TYPE)
4071 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4072 IDENTIFIER_POINTER (name));
4073 *no_add_attrs = true;
4076 return NULL_TREE;
4080 /* !!! FIXME to emit RPTS correctly. */
4083 c4x_rptb_rpts_p (rtx insn, rtx op)
4085 /* The next insn should be our label marking where the
4086 repeat block starts. */
4087 insn = NEXT_INSN (insn);
4088 if (GET_CODE (insn) != CODE_LABEL)
4090 /* Some insns may have been shifted between the RPTB insn
4091 and the top label... They were probably destined to
4092 be moved out of the loop. For now, let's leave them
4093 where they are and print a warning. We should
4094 probably move these insns before the repeat block insn. */
4095 if (TARGET_DEBUG)
4096 fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4097 insn);
4098 return 0;
4101 /* Skip any notes. */
4102 insn = next_nonnote_insn (insn);
4104 /* This should be our first insn in the loop. */
4105 if (! INSN_P (insn))
4106 return 0;
4108 /* Skip any notes. */
4109 insn = next_nonnote_insn (insn);
4111 if (! INSN_P (insn))
4112 return 0;
4114 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4115 return 0;
4117 if (TARGET_RPTS)
4118 return 1;
4120 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4124 /* Check if register r11 is used as the destination of an insn. */
4126 static int
4127 c4x_r11_set_p(rtx x)
4129 rtx set;
4130 int i, j;
4131 const char *fmt;
4133 if (x == 0)
4134 return 0;
4136 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4137 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4139 if (INSN_P (x) && (set = single_set (x)))
4140 x = SET_DEST (set);
4142 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4143 return 1;
4145 fmt = GET_RTX_FORMAT (GET_CODE (x));
4146 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4148 if (fmt[i] == 'e')
4150 if (c4x_r11_set_p (XEXP (x, i)))
4151 return 1;
4153 else if (fmt[i] == 'E')
4154 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4155 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4156 return 1;
4158 return 0;
4162 /* The c4x sometimes has a problem when the insn before the laj insn
4163 sets the r11 register. Check for this situation. */
4166 c4x_check_laj_p (rtx insn)
4168 insn = prev_nonnote_insn (insn);
4170 /* If this is the start of the function no nop is needed. */
4171 if (insn == 0)
4172 return 0;
4174 /* If the previous insn is a code label we have to insert a nop. This
4175 could be a jump or table jump. We can find the normal jumps by
4176 scanning the function but this will not find table jumps. */
4177 if (GET_CODE (insn) == CODE_LABEL)
4178 return 1;
4180 /* If the previous insn sets register r11 we have to insert a nop. */
4181 if (c4x_r11_set_p (insn))
4182 return 1;
4184 /* No nop needed. */
4185 return 0;
4189 /* Adjust the cost of a scheduling dependency. Return the new cost of
4190 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4191 A set of an address register followed by a use occurs a 2 cycle
4192 stall (reduced to a single cycle on the c40 using LDA), while
4193 a read of an address register followed by a use occurs a single cycle. */
4195 #define SET_USE_COST 3
4196 #define SETLDA_USE_COST 2
4197 #define READ_USE_COST 2
4199 static int
4200 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4202 /* Don't worry about this until we know what registers have been
4203 assigned. */
4204 if (flag_schedule_insns == 0 && ! reload_completed)
4205 return 0;
4207 /* How do we handle dependencies where a read followed by another
4208 read causes a pipeline stall? For example, a read of ar0 followed
4209 by the use of ar0 for a memory reference. It looks like we
4210 need to extend the scheduler to handle this case. */
4212 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4213 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4214 so only deal with insns we know about. */
4215 if (recog_memoized (dep_insn) < 0)
4216 return 0;
4218 if (REG_NOTE_KIND (link) == 0)
4220 int max = 0;
4222 /* Data dependency; DEP_INSN writes a register that INSN reads some
4223 cycles later. */
4224 if (TARGET_C3X)
4226 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4227 max = SET_USE_COST > max ? SET_USE_COST : max;
4228 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4229 max = READ_USE_COST > max ? READ_USE_COST : max;
4231 else
4233 /* This could be significantly optimized. We should look
4234 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4235 insn uses ar0-ar7. We then test if the same register
4236 is used. The tricky bit is that some operands will
4237 use several registers... */
4238 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4239 max = SET_USE_COST > max ? SET_USE_COST : max;
4240 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4241 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4242 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4243 max = READ_USE_COST > max ? READ_USE_COST : max;
4245 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4246 max = SET_USE_COST > max ? SET_USE_COST : max;
4247 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4248 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4249 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4250 max = READ_USE_COST > max ? READ_USE_COST : max;
4252 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4253 max = SET_USE_COST > max ? SET_USE_COST : max;
4254 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4255 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4256 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4257 max = READ_USE_COST > max ? READ_USE_COST : max;
4259 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4260 max = SET_USE_COST > max ? SET_USE_COST : max;
4261 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4262 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4263 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4264 max = READ_USE_COST > max ? READ_USE_COST : max;
4266 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4267 max = SET_USE_COST > max ? SET_USE_COST : max;
4268 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4269 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4270 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4271 max = READ_USE_COST > max ? READ_USE_COST : max;
4273 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4274 max = SET_USE_COST > max ? SET_USE_COST : max;
4275 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4276 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4277 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4278 max = READ_USE_COST > max ? READ_USE_COST : max;
4280 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4281 max = SET_USE_COST > max ? SET_USE_COST : max;
4282 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4283 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4284 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4285 max = READ_USE_COST > max ? READ_USE_COST : max;
4287 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4288 max = SET_USE_COST > max ? SET_USE_COST : max;
4289 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4290 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4291 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4292 max = READ_USE_COST > max ? READ_USE_COST : max;
4294 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4295 max = SET_USE_COST > max ? SET_USE_COST : max;
4296 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4297 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4299 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4300 max = SET_USE_COST > max ? SET_USE_COST : max;
4301 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4302 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4305 if (max)
4306 cost = max;
4308 /* For other data dependencies, the default cost specified in the
4309 md is correct. */
4310 return cost;
4312 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4314 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4315 cycles later. */
4317 /* For c4x anti dependencies, the cost is 0. */
4318 return 0;
4320 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4322 /* Output dependency; DEP_INSN writes a register that INSN writes some
4323 cycles later. */
4325 /* For c4x output dependencies, the cost is 0. */
4326 return 0;
4328 else
4329 abort ();
4332 void
4333 c4x_init_builtins (void)
4335 tree endlink = void_list_node;
4337 add_builtin_function ("fast_ftoi",
4338 build_function_type
4339 (integer_type_node,
4340 tree_cons (NULL_TREE, double_type_node,
4341 endlink)),
4342 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4343 add_builtin_function ("ansi_ftoi",
4344 build_function_type
4345 (integer_type_node,
4346 tree_cons (NULL_TREE, double_type_node,
4347 endlink)),
4348 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4349 NULL_TREE);
4350 if (TARGET_C3X)
4351 add_builtin_function ("fast_imult",
4352 build_function_type
4353 (integer_type_node,
4354 tree_cons (NULL_TREE, integer_type_node,
4355 tree_cons (NULL_TREE,
4356 integer_type_node,
4357 endlink))),
4358 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4359 NULL_TREE);
4360 else
4362 add_builtin_function ("toieee",
4363 build_function_type
4364 (double_type_node,
4365 tree_cons (NULL_TREE, double_type_node,
4366 endlink)),
4367 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4368 NULL_TREE);
4369 add_builtin_function ("frieee",
4370 build_function_type
4371 (double_type_node,
4372 tree_cons (NULL_TREE, double_type_node,
4373 endlink)),
4374 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4375 NULL_TREE);
4376 add_builtin_function ("fast_invf",
4377 build_function_type
4378 (double_type_node,
4379 tree_cons (NULL_TREE, double_type_node,
4380 endlink)),
4381 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4382 NULL_TREE);
4388 c4x_expand_builtin (tree exp, rtx target,
4389 rtx subtarget ATTRIBUTE_UNUSED,
4390 enum machine_mode mode ATTRIBUTE_UNUSED,
4391 int ignore ATTRIBUTE_UNUSED)
4393 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
4394 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4395 tree arg0, arg1;
4396 rtx r0, r1;
4398 switch (fcode)
4400 case C4X_BUILTIN_FIX:
4401 arg0 = CALL_EXPR_ARG (exp, 0);
4402 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4403 if (! target || ! register_operand (target, QImode))
4404 target = gen_reg_rtx (QImode);
4405 emit_insn (gen_fixqfqi_clobber (target, r0));
4406 return target;
4408 case C4X_BUILTIN_FIX_ANSI:
4409 arg0 = CALL_EXPR_ARG (exp, 0);
4410 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4411 if (! target || ! register_operand (target, QImode))
4412 target = gen_reg_rtx (QImode);
4413 emit_insn (gen_fix_truncqfqi2 (target, r0));
4414 return target;
4416 case C4X_BUILTIN_MPYI:
4417 if (! TARGET_C3X)
4418 break;
4419 arg0 = CALL_EXPR_ARG (exp, 0);
4420 arg1 = CALL_EXPR_ARG (exp, 1);
4421 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4422 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4423 if (! target || ! register_operand (target, QImode))
4424 target = gen_reg_rtx (QImode);
4425 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4426 return target;
4428 case C4X_BUILTIN_TOIEEE:
4429 if (TARGET_C3X)
4430 break;
4431 arg0 = CALL_EXPR_ARG (exp, 0);
4432 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4433 if (! target || ! register_operand (target, QFmode))
4434 target = gen_reg_rtx (QFmode);
4435 emit_insn (gen_toieee (target, r0));
4436 return target;
4438 case C4X_BUILTIN_FRIEEE:
4439 if (TARGET_C3X)
4440 break;
4441 arg0 = CALL_EXPR_ARG (exp, 0);
4442 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4443 if (register_operand (r0, QFmode))
4445 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4446 emit_move_insn (r1, r0);
4447 r0 = r1;
4449 if (! target || ! register_operand (target, QFmode))
4450 target = gen_reg_rtx (QFmode);
4451 emit_insn (gen_frieee (target, r0));
4452 return target;
4454 case C4X_BUILTIN_RCPF:
4455 if (TARGET_C3X)
4456 break;
4457 arg0 = CALL_EXPR_ARG (exp, 0);
4458 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4459 if (! target || ! register_operand (target, QFmode))
4460 target = gen_reg_rtx (QFmode);
4461 emit_insn (gen_rcpfqf_clobber (target, r0));
4462 return target;
4464 return NULL_RTX;
4467 static void
4468 c4x_init_libfuncs (void)
4470 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4471 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4472 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4473 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4474 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4475 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4476 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4477 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4478 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4479 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4480 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4481 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4482 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4483 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4484 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4485 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4486 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4487 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4488 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4489 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4490 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4491 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4492 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4493 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4496 static void
4497 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4498 tree decl ATTRIBUTE_UNUSED)
4500 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4503 static void
4504 c4x_globalize_label (FILE *stream, const char *name)
4506 default_globalize_label (stream, name);
4507 c4x_global_label (name);
4510 #define SHIFT_CODE_P(C) \
4511 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4512 #define LOGICAL_CODE_P(C) \
4513 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4515 /* Compute a (partial) cost for rtx X. Return true if the complete
4516 cost has been computed, and false if subexpressions should be
4517 scanned. In either case, *TOTAL contains the cost result. */
4519 static bool
4520 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4522 HOST_WIDE_INT val;
4524 switch (code)
4526 /* Some small integers are effectively free for the C40. We should
4527 also consider if we are using the small memory model. With
4528 the big memory model we require an extra insn for a constant
4529 loaded from memory. */
4531 case CONST_INT:
4532 val = INTVAL (x);
4533 if (c4x_J_constant (x))
4534 *total = 0;
4535 else if (! TARGET_C3X
4536 && outer_code == AND
4537 && (val == 255 || val == 65535))
4538 *total = 0;
4539 else if (! TARGET_C3X
4540 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4541 && (val == 16 || val == 24))
4542 *total = 0;
4543 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4544 *total = 3;
4545 else if (LOGICAL_CODE_P (outer_code)
4546 ? c4x_L_constant (x) : c4x_I_constant (x))
4547 *total = 2;
4548 else
4549 *total = 4;
4550 return true;
4552 case CONST:
4553 case LABEL_REF:
4554 case SYMBOL_REF:
4555 *total = 4;
4556 return true;
4558 case CONST_DOUBLE:
4559 if (c4x_H_constant (x))
4560 *total = 2;
4561 else if (GET_MODE (x) == QFmode)
4562 *total = 4;
4563 else
4564 *total = 8;
4565 return true;
4567 /* ??? Note that we return true, rather than false so that rtx_cost
4568 doesn't include the constant costs. Otherwise expand_mult will
4569 think that it is cheaper to synthesize a multiply rather than to
4570 use a multiply instruction. I think this is because the algorithm
4571 synth_mult doesn't take into account the loading of the operands,
4572 whereas the calculation of mult_cost does. */
4573 case PLUS:
4574 case MINUS:
4575 case AND:
4576 case IOR:
4577 case XOR:
4578 case ASHIFT:
4579 case ASHIFTRT:
4580 case LSHIFTRT:
4581 *total = COSTS_N_INSNS (1);
4582 return true;
4584 case MULT:
4585 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4586 || TARGET_MPYI ? 1 : 14);
4587 return true;
4589 case DIV:
4590 case UDIV:
4591 case MOD:
4592 case UMOD:
4593 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4594 ? 15 : 50);
4595 return true;
4597 default:
4598 return false;
4602 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
4604 static void
4605 c4x_external_libcall (rtx fun)
4607 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
4608 c4x_external_ref (XSTR (fun, 0));
4611 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
4613 static rtx
4614 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
4615 int incoming ATTRIBUTE_UNUSED)
4617 return gen_rtx_REG (Pmode, AR0_REGNO);