* config/xtensa/elf.h (HANDLE_PRAGMA_PACK_PUSH_POP): Define.
[official-gcc.git] / gcc / config / c4x / c4x.c
blob6e831c73923a2f495405daece15632bec8942edc
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
3 2004, 2005
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 2, or (at your option)
14 any later version.
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to
23 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
24 Boston, MA 02110-1301, USA. */
26 /* Some output-actions in c4x.md need these. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "real.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "libfuncs.h"
45 #include "flags.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
53 #include "langhooks.h"
55 rtx smulhi3_libfunc;
56 rtx umulhi3_libfunc;
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
153 rtx c4x_compare_op0;
154 rtx c4x_compare_op1;
156 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
158 /* Pragma definitions. */
160 tree code_tree = NULL_TREE;
161 tree data_tree = NULL_TREE;
162 tree pure_tree = NULL_TREE;
163 tree noreturn_tree = NULL_TREE;
164 tree interrupt_tree = NULL_TREE;
165 tree naked_tree = NULL_TREE;
167 /* Forward declarations */
168 static bool c4x_handle_option (size_t, const char *, int);
169 static int c4x_isr_reg_used_p (unsigned int);
170 static int c4x_leaf_function_p (void);
171 static int c4x_naked_function_p (void);
172 static int c4x_immed_int_constant (rtx);
173 static int c4x_immed_float_constant (rtx);
174 static int c4x_R_indirect (rtx);
175 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
176 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
177 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
178 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
179 static void c4x_file_start (void);
180 static void c4x_file_end (void);
181 static void c4x_check_attribute (const char *, tree, tree, tree *);
182 static int c4x_r11_set_p (rtx);
183 static int c4x_rptb_valid_p (rtx, rtx);
184 static void c4x_reorg (void);
185 static int c4x_label_ref_used_p (rtx, rtx);
186 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
187 const struct attribute_spec c4x_attribute_table[];
188 static void c4x_insert_attributes (tree, tree *);
189 static void c4x_asm_named_section (const char *, unsigned int, tree);
190 static int c4x_adjust_cost (rtx, rtx, rtx, int);
191 static void c4x_globalize_label (FILE *, const char *);
192 static bool c4x_rtx_costs (rtx, int, int, int *);
193 static int c4x_address_cost (rtx);
194 static void c4x_init_libfuncs (void);
195 static void c4x_external_libcall (rtx);
196 static rtx c4x_struct_value_rtx (tree, int);
197 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
199 /* Initialize the GCC target structure. */
200 #undef TARGET_ASM_BYTE_OP
201 #define TARGET_ASM_BYTE_OP "\t.word\t"
202 #undef TARGET_ASM_ALIGNED_HI_OP
203 #define TARGET_ASM_ALIGNED_HI_OP NULL
204 #undef TARGET_ASM_ALIGNED_SI_OP
205 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ASM_FILE_START
207 #define TARGET_ASM_FILE_START c4x_file_start
208 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
209 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
210 #undef TARGET_ASM_FILE_END
211 #define TARGET_ASM_FILE_END c4x_file_end
213 #undef TARGET_ASM_EXTERNAL_LIBCALL
214 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
216 /* Play safe, not the fastest code. */
217 #undef TARGET_DEFAULT_TARGET_FLAGS
218 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
219 | MASK_PARALLEL_MPY | MASK_RPTB)
220 #undef TARGET_HANDLE_OPTION
221 #define TARGET_HANDLE_OPTION c4x_handle_option
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
226 #undef TARGET_INSERT_ATTRIBUTES
227 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
229 #undef TARGET_INIT_BUILTINS
230 #define TARGET_INIT_BUILTINS c4x_init_builtins
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
235 #undef TARGET_SCHED_ADJUST_COST
236 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
238 #undef TARGET_ASM_GLOBALIZE_LABEL
239 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
241 #undef TARGET_RTX_COSTS
242 #define TARGET_RTX_COSTS c4x_rtx_costs
243 #undef TARGET_ADDRESS_COST
244 #define TARGET_ADDRESS_COST c4x_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
249 #undef TARGET_INIT_LIBFUNCS
250 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
252 #undef TARGET_STRUCT_VALUE_RTX
253 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
255 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
256 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
258 struct gcc_target targetm = TARGET_INITIALIZER;
260 /* Implement TARGET_HANDLE_OPTION. */
262 static bool
263 c4x_handle_option (size_t code, const char *arg, int value)
265 switch (code)
267 case OPT_m30: c4x_cpu_version = 30; return true;
268 case OPT_m31: c4x_cpu_version = 31; return true;
269 case OPT_m32: c4x_cpu_version = 32; return true;
270 case OPT_m33: c4x_cpu_version = 33; return true;
271 case OPT_m40: c4x_cpu_version = 40; return true;
272 case OPT_m44: c4x_cpu_version = 44; return true;
274 case OPT_mcpu_:
275 if (arg[0] == 'c' || arg[0] == 'C')
276 arg++;
277 value = atoi (arg);
278 switch (value)
280 case 30: case 31: case 32: case 33: case 40: case 44:
281 c4x_cpu_version = value;
282 return true;
284 return false;
286 default:
287 return true;
291 /* Override command line options.
292 Called once after all options have been parsed.
293 Mostly we process the processor
294 type and sometimes adjust other TARGET_ options. */
296 void
297 c4x_override_options (void)
299 /* Convert foo / 8.0 into foo * 0.125, etc. */
300 set_fast_math_flags (1);
302 /* We should phase out the following at some stage.
303 This provides compatibility with the old -mno-aliases option. */
304 if (! TARGET_ALIASES && ! flag_argument_noalias)
305 flag_argument_noalias = 1;
307 if (!TARGET_C3X)
308 target_flags |= MASK_MPYI | MASK_DB;
310 if (optimize < 2)
311 target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
313 if (!TARGET_PARALLEL)
314 target_flags &= ~MASK_PARALLEL_MPY;
318 /* This is called before c4x_override_options. */
320 void
321 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
322 int size ATTRIBUTE_UNUSED)
324 /* Scheduling before register allocation can screw up global
325 register allocation, especially for functions that use MPY||ADD
326 instructions. The benefit we gain we get by scheduling before
327 register allocation is probably marginal anyhow. */
328 flag_schedule_insns = 0;
332 /* Write an ASCII string. */
334 #define C4X_ASCII_LIMIT 40
336 void
337 c4x_output_ascii (FILE *stream, const char *ptr, int len)
339 char sbuf[C4X_ASCII_LIMIT + 1];
340 int s, l, special, first = 1, onlys;
342 if (len)
343 fprintf (stream, "\t.byte\t");
345 for (s = l = 0; len > 0; --len, ++ptr)
347 onlys = 0;
349 /* Escape " and \ with a \". */
350 special = *ptr == '\"' || *ptr == '\\';
352 /* If printable - add to buff. */
353 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
355 if (special)
356 sbuf[s++] = '\\';
357 sbuf[s++] = *ptr;
358 if (s < C4X_ASCII_LIMIT - 1)
359 continue;
360 onlys = 1;
362 if (s)
364 if (first)
365 first = 0;
366 else
368 fputc (',', stream);
369 l++;
372 sbuf[s] = 0;
373 fprintf (stream, "\"%s\"", sbuf);
374 l += s + 2;
375 if (TARGET_TI && l >= 80 && len > 1)
377 fprintf (stream, "\n\t.byte\t");
378 first = 1;
379 l = 0;
382 s = 0;
384 if (onlys)
385 continue;
387 if (first)
388 first = 0;
389 else
391 fputc (',', stream);
392 l++;
395 fprintf (stream, "%d", *ptr);
396 l += 3;
397 if (TARGET_TI && l >= 80 && len > 1)
399 fprintf (stream, "\n\t.byte\t");
400 first = 1;
401 l = 0;
404 if (s)
406 if (! first)
407 fputc (',', stream);
409 sbuf[s] = 0;
410 fprintf (stream, "\"%s\"", sbuf);
411 s = 0;
413 fputc ('\n', stream);
418 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
420 switch (mode)
422 #if Pmode != QImode
423 case Pmode: /* Pointer (24/32 bits). */
424 #endif
425 case QImode: /* Integer (32 bits). */
426 return IS_INT_REGNO (regno);
428 case QFmode: /* Float, Double (32 bits). */
429 case HFmode: /* Long Double (40 bits). */
430 return IS_EXT_REGNO (regno);
432 case CCmode: /* Condition Codes. */
433 case CC_NOOVmode: /* Condition Codes. */
434 return IS_ST_REGNO (regno);
436 case HImode: /* Long Long (64 bits). */
437 /* We need two registers to store long longs. Note that
438 it is much easier to constrain the first register
439 to start on an even boundary. */
440 return IS_INT_REGNO (regno)
441 && IS_INT_REGNO (regno + 1)
442 && (regno & 1) == 0;
444 default:
445 return 0; /* We don't support these modes. */
448 return 0;
451 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
453 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
455 /* We cannot copy call saved registers from mode QI into QF or from
456 mode QF into QI. */
457 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
458 return 0;
459 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
460 return 0;
461 /* We cannot copy from an extended (40 bit) register to a standard
462 (32 bit) register because we only set the condition codes for
463 extended registers. */
464 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
465 return 0;
466 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
467 return 0;
468 return 1;
471 /* The TI C3x C compiler register argument runtime model uses 6 registers,
472 AR2, R2, R3, RC, RS, RE.
474 The first two floating point arguments (float, double, long double)
475 that are found scanning from left to right are assigned to R2 and R3.
477 The remaining integer (char, short, int, long) or pointer arguments
478 are assigned to the remaining registers in the order AR2, R2, R3,
479 RC, RS, RE when scanning left to right, except for the last named
480 argument prior to an ellipsis denoting variable number of
481 arguments. We don't have to worry about the latter condition since
482 function.c treats the last named argument as anonymous (unnamed).
484 All arguments that cannot be passed in registers are pushed onto
485 the stack in reverse order (right to left). GCC handles that for us.
487 c4x_init_cumulative_args() is called at the start, so we can parse
488 the args to see how many floating point arguments and how many
489 integer (or pointer) arguments there are. c4x_function_arg() is
490 then called (sometimes repeatedly) for each argument (parsed left
491 to right) to obtain the register to pass the argument in, or zero
492 if the argument is to be passed on the stack. Once the compiler is
493 happy, c4x_function_arg_advance() is called.
495 Don't use R0 to pass arguments in, we use 0 to indicate a stack
496 argument. */
498 static const int c4x_int_reglist[3][6] =
500 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
501 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
502 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
505 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
508 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
509 function whose data type is FNTYPE.
510 For a library call, FNTYPE is 0. */
512 void
513 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
515 tree param, next_param;
517 cum->floats = cum->ints = 0;
518 cum->init = 0;
519 cum->var = 0;
520 cum->args = 0;
522 if (TARGET_DEBUG)
524 fprintf (stderr, "\nc4x_init_cumulative_args (");
525 if (fntype)
527 tree ret_type = TREE_TYPE (fntype);
529 fprintf (stderr, "fntype code = %s, ret code = %s",
530 tree_code_name[(int) TREE_CODE (fntype)],
531 tree_code_name[(int) TREE_CODE (ret_type)]);
533 else
534 fprintf (stderr, "no fntype");
536 if (libname)
537 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
540 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
542 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
543 param; param = next_param)
545 tree type;
547 next_param = TREE_CHAIN (param);
549 type = TREE_VALUE (param);
550 if (type && type != void_type_node)
552 enum machine_mode mode;
554 /* If the last arg doesn't have void type then we have
555 variable arguments. */
556 if (! next_param)
557 cum->var = 1;
559 if ((mode = TYPE_MODE (type)))
561 if (! targetm.calls.must_pass_in_stack (mode, type))
563 /* Look for float, double, or long double argument. */
564 if (mode == QFmode || mode == HFmode)
565 cum->floats++;
566 /* Look for integer, enumeral, boolean, char, or pointer
567 argument. */
568 else if (mode == QImode || mode == Pmode)
569 cum->ints++;
572 cum->args++;
576 if (TARGET_DEBUG)
577 fprintf (stderr, "%s%s, args = %d)\n",
578 cum->prototype ? ", prototype" : "",
579 cum->var ? ", variable args" : "",
580 cum->args);
584 /* Update the data in CUM to advance over an argument
585 of mode MODE and data type TYPE.
586 (TYPE is null for libcalls where that information may not be available.) */
588 void
589 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
590 tree type, int named)
592 if (TARGET_DEBUG)
593 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
594 GET_MODE_NAME (mode), named);
595 if (! TARGET_MEMPARM
596 && named
597 && type
598 && ! targetm.calls.must_pass_in_stack (mode, type))
600 /* Look for float, double, or long double argument. */
601 if (mode == QFmode || mode == HFmode)
602 cum->floats++;
603 /* Look for integer, enumeral, boolean, char, or pointer argument. */
604 else if (mode == QImode || mode == Pmode)
605 cum->ints++;
607 else if (! TARGET_MEMPARM && ! type)
609 /* Handle libcall arguments. */
610 if (mode == QFmode || mode == HFmode)
611 cum->floats++;
612 else if (mode == QImode || mode == Pmode)
613 cum->ints++;
615 return;
619 /* Define where to put the arguments to a function. Value is zero to
620 push the argument on the stack, or a hard register in which to
621 store the argument.
623 MODE is the argument's machine mode.
624 TYPE is the data type of the argument (as a tree).
625 This is null for libcalls where that information may
626 not be available.
627 CUM is a variable of type CUMULATIVE_ARGS which gives info about
628 the preceding args and about the function being called.
629 NAMED is nonzero if this argument is a named parameter
630 (otherwise it is an extra parameter matching an ellipsis). */
632 struct rtx_def *
633 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
634 tree type, int named)
636 int reg = 0; /* Default to passing argument on stack. */
638 if (! cum->init)
640 /* We can handle at most 2 floats in R2, R3. */
641 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
643 /* We can handle at most 6 integers minus number of floats passed
644 in registers. */
645 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
646 6 - cum->maxfloats : cum->ints;
648 /* If there is no prototype, assume all the arguments are integers. */
649 if (! cum->prototype)
650 cum->maxints = 6;
652 cum->ints = cum->floats = 0;
653 cum->init = 1;
656 /* This marks the last argument. We don't need to pass this through
657 to the call insn. */
658 if (type == void_type_node)
659 return 0;
661 if (! TARGET_MEMPARM
662 && named
663 && type
664 && ! targetm.calls.must_pass_in_stack (mode, type))
666 /* Look for float, double, or long double argument. */
667 if (mode == QFmode || mode == HFmode)
669 if (cum->floats < cum->maxfloats)
670 reg = c4x_fp_reglist[cum->floats];
672 /* Look for integer, enumeral, boolean, char, or pointer argument. */
673 else if (mode == QImode || mode == Pmode)
675 if (cum->ints < cum->maxints)
676 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
679 else if (! TARGET_MEMPARM && ! type)
681 /* We could use a different argument calling model for libcalls,
682 since we're only calling functions in libgcc. Thus we could
683 pass arguments for long longs in registers rather than on the
684 stack. In the meantime, use the odd TI format. We make the
685 assumption that we won't have more than two floating point
686 args, six integer args, and that all the arguments are of the
687 same mode. */
688 if (mode == QFmode || mode == HFmode)
689 reg = c4x_fp_reglist[cum->floats];
690 else if (mode == QImode || mode == Pmode)
691 reg = c4x_int_reglist[0][cum->ints];
694 if (TARGET_DEBUG)
696 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
697 GET_MODE_NAME (mode), named);
698 if (reg)
699 fprintf (stderr, ", reg=%s", reg_names[reg]);
700 else
701 fprintf (stderr, ", stack");
702 fprintf (stderr, ")\n");
704 if (reg)
705 return gen_rtx_REG (mode, reg);
706 else
707 return NULL_RTX;
710 /* C[34]x arguments grow in weird ways (downwards) that the standard
711 varargs stuff can't handle.. */
713 static tree
714 c4x_gimplify_va_arg_expr (tree valist, tree type,
715 tree *pre_p ATTRIBUTE_UNUSED,
716 tree *post_p ATTRIBUTE_UNUSED)
718 tree t;
719 bool indirect;
721 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
722 if (indirect)
723 type = build_pointer_type (type);
725 t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
726 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
727 t = fold_convert (build_pointer_type (type), t);
728 t = build_va_arg_indirect_ref (t);
730 if (indirect)
731 t = build_va_arg_indirect_ref (t);
733 return t;
737 static int
738 c4x_isr_reg_used_p (unsigned int regno)
740 /* Don't save/restore FP or ST, we handle them separately. */
741 if (regno == FRAME_POINTER_REGNUM
742 || IS_ST_REGNO (regno))
743 return 0;
745 /* We could be a little smarter abut saving/restoring DP.
746 We'll only save if for the big memory model or if
747 we're paranoid. ;-) */
748 if (IS_DP_REGNO (regno))
749 return ! TARGET_SMALL || TARGET_PARANOID;
751 /* Only save/restore regs in leaf function that are used. */
752 if (c4x_leaf_function)
753 return regs_ever_live[regno] && fixed_regs[regno] == 0;
755 /* Only save/restore regs that are used by the ISR and regs
756 that are likely to be used by functions the ISR calls
757 if they are not fixed. */
758 return IS_EXT_REGNO (regno)
759 || ((regs_ever_live[regno] || call_used_regs[regno])
760 && fixed_regs[regno] == 0);
764 static int
765 c4x_leaf_function_p (void)
767 /* A leaf function makes no calls, so we only need
768 to save/restore the registers we actually use.
769 For the global variable leaf_function to be set, we need
770 to define LEAF_REGISTERS and all that it entails.
771 Let's check ourselves.... */
773 if (lookup_attribute ("leaf_pretend",
774 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
775 return 1;
777 /* Use the leaf_pretend attribute at your own risk. This is a hack
778 to speed up ISRs that call a function infrequently where the
779 overhead of saving and restoring the additional registers is not
780 warranted. You must save and restore the additional registers
781 required by the called function. Caveat emptor. Here's enough
782 rope... */
784 if (leaf_function_p ())
785 return 1;
787 return 0;
791 static int
792 c4x_naked_function_p (void)
794 tree type;
796 type = TREE_TYPE (current_function_decl);
797 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
802 c4x_interrupt_function_p (void)
804 const char *cfun_name;
805 if (lookup_attribute ("interrupt",
806 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
807 return 1;
809 /* Look for TI style c_intnn. */
810 cfun_name = current_function_name ();
811 return cfun_name[0] == 'c'
812 && cfun_name[1] == '_'
813 && cfun_name[2] == 'i'
814 && cfun_name[3] == 'n'
815 && cfun_name[4] == 't'
816 && ISDIGIT (cfun_name[5])
817 && ISDIGIT (cfun_name[6]);
820 void
821 c4x_expand_prologue (void)
823 unsigned int regno;
824 int size = get_frame_size ();
825 rtx insn;
827 /* In functions where ar3 is not used but frame pointers are still
828 specified, frame pointers are not adjusted (if >= -O2) and this
829 is used so it won't needlessly push the frame pointer. */
830 int dont_push_ar3;
832 /* For __naked__ function don't build a prologue. */
833 if (c4x_naked_function_p ())
835 return;
838 /* For __interrupt__ function build specific prologue. */
839 if (c4x_interrupt_function_p ())
841 c4x_leaf_function = c4x_leaf_function_p ();
843 insn = emit_insn (gen_push_st ());
844 RTX_FRAME_RELATED_P (insn) = 1;
845 if (size)
847 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
848 RTX_FRAME_RELATED_P (insn) = 1;
849 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
850 gen_rtx_REG (QImode, SP_REGNO)));
851 RTX_FRAME_RELATED_P (insn) = 1;
852 /* We require that an ISR uses fewer than 32768 words of
853 local variables, otherwise we have to go to lots of
854 effort to save a register, load it with the desired size,
855 adjust the stack pointer, and then restore the modified
856 register. Frankly, I think it is a poor ISR that
857 requires more than 32767 words of local temporary
858 storage! */
859 if (size > 32767)
860 error ("ISR %s requires %d words of local vars, max is 32767",
861 current_function_name (), size);
863 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
864 gen_rtx_REG (QImode, SP_REGNO),
865 GEN_INT (size)));
866 RTX_FRAME_RELATED_P (insn) = 1;
868 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
870 if (c4x_isr_reg_used_p (regno))
872 if (regno == DP_REGNO)
874 insn = emit_insn (gen_push_dp ());
875 RTX_FRAME_RELATED_P (insn) = 1;
877 else
879 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
880 RTX_FRAME_RELATED_P (insn) = 1;
881 if (IS_EXT_REGNO (regno))
883 insn = emit_insn (gen_pushqf
884 (gen_rtx_REG (QFmode, regno)));
885 RTX_FRAME_RELATED_P (insn) = 1;
890 /* We need to clear the repeat mode flag if the ISR is
891 going to use a RPTB instruction or uses the RC, RS, or RE
892 registers. */
893 if (regs_ever_live[RC_REGNO]
894 || regs_ever_live[RS_REGNO]
895 || regs_ever_live[RE_REGNO])
897 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
898 RTX_FRAME_RELATED_P (insn) = 1;
901 /* Reload DP reg if we are paranoid about some turkey
902 violating small memory model rules. */
903 if (TARGET_SMALL && TARGET_PARANOID)
905 insn = emit_insn (gen_set_ldp_prologue
906 (gen_rtx_REG (QImode, DP_REGNO),
907 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
908 RTX_FRAME_RELATED_P (insn) = 1;
911 else
913 if (frame_pointer_needed)
915 if ((size != 0)
916 || (current_function_args_size != 0)
917 || (optimize < 2))
919 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
920 RTX_FRAME_RELATED_P (insn) = 1;
921 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
922 gen_rtx_REG (QImode, SP_REGNO)));
923 RTX_FRAME_RELATED_P (insn) = 1;
924 dont_push_ar3 = 1;
926 else
928 /* Since ar3 is not used, we don't need to push it. */
929 dont_push_ar3 = 1;
932 else
934 /* If we use ar3, we need to push it. */
935 dont_push_ar3 = 0;
936 if ((size != 0) || (current_function_args_size != 0))
938 /* If we are omitting the frame pointer, we still have
939 to make space for it so the offsets are correct
940 unless we don't use anything on the stack at all. */
941 size += 1;
945 if (size > 32767)
947 /* Local vars are too big, it will take multiple operations
948 to increment SP. */
949 if (TARGET_C3X)
951 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
952 GEN_INT(size >> 16)));
953 RTX_FRAME_RELATED_P (insn) = 1;
954 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
955 gen_rtx_REG (QImode, R1_REGNO),
956 GEN_INT(-16)));
957 RTX_FRAME_RELATED_P (insn) = 1;
959 else
961 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
962 GEN_INT(size & ~0xffff)));
963 RTX_FRAME_RELATED_P (insn) = 1;
965 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
966 gen_rtx_REG (QImode, R1_REGNO),
967 GEN_INT(size & 0xffff)));
968 RTX_FRAME_RELATED_P (insn) = 1;
969 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
970 gen_rtx_REG (QImode, SP_REGNO),
971 gen_rtx_REG (QImode, R1_REGNO)));
972 RTX_FRAME_RELATED_P (insn) = 1;
974 else if (size != 0)
976 /* Local vars take up less than 32767 words, so we can directly
977 add the number. */
978 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
979 gen_rtx_REG (QImode, SP_REGNO),
980 GEN_INT (size)));
981 RTX_FRAME_RELATED_P (insn) = 1;
984 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
986 if (regs_ever_live[regno] && ! call_used_regs[regno])
988 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
990 if (TARGET_PRESERVE_FLOAT)
992 insn = emit_insn (gen_pushqi
993 (gen_rtx_REG (QImode, regno)));
994 RTX_FRAME_RELATED_P (insn) = 1;
996 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
997 RTX_FRAME_RELATED_P (insn) = 1;
999 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1001 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1002 RTX_FRAME_RELATED_P (insn) = 1;
1010 void
1011 c4x_expand_epilogue(void)
1013 int regno;
1014 int jump = 0;
1015 int dont_pop_ar3;
1016 rtx insn;
1017 int size = get_frame_size ();
1019 /* For __naked__ function build no epilogue. */
1020 if (c4x_naked_function_p ())
1022 insn = emit_jump_insn (gen_return_from_epilogue ());
1023 RTX_FRAME_RELATED_P (insn) = 1;
1024 return;
1027 /* For __interrupt__ function build specific epilogue. */
1028 if (c4x_interrupt_function_p ())
1030 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1032 if (! c4x_isr_reg_used_p (regno))
1033 continue;
1034 if (regno == DP_REGNO)
1036 insn = emit_insn (gen_pop_dp ());
1037 RTX_FRAME_RELATED_P (insn) = 1;
1039 else
1041 /* We have to use unspec because the compiler will delete insns
1042 that are not call-saved. */
1043 if (IS_EXT_REGNO (regno))
1045 insn = emit_insn (gen_popqf_unspec
1046 (gen_rtx_REG (QFmode, regno)));
1047 RTX_FRAME_RELATED_P (insn) = 1;
1049 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1050 RTX_FRAME_RELATED_P (insn) = 1;
1053 if (size)
1055 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1056 gen_rtx_REG (QImode, SP_REGNO),
1057 GEN_INT(size)));
1058 RTX_FRAME_RELATED_P (insn) = 1;
1059 insn = emit_insn (gen_popqi
1060 (gen_rtx_REG (QImode, AR3_REGNO)));
1061 RTX_FRAME_RELATED_P (insn) = 1;
1063 insn = emit_insn (gen_pop_st ());
1064 RTX_FRAME_RELATED_P (insn) = 1;
1065 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1066 RTX_FRAME_RELATED_P (insn) = 1;
1068 else
1070 if (frame_pointer_needed)
1072 if ((size != 0)
1073 || (current_function_args_size != 0)
1074 || (optimize < 2))
1076 insn = emit_insn
1077 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1078 gen_rtx_MEM (QImode,
1079 gen_rtx_PLUS
1080 (QImode, gen_rtx_REG (QImode,
1081 AR3_REGNO),
1082 constm1_rtx))));
1083 RTX_FRAME_RELATED_P (insn) = 1;
1085 /* We already have the return value and the fp,
1086 so we need to add those to the stack. */
1087 size += 2;
1088 jump = 1;
1089 dont_pop_ar3 = 1;
1091 else
1093 /* Since ar3 is not used for anything, we don't need to
1094 pop it. */
1095 dont_pop_ar3 = 1;
1098 else
1100 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1101 if (size || current_function_args_size)
1103 /* If we are omitting the frame pointer, we still have
1104 to make space for it so the offsets are correct
1105 unless we don't use anything on the stack at all. */
1106 size += 1;
1110 /* Now restore the saved registers, putting in the delayed branch
1111 where required. */
1112 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1114 if (regs_ever_live[regno] && ! call_used_regs[regno])
1116 if (regno == AR3_REGNO && dont_pop_ar3)
1117 continue;
1119 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1121 insn = emit_insn (gen_popqf_unspec
1122 (gen_rtx_REG (QFmode, regno)));
1123 RTX_FRAME_RELATED_P (insn) = 1;
1124 if (TARGET_PRESERVE_FLOAT)
1126 insn = emit_insn (gen_popqi_unspec
1127 (gen_rtx_REG (QImode, regno)));
1128 RTX_FRAME_RELATED_P (insn) = 1;
1131 else
1133 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1134 RTX_FRAME_RELATED_P (insn) = 1;
1139 if (frame_pointer_needed)
1141 if ((size != 0)
1142 || (current_function_args_size != 0)
1143 || (optimize < 2))
1145 /* Restore the old FP. */
1146 insn = emit_insn
1147 (gen_movqi
1148 (gen_rtx_REG (QImode, AR3_REGNO),
1149 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1151 RTX_FRAME_RELATED_P (insn) = 1;
1155 if (size > 32767)
1157 /* Local vars are too big, it will take multiple operations
1158 to decrement SP. */
1159 if (TARGET_C3X)
1161 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1162 GEN_INT(size >> 16)));
1163 RTX_FRAME_RELATED_P (insn) = 1;
1164 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1165 gen_rtx_REG (QImode, R3_REGNO),
1166 GEN_INT(-16)));
1167 RTX_FRAME_RELATED_P (insn) = 1;
1169 else
1171 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1172 GEN_INT(size & ~0xffff)));
1173 RTX_FRAME_RELATED_P (insn) = 1;
1175 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1176 gen_rtx_REG (QImode, R3_REGNO),
1177 GEN_INT(size & 0xffff)));
1178 RTX_FRAME_RELATED_P (insn) = 1;
1179 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1180 gen_rtx_REG (QImode, SP_REGNO),
1181 gen_rtx_REG (QImode, R3_REGNO)));
1182 RTX_FRAME_RELATED_P (insn) = 1;
1184 else if (size != 0)
1186 /* Local vars take up less than 32768 words, so we can directly
1187 subtract the number. */
1188 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1189 gen_rtx_REG (QImode, SP_REGNO),
1190 GEN_INT(size)));
1191 RTX_FRAME_RELATED_P (insn) = 1;
1194 if (jump)
1196 insn = emit_jump_insn (gen_return_indirect_internal
1197 (gen_rtx_REG (QImode, R2_REGNO)));
1198 RTX_FRAME_RELATED_P (insn) = 1;
1200 else
1202 insn = emit_jump_insn (gen_return_from_epilogue ());
1203 RTX_FRAME_RELATED_P (insn) = 1;
1210 c4x_null_epilogue_p (void)
1212 int regno;
1214 if (reload_completed
1215 && ! c4x_naked_function_p ()
1216 && ! c4x_interrupt_function_p ()
1217 && ! current_function_calls_alloca
1218 && ! current_function_args_size
1219 && ! (optimize < 2)
1220 && ! get_frame_size ())
1222 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1223 if (regs_ever_live[regno] && ! call_used_regs[regno]
1224 && (regno != AR3_REGNO))
1225 return 1;
1226 return 0;
1228 return 1;
1233 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1235 rtx op0 = operands[0];
1236 rtx op1 = operands[1];
1238 if (! reload_in_progress
1239 && ! REG_P (op0)
1240 && ! REG_P (op1)
1241 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1242 op1 = force_reg (mode, op1);
1244 if (GET_CODE (op1) == LO_SUM
1245 && GET_MODE (op1) == Pmode
1246 && dp_reg_operand (XEXP (op1, 0), mode))
1248 /* expand_increment will sometimes create a LO_SUM immediate
1249 address. Undo this silliness. */
1250 op1 = XEXP (op1, 1);
1253 if (symbolic_address_operand (op1, mode))
1255 if (TARGET_LOAD_ADDRESS)
1257 /* Alias analysis seems to do a better job if we force
1258 constant addresses to memory after reload. */
1259 emit_insn (gen_load_immed_address (op0, op1));
1260 return 1;
1262 else
1264 /* Stick symbol or label address into the constant pool. */
1265 op1 = force_const_mem (Pmode, op1);
1268 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1270 /* We could be a lot smarter about loading some of these
1271 constants... */
1272 op1 = force_const_mem (mode, op1);
1275 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1276 and emit associated (HIGH (SYMREF)) if large memory model.
1277 c4x_legitimize_address could be used to do this,
1278 perhaps by calling validize_address. */
1279 if (TARGET_EXPOSE_LDP
1280 && ! (reload_in_progress || reload_completed)
1281 && GET_CODE (op1) == MEM
1282 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1284 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1285 if (! TARGET_SMALL)
1286 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1287 op1 = change_address (op1, mode,
1288 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1291 if (TARGET_EXPOSE_LDP
1292 && ! (reload_in_progress || reload_completed)
1293 && GET_CODE (op0) == MEM
1294 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1296 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1297 if (! TARGET_SMALL)
1298 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1299 op0 = change_address (op0, mode,
1300 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1303 if (GET_CODE (op0) == SUBREG
1304 && mixed_subreg_operand (op0, mode))
1306 /* We should only generate these mixed mode patterns
1307 during RTL generation. If we need do it later on
1308 then we'll have to emit patterns that won't clobber CC. */
1309 if (reload_in_progress || reload_completed)
1310 abort ();
1311 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1312 op0 = SUBREG_REG (op0);
1313 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1315 op0 = copy_rtx (op0);
1316 PUT_MODE (op0, QImode);
1318 else
1319 abort ();
1321 if (mode == QFmode)
1322 emit_insn (gen_storeqf_int_clobber (op0, op1));
1323 else
1324 abort ();
1325 return 1;
1328 if (GET_CODE (op1) == SUBREG
1329 && mixed_subreg_operand (op1, mode))
1331 /* We should only generate these mixed mode patterns
1332 during RTL generation. If we need do it later on
1333 then we'll have to emit patterns that won't clobber CC. */
1334 if (reload_in_progress || reload_completed)
1335 abort ();
1336 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1337 op1 = SUBREG_REG (op1);
1338 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1340 op1 = copy_rtx (op1);
1341 PUT_MODE (op1, QImode);
1343 else
1344 abort ();
1346 if (mode == QFmode)
1347 emit_insn (gen_loadqf_int_clobber (op0, op1));
1348 else
1349 abort ();
1350 return 1;
1353 if (mode == QImode
1354 && reg_operand (op0, mode)
1355 && const_int_operand (op1, mode)
1356 && ! IS_INT16_CONST (INTVAL (op1))
1357 && ! IS_HIGH_CONST (INTVAL (op1)))
1359 emit_insn (gen_loadqi_big_constant (op0, op1));
1360 return 1;
1363 if (mode == HImode
1364 && reg_operand (op0, mode)
1365 && const_int_operand (op1, mode))
1367 emit_insn (gen_loadhi_big_constant (op0, op1));
1368 return 1;
1371 /* Adjust operands in case we have modified them. */
1372 operands[0] = op0;
1373 operands[1] = op1;
1375 /* Emit normal pattern. */
1376 return 0;
1380 void
1381 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1382 enum machine_mode dmode, enum machine_mode smode,
1383 int noperands, rtx *operands)
1385 rtx ret;
1386 rtx insns;
1387 rtx equiv;
1389 start_sequence ();
1390 switch (noperands)
1392 case 2:
1393 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1394 operands[1], smode);
1395 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1396 break;
1398 case 3:
1399 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1400 operands[1], smode, operands[2], smode);
1401 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1402 break;
1404 default:
1405 abort ();
1408 insns = get_insns ();
1409 end_sequence ();
1410 emit_libcall_block (insns, operands[0], ret, equiv);
1414 void
1415 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1416 enum machine_mode mode, rtx *operands)
1418 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1422 void
1423 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1424 enum machine_mode mode, rtx *operands)
1426 rtx ret;
1427 rtx insns;
1428 rtx equiv;
1430 start_sequence ();
1431 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1432 operands[1], mode, operands[2], mode);
1433 equiv = gen_rtx_TRUNCATE (mode,
1434 gen_rtx_LSHIFTRT (HImode,
1435 gen_rtx_MULT (HImode,
1436 gen_rtx_fmt_e (code, HImode, operands[1]),
1437 gen_rtx_fmt_e (code, HImode, operands[2])),
1438 GEN_INT (32)));
1439 insns = get_insns ();
1440 end_sequence ();
1441 emit_libcall_block (insns, operands[0], ret, equiv);
1446 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1448 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1449 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1450 rtx disp = NULL_RTX; /* Displacement. */
1451 enum rtx_code code;
1453 code = GET_CODE (addr);
1454 switch (code)
1456 /* Register indirect with auto increment/decrement. We don't
1457 allow SP here---push_operand should recognize an operand
1458 being pushed on the stack. */
1460 case PRE_DEC:
1461 case PRE_INC:
1462 case POST_DEC:
1463 if (mode != QImode && mode != QFmode)
1464 return 0;
1466 case POST_INC:
1467 base = XEXP (addr, 0);
1468 if (! REG_P (base))
1469 return 0;
1470 break;
1472 case PRE_MODIFY:
1473 case POST_MODIFY:
1475 rtx op0 = XEXP (addr, 0);
1476 rtx op1 = XEXP (addr, 1);
1478 if (mode != QImode && mode != QFmode)
1479 return 0;
1481 if (! REG_P (op0)
1482 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1483 return 0;
1484 base = XEXP (op1, 0);
1485 if (! REG_P (base))
1486 return 0;
1487 if (REGNO (base) != REGNO (op0))
1488 return 0;
1489 if (REG_P (XEXP (op1, 1)))
1490 indx = XEXP (op1, 1);
1491 else
1492 disp = XEXP (op1, 1);
1494 break;
1496 /* Register indirect. */
1497 case REG:
1498 base = addr;
1499 break;
1501 /* Register indirect with displacement or index. */
1502 case PLUS:
1504 rtx op0 = XEXP (addr, 0);
1505 rtx op1 = XEXP (addr, 1);
1506 enum rtx_code code0 = GET_CODE (op0);
1508 switch (code0)
1510 case REG:
1511 if (REG_P (op1))
1513 base = op0; /* Base + index. */
1514 indx = op1;
1515 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1517 base = op1;
1518 indx = op0;
1521 else
1523 base = op0; /* Base + displacement. */
1524 disp = op1;
1526 break;
1528 default:
1529 return 0;
1532 break;
1534 /* Direct addressing with DP register. */
1535 case LO_SUM:
1537 rtx op0 = XEXP (addr, 0);
1538 rtx op1 = XEXP (addr, 1);
1540 /* HImode and HFmode direct memory references aren't truly
1541 offsettable (consider case at end of data page). We
1542 probably get better code by loading a pointer and using an
1543 indirect memory reference. */
1544 if (mode == HImode || mode == HFmode)
1545 return 0;
1547 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1548 return 0;
1550 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1551 return 1;
1553 if (GET_CODE (op1) == CONST)
1554 return 1;
1555 return 0;
1557 break;
1559 /* Direct addressing with some work for the assembler... */
1560 case CONST:
1561 /* Direct addressing. */
1562 case LABEL_REF:
1563 case SYMBOL_REF:
1564 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1565 return 1;
1566 /* These need to be converted to a LO_SUM (...).
1567 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1568 return 0;
1570 /* Do not allow direct memory access to absolute addresses.
1571 This is more pain than it's worth, especially for the
1572 small memory model where we can't guarantee that
1573 this address is within the data page---we don't want
1574 to modify the DP register in the small memory model,
1575 even temporarily, since an interrupt can sneak in.... */
1576 case CONST_INT:
1577 return 0;
1579 /* Indirect indirect addressing. */
1580 case MEM:
1581 return 0;
1583 case CONST_DOUBLE:
1584 fatal_insn ("using CONST_DOUBLE for address", addr);
1586 default:
1587 return 0;
1590 /* Validate the base register. */
1591 if (base)
1593 /* Check that the address is offsettable for HImode and HFmode. */
1594 if (indx && (mode == HImode || mode == HFmode))
1595 return 0;
1597 /* Handle DP based stuff. */
1598 if (REGNO (base) == DP_REGNO)
1599 return 1;
1600 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1601 return 0;
1602 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1603 return 0;
1606 /* Now validate the index register. */
1607 if (indx)
1609 if (GET_CODE (indx) != REG)
1610 return 0;
1611 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1612 return 0;
1613 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1614 return 0;
1617 /* Validate displacement. */
1618 if (disp)
1620 if (GET_CODE (disp) != CONST_INT)
1621 return 0;
1622 if (mode == HImode || mode == HFmode)
1624 /* The offset displacement must be legitimate. */
1625 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1626 return 0;
1628 else
1630 if (! IS_DISP8_CONST (INTVAL (disp)))
1631 return 0;
1633 /* Can't add an index with a disp. */
1634 if (indx)
1635 return 0;
1637 return 1;
1642 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1643 enum machine_mode mode ATTRIBUTE_UNUSED)
1645 if (GET_CODE (orig) == SYMBOL_REF
1646 || GET_CODE (orig) == LABEL_REF)
1648 if (mode == HImode || mode == HFmode)
1650 /* We need to force the address into
1651 a register so that it is offsettable. */
1652 rtx addr_reg = gen_reg_rtx (Pmode);
1653 emit_move_insn (addr_reg, orig);
1654 return addr_reg;
1656 else
1658 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1660 if (! TARGET_SMALL)
1661 emit_insn (gen_set_ldp (dp_reg, orig));
1663 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1667 return NULL_RTX;
1671 /* Provide the costs of an addressing mode that contains ADDR.
1672 If ADDR is not a valid address, its cost is irrelevant.
1673 This is used in cse and loop optimization to determine
1674 if it is worthwhile storing a common address into a register.
1675 Unfortunately, the C4x address cost depends on other operands. */
1677 static int
1678 c4x_address_cost (rtx addr)
1680 switch (GET_CODE (addr))
1682 case REG:
1683 return 1;
1685 case POST_INC:
1686 case POST_DEC:
1687 case PRE_INC:
1688 case PRE_DEC:
1689 return 1;
1691 /* These shouldn't be directly generated. */
1692 case SYMBOL_REF:
1693 case LABEL_REF:
1694 case CONST:
1695 return 10;
1697 case LO_SUM:
1699 rtx op1 = XEXP (addr, 1);
1701 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1702 return TARGET_SMALL ? 3 : 4;
1704 if (GET_CODE (op1) == CONST)
1706 rtx offset = const0_rtx;
1708 op1 = eliminate_constant_term (op1, &offset);
1710 /* ??? These costs need rethinking... */
1711 if (GET_CODE (op1) == LABEL_REF)
1712 return 3;
1714 if (GET_CODE (op1) != SYMBOL_REF)
1715 return 4;
1717 if (INTVAL (offset) == 0)
1718 return 3;
1720 return 4;
1722 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1724 break;
1726 case PLUS:
1728 register rtx op0 = XEXP (addr, 0);
1729 register rtx op1 = XEXP (addr, 1);
1731 if (GET_CODE (op0) != REG)
1732 break;
1734 switch (GET_CODE (op1))
1736 default:
1737 break;
1739 case REG:
1740 /* This cost for REG+REG must be greater than the cost
1741 for REG if we want autoincrement addressing modes. */
1742 return 2;
1744 case CONST_INT:
1745 /* The following tries to improve GIV combination
1746 in strength reduce but appears not to help. */
1747 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1748 return 1;
1750 if (IS_DISP1_CONST (INTVAL (op1)))
1751 return 1;
1753 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1754 return 2;
1756 return 3;
1759 default:
1760 break;
1763 return 4;
1768 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1770 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1771 rtx cc_reg;
1773 if (mode == CC_NOOVmode
1774 && (code == LE || code == GE || code == LT || code == GT))
1775 return NULL_RTX;
1777 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1778 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1779 gen_rtx_COMPARE (mode, x, y)));
1780 return cc_reg;
1783 char *
1784 c4x_output_cbranch (const char *form, rtx seq)
1786 int delayed = 0;
1787 int annultrue = 0;
1788 int annulfalse = 0;
1789 rtx delay;
1790 char *cp;
1791 static char str[100];
1793 if (final_sequence)
1795 delay = XVECEXP (final_sequence, 0, 1);
1796 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1797 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1798 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1800 strcpy (str, form);
1801 cp = &str [strlen (str)];
1802 if (delayed)
1804 *cp++ = '%';
1805 *cp++ = '#';
1807 if (annultrue)
1809 *cp++ = 'a';
1810 *cp++ = 't';
1812 if (annulfalse)
1814 *cp++ = 'a';
1815 *cp++ = 'f';
1817 *cp++ = '\t';
1818 *cp++ = '%';
1819 *cp++ = 'l';
1820 *cp++ = '1';
1821 *cp = 0;
1822 return str;
1825 void
1826 c4x_print_operand (FILE *file, rtx op, int letter)
1828 rtx op1;
1829 enum rtx_code code;
1831 switch (letter)
1833 case '#': /* Delayed. */
1834 if (final_sequence)
1835 fprintf (file, "d");
1836 return;
1839 code = GET_CODE (op);
1840 switch (letter)
1842 case 'A': /* Direct address. */
1843 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1844 fprintf (file, "@");
1845 break;
1847 case 'H': /* Sethi. */
1848 output_addr_const (file, op);
1849 return;
1851 case 'I': /* Reversed condition. */
1852 code = reverse_condition (code);
1853 break;
1855 case 'L': /* Log 2 of constant. */
1856 if (code != CONST_INT)
1857 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1858 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1859 return;
1861 case 'N': /* Ones complement of small constant. */
1862 if (code != CONST_INT)
1863 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1864 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1865 return;
1867 case 'K': /* Generate ldp(k) if direct address. */
1868 if (! TARGET_SMALL
1869 && code == MEM
1870 && GET_CODE (XEXP (op, 0)) == LO_SUM
1871 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1872 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1874 op1 = XEXP (XEXP (op, 0), 1);
1875 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1877 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1878 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1879 fprintf (file, "\n");
1882 return;
1884 case 'M': /* Generate ldp(k) if direct address. */
1885 if (! TARGET_SMALL /* Only used in asm statements. */
1886 && code == MEM
1887 && (GET_CODE (XEXP (op, 0)) == CONST
1888 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1890 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1891 output_address (XEXP (op, 0));
1892 fprintf (file, "\n\t");
1894 return;
1896 case 'O': /* Offset address. */
1897 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1898 break;
1899 else if (code == MEM)
1900 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1901 else if (code == REG)
1902 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1903 else
1904 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1905 return;
1907 case 'C': /* Call. */
1908 break;
1910 case 'U': /* Call/callu. */
1911 if (code != SYMBOL_REF)
1912 fprintf (file, "u");
1913 return;
1915 default:
1916 break;
1919 switch (code)
1921 case REG:
1922 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1923 && ! TARGET_TI)
1924 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1925 else
1926 fprintf (file, "%s", reg_names[REGNO (op)]);
1927 break;
1929 case MEM:
1930 output_address (XEXP (op, 0));
1931 break;
1933 case CONST_DOUBLE:
1935 char str[64];
1937 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1938 sizeof (str), 0, 1);
1939 fprintf (file, "%s", str);
1941 break;
1943 case CONST_INT:
1944 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1945 break;
1947 case NE:
1948 fprintf (file, "ne");
1949 break;
1951 case EQ:
1952 fprintf (file, "eq");
1953 break;
1955 case GE:
1956 fprintf (file, "ge");
1957 break;
1959 case GT:
1960 fprintf (file, "gt");
1961 break;
1963 case LE:
1964 fprintf (file, "le");
1965 break;
1967 case LT:
1968 fprintf (file, "lt");
1969 break;
1971 case GEU:
1972 fprintf (file, "hs");
1973 break;
1975 case GTU:
1976 fprintf (file, "hi");
1977 break;
1979 case LEU:
1980 fprintf (file, "ls");
1981 break;
1983 case LTU:
1984 fprintf (file, "lo");
1985 break;
1987 case SYMBOL_REF:
1988 output_addr_const (file, op);
1989 break;
1991 case CONST:
1992 output_addr_const (file, XEXP (op, 0));
1993 break;
1995 case CODE_LABEL:
1996 break;
1998 default:
1999 fatal_insn ("c4x_print_operand: Bad operand case", op);
2000 break;
2005 void
2006 c4x_print_operand_address (FILE *file, rtx addr)
2008 switch (GET_CODE (addr))
2010 case REG:
2011 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2012 break;
2014 case PRE_DEC:
2015 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2016 break;
2018 case POST_INC:
2019 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2020 break;
2022 case POST_MODIFY:
2024 rtx op0 = XEXP (XEXP (addr, 1), 0);
2025 rtx op1 = XEXP (XEXP (addr, 1), 1);
2027 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2028 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2029 reg_names[REGNO (op1)]);
2030 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2031 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2032 reg_names[REGNO (op0)], INTVAL (op1));
2033 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2034 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2035 reg_names[REGNO (op0)], -INTVAL (op1));
2036 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2037 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2038 reg_names[REGNO (op1)]);
2039 else
2040 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2042 break;
2044 case PRE_MODIFY:
2046 rtx op0 = XEXP (XEXP (addr, 1), 0);
2047 rtx op1 = XEXP (XEXP (addr, 1), 1);
2049 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2050 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2051 reg_names[REGNO (op1)]);
2052 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2053 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2054 reg_names[REGNO (op0)], INTVAL (op1));
2055 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2056 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2057 reg_names[REGNO (op0)], -INTVAL (op1));
2058 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2059 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2060 reg_names[REGNO (op1)]);
2061 else
2062 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2064 break;
2066 case PRE_INC:
2067 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2068 break;
2070 case POST_DEC:
2071 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2072 break;
2074 case PLUS: /* Indirect with displacement. */
2076 rtx op0 = XEXP (addr, 0);
2077 rtx op1 = XEXP (addr, 1);
2079 if (REG_P (op0))
2081 if (REG_P (op1))
2083 if (IS_INDEX_REG (op0))
2085 fprintf (file, "*+%s(%s)",
2086 reg_names[REGNO (op1)],
2087 reg_names[REGNO (op0)]); /* Index + base. */
2089 else
2091 fprintf (file, "*+%s(%s)",
2092 reg_names[REGNO (op0)],
2093 reg_names[REGNO (op1)]); /* Base + index. */
2096 else if (INTVAL (op1) < 0)
2098 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2099 reg_names[REGNO (op0)],
2100 -INTVAL (op1)); /* Base - displacement. */
2102 else
2104 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2105 reg_names[REGNO (op0)],
2106 INTVAL (op1)); /* Base + displacement. */
2109 else
2110 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2112 break;
2114 case LO_SUM:
2116 rtx op0 = XEXP (addr, 0);
2117 rtx op1 = XEXP (addr, 1);
2119 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2120 c4x_print_operand_address (file, op1);
2121 else
2122 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2124 break;
2126 case CONST:
2127 case SYMBOL_REF:
2128 case LABEL_REF:
2129 fprintf (file, "@");
2130 output_addr_const (file, addr);
2131 break;
2133 /* We shouldn't access CONST_INT addresses. */
2134 case CONST_INT:
2136 default:
2137 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2138 break;
2143 /* Return nonzero if the floating point operand will fit
2144 in the immediate field. */
2147 c4x_immed_float_p (rtx op)
2149 long convval[2];
2150 int exponent;
2151 REAL_VALUE_TYPE r;
2153 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2154 if (GET_MODE (op) == HFmode)
2155 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2156 else
2158 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2159 convval[1] = 0;
2162 /* Sign extend exponent. */
2163 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2164 if (exponent == -128)
2165 return 1; /* 0.0 */
2166 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2167 return 0; /* Precision doesn't fit. */
2168 return (exponent <= 7) /* Positive exp. */
2169 && (exponent >= -7); /* Negative exp. */
2173 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2174 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2176 None of the last four instructions from the bottom of the block can
2177 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2178 BcondAT or RETIcondD.
2180 This routine scans the four previous insns for a jump insn, and if
2181 one is found, returns 1 so that we bung in a nop instruction.
2182 This simple minded strategy will add a nop, when it may not
2183 be required. Say when there is a JUMP_INSN near the end of the
2184 block that doesn't get converted into a delayed branch.
2186 Note that we cannot have a call insn, since we don't generate
2187 repeat loops with calls in them (although I suppose we could, but
2188 there's no benefit.)
2190 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2193 c4x_rptb_nop_p (rtx insn)
2195 rtx start_label;
2196 int i;
2198 /* Extract the start label from the jump pattern (rptb_end). */
2199 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2201 /* If there is a label at the end of the loop we must insert
2202 a NOP. */
2203 do {
2204 insn = previous_insn (insn);
2205 } while (GET_CODE (insn) == NOTE
2206 || GET_CODE (insn) == USE
2207 || GET_CODE (insn) == CLOBBER);
2208 if (GET_CODE (insn) == CODE_LABEL)
2209 return 1;
2211 for (i = 0; i < 4; i++)
2213 /* Search back for prev non-note and non-label insn. */
2214 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2215 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2217 if (insn == start_label)
2218 return i == 0;
2220 insn = previous_insn (insn);
2223 /* If we have a jump instruction we should insert a NOP. If we
2224 hit repeat block top we should only insert a NOP if the loop
2225 is empty. */
2226 if (GET_CODE (insn) == JUMP_INSN)
2227 return 1;
2228 insn = previous_insn (insn);
2230 return 0;
2234 /* The C4x looping instruction needs to be emitted at the top of the
2235 loop. Emitting the true RTL for a looping instruction at the top of
2236 the loop can cause problems with flow analysis. So instead, a dummy
2237 doloop insn is emitted at the end of the loop. This routine checks
2238 for the presence of this doloop insn and then searches back to the
2239 top of the loop, where it inserts the true looping insn (provided
2240 there are no instructions in the loop which would cause problems).
2241 Any additional labels can be emitted at this point. In addition, if
2242 the desired loop count register was not allocated, this routine does
2243 nothing.
2245 Before we can create a repeat block looping instruction we have to
2246 verify that there are no jumps outside the loop and no jumps outside
2247 the loop go into this loop. This can happen in the basic blocks reorder
2248 pass. The C4x cpu cannot handle this. */
2250 static int
2251 c4x_label_ref_used_p (rtx x, rtx code_label)
2253 enum rtx_code code;
2254 int i, j;
2255 const char *fmt;
2257 if (x == 0)
2258 return 0;
2260 code = GET_CODE (x);
2261 if (code == LABEL_REF)
2262 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2264 fmt = GET_RTX_FORMAT (code);
2265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2267 if (fmt[i] == 'e')
2269 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2270 return 1;
2272 else if (fmt[i] == 'E')
2273 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2274 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2275 return 1;
2277 return 0;
2281 static int
2282 c4x_rptb_valid_p (rtx insn, rtx start_label)
2284 rtx end = insn;
2285 rtx start;
2286 rtx tmp;
2288 /* Find the start label. */
2289 for (; insn; insn = PREV_INSN (insn))
2290 if (insn == start_label)
2291 break;
2293 /* Note found then we cannot use a rptb or rpts. The label was
2294 probably moved by the basic block reorder pass. */
2295 if (! insn)
2296 return 0;
2298 start = insn;
2299 /* If any jump jumps inside this block then we must fail. */
2300 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2302 if (GET_CODE (insn) == CODE_LABEL)
2304 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2305 if (GET_CODE (tmp) == JUMP_INSN
2306 && c4x_label_ref_used_p (tmp, insn))
2307 return 0;
2310 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2312 if (GET_CODE (insn) == CODE_LABEL)
2314 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2315 if (GET_CODE (tmp) == JUMP_INSN
2316 && c4x_label_ref_used_p (tmp, insn))
2317 return 0;
2320 /* If any jump jumps outside this block then we must fail. */
2321 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2323 if (GET_CODE (insn) == CODE_LABEL)
2325 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2326 if (GET_CODE (tmp) == JUMP_INSN
2327 && c4x_label_ref_used_p (tmp, insn))
2328 return 0;
2329 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2330 if (GET_CODE (tmp) == JUMP_INSN
2331 && c4x_label_ref_used_p (tmp, insn))
2332 return 0;
2336 /* All checks OK. */
2337 return 1;
2341 void
2342 c4x_rptb_insert (rtx insn)
2344 rtx end_label;
2345 rtx start_label;
2346 rtx new_start_label;
2347 rtx count_reg;
2349 /* If the count register has not been allocated to RC, say if
2350 there is a movmem pattern in the loop, then do not insert a
2351 RPTB instruction. Instead we emit a decrement and branch
2352 at the end of the loop. */
2353 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2354 if (REGNO (count_reg) != RC_REGNO)
2355 return;
2357 /* Extract the start label from the jump pattern (rptb_end). */
2358 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2360 if (! c4x_rptb_valid_p (insn, start_label))
2362 /* We cannot use the rptb insn. Replace it so reorg can use
2363 the delay slots of the jump insn. */
2364 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2365 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2366 emit_insn_before (gen_bge (start_label), insn);
2367 LABEL_NUSES (start_label)++;
2368 delete_insn (insn);
2369 return;
2372 end_label = gen_label_rtx ();
2373 LABEL_NUSES (end_label)++;
2374 emit_label_after (end_label, insn);
2376 new_start_label = gen_label_rtx ();
2377 LABEL_NUSES (new_start_label)++;
2379 for (; insn; insn = PREV_INSN (insn))
2381 if (insn == start_label)
2382 break;
2383 if (GET_CODE (insn) == JUMP_INSN &&
2384 JUMP_LABEL (insn) == start_label)
2385 redirect_jump (insn, new_start_label, 0);
2387 if (! insn)
2388 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2390 emit_label_after (new_start_label, insn);
2392 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2393 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2394 else
2395 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2396 if (LABEL_NUSES (start_label) == 0)
2397 delete_insn (start_label);
2401 /* We need to use direct addressing for large constants and addresses
2402 that cannot fit within an instruction. We must check for these
2403 after after the final jump optimization pass, since this may
2404 introduce a local_move insn for a SYMBOL_REF. This pass
2405 must come before delayed branch slot filling since it can generate
2406 additional instructions.
2408 This function also fixes up RTPB style loops that didn't get RC
2409 allocated as the loop counter. */
2411 static void
2412 c4x_reorg (void)
2414 rtx insn;
2416 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2418 /* Look for insn. */
2419 if (INSN_P (insn))
2421 int insn_code_number;
2422 rtx old;
2424 insn_code_number = recog_memoized (insn);
2426 if (insn_code_number < 0)
2427 continue;
2429 /* Insert the RTX for RPTB at the top of the loop
2430 and a label at the end of the loop. */
2431 if (insn_code_number == CODE_FOR_rptb_end)
2432 c4x_rptb_insert(insn);
2434 /* We need to split the insn here. Otherwise the calls to
2435 force_const_mem will not work for load_immed_address. */
2436 old = insn;
2438 /* Don't split the insn if it has been deleted. */
2439 if (! INSN_DELETED_P (old))
2440 insn = try_split (PATTERN(old), old, 1);
2442 /* When not optimizing, the old insn will be still left around
2443 with only the 'deleted' bit set. Transform it into a note
2444 to avoid confusion of subsequent processing. */
2445 if (INSN_DELETED_P (old))
2447 PUT_CODE (old, NOTE);
2448 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2449 NOTE_SOURCE_FILE (old) = 0;
2457 c4x_a_register (rtx op)
2459 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2464 c4x_x_register (rtx op)
2466 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2470 static int
2471 c4x_immed_int_constant (rtx op)
2473 if (GET_CODE (op) != CONST_INT)
2474 return 0;
2476 return GET_MODE (op) == VOIDmode
2477 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2478 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2482 static int
2483 c4x_immed_float_constant (rtx op)
2485 if (GET_CODE (op) != CONST_DOUBLE)
2486 return 0;
2488 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2489 present this only means that a MEM rtx has been generated. It does
2490 not mean the rtx is really in memory. */
2492 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2497 c4x_shiftable_constant (rtx op)
2499 int i;
2500 int mask;
2501 int val = INTVAL (op);
2503 for (i = 0; i < 16; i++)
2505 if (val & (1 << i))
2506 break;
2508 mask = ((0xffff >> i) << 16) | 0xffff;
2509 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2510 : (val >> i) & mask))
2511 return i;
2512 return -1;
2517 c4x_H_constant (rtx op)
2519 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2524 c4x_I_constant (rtx op)
2526 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2531 c4x_J_constant (rtx op)
2533 if (TARGET_C3X)
2534 return 0;
2535 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2540 c4x_K_constant (rtx op)
2542 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2543 return 0;
2544 return IS_INT5_CONST (INTVAL (op));
2549 c4x_L_constant (rtx op)
2551 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2556 c4x_N_constant (rtx op)
2558 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2563 c4x_O_constant (rtx op)
2565 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2569 /* The constraints do not have to check the register class,
2570 except when needed to discriminate between the constraints.
2571 The operand has been checked by the predicates to be valid. */
2573 /* ARx + 9-bit signed const or IRn
2574 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2575 We don't include the pre/post inc/dec forms here since
2576 they are handled by the <> constraints. */
2579 c4x_Q_constraint (rtx op)
2581 enum machine_mode mode = GET_MODE (op);
2583 if (GET_CODE (op) != MEM)
2584 return 0;
2585 op = XEXP (op, 0);
2586 switch (GET_CODE (op))
2588 case REG:
2589 return 1;
2591 case PLUS:
2593 rtx op0 = XEXP (op, 0);
2594 rtx op1 = XEXP (op, 1);
2596 if (! REG_P (op0))
2597 return 0;
2599 if (REG_P (op1))
2600 return 1;
2602 if (GET_CODE (op1) != CONST_INT)
2603 return 0;
2605 /* HImode and HFmode must be offsettable. */
2606 if (mode == HImode || mode == HFmode)
2607 return IS_DISP8_OFF_CONST (INTVAL (op1));
2609 return IS_DISP8_CONST (INTVAL (op1));
2611 break;
2613 default:
2614 break;
2616 return 0;
2620 /* ARx + 5-bit unsigned const
2621 *ARx, *+ARx(n) for n < 32. */
2624 c4x_R_constraint (rtx op)
2626 enum machine_mode mode = GET_MODE (op);
2628 if (TARGET_C3X)
2629 return 0;
2630 if (GET_CODE (op) != MEM)
2631 return 0;
2632 op = XEXP (op, 0);
2633 switch (GET_CODE (op))
2635 case REG:
2636 return 1;
2638 case PLUS:
2640 rtx op0 = XEXP (op, 0);
2641 rtx op1 = XEXP (op, 1);
2643 if (! REG_P (op0))
2644 return 0;
2646 if (GET_CODE (op1) != CONST_INT)
2647 return 0;
2649 /* HImode and HFmode must be offsettable. */
2650 if (mode == HImode || mode == HFmode)
2651 return IS_UINT5_CONST (INTVAL (op1) + 1);
2653 return IS_UINT5_CONST (INTVAL (op1));
2655 break;
2657 default:
2658 break;
2660 return 0;
2664 static int
2665 c4x_R_indirect (rtx op)
2667 enum machine_mode mode = GET_MODE (op);
2669 if (TARGET_C3X || GET_CODE (op) != MEM)
2670 return 0;
2672 op = XEXP (op, 0);
2673 switch (GET_CODE (op))
2675 case REG:
2676 return IS_ADDR_OR_PSEUDO_REG (op);
2678 case PLUS:
2680 rtx op0 = XEXP (op, 0);
2681 rtx op1 = XEXP (op, 1);
2683 /* HImode and HFmode must be offsettable. */
2684 if (mode == HImode || mode == HFmode)
2685 return IS_ADDR_OR_PSEUDO_REG (op0)
2686 && GET_CODE (op1) == CONST_INT
2687 && IS_UINT5_CONST (INTVAL (op1) + 1);
2689 return REG_P (op0)
2690 && IS_ADDR_OR_PSEUDO_REG (op0)
2691 && GET_CODE (op1) == CONST_INT
2692 && IS_UINT5_CONST (INTVAL (op1));
2694 break;
2696 default:
2697 break;
2699 return 0;
2703 /* ARx + 1-bit unsigned const or IRn
2704 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2705 We don't include the pre/post inc/dec forms here since
2706 they are handled by the <> constraints. */
2709 c4x_S_constraint (rtx op)
2711 enum machine_mode mode = GET_MODE (op);
2712 if (GET_CODE (op) != MEM)
2713 return 0;
2714 op = XEXP (op, 0);
2715 switch (GET_CODE (op))
2717 case REG:
2718 return 1;
2720 case PRE_MODIFY:
2721 case POST_MODIFY:
2723 rtx op0 = XEXP (op, 0);
2724 rtx op1 = XEXP (op, 1);
2726 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2727 || (op0 != XEXP (op1, 0)))
2728 return 0;
2730 op0 = XEXP (op1, 0);
2731 op1 = XEXP (op1, 1);
2732 return REG_P (op0) && REG_P (op1);
2733 /* Pre or post_modify with a displacement of 0 or 1
2734 should not be generated. */
2736 break;
2738 case PLUS:
2740 rtx op0 = XEXP (op, 0);
2741 rtx op1 = XEXP (op, 1);
2743 if (!REG_P (op0))
2744 return 0;
2746 if (REG_P (op1))
2747 return 1;
2749 if (GET_CODE (op1) != CONST_INT)
2750 return 0;
2752 /* HImode and HFmode must be offsettable. */
2753 if (mode == HImode || mode == HFmode)
2754 return IS_DISP1_OFF_CONST (INTVAL (op1));
2756 return IS_DISP1_CONST (INTVAL (op1));
2758 break;
2760 default:
2761 break;
2763 return 0;
2768 c4x_S_indirect (rtx op)
2770 enum machine_mode mode = GET_MODE (op);
2771 if (GET_CODE (op) != MEM)
2772 return 0;
2774 op = XEXP (op, 0);
2775 switch (GET_CODE (op))
2777 case PRE_DEC:
2778 case POST_DEC:
2779 if (mode != QImode && mode != QFmode)
2780 return 0;
2781 case PRE_INC:
2782 case POST_INC:
2783 op = XEXP (op, 0);
2785 case REG:
2786 return IS_ADDR_OR_PSEUDO_REG (op);
2788 case PRE_MODIFY:
2789 case POST_MODIFY:
2791 rtx op0 = XEXP (op, 0);
2792 rtx op1 = XEXP (op, 1);
2794 if (mode != QImode && mode != QFmode)
2795 return 0;
2797 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2798 || (op0 != XEXP (op1, 0)))
2799 return 0;
2801 op0 = XEXP (op1, 0);
2802 op1 = XEXP (op1, 1);
2803 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2804 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2805 /* Pre or post_modify with a displacement of 0 or 1
2806 should not be generated. */
2809 case PLUS:
2811 rtx op0 = XEXP (op, 0);
2812 rtx op1 = XEXP (op, 1);
2814 if (REG_P (op0))
2816 /* HImode and HFmode must be offsettable. */
2817 if (mode == HImode || mode == HFmode)
2818 return IS_ADDR_OR_PSEUDO_REG (op0)
2819 && GET_CODE (op1) == CONST_INT
2820 && IS_DISP1_OFF_CONST (INTVAL (op1));
2822 if (REG_P (op1))
2823 return (IS_INDEX_OR_PSEUDO_REG (op1)
2824 && IS_ADDR_OR_PSEUDO_REG (op0))
2825 || (IS_ADDR_OR_PSEUDO_REG (op1)
2826 && IS_INDEX_OR_PSEUDO_REG (op0));
2828 return IS_ADDR_OR_PSEUDO_REG (op0)
2829 && GET_CODE (op1) == CONST_INT
2830 && IS_DISP1_CONST (INTVAL (op1));
2833 break;
2835 default:
2836 break;
2838 return 0;
2842 /* Direct memory operand. */
2845 c4x_T_constraint (rtx op)
2847 if (GET_CODE (op) != MEM)
2848 return 0;
2849 op = XEXP (op, 0);
2851 if (GET_CODE (op) != LO_SUM)
2853 /* Allow call operands. */
2854 return GET_CODE (op) == SYMBOL_REF
2855 && GET_MODE (op) == Pmode
2856 && SYMBOL_REF_FUNCTION_P (op);
2859 /* HImode and HFmode are not offsettable. */
2860 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2861 return 0;
2863 if ((GET_CODE (XEXP (op, 0)) == REG)
2864 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2865 return c4x_U_constraint (XEXP (op, 1));
2867 return 0;
2871 /* Symbolic operand. */
2874 c4x_U_constraint (rtx op)
2876 /* Don't allow direct addressing to an arbitrary constant. */
2877 return GET_CODE (op) == CONST
2878 || GET_CODE (op) == SYMBOL_REF
2879 || GET_CODE (op) == LABEL_REF;
2884 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2886 if (GET_CODE (op) == MEM)
2888 enum rtx_code code = GET_CODE (XEXP (op, 0));
2890 if (code == PRE_INC
2891 || code == PRE_DEC
2892 || code == POST_INC
2893 || code == POST_DEC
2894 || code == PRE_MODIFY
2895 || code == POST_MODIFY
2897 return 1;
2899 return 0;
2904 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2906 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2907 int and a long double. */
2908 if (GET_CODE (op) == SUBREG
2909 && (GET_MODE (op) == QFmode)
2910 && (GET_MODE (SUBREG_REG (op)) == QImode
2911 || GET_MODE (SUBREG_REG (op)) == HImode))
2912 return 1;
2913 return 0;
2918 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2920 if (REG_P (op) || CONSTANT_P (op))
2921 return 1;
2922 return 0;
2927 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2929 if (REG_P (op) || CONSTANT_P (op))
2930 return 1;
2931 if (GET_CODE (op) != MEM)
2932 return 0;
2933 op = XEXP (op, 0);
2934 switch (GET_CODE (op))
2936 case REG:
2937 return 1;
2939 case PLUS:
2941 rtx op0 = XEXP (op, 0);
2942 rtx op1 = XEXP (op, 1);
2944 if (! REG_P (op0))
2945 return 0;
2947 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
2948 return 1;
2951 case LO_SUM:
2953 rtx op0 = XEXP (op, 0);
2955 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2956 return 1;
2958 break;
2960 case CONST:
2961 case SYMBOL_REF:
2962 case LABEL_REF:
2963 return 1;
2965 default:
2966 break;
2968 return 0;
2973 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2975 if (REG_P (op) && REGNO (op) == RC_REGNO)
2976 return 0;
2977 return 1;
2981 static void
2982 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
2984 *base = 0;
2985 *incdec = 0;
2986 *index = 0;
2987 *disp = 0;
2989 if (GET_CODE (op) != MEM)
2990 fatal_insn ("invalid indirect memory address", op);
2992 op = XEXP (op, 0);
2993 switch (GET_CODE (op))
2995 case PRE_DEC:
2996 *base = REGNO (XEXP (op, 0));
2997 *incdec = 1;
2998 *disp = -1;
2999 return;
3001 case POST_DEC:
3002 *base = REGNO (XEXP (op, 0));
3003 *incdec = 1;
3004 *disp = 0;
3005 return;
3007 case PRE_INC:
3008 *base = REGNO (XEXP (op, 0));
3009 *incdec = 1;
3010 *disp = 1;
3011 return;
3013 case POST_INC:
3014 *base = REGNO (XEXP (op, 0));
3015 *incdec = 1;
3016 *disp = 0;
3017 return;
3019 case POST_MODIFY:
3020 *base = REGNO (XEXP (op, 0));
3021 if (REG_P (XEXP (XEXP (op, 1), 1)))
3023 *index = REGNO (XEXP (XEXP (op, 1), 1));
3024 *disp = 0; /* ??? */
3026 else
3027 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3028 *incdec = 1;
3029 return;
3031 case PRE_MODIFY:
3032 *base = REGNO (XEXP (op, 0));
3033 if (REG_P (XEXP (XEXP (op, 1), 1)))
3035 *index = REGNO (XEXP (XEXP (op, 1), 1));
3036 *disp = 1; /* ??? */
3038 else
3039 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3040 *incdec = 1;
3042 return;
3044 case REG:
3045 *base = REGNO (op);
3046 return;
3048 case PLUS:
3050 rtx op0 = XEXP (op, 0);
3051 rtx op1 = XEXP (op, 1);
3053 if (c4x_a_register (op0))
3055 if (c4x_x_register (op1))
3057 *base = REGNO (op0);
3058 *index = REGNO (op1);
3059 return;
3061 else if ((GET_CODE (op1) == CONST_INT
3062 && IS_DISP1_CONST (INTVAL (op1))))
3064 *base = REGNO (op0);
3065 *disp = INTVAL (op1);
3066 return;
3069 else if (c4x_x_register (op0) && c4x_a_register (op1))
3071 *base = REGNO (op1);
3072 *index = REGNO (op0);
3073 return;
3076 /* Fall through. */
3078 default:
3079 fatal_insn ("invalid indirect (S) memory address", op);
3085 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3087 int base0;
3088 int base1;
3089 int incdec0;
3090 int incdec1;
3091 int index0;
3092 int index1;
3093 int disp0;
3094 int disp1;
3096 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3097 return 1;
3099 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3100 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3102 if (store0 && store1)
3104 /* If we have two stores in parallel to the same address, then
3105 the C4x only executes one of the stores. This is unlikely to
3106 cause problems except when writing to a hardware device such
3107 as a FIFO since the second write will be lost. The user
3108 should flag the hardware location as being volatile so that
3109 we don't do this optimization. While it is unlikely that we
3110 have an aliased address if both locations are not marked
3111 volatile, it is probably safer to flag a potential conflict
3112 if either location is volatile. */
3113 if (! flag_argument_noalias)
3115 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3116 return 1;
3120 /* If have a parallel load and a store to the same address, the load
3121 is performed first, so there is no conflict. Similarly, there is
3122 no conflict if have parallel loads from the same address. */
3124 /* Cannot use auto increment or auto decrement twice for same
3125 base register. */
3126 if (base0 == base1 && incdec0 && incdec0)
3127 return 1;
3129 /* It might be too confusing for GCC if we have use a base register
3130 with a side effect and a memory reference using the same register
3131 in parallel. */
3132 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3133 return 1;
3135 /* We cannot optimize the case where op1 and op2 refer to the same
3136 address. */
3137 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3138 return 1;
3140 /* No conflict. */
3141 return 0;
3145 /* Check for while loop inside a decrement and branch loop. */
3148 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3150 while (insn)
3152 if (GET_CODE (insn) == CODE_LABEL)
3154 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3155 return 1;
3156 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3157 return 0;
3159 insn = PREV_INSN (insn);
3161 return 1;
3165 /* Validate combination of operands for parallel load/store instructions. */
3168 valid_parallel_load_store (rtx *operands,
3169 enum machine_mode mode ATTRIBUTE_UNUSED)
3171 rtx op0 = operands[0];
3172 rtx op1 = operands[1];
3173 rtx op2 = operands[2];
3174 rtx op3 = operands[3];
3176 if (GET_CODE (op0) == SUBREG)
3177 op0 = SUBREG_REG (op0);
3178 if (GET_CODE (op1) == SUBREG)
3179 op1 = SUBREG_REG (op1);
3180 if (GET_CODE (op2) == SUBREG)
3181 op2 = SUBREG_REG (op2);
3182 if (GET_CODE (op3) == SUBREG)
3183 op3 = SUBREG_REG (op3);
3185 /* The patterns should only allow ext_low_reg_operand() or
3186 par_ind_operand() operands. Thus of the 4 operands, only 2
3187 should be REGs and the other 2 should be MEMs. */
3189 /* This test prevents the multipack pass from using this pattern if
3190 op0 is used as an index or base register in op2 or op3, since
3191 this combination will require reloading. */
3192 if (GET_CODE (op0) == REG
3193 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3194 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3195 return 0;
3197 /* LDI||LDI. */
3198 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3199 return (REGNO (op0) != REGNO (op2))
3200 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3201 && ! c4x_address_conflict (op1, op3, 0, 0);
3203 /* STI||STI. */
3204 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3205 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3206 && ! c4x_address_conflict (op0, op2, 1, 1);
3208 /* LDI||STI. */
3209 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3210 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3211 && ! c4x_address_conflict (op1, op2, 0, 1);
3213 /* STI||LDI. */
3214 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3215 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3216 && ! c4x_address_conflict (op0, op3, 1, 0);
3218 return 0;
3223 valid_parallel_operands_4 (rtx *operands,
3224 enum machine_mode mode ATTRIBUTE_UNUSED)
3226 rtx op0 = operands[0];
3227 rtx op2 = operands[2];
3229 if (GET_CODE (op0) == SUBREG)
3230 op0 = SUBREG_REG (op0);
3231 if (GET_CODE (op2) == SUBREG)
3232 op2 = SUBREG_REG (op2);
3234 /* This test prevents the multipack pass from using this pattern if
3235 op0 is used as an index or base register in op2, since this combination
3236 will require reloading. */
3237 if (GET_CODE (op0) == REG
3238 && GET_CODE (op2) == MEM
3239 && reg_mentioned_p (op0, XEXP (op2, 0)))
3240 return 0;
3242 return 1;
3247 valid_parallel_operands_5 (rtx *operands,
3248 enum machine_mode mode ATTRIBUTE_UNUSED)
3250 int regs = 0;
3251 rtx op0 = operands[0];
3252 rtx op1 = operands[1];
3253 rtx op2 = operands[2];
3254 rtx op3 = operands[3];
3256 if (GET_CODE (op0) == SUBREG)
3257 op0 = SUBREG_REG (op0);
3258 if (GET_CODE (op1) == SUBREG)
3259 op1 = SUBREG_REG (op1);
3260 if (GET_CODE (op2) == SUBREG)
3261 op2 = SUBREG_REG (op2);
3263 /* The patterns should only allow ext_low_reg_operand() or
3264 par_ind_operand() operands. Operands 1 and 2 may be commutative
3265 but only one of them can be a register. */
3266 if (GET_CODE (op1) == REG)
3267 regs++;
3268 if (GET_CODE (op2) == REG)
3269 regs++;
3271 if (regs != 1)
3272 return 0;
3274 /* This test prevents the multipack pass from using this pattern if
3275 op0 is used as an index or base register in op3, since this combination
3276 will require reloading. */
3277 if (GET_CODE (op0) == REG
3278 && GET_CODE (op3) == MEM
3279 && reg_mentioned_p (op0, XEXP (op3, 0)))
3280 return 0;
3282 return 1;
3287 valid_parallel_operands_6 (rtx *operands,
3288 enum machine_mode mode ATTRIBUTE_UNUSED)
3290 int regs = 0;
3291 rtx op0 = operands[0];
3292 rtx op1 = operands[1];
3293 rtx op2 = operands[2];
3294 rtx op4 = operands[4];
3295 rtx op5 = operands[5];
3297 if (GET_CODE (op1) == SUBREG)
3298 op1 = SUBREG_REG (op1);
3299 if (GET_CODE (op2) == SUBREG)
3300 op2 = SUBREG_REG (op2);
3301 if (GET_CODE (op4) == SUBREG)
3302 op4 = SUBREG_REG (op4);
3303 if (GET_CODE (op5) == SUBREG)
3304 op5 = SUBREG_REG (op5);
3306 /* The patterns should only allow ext_low_reg_operand() or
3307 par_ind_operand() operands. Thus of the 4 input operands, only 2
3308 should be REGs and the other 2 should be MEMs. */
3310 if (GET_CODE (op1) == REG)
3311 regs++;
3312 if (GET_CODE (op2) == REG)
3313 regs++;
3314 if (GET_CODE (op4) == REG)
3315 regs++;
3316 if (GET_CODE (op5) == REG)
3317 regs++;
3319 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3320 Perhaps we should count the MEMs as well? */
3321 if (regs != 2)
3322 return 0;
3324 /* This test prevents the multipack pass from using this pattern if
3325 op0 is used as an index or base register in op4 or op5, since
3326 this combination will require reloading. */
3327 if (GET_CODE (op0) == REG
3328 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3329 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3330 return 0;
3332 return 1;
3336 /* Validate combination of src operands. Note that the operands have
3337 been screened by the src_operand predicate. We just have to check
3338 that the combination of operands is valid. If FORCE is set, ensure
3339 that the destination regno is valid if we have a 2 operand insn. */
3341 static int
3342 c4x_valid_operands (enum rtx_code code, rtx *operands,
3343 enum machine_mode mode ATTRIBUTE_UNUSED,
3344 int force)
3346 rtx op0;
3347 rtx op1;
3348 rtx op2;
3349 enum rtx_code code1;
3350 enum rtx_code code2;
3353 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3354 if (code == IF_THEN_ELSE)
3355 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3357 if (code == COMPARE)
3359 op1 = operands[0];
3360 op2 = operands[1];
3362 else
3364 op1 = operands[1];
3365 op2 = operands[2];
3368 op0 = operands[0];
3370 if (GET_CODE (op0) == SUBREG)
3371 op0 = SUBREG_REG (op0);
3372 if (GET_CODE (op1) == SUBREG)
3373 op1 = SUBREG_REG (op1);
3374 if (GET_CODE (op2) == SUBREG)
3375 op2 = SUBREG_REG (op2);
3377 code1 = GET_CODE (op1);
3378 code2 = GET_CODE (op2);
3381 if (code1 == REG && code2 == REG)
3382 return 1;
3384 if (code1 == MEM && code2 == MEM)
3386 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3387 return 1;
3388 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3391 /* We cannot handle two MEMs or two CONSTS, etc. */
3392 if (code1 == code2)
3393 return 0;
3395 if (code1 == REG)
3397 switch (code2)
3399 case CONST_INT:
3400 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3401 return 1;
3402 break;
3404 case CONST_DOUBLE:
3405 if (! c4x_H_constant (op2))
3406 return 0;
3407 break;
3409 /* Any valid memory operand screened by src_operand is OK. */
3410 case MEM:
3411 break;
3413 default:
3414 fatal_insn ("c4x_valid_operands: Internal error", op2);
3415 break;
3418 if (GET_CODE (op0) == SCRATCH)
3419 return 1;
3421 if (!REG_P (op0))
3422 return 0;
3424 /* Check that we have a valid destination register for a two operand
3425 instruction. */
3426 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3430 /* Check non-commutative operators. */
3431 if (code == ASHIFTRT || code == LSHIFTRT
3432 || code == ASHIFT || code == COMPARE)
3433 return code2 == REG
3434 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3437 /* Assume MINUS is commutative since the subtract patterns
3438 also support the reverse subtract instructions. Since op1
3439 is not a register, and op2 is a register, op1 can only
3440 be a restricted memory operand for a shift instruction. */
3441 if (code2 == REG)
3443 switch (code1)
3445 case CONST_INT:
3446 break;
3448 case CONST_DOUBLE:
3449 if (! c4x_H_constant (op1))
3450 return 0;
3451 break;
3453 /* Any valid memory operand screened by src_operand is OK. */
3454 case MEM:
3455 break;
3457 default:
3458 abort ();
3459 break;
3462 if (GET_CODE (op0) == SCRATCH)
3463 return 1;
3465 if (!REG_P (op0))
3466 return 0;
3468 /* Check that we have a valid destination register for a two operand
3469 instruction. */
3470 return ! force || REGNO (op1) == REGNO (op0);
3473 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3474 return 1;
3476 return 0;
3480 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3483 /* If we are not optimizing then we have to let anything go and let
3484 reload fix things up. instantiate_decl in function.c can produce
3485 invalid insns by changing the offset of a memory operand from a
3486 valid one into an invalid one, when the second operand is also a
3487 memory operand. The alternative is not to allow two memory
3488 operands for an insn when not optimizing. The problem only rarely
3489 occurs, for example with the C-torture program DFcmp.c. */
3491 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3496 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3498 /* Compare only has 2 operands. */
3499 if (code == COMPARE)
3501 /* During RTL generation, force constants into pseudos so that
3502 they can get hoisted out of loops. This will tie up an extra
3503 register but can save an extra cycle. Only do this if loop
3504 optimization enabled. (We cannot pull this trick for add and
3505 sub instructions since the flow pass won't find
3506 autoincrements etc.) This allows us to generate compare
3507 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3508 of LDI *AR0++, R0; CMPI 42, R0.
3510 Note that expand_binops will try to load an expensive constant
3511 into a register if it is used within a loop. Unfortunately,
3512 the cost mechanism doesn't allow us to look at the other
3513 operand to decide whether the constant is expensive. */
3515 if (! reload_in_progress
3516 && TARGET_HOIST
3517 && optimize > 0
3518 && GET_CODE (operands[1]) == CONST_INT
3519 && rtx_cost (operands[1], code) > 1)
3520 operands[1] = force_reg (mode, operands[1]);
3522 if (! reload_in_progress
3523 && ! c4x_valid_operands (code, operands, mode, 0))
3524 operands[0] = force_reg (mode, operands[0]);
3525 return 1;
3528 /* We cannot do this for ADDI/SUBI insns since we will
3529 defeat the flow pass from finding autoincrement addressing
3530 opportunities. */
3531 if (! reload_in_progress
3532 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3533 && TARGET_HOIST
3534 && optimize > 1
3535 && GET_CODE (operands[2]) == CONST_INT
3536 && rtx_cost (operands[2], code) > 1)
3537 operands[2] = force_reg (mode, operands[2]);
3539 /* We can get better code on a C30 if we force constant shift counts
3540 into a register. This way they can get hoisted out of loops,
3541 tying up a register but saving an instruction. The downside is
3542 that they may get allocated to an address or index register, and
3543 thus we will get a pipeline conflict if there is a nearby
3544 indirect address using an address register.
3546 Note that expand_binops will not try to load an expensive constant
3547 into a register if it is used within a loop for a shift insn. */
3549 if (! reload_in_progress
3550 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3552 /* If the operand combination is invalid, we force operand1 into a
3553 register, preventing reload from having doing to do this at a
3554 later stage. */
3555 operands[1] = force_reg (mode, operands[1]);
3556 if (TARGET_FORCE)
3558 emit_move_insn (operands[0], operands[1]);
3559 operands[1] = copy_rtx (operands[0]);
3561 else
3563 /* Just in case... */
3564 if (! c4x_valid_operands (code, operands, mode, 0))
3565 operands[2] = force_reg (mode, operands[2]);
3569 /* Right shifts require a negative shift count, but GCC expects
3570 a positive count, so we emit a NEG. */
3571 if ((code == ASHIFTRT || code == LSHIFTRT)
3572 && (GET_CODE (operands[2]) != CONST_INT))
3573 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3576 /* When the shift count is greater than 32 then the result
3577 can be implementation dependent. We truncate the result to
3578 fit in 5 bits so that we do not emit invalid code when
3579 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3580 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3581 && (GET_CODE (operands[2]) == CONST_INT))
3582 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3583 operands[2]
3584 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3586 return 1;
3590 /* The following predicates are used for instruction scheduling. */
3593 group1_reg_operand (rtx op, enum machine_mode mode)
3595 if (mode != VOIDmode && mode != GET_MODE (op))
3596 return 0;
3597 if (GET_CODE (op) == SUBREG)
3598 op = SUBREG_REG (op);
3599 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
3604 group1_mem_operand (rtx op, enum machine_mode mode)
3606 if (mode != VOIDmode && mode != GET_MODE (op))
3607 return 0;
3609 if (GET_CODE (op) == MEM)
3611 op = XEXP (op, 0);
3612 if (GET_CODE (op) == PLUS)
3614 rtx op0 = XEXP (op, 0);
3615 rtx op1 = XEXP (op, 1);
3617 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
3618 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
3619 return 1;
3621 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
3622 return 1;
3625 return 0;
3629 /* Return true if any one of the address registers. */
3632 arx_reg_operand (rtx op, enum machine_mode mode)
3634 if (mode != VOIDmode && mode != GET_MODE (op))
3635 return 0;
3636 if (GET_CODE (op) == SUBREG)
3637 op = SUBREG_REG (op);
3638 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
3642 static int
3643 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
3645 if (mode != VOIDmode && mode != GET_MODE (op))
3646 return 0;
3647 if (GET_CODE (op) == SUBREG)
3648 op = SUBREG_REG (op);
3649 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3653 static int
3654 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
3656 if (mode != VOIDmode && mode != GET_MODE (op))
3657 return 0;
3659 if (GET_CODE (op) == MEM)
3661 op = XEXP (op, 0);
3662 switch (GET_CODE (op))
3664 case PRE_DEC:
3665 case POST_DEC:
3666 case PRE_INC:
3667 case POST_INC:
3668 op = XEXP (op, 0);
3670 case REG:
3671 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3673 case PRE_MODIFY:
3674 case POST_MODIFY:
3675 if (REG_P (XEXP (op, 0)) && (! reload_completed
3676 || (REGNO (XEXP (op, 0)) == regno)))
3677 return 1;
3678 if (REG_P (XEXP (XEXP (op, 1), 1))
3679 && (! reload_completed
3680 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
3681 return 1;
3682 break;
3684 case PLUS:
3686 rtx op0 = XEXP (op, 0);
3687 rtx op1 = XEXP (op, 1);
3689 if ((REG_P (op0) && (! reload_completed
3690 || (REGNO (op0) == regno)))
3691 || (REG_P (op1) && (! reload_completed
3692 || (REGNO (op1) == regno))))
3693 return 1;
3695 break;
3697 default:
3698 break;
3701 return 0;
3706 ar0_reg_operand (rtx op, enum machine_mode mode)
3708 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
3713 ar0_mem_operand (rtx op, enum machine_mode mode)
3715 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
3720 ar1_reg_operand (rtx op, enum machine_mode mode)
3722 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
3727 ar1_mem_operand (rtx op, enum machine_mode mode)
3729 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
3734 ar2_reg_operand (rtx op, enum machine_mode mode)
3736 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
3741 ar2_mem_operand (rtx op, enum machine_mode mode)
3743 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
3748 ar3_reg_operand (rtx op, enum machine_mode mode)
3750 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
3755 ar3_mem_operand (rtx op, enum machine_mode mode)
3757 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
3762 ar4_reg_operand (rtx op, enum machine_mode mode)
3764 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
3769 ar4_mem_operand (rtx op, enum machine_mode mode)
3771 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
3776 ar5_reg_operand (rtx op, enum machine_mode mode)
3778 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
3783 ar5_mem_operand (rtx op, enum machine_mode mode)
3785 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
3790 ar6_reg_operand (rtx op, enum machine_mode mode)
3792 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
3797 ar6_mem_operand (rtx op, enum machine_mode mode)
3799 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
3804 ar7_reg_operand (rtx op, enum machine_mode mode)
3806 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
3811 ar7_mem_operand (rtx op, enum machine_mode mode)
3813 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
3818 ir0_reg_operand (rtx op, enum machine_mode mode)
3820 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
3825 ir0_mem_operand (rtx op, enum machine_mode mode)
3827 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
3832 ir1_reg_operand (rtx op, enum machine_mode mode)
3834 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
3839 ir1_mem_operand (rtx op, enum machine_mode mode)
3841 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
3845 /* This is similar to operand_subword but allows autoincrement
3846 addressing. */
3849 c4x_operand_subword (rtx op, int i, int validate_address,
3850 enum machine_mode mode)
3852 if (mode != HImode && mode != HFmode)
3853 fatal_insn ("c4x_operand_subword: invalid mode", op);
3855 if (mode == HFmode && REG_P (op))
3856 fatal_insn ("c4x_operand_subword: invalid operand", op);
3858 if (GET_CODE (op) == MEM)
3860 enum rtx_code code = GET_CODE (XEXP (op, 0));
3861 enum machine_mode mode = GET_MODE (XEXP (op, 0));
3862 enum machine_mode submode;
3864 submode = mode;
3865 if (mode == HImode)
3866 submode = QImode;
3867 else if (mode == HFmode)
3868 submode = QFmode;
3870 switch (code)
3872 case POST_INC:
3873 case PRE_INC:
3874 return gen_rtx_MEM (submode, XEXP (op, 0));
3876 case POST_DEC:
3877 case PRE_DEC:
3878 case PRE_MODIFY:
3879 case POST_MODIFY:
3880 /* We could handle these with some difficulty.
3881 e.g., *p-- => *(p-=2); *(p+1). */
3882 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
3884 case SYMBOL_REF:
3885 case LABEL_REF:
3886 case CONST:
3887 case CONST_INT:
3888 fatal_insn ("c4x_operand_subword: invalid address", op);
3890 /* Even though offsettable_address_p considers (MEM
3891 (LO_SUM)) to be offsettable, it is not safe if the
3892 address is at the end of the data page since we also have
3893 to fix up the associated high PART. In this case where
3894 we are trying to split a HImode or HFmode memory
3895 reference, we would have to emit another insn to reload a
3896 new HIGH value. It's easier to disable LO_SUM memory references
3897 in HImode or HFmode and we probably get better code. */
3898 case LO_SUM:
3899 fatal_insn ("c4x_operand_subword: address not offsettable", op);
3901 default:
3902 break;
3906 return operand_subword (op, i, validate_address, mode);
3909 struct name_list
3911 struct name_list *next;
3912 const char *name;
3915 static struct name_list *global_head;
3916 static struct name_list *extern_head;
3919 /* Add NAME to list of global symbols and remove from external list if
3920 present on external list. */
3922 void
3923 c4x_global_label (const char *name)
3925 struct name_list *p, *last;
3927 /* Do not insert duplicate names, so linearly search through list of
3928 existing names. */
3929 p = global_head;
3930 while (p)
3932 if (strcmp (p->name, name) == 0)
3933 return;
3934 p = p->next;
3936 p = (struct name_list *) xmalloc (sizeof *p);
3937 p->next = global_head;
3938 p->name = name;
3939 global_head = p;
3941 /* Remove this name from ref list if present. */
3942 last = NULL;
3943 p = extern_head;
3944 while (p)
3946 if (strcmp (p->name, name) == 0)
3948 if (last)
3949 last->next = p->next;
3950 else
3951 extern_head = p->next;
3952 break;
3954 last = p;
3955 p = p->next;
3960 /* Add NAME to list of external symbols. */
3962 void
3963 c4x_external_ref (const char *name)
3965 struct name_list *p;
3967 /* Do not insert duplicate names. */
3968 p = extern_head;
3969 while (p)
3971 if (strcmp (p->name, name) == 0)
3972 return;
3973 p = p->next;
3976 /* Do not insert ref if global found. */
3977 p = global_head;
3978 while (p)
3980 if (strcmp (p->name, name) == 0)
3981 return;
3982 p = p->next;
3984 p = (struct name_list *) xmalloc (sizeof *p);
3985 p->next = extern_head;
3986 p->name = name;
3987 extern_head = p;
3990 /* We need to have a data section we can identify so that we can set
3991 the DP register back to a data pointer in the small memory model.
3992 This is only required for ISRs if we are paranoid that someone
3993 may have quietly changed this register on the sly. */
3994 static void
3995 c4x_file_start (void)
3997 default_file_start ();
3998 fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
3999 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4003 static void
4004 c4x_file_end (void)
4006 struct name_list *p;
4008 /* Output all external names that are not global. */
4009 p = extern_head;
4010 while (p)
4012 fprintf (asm_out_file, "\t.ref\t");
4013 assemble_name (asm_out_file, p->name);
4014 fprintf (asm_out_file, "\n");
4015 p = p->next;
4017 fprintf (asm_out_file, "\t.end\n");
4021 static void
4022 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4024 while (list != NULL_TREE
4025 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4026 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4027 list = TREE_CHAIN (list);
4028 if (list)
4029 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4030 *attributes);
4034 static void
4035 c4x_insert_attributes (tree decl, tree *attributes)
4037 switch (TREE_CODE (decl))
4039 case FUNCTION_DECL:
4040 c4x_check_attribute ("section", code_tree, decl, attributes);
4041 c4x_check_attribute ("const", pure_tree, decl, attributes);
4042 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4043 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4044 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4045 break;
4047 case VAR_DECL:
4048 c4x_check_attribute ("section", data_tree, decl, attributes);
4049 break;
4051 default:
4052 break;
4056 /* Table of valid machine attributes. */
4057 const struct attribute_spec c4x_attribute_table[] =
4059 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4060 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4061 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4062 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4063 { NULL, 0, 0, false, false, false, NULL }
4066 /* Handle an attribute requiring a FUNCTION_TYPE;
4067 arguments as in struct attribute_spec.handler. */
4068 static tree
4069 c4x_handle_fntype_attribute (tree *node, tree name,
4070 tree args ATTRIBUTE_UNUSED,
4071 int flags ATTRIBUTE_UNUSED,
4072 bool *no_add_attrs)
4074 if (TREE_CODE (*node) != FUNCTION_TYPE)
4076 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4077 IDENTIFIER_POINTER (name));
4078 *no_add_attrs = true;
4081 return NULL_TREE;
4085 /* !!! FIXME to emit RPTS correctly. */
4088 c4x_rptb_rpts_p (rtx insn, rtx op)
4090 /* The next insn should be our label marking where the
4091 repeat block starts. */
4092 insn = NEXT_INSN (insn);
4093 if (GET_CODE (insn) != CODE_LABEL)
4095 /* Some insns may have been shifted between the RPTB insn
4096 and the top label... They were probably destined to
4097 be moved out of the loop. For now, let's leave them
4098 where they are and print a warning. We should
4099 probably move these insns before the repeat block insn. */
4100 if (TARGET_DEBUG)
4101 fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4102 insn);
4103 return 0;
4106 /* Skip any notes. */
4107 insn = next_nonnote_insn (insn);
4109 /* This should be our first insn in the loop. */
4110 if (! INSN_P (insn))
4111 return 0;
4113 /* Skip any notes. */
4114 insn = next_nonnote_insn (insn);
4116 if (! INSN_P (insn))
4117 return 0;
4119 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4120 return 0;
4122 if (TARGET_RPTS)
4123 return 1;
4125 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4129 /* Check if register r11 is used as the destination of an insn. */
4131 static int
4132 c4x_r11_set_p(rtx x)
4134 rtx set;
4135 int i, j;
4136 const char *fmt;
4138 if (x == 0)
4139 return 0;
4141 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4142 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4144 if (INSN_P (x) && (set = single_set (x)))
4145 x = SET_DEST (set);
4147 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4148 return 1;
4150 fmt = GET_RTX_FORMAT (GET_CODE (x));
4151 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4153 if (fmt[i] == 'e')
4155 if (c4x_r11_set_p (XEXP (x, i)))
4156 return 1;
4158 else if (fmt[i] == 'E')
4159 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4160 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4161 return 1;
4163 return 0;
4167 /* The c4x sometimes has a problem when the insn before the laj insn
4168 sets the r11 register. Check for this situation. */
4171 c4x_check_laj_p (rtx insn)
4173 insn = prev_nonnote_insn (insn);
4175 /* If this is the start of the function no nop is needed. */
4176 if (insn == 0)
4177 return 0;
4179 /* If the previous insn is a code label we have to insert a nop. This
4180 could be a jump or table jump. We can find the normal jumps by
4181 scanning the function but this will not find table jumps. */
4182 if (GET_CODE (insn) == CODE_LABEL)
4183 return 1;
4185 /* If the previous insn sets register r11 we have to insert a nop. */
4186 if (c4x_r11_set_p (insn))
4187 return 1;
4189 /* No nop needed. */
4190 return 0;
4194 /* Adjust the cost of a scheduling dependency. Return the new cost of
4195 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4196 A set of an address register followed by a use occurs a 2 cycle
4197 stall (reduced to a single cycle on the c40 using LDA), while
4198 a read of an address register followed by a use occurs a single cycle. */
4200 #define SET_USE_COST 3
4201 #define SETLDA_USE_COST 2
4202 #define READ_USE_COST 2
4204 static int
4205 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4207 /* Don't worry about this until we know what registers have been
4208 assigned. */
4209 if (flag_schedule_insns == 0 && ! reload_completed)
4210 return 0;
4212 /* How do we handle dependencies where a read followed by another
4213 read causes a pipeline stall? For example, a read of ar0 followed
4214 by the use of ar0 for a memory reference. It looks like we
4215 need to extend the scheduler to handle this case. */
4217 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4218 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4219 so only deal with insns we know about. */
4220 if (recog_memoized (dep_insn) < 0)
4221 return 0;
4223 if (REG_NOTE_KIND (link) == 0)
4225 int max = 0;
4227 /* Data dependency; DEP_INSN writes a register that INSN reads some
4228 cycles later. */
4229 if (TARGET_C3X)
4231 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4232 max = SET_USE_COST > max ? SET_USE_COST : max;
4233 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4234 max = READ_USE_COST > max ? READ_USE_COST : max;
4236 else
4238 /* This could be significantly optimized. We should look
4239 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4240 insn uses ar0-ar7. We then test if the same register
4241 is used. The tricky bit is that some operands will
4242 use several registers... */
4243 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4244 max = SET_USE_COST > max ? SET_USE_COST : max;
4245 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4246 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4247 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4248 max = READ_USE_COST > max ? READ_USE_COST : max;
4250 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4251 max = SET_USE_COST > max ? SET_USE_COST : max;
4252 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4253 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4254 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4255 max = READ_USE_COST > max ? READ_USE_COST : max;
4257 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4258 max = SET_USE_COST > max ? SET_USE_COST : max;
4259 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4260 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4261 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4262 max = READ_USE_COST > max ? READ_USE_COST : max;
4264 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4265 max = SET_USE_COST > max ? SET_USE_COST : max;
4266 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4267 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4268 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4269 max = READ_USE_COST > max ? READ_USE_COST : max;
4271 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4272 max = SET_USE_COST > max ? SET_USE_COST : max;
4273 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4274 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4275 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4276 max = READ_USE_COST > max ? READ_USE_COST : max;
4278 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4279 max = SET_USE_COST > max ? SET_USE_COST : max;
4280 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4281 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4282 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4283 max = READ_USE_COST > max ? READ_USE_COST : max;
4285 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4286 max = SET_USE_COST > max ? SET_USE_COST : max;
4287 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4288 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4289 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4290 max = READ_USE_COST > max ? READ_USE_COST : max;
4292 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4293 max = SET_USE_COST > max ? SET_USE_COST : max;
4294 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4295 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4296 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4297 max = READ_USE_COST > max ? READ_USE_COST : max;
4299 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4300 max = SET_USE_COST > max ? SET_USE_COST : max;
4301 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4302 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4304 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4305 max = SET_USE_COST > max ? SET_USE_COST : max;
4306 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4307 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4310 if (max)
4311 cost = max;
4313 /* For other data dependencies, the default cost specified in the
4314 md is correct. */
4315 return cost;
4317 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4319 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4320 cycles later. */
4322 /* For c4x anti dependencies, the cost is 0. */
4323 return 0;
4325 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4327 /* Output dependency; DEP_INSN writes a register that INSN writes some
4328 cycles later. */
4330 /* For c4x output dependencies, the cost is 0. */
4331 return 0;
4333 else
4334 abort ();
4337 void
4338 c4x_init_builtins (void)
4340 tree endlink = void_list_node;
4342 add_builtin_function ("fast_ftoi",
4343 build_function_type
4344 (integer_type_node,
4345 tree_cons (NULL_TREE, double_type_node,
4346 endlink)),
4347 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4348 add_builtin_function ("ansi_ftoi",
4349 build_function_type
4350 (integer_type_node,
4351 tree_cons (NULL_TREE, double_type_node,
4352 endlink)),
4353 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4354 NULL_TREE);
4355 if (TARGET_C3X)
4356 add_builtin_function ("fast_imult",
4357 build_function_type
4358 (integer_type_node,
4359 tree_cons (NULL_TREE, integer_type_node,
4360 tree_cons (NULL_TREE,
4361 integer_type_node,
4362 endlink))),
4363 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4364 NULL_TREE);
4365 else
4367 add_builtin_function ("toieee",
4368 build_function_type
4369 (double_type_node,
4370 tree_cons (NULL_TREE, double_type_node,
4371 endlink)),
4372 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4373 NULL_TREE);
4374 add_builtin_function ("frieee",
4375 build_function_type
4376 (double_type_node,
4377 tree_cons (NULL_TREE, double_type_node,
4378 endlink)),
4379 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4380 NULL_TREE);
4381 add_builtin_function ("fast_invf",
4382 build_function_type
4383 (double_type_node,
4384 tree_cons (NULL_TREE, double_type_node,
4385 endlink)),
4386 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4387 NULL_TREE);
4393 c4x_expand_builtin (tree exp, rtx target,
4394 rtx subtarget ATTRIBUTE_UNUSED,
4395 enum machine_mode mode ATTRIBUTE_UNUSED,
4396 int ignore ATTRIBUTE_UNUSED)
4398 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4399 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4400 tree arglist = TREE_OPERAND (exp, 1);
4401 tree arg0, arg1;
4402 rtx r0, r1;
4404 switch (fcode)
4406 case C4X_BUILTIN_FIX:
4407 arg0 = TREE_VALUE (arglist);
4408 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4409 if (! target || ! register_operand (target, QImode))
4410 target = gen_reg_rtx (QImode);
4411 emit_insn (gen_fixqfqi_clobber (target, r0));
4412 return target;
4414 case C4X_BUILTIN_FIX_ANSI:
4415 arg0 = TREE_VALUE (arglist);
4416 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4417 if (! target || ! register_operand (target, QImode))
4418 target = gen_reg_rtx (QImode);
4419 emit_insn (gen_fix_truncqfqi2 (target, r0));
4420 return target;
4422 case C4X_BUILTIN_MPYI:
4423 if (! TARGET_C3X)
4424 break;
4425 arg0 = TREE_VALUE (arglist);
4426 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4427 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4428 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4429 if (! target || ! register_operand (target, QImode))
4430 target = gen_reg_rtx (QImode);
4431 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4432 return target;
4434 case C4X_BUILTIN_TOIEEE:
4435 if (TARGET_C3X)
4436 break;
4437 arg0 = TREE_VALUE (arglist);
4438 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4439 if (! target || ! register_operand (target, QFmode))
4440 target = gen_reg_rtx (QFmode);
4441 emit_insn (gen_toieee (target, r0));
4442 return target;
4444 case C4X_BUILTIN_FRIEEE:
4445 if (TARGET_C3X)
4446 break;
4447 arg0 = TREE_VALUE (arglist);
4448 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4449 if (register_operand (r0, QFmode))
4451 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4452 emit_move_insn (r1, r0);
4453 r0 = r1;
4455 if (! target || ! register_operand (target, QFmode))
4456 target = gen_reg_rtx (QFmode);
4457 emit_insn (gen_frieee (target, r0));
4458 return target;
4460 case C4X_BUILTIN_RCPF:
4461 if (TARGET_C3X)
4462 break;
4463 arg0 = TREE_VALUE (arglist);
4464 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4465 if (! target || ! register_operand (target, QFmode))
4466 target = gen_reg_rtx (QFmode);
4467 emit_insn (gen_rcpfqf_clobber (target, r0));
4468 return target;
4470 return NULL_RTX;
4473 static void
4474 c4x_init_libfuncs (void)
4476 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4477 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4478 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4479 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4480 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4481 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4482 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4483 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4484 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4485 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4486 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4487 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4488 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4489 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4490 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4491 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4492 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4493 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4494 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4495 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4496 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4497 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4498 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4499 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4502 static void
4503 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4504 tree decl ATTRIBUTE_UNUSED)
4506 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4509 static void
4510 c4x_globalize_label (FILE *stream, const char *name)
4512 default_globalize_label (stream, name);
4513 c4x_global_label (name);
4516 #define SHIFT_CODE_P(C) \
4517 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4518 #define LOGICAL_CODE_P(C) \
4519 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4521 /* Compute a (partial) cost for rtx X. Return true if the complete
4522 cost has been computed, and false if subexpressions should be
4523 scanned. In either case, *TOTAL contains the cost result. */
4525 static bool
4526 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4528 HOST_WIDE_INT val;
4530 switch (code)
4532 /* Some small integers are effectively free for the C40. We should
4533 also consider if we are using the small memory model. With
4534 the big memory model we require an extra insn for a constant
4535 loaded from memory. */
4537 case CONST_INT:
4538 val = INTVAL (x);
4539 if (c4x_J_constant (x))
4540 *total = 0;
4541 else if (! TARGET_C3X
4542 && outer_code == AND
4543 && (val == 255 || val == 65535))
4544 *total = 0;
4545 else if (! TARGET_C3X
4546 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4547 && (val == 16 || val == 24))
4548 *total = 0;
4549 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4550 *total = 3;
4551 else if (LOGICAL_CODE_P (outer_code)
4552 ? c4x_L_constant (x) : c4x_I_constant (x))
4553 *total = 2;
4554 else
4555 *total = 4;
4556 return true;
4558 case CONST:
4559 case LABEL_REF:
4560 case SYMBOL_REF:
4561 *total = 4;
4562 return true;
4564 case CONST_DOUBLE:
4565 if (c4x_H_constant (x))
4566 *total = 2;
4567 else if (GET_MODE (x) == QFmode)
4568 *total = 4;
4569 else
4570 *total = 8;
4571 return true;
4573 /* ??? Note that we return true, rather than false so that rtx_cost
4574 doesn't include the constant costs. Otherwise expand_mult will
4575 think that it is cheaper to synthesize a multiply rather than to
4576 use a multiply instruction. I think this is because the algorithm
4577 synth_mult doesn't take into account the loading of the operands,
4578 whereas the calculation of mult_cost does. */
4579 case PLUS:
4580 case MINUS:
4581 case AND:
4582 case IOR:
4583 case XOR:
4584 case ASHIFT:
4585 case ASHIFTRT:
4586 case LSHIFTRT:
4587 *total = COSTS_N_INSNS (1);
4588 return true;
4590 case MULT:
4591 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4592 || TARGET_MPYI ? 1 : 14);
4593 return true;
4595 case DIV:
4596 case UDIV:
4597 case MOD:
4598 case UMOD:
4599 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4600 ? 15 : 50);
4601 return true;
4603 default:
4604 return false;
4608 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
4610 static void
4611 c4x_external_libcall (rtx fun)
4613 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
4614 c4x_external_ref (XSTR (fun, 0));
4617 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
4619 static rtx
4620 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
4621 int incoming ATTRIBUTE_UNUSED)
4623 return gen_rtx_REG (Pmode, AR0_REGNO);