* common.opt (-Wattributes): New. Default true.
[official-gcc.git] / gcc / config / c4x / c4x.c
blob119c7be80d8a9c7764f0ada47a937a27136cff6e
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
3 2004, 2005
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 2, or (at your option)
14 any later version.
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to
23 the Free Software Foundation, 59 Temple Place - Suite 330,
24 Boston, MA 02111-1307, USA. */
26 /* Some output-actions in c4x.md need these. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "real.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "libfuncs.h"
45 #include "flags.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
53 #include "langhooks.h"
55 rtx smulhi3_libfunc;
56 rtx umulhi3_libfunc;
57 rtx fix_truncqfhi2_libfunc;
58 rtx fixuns_truncqfhi2_libfunc;
59 rtx fix_trunchfhi2_libfunc;
60 rtx fixuns_trunchfhi2_libfunc;
61 rtx floathiqf2_libfunc;
62 rtx floatunshiqf2_libfunc;
63 rtx floathihf2_libfunc;
64 rtx floatunshihf2_libfunc;
66 static int c4x_leaf_function;
68 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
153 rtx c4x_compare_op0;
154 rtx c4x_compare_op1;
156 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
166 tree naked_tree = NULL_TREE;
168 /* Forward declarations */
169 static bool c4x_handle_option (size_t, const char *, int);
170 static int c4x_isr_reg_used_p (unsigned int);
171 static int c4x_leaf_function_p (void);
172 static int c4x_naked_function_p (void);
173 static int c4x_immed_float_p (rtx);
174 static int c4x_a_register (rtx);
175 static int c4x_x_register (rtx);
176 static int c4x_immed_int_constant (rtx);
177 static int c4x_immed_float_constant (rtx);
178 static int c4x_K_constant (rtx);
179 static int c4x_N_constant (rtx);
180 static int c4x_O_constant (rtx);
181 static int c4x_R_indirect (rtx);
182 static int c4x_S_indirect (rtx);
183 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
184 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
185 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
186 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
187 static void c4x_file_start (void);
188 static void c4x_file_end (void);
189 static void c4x_check_attribute (const char *, tree, tree, tree *);
190 static int c4x_r11_set_p (rtx);
191 static int c4x_rptb_valid_p (rtx, rtx);
192 static void c4x_reorg (void);
193 static int c4x_label_ref_used_p (rtx, rtx);
194 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
195 const struct attribute_spec c4x_attribute_table[];
196 static void c4x_insert_attributes (tree, tree *);
197 static void c4x_asm_named_section (const char *, unsigned int, tree);
198 static int c4x_adjust_cost (rtx, rtx, rtx, int);
199 static void c4x_globalize_label (FILE *, const char *);
200 static bool c4x_rtx_costs (rtx, int, int, int *);
201 static int c4x_address_cost (rtx);
202 static void c4x_init_libfuncs (void);
203 static void c4x_external_libcall (rtx);
204 static rtx c4x_struct_value_rtx (tree, int);
205 static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
207 /* Initialize the GCC target structure. */
208 #undef TARGET_ASM_BYTE_OP
209 #define TARGET_ASM_BYTE_OP "\t.word\t"
210 #undef TARGET_ASM_ALIGNED_HI_OP
211 #define TARGET_ASM_ALIGNED_HI_OP NULL
212 #undef TARGET_ASM_ALIGNED_SI_OP
213 #define TARGET_ASM_ALIGNED_SI_OP NULL
214 #undef TARGET_ASM_FILE_START
215 #define TARGET_ASM_FILE_START c4x_file_start
216 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
217 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
218 #undef TARGET_ASM_FILE_END
219 #define TARGET_ASM_FILE_END c4x_file_end
221 #undef TARGET_ASM_EXTERNAL_LIBCALL
222 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
224 /* Play safe, not the fastest code. */
225 #undef TARGET_DEFAULT_TARGET_FLAGS
226 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
227 | MASK_PARALLEL_MPY | MASK_RPTB)
228 #undef TARGET_HANDLE_OPTION
229 #define TARGET_HANDLE_OPTION c4x_handle_option
231 #undef TARGET_ATTRIBUTE_TABLE
232 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
234 #undef TARGET_INSERT_ATTRIBUTES
235 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
237 #undef TARGET_INIT_BUILTINS
238 #define TARGET_INIT_BUILTINS c4x_init_builtins
240 #undef TARGET_EXPAND_BUILTIN
241 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
243 #undef TARGET_SCHED_ADJUST_COST
244 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
246 #undef TARGET_ASM_GLOBALIZE_LABEL
247 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
249 #undef TARGET_RTX_COSTS
250 #define TARGET_RTX_COSTS c4x_rtx_costs
251 #undef TARGET_ADDRESS_COST
252 #define TARGET_ADDRESS_COST c4x_address_cost
254 #undef TARGET_MACHINE_DEPENDENT_REORG
255 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
257 #undef TARGET_INIT_LIBFUNCS
258 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
260 #undef TARGET_STRUCT_VALUE_RTX
261 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
263 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
264 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
266 struct gcc_target targetm = TARGET_INITIALIZER;
268 /* Implement TARGET_HANDLE_OPTION. */
270 static bool
271 c4x_handle_option (size_t code, const char *arg, int value)
273 switch (code)
275 case OPT_m30: c4x_cpu_version = 30; return true;
276 case OPT_m31: c4x_cpu_version = 31; return true;
277 case OPT_m32: c4x_cpu_version = 32; return true;
278 case OPT_m33: c4x_cpu_version = 33; return true;
279 case OPT_m40: c4x_cpu_version = 40; return true;
280 case OPT_m44: c4x_cpu_version = 44; return true;
282 case OPT_mcpu_:
283 if (arg[0] == 'c' || arg[0] == 'C')
284 arg++;
285 value = atoi (arg);
286 switch (value)
288 case 30: case 31: case 32: case 33: case 40: case 44:
289 c4x_cpu_version = value;
290 return true;
292 return false;
294 case OPT_mrpts_:
295 c4x_rpts_cycles = value;
296 return true;
298 default:
299 return true;
303 /* Override command line options.
304 Called once after all options have been parsed.
305 Mostly we process the processor
306 type and sometimes adjust other TARGET_ options. */
308 void
309 c4x_override_options (void)
311 /* Convert foo / 8.0 into foo * 0.125, etc. */
312 set_fast_math_flags (1);
314 /* We should phase out the following at some stage.
315 This provides compatibility with the old -mno-aliases option. */
316 if (! TARGET_ALIASES && ! flag_argument_noalias)
317 flag_argument_noalias = 1;
319 if (!TARGET_C3X)
320 target_flags |= MASK_MPYI | MASK_DB;
322 if (optimize < 2)
323 target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
325 if (!TARGET_PARALLEL)
326 target_flags &= ~MASK_PARALLEL_MPY;
330 /* This is called before c4x_override_options. */
332 void
333 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
334 int size ATTRIBUTE_UNUSED)
336 /* Scheduling before register allocation can screw up global
337 register allocation, especially for functions that use MPY||ADD
338 instructions. The benefit we gain we get by scheduling before
339 register allocation is probably marginal anyhow. */
340 flag_schedule_insns = 0;
344 /* Write an ASCII string. */
346 #define C4X_ASCII_LIMIT 40
348 void
349 c4x_output_ascii (FILE *stream, const char *ptr, int len)
351 char sbuf[C4X_ASCII_LIMIT + 1];
352 int s, l, special, first = 1, onlys;
354 if (len)
355 fprintf (stream, "\t.byte\t");
357 for (s = l = 0; len > 0; --len, ++ptr)
359 onlys = 0;
361 /* Escape " and \ with a \". */
362 special = *ptr == '\"' || *ptr == '\\';
364 /* If printable - add to buff. */
365 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
367 if (special)
368 sbuf[s++] = '\\';
369 sbuf[s++] = *ptr;
370 if (s < C4X_ASCII_LIMIT - 1)
371 continue;
372 onlys = 1;
374 if (s)
376 if (first)
377 first = 0;
378 else
380 fputc (',', stream);
381 l++;
384 sbuf[s] = 0;
385 fprintf (stream, "\"%s\"", sbuf);
386 l += s + 2;
387 if (TARGET_TI && l >= 80 && len > 1)
389 fprintf (stream, "\n\t.byte\t");
390 first = 1;
391 l = 0;
394 s = 0;
396 if (onlys)
397 continue;
399 if (first)
400 first = 0;
401 else
403 fputc (',', stream);
404 l++;
407 fprintf (stream, "%d", *ptr);
408 l += 3;
409 if (TARGET_TI && l >= 80 && len > 1)
411 fprintf (stream, "\n\t.byte\t");
412 first = 1;
413 l = 0;
416 if (s)
418 if (! first)
419 fputc (',', stream);
421 sbuf[s] = 0;
422 fprintf (stream, "\"%s\"", sbuf);
423 s = 0;
425 fputc ('\n', stream);
430 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
432 switch (mode)
434 #if Pmode != QImode
435 case Pmode: /* Pointer (24/32 bits). */
436 #endif
437 case QImode: /* Integer (32 bits). */
438 return IS_INT_REGNO (regno);
440 case QFmode: /* Float, Double (32 bits). */
441 case HFmode: /* Long Double (40 bits). */
442 return IS_EXT_REGNO (regno);
444 case CCmode: /* Condition Codes. */
445 case CC_NOOVmode: /* Condition Codes. */
446 return IS_ST_REGNO (regno);
448 case HImode: /* Long Long (64 bits). */
449 /* We need two registers to store long longs. Note that
450 it is much easier to constrain the first register
451 to start on an even boundary. */
452 return IS_INT_REGNO (regno)
453 && IS_INT_REGNO (regno + 1)
454 && (regno & 1) == 0;
456 default:
457 return 0; /* We don't support these modes. */
460 return 0;
463 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
465 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
467 /* We cannot copy call saved registers from mode QI into QF or from
468 mode QF into QI. */
469 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
470 return 0;
471 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
472 return 0;
473 /* We cannot copy from an extended (40 bit) register to a standard
474 (32 bit) register because we only set the condition codes for
475 extended registers. */
476 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
477 return 0;
478 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
479 return 0;
480 return 1;
483 /* The TI C3x C compiler register argument runtime model uses 6 registers,
484 AR2, R2, R3, RC, RS, RE.
486 The first two floating point arguments (float, double, long double)
487 that are found scanning from left to right are assigned to R2 and R3.
489 The remaining integer (char, short, int, long) or pointer arguments
490 are assigned to the remaining registers in the order AR2, R2, R3,
491 RC, RS, RE when scanning left to right, except for the last named
492 argument prior to an ellipsis denoting variable number of
493 arguments. We don't have to worry about the latter condition since
494 function.c treats the last named argument as anonymous (unnamed).
496 All arguments that cannot be passed in registers are pushed onto
497 the stack in reverse order (right to left). GCC handles that for us.
499 c4x_init_cumulative_args() is called at the start, so we can parse
500 the args to see how many floating point arguments and how many
501 integer (or pointer) arguments there are. c4x_function_arg() is
502 then called (sometimes repeatedly) for each argument (parsed left
503 to right) to obtain the register to pass the argument in, or zero
504 if the argument is to be passed on the stack. Once the compiler is
505 happy, c4x_function_arg_advance() is called.
507 Don't use R0 to pass arguments in, we use 0 to indicate a stack
508 argument. */
510 static const int c4x_int_reglist[3][6] =
512 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
513 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
514 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
517 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
520 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
521 function whose data type is FNTYPE.
522 For a library call, FNTYPE is 0. */
524 void
525 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
527 tree param, next_param;
529 cum->floats = cum->ints = 0;
530 cum->init = 0;
531 cum->var = 0;
532 cum->args = 0;
534 if (TARGET_DEBUG)
536 fprintf (stderr, "\nc4x_init_cumulative_args (");
537 if (fntype)
539 tree ret_type = TREE_TYPE (fntype);
541 fprintf (stderr, "fntype code = %s, ret code = %s",
542 tree_code_name[(int) TREE_CODE (fntype)],
543 tree_code_name[(int) TREE_CODE (ret_type)]);
545 else
546 fprintf (stderr, "no fntype");
548 if (libname)
549 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
552 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
554 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
555 param; param = next_param)
557 tree type;
559 next_param = TREE_CHAIN (param);
561 type = TREE_VALUE (param);
562 if (type && type != void_type_node)
564 enum machine_mode mode;
566 /* If the last arg doesn't have void type then we have
567 variable arguments. */
568 if (! next_param)
569 cum->var = 1;
571 if ((mode = TYPE_MODE (type)))
573 if (! targetm.calls.must_pass_in_stack (mode, type))
575 /* Look for float, double, or long double argument. */
576 if (mode == QFmode || mode == HFmode)
577 cum->floats++;
578 /* Look for integer, enumeral, boolean, char, or pointer
579 argument. */
580 else if (mode == QImode || mode == Pmode)
581 cum->ints++;
584 cum->args++;
588 if (TARGET_DEBUG)
589 fprintf (stderr, "%s%s, args = %d)\n",
590 cum->prototype ? ", prototype" : "",
591 cum->var ? ", variable args" : "",
592 cum->args);
596 /* Update the data in CUM to advance over an argument
597 of mode MODE and data type TYPE.
598 (TYPE is null for libcalls where that information may not be available.) */
600 void
601 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
602 tree type, int named)
604 if (TARGET_DEBUG)
605 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
606 GET_MODE_NAME (mode), named);
607 if (! TARGET_MEMPARM
608 && named
609 && type
610 && ! targetm.calls.must_pass_in_stack (mode, type))
612 /* Look for float, double, or long double argument. */
613 if (mode == QFmode || mode == HFmode)
614 cum->floats++;
615 /* Look for integer, enumeral, boolean, char, or pointer argument. */
616 else if (mode == QImode || mode == Pmode)
617 cum->ints++;
619 else if (! TARGET_MEMPARM && ! type)
621 /* Handle libcall arguments. */
622 if (mode == QFmode || mode == HFmode)
623 cum->floats++;
624 else if (mode == QImode || mode == Pmode)
625 cum->ints++;
627 return;
631 /* Define where to put the arguments to a function. Value is zero to
632 push the argument on the stack, or a hard register in which to
633 store the argument.
635 MODE is the argument's machine mode.
636 TYPE is the data type of the argument (as a tree).
637 This is null for libcalls where that information may
638 not be available.
639 CUM is a variable of type CUMULATIVE_ARGS which gives info about
640 the preceding args and about the function being called.
641 NAMED is nonzero if this argument is a named parameter
642 (otherwise it is an extra parameter matching an ellipsis). */
644 struct rtx_def *
645 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
646 tree type, int named)
648 int reg = 0; /* Default to passing argument on stack. */
650 if (! cum->init)
652 /* We can handle at most 2 floats in R2, R3. */
653 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
655 /* We can handle at most 6 integers minus number of floats passed
656 in registers. */
657 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
658 6 - cum->maxfloats : cum->ints;
660 /* If there is no prototype, assume all the arguments are integers. */
661 if (! cum->prototype)
662 cum->maxints = 6;
664 cum->ints = cum->floats = 0;
665 cum->init = 1;
668 /* This marks the last argument. We don't need to pass this through
669 to the call insn. */
670 if (type == void_type_node)
671 return 0;
673 if (! TARGET_MEMPARM
674 && named
675 && type
676 && ! targetm.calls.must_pass_in_stack (mode, type))
678 /* Look for float, double, or long double argument. */
679 if (mode == QFmode || mode == HFmode)
681 if (cum->floats < cum->maxfloats)
682 reg = c4x_fp_reglist[cum->floats];
684 /* Look for integer, enumeral, boolean, char, or pointer argument. */
685 else if (mode == QImode || mode == Pmode)
687 if (cum->ints < cum->maxints)
688 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
691 else if (! TARGET_MEMPARM && ! type)
693 /* We could use a different argument calling model for libcalls,
694 since we're only calling functions in libgcc. Thus we could
695 pass arguments for long longs in registers rather than on the
696 stack. In the meantime, use the odd TI format. We make the
697 assumption that we won't have more than two floating point
698 args, six integer args, and that all the arguments are of the
699 same mode. */
700 if (mode == QFmode || mode == HFmode)
701 reg = c4x_fp_reglist[cum->floats];
702 else if (mode == QImode || mode == Pmode)
703 reg = c4x_int_reglist[0][cum->ints];
706 if (TARGET_DEBUG)
708 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
709 GET_MODE_NAME (mode), named);
710 if (reg)
711 fprintf (stderr, ", reg=%s", reg_names[reg]);
712 else
713 fprintf (stderr, ", stack");
714 fprintf (stderr, ")\n");
716 if (reg)
717 return gen_rtx_REG (mode, reg);
718 else
719 return NULL_RTX;
722 /* C[34]x arguments grow in weird ways (downwards) that the standard
723 varargs stuff can't handle.. */
725 static tree
726 c4x_gimplify_va_arg_expr (tree valist, tree type,
727 tree *pre_p ATTRIBUTE_UNUSED,
728 tree *post_p ATTRIBUTE_UNUSED)
730 tree t;
731 bool indirect;
733 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
734 if (indirect)
735 type = build_pointer_type (type);
737 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
738 build_int_cst (NULL_TREE, int_size_in_bytes (type)));
739 t = fold_convert (build_pointer_type (type), t);
740 t = build_fold_indirect_ref (t);
742 if (indirect)
743 t = build_fold_indirect_ref (t);
745 return t;
749 static int
750 c4x_isr_reg_used_p (unsigned int regno)
752 /* Don't save/restore FP or ST, we handle them separately. */
753 if (regno == FRAME_POINTER_REGNUM
754 || IS_ST_REGNO (regno))
755 return 0;
757 /* We could be a little smarter abut saving/restoring DP.
758 We'll only save if for the big memory model or if
759 we're paranoid. ;-) */
760 if (IS_DP_REGNO (regno))
761 return ! TARGET_SMALL || TARGET_PARANOID;
763 /* Only save/restore regs in leaf function that are used. */
764 if (c4x_leaf_function)
765 return regs_ever_live[regno] && fixed_regs[regno] == 0;
767 /* Only save/restore regs that are used by the ISR and regs
768 that are likely to be used by functions the ISR calls
769 if they are not fixed. */
770 return IS_EXT_REGNO (regno)
771 || ((regs_ever_live[regno] || call_used_regs[regno])
772 && fixed_regs[regno] == 0);
776 static int
777 c4x_leaf_function_p (void)
779 /* A leaf function makes no calls, so we only need
780 to save/restore the registers we actually use.
781 For the global variable leaf_function to be set, we need
782 to define LEAF_REGISTERS and all that it entails.
783 Let's check ourselves.... */
785 if (lookup_attribute ("leaf_pretend",
786 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
787 return 1;
789 /* Use the leaf_pretend attribute at your own risk. This is a hack
790 to speed up ISRs that call a function infrequently where the
791 overhead of saving and restoring the additional registers is not
792 warranted. You must save and restore the additional registers
793 required by the called function. Caveat emptor. Here's enough
794 rope... */
796 if (leaf_function_p ())
797 return 1;
799 return 0;
803 static int
804 c4x_naked_function_p (void)
806 tree type;
808 type = TREE_TYPE (current_function_decl);
809 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
814 c4x_interrupt_function_p (void)
816 const char *cfun_name;
817 if (lookup_attribute ("interrupt",
818 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
819 return 1;
821 /* Look for TI style c_intnn. */
822 cfun_name = current_function_name ();
823 return cfun_name[0] == 'c'
824 && cfun_name[1] == '_'
825 && cfun_name[2] == 'i'
826 && cfun_name[3] == 'n'
827 && cfun_name[4] == 't'
828 && ISDIGIT (cfun_name[5])
829 && ISDIGIT (cfun_name[6]);
832 void
833 c4x_expand_prologue (void)
835 unsigned int regno;
836 int size = get_frame_size ();
837 rtx insn;
839 /* In functions where ar3 is not used but frame pointers are still
840 specified, frame pointers are not adjusted (if >= -O2) and this
841 is used so it won't needlessly push the frame pointer. */
842 int dont_push_ar3;
844 /* For __naked__ function don't build a prologue. */
845 if (c4x_naked_function_p ())
847 return;
850 /* For __interrupt__ function build specific prologue. */
851 if (c4x_interrupt_function_p ())
853 c4x_leaf_function = c4x_leaf_function_p ();
855 insn = emit_insn (gen_push_st ());
856 RTX_FRAME_RELATED_P (insn) = 1;
857 if (size)
859 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
860 RTX_FRAME_RELATED_P (insn) = 1;
861 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
862 gen_rtx_REG (QImode, SP_REGNO)));
863 RTX_FRAME_RELATED_P (insn) = 1;
864 /* We require that an ISR uses fewer than 32768 words of
865 local variables, otherwise we have to go to lots of
866 effort to save a register, load it with the desired size,
867 adjust the stack pointer, and then restore the modified
868 register. Frankly, I think it is a poor ISR that
869 requires more than 32767 words of local temporary
870 storage! */
871 if (size > 32767)
872 error ("ISR %s requires %d words of local vars, max is 32767",
873 current_function_name (), size);
875 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
876 gen_rtx_REG (QImode, SP_REGNO),
877 GEN_INT (size)));
878 RTX_FRAME_RELATED_P (insn) = 1;
880 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
882 if (c4x_isr_reg_used_p (regno))
884 if (regno == DP_REGNO)
886 insn = emit_insn (gen_push_dp ());
887 RTX_FRAME_RELATED_P (insn) = 1;
889 else
891 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
892 RTX_FRAME_RELATED_P (insn) = 1;
893 if (IS_EXT_REGNO (regno))
895 insn = emit_insn (gen_pushqf
896 (gen_rtx_REG (QFmode, regno)));
897 RTX_FRAME_RELATED_P (insn) = 1;
902 /* We need to clear the repeat mode flag if the ISR is
903 going to use a RPTB instruction or uses the RC, RS, or RE
904 registers. */
905 if (regs_ever_live[RC_REGNO]
906 || regs_ever_live[RS_REGNO]
907 || regs_ever_live[RE_REGNO])
909 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
910 RTX_FRAME_RELATED_P (insn) = 1;
913 /* Reload DP reg if we are paranoid about some turkey
914 violating small memory model rules. */
915 if (TARGET_SMALL && TARGET_PARANOID)
917 insn = emit_insn (gen_set_ldp_prologue
918 (gen_rtx_REG (QImode, DP_REGNO),
919 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
920 RTX_FRAME_RELATED_P (insn) = 1;
923 else
925 if (frame_pointer_needed)
927 if ((size != 0)
928 || (current_function_args_size != 0)
929 || (optimize < 2))
931 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
932 RTX_FRAME_RELATED_P (insn) = 1;
933 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
934 gen_rtx_REG (QImode, SP_REGNO)));
935 RTX_FRAME_RELATED_P (insn) = 1;
936 dont_push_ar3 = 1;
938 else
940 /* Since ar3 is not used, we don't need to push it. */
941 dont_push_ar3 = 1;
944 else
946 /* If we use ar3, we need to push it. */
947 dont_push_ar3 = 0;
948 if ((size != 0) || (current_function_args_size != 0))
950 /* If we are omitting the frame pointer, we still have
951 to make space for it so the offsets are correct
952 unless we don't use anything on the stack at all. */
953 size += 1;
957 if (size > 32767)
959 /* Local vars are too big, it will take multiple operations
960 to increment SP. */
961 if (TARGET_C3X)
963 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
964 GEN_INT(size >> 16)));
965 RTX_FRAME_RELATED_P (insn) = 1;
966 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
967 gen_rtx_REG (QImode, R1_REGNO),
968 GEN_INT(-16)));
969 RTX_FRAME_RELATED_P (insn) = 1;
971 else
973 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
974 GEN_INT(size & ~0xffff)));
975 RTX_FRAME_RELATED_P (insn) = 1;
977 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
978 gen_rtx_REG (QImode, R1_REGNO),
979 GEN_INT(size & 0xffff)));
980 RTX_FRAME_RELATED_P (insn) = 1;
981 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
982 gen_rtx_REG (QImode, SP_REGNO),
983 gen_rtx_REG (QImode, R1_REGNO)));
984 RTX_FRAME_RELATED_P (insn) = 1;
986 else if (size != 0)
988 /* Local vars take up less than 32767 words, so we can directly
989 add the number. */
990 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
991 gen_rtx_REG (QImode, SP_REGNO),
992 GEN_INT (size)));
993 RTX_FRAME_RELATED_P (insn) = 1;
996 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
998 if (regs_ever_live[regno] && ! call_used_regs[regno])
1000 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1002 if (TARGET_PRESERVE_FLOAT)
1004 insn = emit_insn (gen_pushqi
1005 (gen_rtx_REG (QImode, regno)));
1006 RTX_FRAME_RELATED_P (insn) = 1;
1008 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1009 RTX_FRAME_RELATED_P (insn) = 1;
1011 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1013 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1014 RTX_FRAME_RELATED_P (insn) = 1;
1022 void
1023 c4x_expand_epilogue(void)
1025 int regno;
1026 int jump = 0;
1027 int dont_pop_ar3;
1028 rtx insn;
1029 int size = get_frame_size ();
1031 /* For __naked__ function build no epilogue. */
1032 if (c4x_naked_function_p ())
1034 insn = emit_jump_insn (gen_return_from_epilogue ());
1035 RTX_FRAME_RELATED_P (insn) = 1;
1036 return;
1039 /* For __interrupt__ function build specific epilogue. */
1040 if (c4x_interrupt_function_p ())
1042 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1044 if (! c4x_isr_reg_used_p (regno))
1045 continue;
1046 if (regno == DP_REGNO)
1048 insn = emit_insn (gen_pop_dp ());
1049 RTX_FRAME_RELATED_P (insn) = 1;
1051 else
1053 /* We have to use unspec because the compiler will delete insns
1054 that are not call-saved. */
1055 if (IS_EXT_REGNO (regno))
1057 insn = emit_insn (gen_popqf_unspec
1058 (gen_rtx_REG (QFmode, regno)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1061 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
1065 if (size)
1067 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1068 gen_rtx_REG (QImode, SP_REGNO),
1069 GEN_INT(size)));
1070 RTX_FRAME_RELATED_P (insn) = 1;
1071 insn = emit_insn (gen_popqi
1072 (gen_rtx_REG (QImode, AR3_REGNO)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1075 insn = emit_insn (gen_pop_st ());
1076 RTX_FRAME_RELATED_P (insn) = 1;
1077 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1078 RTX_FRAME_RELATED_P (insn) = 1;
1080 else
1082 if (frame_pointer_needed)
1084 if ((size != 0)
1085 || (current_function_args_size != 0)
1086 || (optimize < 2))
1088 insn = emit_insn
1089 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1090 gen_rtx_MEM (QImode,
1091 gen_rtx_PLUS
1092 (QImode, gen_rtx_REG (QImode,
1093 AR3_REGNO),
1094 constm1_rtx))));
1095 RTX_FRAME_RELATED_P (insn) = 1;
1097 /* We already have the return value and the fp,
1098 so we need to add those to the stack. */
1099 size += 2;
1100 jump = 1;
1101 dont_pop_ar3 = 1;
1103 else
1105 /* Since ar3 is not used for anything, we don't need to
1106 pop it. */
1107 dont_pop_ar3 = 1;
1110 else
1112 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1113 if (size || current_function_args_size)
1115 /* If we are omitting the frame pointer, we still have
1116 to make space for it so the offsets are correct
1117 unless we don't use anything on the stack at all. */
1118 size += 1;
1122 /* Now restore the saved registers, putting in the delayed branch
1123 where required. */
1124 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1126 if (regs_ever_live[regno] && ! call_used_regs[regno])
1128 if (regno == AR3_REGNO && dont_pop_ar3)
1129 continue;
1131 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1133 insn = emit_insn (gen_popqf_unspec
1134 (gen_rtx_REG (QFmode, regno)));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1136 if (TARGET_PRESERVE_FLOAT)
1138 insn = emit_insn (gen_popqi_unspec
1139 (gen_rtx_REG (QImode, regno)));
1140 RTX_FRAME_RELATED_P (insn) = 1;
1143 else
1145 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1146 RTX_FRAME_RELATED_P (insn) = 1;
1151 if (frame_pointer_needed)
1153 if ((size != 0)
1154 || (current_function_args_size != 0)
1155 || (optimize < 2))
1157 /* Restore the old FP. */
1158 insn = emit_insn
1159 (gen_movqi
1160 (gen_rtx_REG (QImode, AR3_REGNO),
1161 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1163 RTX_FRAME_RELATED_P (insn) = 1;
1167 if (size > 32767)
1169 /* Local vars are too big, it will take multiple operations
1170 to decrement SP. */
1171 if (TARGET_C3X)
1173 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1174 GEN_INT(size >> 16)));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1176 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1177 gen_rtx_REG (QImode, R3_REGNO),
1178 GEN_INT(-16)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1181 else
1183 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1184 GEN_INT(size & ~0xffff)));
1185 RTX_FRAME_RELATED_P (insn) = 1;
1187 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1188 gen_rtx_REG (QImode, R3_REGNO),
1189 GEN_INT(size & 0xffff)));
1190 RTX_FRAME_RELATED_P (insn) = 1;
1191 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1192 gen_rtx_REG (QImode, SP_REGNO),
1193 gen_rtx_REG (QImode, R3_REGNO)));
1194 RTX_FRAME_RELATED_P (insn) = 1;
1196 else if (size != 0)
1198 /* Local vars take up less than 32768 words, so we can directly
1199 subtract the number. */
1200 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1201 gen_rtx_REG (QImode, SP_REGNO),
1202 GEN_INT(size)));
1203 RTX_FRAME_RELATED_P (insn) = 1;
1206 if (jump)
1208 insn = emit_jump_insn (gen_return_indirect_internal
1209 (gen_rtx_REG (QImode, R2_REGNO)));
1210 RTX_FRAME_RELATED_P (insn) = 1;
1212 else
1214 insn = emit_jump_insn (gen_return_from_epilogue ());
1215 RTX_FRAME_RELATED_P (insn) = 1;
1222 c4x_null_epilogue_p (void)
1224 int regno;
1226 if (reload_completed
1227 && ! c4x_naked_function_p ()
1228 && ! c4x_interrupt_function_p ()
1229 && ! current_function_calls_alloca
1230 && ! current_function_args_size
1231 && ! (optimize < 2)
1232 && ! get_frame_size ())
1234 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1235 if (regs_ever_live[regno] && ! call_used_regs[regno]
1236 && (regno != AR3_REGNO))
1237 return 1;
1238 return 0;
1240 return 1;
1245 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1247 rtx op0 = operands[0];
1248 rtx op1 = operands[1];
1250 if (! reload_in_progress
1251 && ! REG_P (op0)
1252 && ! REG_P (op1)
1253 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1254 op1 = force_reg (mode, op1);
1256 if (GET_CODE (op1) == LO_SUM
1257 && GET_MODE (op1) == Pmode
1258 && dp_reg_operand (XEXP (op1, 0), mode))
1260 /* expand_increment will sometimes create a LO_SUM immediate
1261 address. Undo this silliness. */
1262 op1 = XEXP (op1, 1);
1265 if (symbolic_address_operand (op1, mode))
1267 if (TARGET_LOAD_ADDRESS)
1269 /* Alias analysis seems to do a better job if we force
1270 constant addresses to memory after reload. */
1271 emit_insn (gen_load_immed_address (op0, op1));
1272 return 1;
1274 else
1276 /* Stick symbol or label address into the constant pool. */
1277 op1 = force_const_mem (Pmode, op1);
1280 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1282 /* We could be a lot smarter about loading some of these
1283 constants... */
1284 op1 = force_const_mem (mode, op1);
1287 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1288 and emit associated (HIGH (SYMREF)) if large memory model.
1289 c4x_legitimize_address could be used to do this,
1290 perhaps by calling validize_address. */
1291 if (TARGET_EXPOSE_LDP
1292 && ! (reload_in_progress || reload_completed)
1293 && GET_CODE (op1) == MEM
1294 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1296 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1297 if (! TARGET_SMALL)
1298 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1299 op1 = change_address (op1, mode,
1300 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1303 if (TARGET_EXPOSE_LDP
1304 && ! (reload_in_progress || reload_completed)
1305 && GET_CODE (op0) == MEM
1306 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1308 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1309 if (! TARGET_SMALL)
1310 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1311 op0 = change_address (op0, mode,
1312 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1315 if (GET_CODE (op0) == SUBREG
1316 && mixed_subreg_operand (op0, mode))
1318 /* We should only generate these mixed mode patterns
1319 during RTL generation. If we need do it later on
1320 then we'll have to emit patterns that won't clobber CC. */
1321 if (reload_in_progress || reload_completed)
1322 abort ();
1323 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1324 op0 = SUBREG_REG (op0);
1325 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1327 op0 = copy_rtx (op0);
1328 PUT_MODE (op0, QImode);
1330 else
1331 abort ();
1333 if (mode == QFmode)
1334 emit_insn (gen_storeqf_int_clobber (op0, op1));
1335 else
1336 abort ();
1337 return 1;
1340 if (GET_CODE (op1) == SUBREG
1341 && mixed_subreg_operand (op1, mode))
1343 /* We should only generate these mixed mode patterns
1344 during RTL generation. If we need do it later on
1345 then we'll have to emit patterns that won't clobber CC. */
1346 if (reload_in_progress || reload_completed)
1347 abort ();
1348 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1349 op1 = SUBREG_REG (op1);
1350 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1352 op1 = copy_rtx (op1);
1353 PUT_MODE (op1, QImode);
1355 else
1356 abort ();
1358 if (mode == QFmode)
1359 emit_insn (gen_loadqf_int_clobber (op0, op1));
1360 else
1361 abort ();
1362 return 1;
1365 if (mode == QImode
1366 && reg_operand (op0, mode)
1367 && const_int_operand (op1, mode)
1368 && ! IS_INT16_CONST (INTVAL (op1))
1369 && ! IS_HIGH_CONST (INTVAL (op1)))
1371 emit_insn (gen_loadqi_big_constant (op0, op1));
1372 return 1;
1375 if (mode == HImode
1376 && reg_operand (op0, mode)
1377 && const_int_operand (op1, mode))
1379 emit_insn (gen_loadhi_big_constant (op0, op1));
1380 return 1;
1383 /* Adjust operands in case we have modified them. */
1384 operands[0] = op0;
1385 operands[1] = op1;
1387 /* Emit normal pattern. */
1388 return 0;
1392 void
1393 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1394 enum machine_mode dmode, enum machine_mode smode,
1395 int noperands, rtx *operands)
1397 rtx ret;
1398 rtx insns;
1399 rtx equiv;
1401 start_sequence ();
1402 switch (noperands)
1404 case 2:
1405 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1406 operands[1], smode);
1407 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1408 break;
1410 case 3:
1411 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1412 operands[1], smode, operands[2], smode);
1413 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1414 break;
1416 default:
1417 abort ();
1420 insns = get_insns ();
1421 end_sequence ();
1422 emit_libcall_block (insns, operands[0], ret, equiv);
1426 void
1427 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1428 enum machine_mode mode, rtx *operands)
1430 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1434 void
1435 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1436 enum machine_mode mode, rtx *operands)
1438 rtx ret;
1439 rtx insns;
1440 rtx equiv;
1442 start_sequence ();
1443 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1444 operands[1], mode, operands[2], mode);
1445 equiv = gen_rtx_TRUNCATE (mode,
1446 gen_rtx_LSHIFTRT (HImode,
1447 gen_rtx_MULT (HImode,
1448 gen_rtx_fmt_e (code, HImode, operands[1]),
1449 gen_rtx_fmt_e (code, HImode, operands[2])),
1450 GEN_INT (32)));
1451 insns = get_insns ();
1452 end_sequence ();
1453 emit_libcall_block (insns, operands[0], ret, equiv);
1458 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1460 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1461 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1462 rtx disp = NULL_RTX; /* Displacement. */
1463 enum rtx_code code;
1465 code = GET_CODE (addr);
1466 switch (code)
1468 /* Register indirect with auto increment/decrement. We don't
1469 allow SP here---push_operand should recognize an operand
1470 being pushed on the stack. */
1472 case PRE_DEC:
1473 case PRE_INC:
1474 case POST_DEC:
1475 if (mode != QImode && mode != QFmode)
1476 return 0;
1478 case POST_INC:
1479 base = XEXP (addr, 0);
1480 if (! REG_P (base))
1481 return 0;
1482 break;
1484 case PRE_MODIFY:
1485 case POST_MODIFY:
1487 rtx op0 = XEXP (addr, 0);
1488 rtx op1 = XEXP (addr, 1);
1490 if (mode != QImode && mode != QFmode)
1491 return 0;
1493 if (! REG_P (op0)
1494 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1495 return 0;
1496 base = XEXP (op1, 0);
1497 if (! REG_P (base))
1498 return 0;
1499 if (REGNO (base) != REGNO (op0))
1500 return 0;
1501 if (REG_P (XEXP (op1, 1)))
1502 indx = XEXP (op1, 1);
1503 else
1504 disp = XEXP (op1, 1);
1506 break;
1508 /* Register indirect. */
1509 case REG:
1510 base = addr;
1511 break;
1513 /* Register indirect with displacement or index. */
1514 case PLUS:
1516 rtx op0 = XEXP (addr, 0);
1517 rtx op1 = XEXP (addr, 1);
1518 enum rtx_code code0 = GET_CODE (op0);
1520 switch (code0)
1522 case REG:
1523 if (REG_P (op1))
1525 base = op0; /* Base + index. */
1526 indx = op1;
1527 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1529 base = op1;
1530 indx = op0;
1533 else
1535 base = op0; /* Base + displacement. */
1536 disp = op1;
1538 break;
1540 default:
1541 return 0;
1544 break;
1546 /* Direct addressing with DP register. */
1547 case LO_SUM:
1549 rtx op0 = XEXP (addr, 0);
1550 rtx op1 = XEXP (addr, 1);
1552 /* HImode and HFmode direct memory references aren't truly
1553 offsettable (consider case at end of data page). We
1554 probably get better code by loading a pointer and using an
1555 indirect memory reference. */
1556 if (mode == HImode || mode == HFmode)
1557 return 0;
1559 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1560 return 0;
1562 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1563 return 1;
1565 if (GET_CODE (op1) == CONST)
1566 return 1;
1567 return 0;
1569 break;
1571 /* Direct addressing with some work for the assembler... */
1572 case CONST:
1573 /* Direct addressing. */
1574 case LABEL_REF:
1575 case SYMBOL_REF:
1576 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1577 return 1;
1578 /* These need to be converted to a LO_SUM (...).
1579 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1580 return 0;
1582 /* Do not allow direct memory access to absolute addresses.
1583 This is more pain than it's worth, especially for the
1584 small memory model where we can't guarantee that
1585 this address is within the data page---we don't want
1586 to modify the DP register in the small memory model,
1587 even temporarily, since an interrupt can sneak in.... */
1588 case CONST_INT:
1589 return 0;
1591 /* Indirect indirect addressing. */
1592 case MEM:
1593 return 0;
1595 case CONST_DOUBLE:
1596 fatal_insn ("using CONST_DOUBLE for address", addr);
1598 default:
1599 return 0;
1602 /* Validate the base register. */
1603 if (base)
1605 /* Check that the address is offsettable for HImode and HFmode. */
1606 if (indx && (mode == HImode || mode == HFmode))
1607 return 0;
1609 /* Handle DP based stuff. */
1610 if (REGNO (base) == DP_REGNO)
1611 return 1;
1612 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1613 return 0;
1614 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1615 return 0;
1618 /* Now validate the index register. */
1619 if (indx)
1621 if (GET_CODE (indx) != REG)
1622 return 0;
1623 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1624 return 0;
1625 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1626 return 0;
1629 /* Validate displacement. */
1630 if (disp)
1632 if (GET_CODE (disp) != CONST_INT)
1633 return 0;
1634 if (mode == HImode || mode == HFmode)
1636 /* The offset displacement must be legitimate. */
1637 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1638 return 0;
1640 else
1642 if (! IS_DISP8_CONST (INTVAL (disp)))
1643 return 0;
1645 /* Can't add an index with a disp. */
1646 if (indx)
1647 return 0;
1649 return 1;
1654 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1655 enum machine_mode mode ATTRIBUTE_UNUSED)
1657 if (GET_CODE (orig) == SYMBOL_REF
1658 || GET_CODE (orig) == LABEL_REF)
1660 if (mode == HImode || mode == HFmode)
1662 /* We need to force the address into
1663 a register so that it is offsettable. */
1664 rtx addr_reg = gen_reg_rtx (Pmode);
1665 emit_move_insn (addr_reg, orig);
1666 return addr_reg;
1668 else
1670 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1672 if (! TARGET_SMALL)
1673 emit_insn (gen_set_ldp (dp_reg, orig));
1675 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1679 return NULL_RTX;
1683 /* Provide the costs of an addressing mode that contains ADDR.
1684 If ADDR is not a valid address, its cost is irrelevant.
1685 This is used in cse and loop optimization to determine
1686 if it is worthwhile storing a common address into a register.
1687 Unfortunately, the C4x address cost depends on other operands. */
1689 static int
1690 c4x_address_cost (rtx addr)
1692 switch (GET_CODE (addr))
1694 case REG:
1695 return 1;
1697 case POST_INC:
1698 case POST_DEC:
1699 case PRE_INC:
1700 case PRE_DEC:
1701 return 1;
1703 /* These shouldn't be directly generated. */
1704 case SYMBOL_REF:
1705 case LABEL_REF:
1706 case CONST:
1707 return 10;
1709 case LO_SUM:
1711 rtx op1 = XEXP (addr, 1);
1713 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1714 return TARGET_SMALL ? 3 : 4;
1716 if (GET_CODE (op1) == CONST)
1718 rtx offset = const0_rtx;
1720 op1 = eliminate_constant_term (op1, &offset);
1722 /* ??? These costs need rethinking... */
1723 if (GET_CODE (op1) == LABEL_REF)
1724 return 3;
1726 if (GET_CODE (op1) != SYMBOL_REF)
1727 return 4;
1729 if (INTVAL (offset) == 0)
1730 return 3;
1732 return 4;
1734 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1736 break;
1738 case PLUS:
1740 register rtx op0 = XEXP (addr, 0);
1741 register rtx op1 = XEXP (addr, 1);
1743 if (GET_CODE (op0) != REG)
1744 break;
1746 switch (GET_CODE (op1))
1748 default:
1749 break;
1751 case REG:
1752 /* This cost for REG+REG must be greater than the cost
1753 for REG if we want autoincrement addressing modes. */
1754 return 2;
1756 case CONST_INT:
1757 /* The following tries to improve GIV combination
1758 in strength reduce but appears not to help. */
1759 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1760 return 1;
1762 if (IS_DISP1_CONST (INTVAL (op1)))
1763 return 1;
1765 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1766 return 2;
1768 return 3;
1771 default:
1772 break;
1775 return 4;
1780 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1782 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1783 rtx cc_reg;
1785 if (mode == CC_NOOVmode
1786 && (code == LE || code == GE || code == LT || code == GT))
1787 return NULL_RTX;
1789 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1790 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1791 gen_rtx_COMPARE (mode, x, y)));
1792 return cc_reg;
1795 char *
1796 c4x_output_cbranch (const char *form, rtx seq)
1798 int delayed = 0;
1799 int annultrue = 0;
1800 int annulfalse = 0;
1801 rtx delay;
1802 char *cp;
1803 static char str[100];
1805 if (final_sequence)
1807 delay = XVECEXP (final_sequence, 0, 1);
1808 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1809 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1810 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1812 strcpy (str, form);
1813 cp = &str [strlen (str)];
1814 if (delayed)
1816 *cp++ = '%';
1817 *cp++ = '#';
1819 if (annultrue)
1821 *cp++ = 'a';
1822 *cp++ = 't';
1824 if (annulfalse)
1826 *cp++ = 'a';
1827 *cp++ = 'f';
1829 *cp++ = '\t';
1830 *cp++ = '%';
1831 *cp++ = 'l';
1832 *cp++ = '1';
1833 *cp = 0;
1834 return str;
1837 void
1838 c4x_print_operand (FILE *file, rtx op, int letter)
1840 rtx op1;
1841 enum rtx_code code;
1843 switch (letter)
1845 case '#': /* Delayed. */
1846 if (final_sequence)
1847 fprintf (file, "d");
1848 return;
1851 code = GET_CODE (op);
1852 switch (letter)
1854 case 'A': /* Direct address. */
1855 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1856 fprintf (file, "@");
1857 break;
1859 case 'H': /* Sethi. */
1860 output_addr_const (file, op);
1861 return;
1863 case 'I': /* Reversed condition. */
1864 code = reverse_condition (code);
1865 break;
1867 case 'L': /* Log 2 of constant. */
1868 if (code != CONST_INT)
1869 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1870 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1871 return;
1873 case 'N': /* Ones complement of small constant. */
1874 if (code != CONST_INT)
1875 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1876 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1877 return;
1879 case 'K': /* Generate ldp(k) if direct address. */
1880 if (! TARGET_SMALL
1881 && code == MEM
1882 && GET_CODE (XEXP (op, 0)) == LO_SUM
1883 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1884 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1886 op1 = XEXP (XEXP (op, 0), 1);
1887 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1889 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1890 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1891 fprintf (file, "\n");
1894 return;
1896 case 'M': /* Generate ldp(k) if direct address. */
1897 if (! TARGET_SMALL /* Only used in asm statements. */
1898 && code == MEM
1899 && (GET_CODE (XEXP (op, 0)) == CONST
1900 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1902 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1903 output_address (XEXP (op, 0));
1904 fprintf (file, "\n\t");
1906 return;
1908 case 'O': /* Offset address. */
1909 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1910 break;
1911 else if (code == MEM)
1912 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1913 else if (code == REG)
1914 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1915 else
1916 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1917 return;
1919 case 'C': /* Call. */
1920 break;
1922 case 'U': /* Call/callu. */
1923 if (code != SYMBOL_REF)
1924 fprintf (file, "u");
1925 return;
1927 default:
1928 break;
1931 switch (code)
1933 case REG:
1934 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1935 && ! TARGET_TI)
1936 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1937 else
1938 fprintf (file, "%s", reg_names[REGNO (op)]);
1939 break;
1941 case MEM:
1942 output_address (XEXP (op, 0));
1943 break;
1945 case CONST_DOUBLE:
1947 char str[64];
1949 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1950 sizeof (str), 0, 1);
1951 fprintf (file, "%s", str);
1953 break;
1955 case CONST_INT:
1956 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1957 break;
1959 case NE:
1960 fprintf (file, "ne");
1961 break;
1963 case EQ:
1964 fprintf (file, "eq");
1965 break;
1967 case GE:
1968 fprintf (file, "ge");
1969 break;
1971 case GT:
1972 fprintf (file, "gt");
1973 break;
1975 case LE:
1976 fprintf (file, "le");
1977 break;
1979 case LT:
1980 fprintf (file, "lt");
1981 break;
1983 case GEU:
1984 fprintf (file, "hs");
1985 break;
1987 case GTU:
1988 fprintf (file, "hi");
1989 break;
1991 case LEU:
1992 fprintf (file, "ls");
1993 break;
1995 case LTU:
1996 fprintf (file, "lo");
1997 break;
1999 case SYMBOL_REF:
2000 output_addr_const (file, op);
2001 break;
2003 case CONST:
2004 output_addr_const (file, XEXP (op, 0));
2005 break;
2007 case CODE_LABEL:
2008 break;
2010 default:
2011 fatal_insn ("c4x_print_operand: Bad operand case", op);
2012 break;
2017 void
2018 c4x_print_operand_address (FILE *file, rtx addr)
2020 switch (GET_CODE (addr))
2022 case REG:
2023 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2024 break;
2026 case PRE_DEC:
2027 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2028 break;
2030 case POST_INC:
2031 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2032 break;
2034 case POST_MODIFY:
2036 rtx op0 = XEXP (XEXP (addr, 1), 0);
2037 rtx op1 = XEXP (XEXP (addr, 1), 1);
2039 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2040 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2041 reg_names[REGNO (op1)]);
2042 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2043 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2044 reg_names[REGNO (op0)], INTVAL (op1));
2045 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2046 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2047 reg_names[REGNO (op0)], -INTVAL (op1));
2048 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2049 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2050 reg_names[REGNO (op1)]);
2051 else
2052 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2054 break;
2056 case PRE_MODIFY:
2058 rtx op0 = XEXP (XEXP (addr, 1), 0);
2059 rtx op1 = XEXP (XEXP (addr, 1), 1);
2061 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2062 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2063 reg_names[REGNO (op1)]);
2064 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2065 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2066 reg_names[REGNO (op0)], INTVAL (op1));
2067 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2068 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2069 reg_names[REGNO (op0)], -INTVAL (op1));
2070 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2071 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2072 reg_names[REGNO (op1)]);
2073 else
2074 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2076 break;
2078 case PRE_INC:
2079 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2080 break;
2082 case POST_DEC:
2083 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2084 break;
2086 case PLUS: /* Indirect with displacement. */
2088 rtx op0 = XEXP (addr, 0);
2089 rtx op1 = XEXP (addr, 1);
2091 if (REG_P (op0))
2093 if (REG_P (op1))
2095 if (IS_INDEX_REG (op0))
2097 fprintf (file, "*+%s(%s)",
2098 reg_names[REGNO (op1)],
2099 reg_names[REGNO (op0)]); /* Index + base. */
2101 else
2103 fprintf (file, "*+%s(%s)",
2104 reg_names[REGNO (op0)],
2105 reg_names[REGNO (op1)]); /* Base + index. */
2108 else if (INTVAL (op1) < 0)
2110 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2111 reg_names[REGNO (op0)],
2112 -INTVAL (op1)); /* Base - displacement. */
2114 else
2116 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2117 reg_names[REGNO (op0)],
2118 INTVAL (op1)); /* Base + displacement. */
2121 else
2122 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2124 break;
2126 case LO_SUM:
2128 rtx op0 = XEXP (addr, 0);
2129 rtx op1 = XEXP (addr, 1);
2131 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2132 c4x_print_operand_address (file, op1);
2133 else
2134 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2136 break;
2138 case CONST:
2139 case SYMBOL_REF:
2140 case LABEL_REF:
2141 fprintf (file, "@");
2142 output_addr_const (file, addr);
2143 break;
2145 /* We shouldn't access CONST_INT addresses. */
2146 case CONST_INT:
2148 default:
2149 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2150 break;
2155 /* Return nonzero if the floating point operand will fit
2156 in the immediate field. */
2158 static int
2159 c4x_immed_float_p (rtx op)
2161 long convval[2];
2162 int exponent;
2163 REAL_VALUE_TYPE r;
2165 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2166 if (GET_MODE (op) == HFmode)
2167 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2168 else
2170 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2171 convval[1] = 0;
2174 /* Sign extend exponent. */
2175 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2176 if (exponent == -128)
2177 return 1; /* 0.0 */
2178 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2179 return 0; /* Precision doesn't fit. */
2180 return (exponent <= 7) /* Positive exp. */
2181 && (exponent >= -7); /* Negative exp. */
2185 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2186 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2188 None of the last four instructions from the bottom of the block can
2189 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2190 BcondAT or RETIcondD.
2192 This routine scans the four previous insns for a jump insn, and if
2193 one is found, returns 1 so that we bung in a nop instruction.
2194 This simple minded strategy will add a nop, when it may not
2195 be required. Say when there is a JUMP_INSN near the end of the
2196 block that doesn't get converted into a delayed branch.
2198 Note that we cannot have a call insn, since we don't generate
2199 repeat loops with calls in them (although I suppose we could, but
2200 there's no benefit.)
2202 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2205 c4x_rptb_nop_p (rtx insn)
2207 rtx start_label;
2208 int i;
2210 /* Extract the start label from the jump pattern (rptb_end). */
2211 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2213 /* If there is a label at the end of the loop we must insert
2214 a NOP. */
2215 do {
2216 insn = previous_insn (insn);
2217 } while (GET_CODE (insn) == NOTE
2218 || GET_CODE (insn) == USE
2219 || GET_CODE (insn) == CLOBBER);
2220 if (GET_CODE (insn) == CODE_LABEL)
2221 return 1;
2223 for (i = 0; i < 4; i++)
2225 /* Search back for prev non-note and non-label insn. */
2226 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2227 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2229 if (insn == start_label)
2230 return i == 0;
2232 insn = previous_insn (insn);
2235 /* If we have a jump instruction we should insert a NOP. If we
2236 hit repeat block top we should only insert a NOP if the loop
2237 is empty. */
2238 if (GET_CODE (insn) == JUMP_INSN)
2239 return 1;
2240 insn = previous_insn (insn);
2242 return 0;
2246 /* The C4x looping instruction needs to be emitted at the top of the
2247 loop. Emitting the true RTL for a looping instruction at the top of
2248 the loop can cause problems with flow analysis. So instead, a dummy
2249 doloop insn is emitted at the end of the loop. This routine checks
2250 for the presence of this doloop insn and then searches back to the
2251 top of the loop, where it inserts the true looping insn (provided
2252 there are no instructions in the loop which would cause problems).
2253 Any additional labels can be emitted at this point. In addition, if
2254 the desired loop count register was not allocated, this routine does
2255 nothing.
2257 Before we can create a repeat block looping instruction we have to
2258 verify that there are no jumps outside the loop and no jumps outside
2259 the loop go into this loop. This can happen in the basic blocks reorder
2260 pass. The C4x cpu cannot handle this. */
2262 static int
2263 c4x_label_ref_used_p (rtx x, rtx code_label)
2265 enum rtx_code code;
2266 int i, j;
2267 const char *fmt;
2269 if (x == 0)
2270 return 0;
2272 code = GET_CODE (x);
2273 if (code == LABEL_REF)
2274 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2276 fmt = GET_RTX_FORMAT (code);
2277 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2279 if (fmt[i] == 'e')
2281 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2282 return 1;
2284 else if (fmt[i] == 'E')
2285 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2286 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2287 return 1;
2289 return 0;
2293 static int
2294 c4x_rptb_valid_p (rtx insn, rtx start_label)
2296 rtx end = insn;
2297 rtx start;
2298 rtx tmp;
2300 /* Find the start label. */
2301 for (; insn; insn = PREV_INSN (insn))
2302 if (insn == start_label)
2303 break;
2305 /* Note found then we cannot use a rptb or rpts. The label was
2306 probably moved by the basic block reorder pass. */
2307 if (! insn)
2308 return 0;
2310 start = insn;
2311 /* If any jump jumps inside this block then we must fail. */
2312 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2314 if (GET_CODE (insn) == CODE_LABEL)
2316 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2317 if (GET_CODE (tmp) == JUMP_INSN
2318 && c4x_label_ref_used_p (tmp, insn))
2319 return 0;
2322 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2324 if (GET_CODE (insn) == CODE_LABEL)
2326 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2327 if (GET_CODE (tmp) == JUMP_INSN
2328 && c4x_label_ref_used_p (tmp, insn))
2329 return 0;
2332 /* If any jump jumps outside this block then we must fail. */
2333 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2335 if (GET_CODE (insn) == CODE_LABEL)
2337 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2338 if (GET_CODE (tmp) == JUMP_INSN
2339 && c4x_label_ref_used_p (tmp, insn))
2340 return 0;
2341 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2342 if (GET_CODE (tmp) == JUMP_INSN
2343 && c4x_label_ref_used_p (tmp, insn))
2344 return 0;
2348 /* All checks OK. */
2349 return 1;
2353 void
2354 c4x_rptb_insert (rtx insn)
2356 rtx end_label;
2357 rtx start_label;
2358 rtx new_start_label;
2359 rtx count_reg;
2361 /* If the count register has not been allocated to RC, say if
2362 there is a movmem pattern in the loop, then do not insert a
2363 RPTB instruction. Instead we emit a decrement and branch
2364 at the end of the loop. */
2365 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2366 if (REGNO (count_reg) != RC_REGNO)
2367 return;
2369 /* Extract the start label from the jump pattern (rptb_end). */
2370 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2372 if (! c4x_rptb_valid_p (insn, start_label))
2374 /* We cannot use the rptb insn. Replace it so reorg can use
2375 the delay slots of the jump insn. */
2376 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2377 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2378 emit_insn_before (gen_bge (start_label), insn);
2379 LABEL_NUSES (start_label)++;
2380 delete_insn (insn);
2381 return;
2384 end_label = gen_label_rtx ();
2385 LABEL_NUSES (end_label)++;
2386 emit_label_after (end_label, insn);
2388 new_start_label = gen_label_rtx ();
2389 LABEL_NUSES (new_start_label)++;
2391 for (; insn; insn = PREV_INSN (insn))
2393 if (insn == start_label)
2394 break;
2395 if (GET_CODE (insn) == JUMP_INSN &&
2396 JUMP_LABEL (insn) == start_label)
2397 redirect_jump (insn, new_start_label, 0);
2399 if (! insn)
2400 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2402 emit_label_after (new_start_label, insn);
2404 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2405 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2406 else
2407 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2408 if (LABEL_NUSES (start_label) == 0)
2409 delete_insn (start_label);
2413 /* We need to use direct addressing for large constants and addresses
2414 that cannot fit within an instruction. We must check for these
2415 after after the final jump optimization pass, since this may
2416 introduce a local_move insn for a SYMBOL_REF. This pass
2417 must come before delayed branch slot filling since it can generate
2418 additional instructions.
2420 This function also fixes up RTPB style loops that didn't get RC
2421 allocated as the loop counter. */
2423 static void
2424 c4x_reorg (void)
2426 rtx insn;
2428 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2430 /* Look for insn. */
2431 if (INSN_P (insn))
2433 int insn_code_number;
2434 rtx old;
2436 insn_code_number = recog_memoized (insn);
2438 if (insn_code_number < 0)
2439 continue;
2441 /* Insert the RTX for RPTB at the top of the loop
2442 and a label at the end of the loop. */
2443 if (insn_code_number == CODE_FOR_rptb_end)
2444 c4x_rptb_insert(insn);
2446 /* We need to split the insn here. Otherwise the calls to
2447 force_const_mem will not work for load_immed_address. */
2448 old = insn;
2450 /* Don't split the insn if it has been deleted. */
2451 if (! INSN_DELETED_P (old))
2452 insn = try_split (PATTERN(old), old, 1);
2454 /* When not optimizing, the old insn will be still left around
2455 with only the 'deleted' bit set. Transform it into a note
2456 to avoid confusion of subsequent processing. */
2457 if (INSN_DELETED_P (old))
2459 PUT_CODE (old, NOTE);
2460 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2461 NOTE_SOURCE_FILE (old) = 0;
2468 static int
2469 c4x_a_register (rtx op)
2471 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2475 static int
2476 c4x_x_register (rtx op)
2478 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2482 static int
2483 c4x_immed_int_constant (rtx op)
2485 if (GET_CODE (op) != CONST_INT)
2486 return 0;
2488 return GET_MODE (op) == VOIDmode
2489 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2490 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2494 static int
2495 c4x_immed_float_constant (rtx op)
2497 if (GET_CODE (op) != CONST_DOUBLE)
2498 return 0;
2500 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2501 present this only means that a MEM rtx has been generated. It does
2502 not mean the rtx is really in memory. */
2504 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2509 c4x_shiftable_constant (rtx op)
2511 int i;
2512 int mask;
2513 int val = INTVAL (op);
2515 for (i = 0; i < 16; i++)
2517 if (val & (1 << i))
2518 break;
2520 mask = ((0xffff >> i) << 16) | 0xffff;
2521 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2522 : (val >> i) & mask))
2523 return i;
2524 return -1;
2529 c4x_H_constant (rtx op)
2531 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2536 c4x_I_constant (rtx op)
2538 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2543 c4x_J_constant (rtx op)
2545 if (TARGET_C3X)
2546 return 0;
2547 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2551 static int
2552 c4x_K_constant (rtx op)
2554 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2555 return 0;
2556 return IS_INT5_CONST (INTVAL (op));
2561 c4x_L_constant (rtx op)
2563 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2567 static int
2568 c4x_N_constant (rtx op)
2570 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2574 static int
2575 c4x_O_constant (rtx op)
2577 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2581 /* The constraints do not have to check the register class,
2582 except when needed to discriminate between the constraints.
2583 The operand has been checked by the predicates to be valid. */
2585 /* ARx + 9-bit signed const or IRn
2586 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2587 We don't include the pre/post inc/dec forms here since
2588 they are handled by the <> constraints. */
2591 c4x_Q_constraint (rtx op)
2593 enum machine_mode mode = GET_MODE (op);
2595 if (GET_CODE (op) != MEM)
2596 return 0;
2597 op = XEXP (op, 0);
2598 switch (GET_CODE (op))
2600 case REG:
2601 return 1;
2603 case PLUS:
2605 rtx op0 = XEXP (op, 0);
2606 rtx op1 = XEXP (op, 1);
2608 if (! REG_P (op0))
2609 return 0;
2611 if (REG_P (op1))
2612 return 1;
2614 if (GET_CODE (op1) != CONST_INT)
2615 return 0;
2617 /* HImode and HFmode must be offsettable. */
2618 if (mode == HImode || mode == HFmode)
2619 return IS_DISP8_OFF_CONST (INTVAL (op1));
2621 return IS_DISP8_CONST (INTVAL (op1));
2623 break;
2625 default:
2626 break;
2628 return 0;
2632 /* ARx + 5-bit unsigned const
2633 *ARx, *+ARx(n) for n < 32. */
2636 c4x_R_constraint (rtx op)
2638 enum machine_mode mode = GET_MODE (op);
2640 if (TARGET_C3X)
2641 return 0;
2642 if (GET_CODE (op) != MEM)
2643 return 0;
2644 op = XEXP (op, 0);
2645 switch (GET_CODE (op))
2647 case REG:
2648 return 1;
2650 case PLUS:
2652 rtx op0 = XEXP (op, 0);
2653 rtx op1 = XEXP (op, 1);
2655 if (! REG_P (op0))
2656 return 0;
2658 if (GET_CODE (op1) != CONST_INT)
2659 return 0;
2661 /* HImode and HFmode must be offsettable. */
2662 if (mode == HImode || mode == HFmode)
2663 return IS_UINT5_CONST (INTVAL (op1) + 1);
2665 return IS_UINT5_CONST (INTVAL (op1));
2667 break;
2669 default:
2670 break;
2672 return 0;
2676 static int
2677 c4x_R_indirect (rtx op)
2679 enum machine_mode mode = GET_MODE (op);
2681 if (TARGET_C3X || GET_CODE (op) != MEM)
2682 return 0;
2684 op = XEXP (op, 0);
2685 switch (GET_CODE (op))
2687 case REG:
2688 return IS_ADDR_OR_PSEUDO_REG (op);
2690 case PLUS:
2692 rtx op0 = XEXP (op, 0);
2693 rtx op1 = XEXP (op, 1);
2695 /* HImode and HFmode must be offsettable. */
2696 if (mode == HImode || mode == HFmode)
2697 return IS_ADDR_OR_PSEUDO_REG (op0)
2698 && GET_CODE (op1) == CONST_INT
2699 && IS_UINT5_CONST (INTVAL (op1) + 1);
2701 return REG_P (op0)
2702 && IS_ADDR_OR_PSEUDO_REG (op0)
2703 && GET_CODE (op1) == CONST_INT
2704 && IS_UINT5_CONST (INTVAL (op1));
2706 break;
2708 default:
2709 break;
2711 return 0;
2715 /* ARx + 1-bit unsigned const or IRn
2716 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2717 We don't include the pre/post inc/dec forms here since
2718 they are handled by the <> constraints. */
2721 c4x_S_constraint (rtx op)
2723 enum machine_mode mode = GET_MODE (op);
2724 if (GET_CODE (op) != MEM)
2725 return 0;
2726 op = XEXP (op, 0);
2727 switch (GET_CODE (op))
2729 case REG:
2730 return 1;
2732 case PRE_MODIFY:
2733 case POST_MODIFY:
2735 rtx op0 = XEXP (op, 0);
2736 rtx op1 = XEXP (op, 1);
2738 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2739 || (op0 != XEXP (op1, 0)))
2740 return 0;
2742 op0 = XEXP (op1, 0);
2743 op1 = XEXP (op1, 1);
2744 return REG_P (op0) && REG_P (op1);
2745 /* Pre or post_modify with a displacement of 0 or 1
2746 should not be generated. */
2748 break;
2750 case PLUS:
2752 rtx op0 = XEXP (op, 0);
2753 rtx op1 = XEXP (op, 1);
2755 if (!REG_P (op0))
2756 return 0;
2758 if (REG_P (op1))
2759 return 1;
2761 if (GET_CODE (op1) != CONST_INT)
2762 return 0;
2764 /* HImode and HFmode must be offsettable. */
2765 if (mode == HImode || mode == HFmode)
2766 return IS_DISP1_OFF_CONST (INTVAL (op1));
2768 return IS_DISP1_CONST (INTVAL (op1));
2770 break;
2772 default:
2773 break;
2775 return 0;
2779 static int
2780 c4x_S_indirect (rtx op)
2782 enum machine_mode mode = GET_MODE (op);
2783 if (GET_CODE (op) != MEM)
2784 return 0;
2786 op = XEXP (op, 0);
2787 switch (GET_CODE (op))
2789 case PRE_DEC:
2790 case POST_DEC:
2791 if (mode != QImode && mode != QFmode)
2792 return 0;
2793 case PRE_INC:
2794 case POST_INC:
2795 op = XEXP (op, 0);
2797 case REG:
2798 return IS_ADDR_OR_PSEUDO_REG (op);
2800 case PRE_MODIFY:
2801 case POST_MODIFY:
2803 rtx op0 = XEXP (op, 0);
2804 rtx op1 = XEXP (op, 1);
2806 if (mode != QImode && mode != QFmode)
2807 return 0;
2809 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2810 || (op0 != XEXP (op1, 0)))
2811 return 0;
2813 op0 = XEXP (op1, 0);
2814 op1 = XEXP (op1, 1);
2815 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2816 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2817 /* Pre or post_modify with a displacement of 0 or 1
2818 should not be generated. */
2821 case PLUS:
2823 rtx op0 = XEXP (op, 0);
2824 rtx op1 = XEXP (op, 1);
2826 if (REG_P (op0))
2828 /* HImode and HFmode must be offsettable. */
2829 if (mode == HImode || mode == HFmode)
2830 return IS_ADDR_OR_PSEUDO_REG (op0)
2831 && GET_CODE (op1) == CONST_INT
2832 && IS_DISP1_OFF_CONST (INTVAL (op1));
2834 if (REG_P (op1))
2835 return (IS_INDEX_OR_PSEUDO_REG (op1)
2836 && IS_ADDR_OR_PSEUDO_REG (op0))
2837 || (IS_ADDR_OR_PSEUDO_REG (op1)
2838 && IS_INDEX_OR_PSEUDO_REG (op0));
2840 return IS_ADDR_OR_PSEUDO_REG (op0)
2841 && GET_CODE (op1) == CONST_INT
2842 && IS_DISP1_CONST (INTVAL (op1));
2845 break;
2847 default:
2848 break;
2850 return 0;
2854 /* Direct memory operand. */
2857 c4x_T_constraint (rtx op)
2859 if (GET_CODE (op) != MEM)
2860 return 0;
2861 op = XEXP (op, 0);
2863 if (GET_CODE (op) != LO_SUM)
2865 /* Allow call operands. */
2866 return GET_CODE (op) == SYMBOL_REF
2867 && GET_MODE (op) == Pmode
2868 && SYMBOL_REF_FUNCTION_P (op);
2871 /* HImode and HFmode are not offsettable. */
2872 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2873 return 0;
2875 if ((GET_CODE (XEXP (op, 0)) == REG)
2876 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2877 return c4x_U_constraint (XEXP (op, 1));
2879 return 0;
2883 /* Symbolic operand. */
2886 c4x_U_constraint (rtx op)
2888 /* Don't allow direct addressing to an arbitrary constant. */
2889 return GET_CODE (op) == CONST
2890 || GET_CODE (op) == SYMBOL_REF
2891 || GET_CODE (op) == LABEL_REF;
2896 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2898 if (GET_CODE (op) == MEM)
2900 enum rtx_code code = GET_CODE (XEXP (op, 0));
2902 if (code == PRE_INC
2903 || code == PRE_DEC
2904 || code == POST_INC
2905 || code == POST_DEC
2906 || code == PRE_MODIFY
2907 || code == POST_MODIFY
2909 return 1;
2911 return 0;
2915 /* Match any operand. */
2918 any_operand (register rtx op ATTRIBUTE_UNUSED,
2919 enum machine_mode mode ATTRIBUTE_UNUSED)
2921 return 1;
2925 /* Nonzero if OP is a floating point value with value 0.0. */
2928 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2930 REAL_VALUE_TYPE r;
2932 if (GET_CODE (op) != CONST_DOUBLE)
2933 return 0;
2934 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2935 return REAL_VALUES_EQUAL (r, dconst0);
2940 const_operand (register rtx op, register enum machine_mode mode)
2942 switch (mode)
2944 case QFmode:
2945 case HFmode:
2946 if (GET_CODE (op) != CONST_DOUBLE
2947 || GET_MODE (op) != mode
2948 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2949 return 0;
2951 return c4x_immed_float_p (op);
2953 #if Pmode != QImode
2954 case Pmode:
2955 #endif
2956 case QImode:
2957 if (GET_CODE (op) != CONST_INT
2958 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2959 || GET_MODE_CLASS (mode) != MODE_INT)
2960 return 0;
2962 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2964 case HImode:
2965 return 0;
2967 default:
2968 return 0;
2974 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2976 return c4x_K_constant (op);
2981 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2983 return c4x_N_constant (op);
2988 reg_operand (rtx op, enum machine_mode mode)
2990 if (GET_CODE (op) == SUBREG
2991 && GET_MODE (op) == QFmode)
2992 return 0;
2993 return register_operand (op, mode);
2998 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3000 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3001 int and a long double. */
3002 if (GET_CODE (op) == SUBREG
3003 && (GET_MODE (op) == QFmode)
3004 && (GET_MODE (SUBREG_REG (op)) == QImode
3005 || GET_MODE (SUBREG_REG (op)) == HImode))
3006 return 1;
3007 return 0;
3012 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3014 if (REG_P (op) || CONSTANT_P (op))
3015 return 1;
3016 return 0;
3021 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3023 if (REG_P (op) || CONSTANT_P (op))
3024 return 1;
3025 if (GET_CODE (op) != MEM)
3026 return 0;
3027 op = XEXP (op, 0);
3028 switch (GET_CODE (op))
3030 case REG:
3031 return 1;
3033 case PLUS:
3035 rtx op0 = XEXP (op, 0);
3036 rtx op1 = XEXP (op, 1);
3038 if (! REG_P (op0))
3039 return 0;
3041 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3042 return 1;
3045 case LO_SUM:
3047 rtx op0 = XEXP (op, 0);
3049 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3050 return 1;
3052 break;
3054 case CONST:
3055 case SYMBOL_REF:
3056 case LABEL_REF:
3057 return 1;
3059 default:
3060 break;
3062 return 0;
3067 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3069 if (REG_P (op) && REGNO (op) == RC_REGNO)
3070 return 0;
3071 return 1;
3075 /* Extended precision register R0-R1. */
3078 r0r1_reg_operand (rtx op, enum machine_mode mode)
3080 if (! reg_operand (op, mode))
3081 return 0;
3082 if (GET_CODE (op) == SUBREG)
3083 op = SUBREG_REG (op);
3084 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3088 /* Extended precision register R2-R3. */
3091 r2r3_reg_operand (rtx op, enum machine_mode mode)
3093 if (! reg_operand (op, mode))
3094 return 0;
3095 if (GET_CODE (op) == SUBREG)
3096 op = SUBREG_REG (op);
3097 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3101 /* Low extended precision register R0-R7. */
3104 ext_low_reg_operand (rtx op, enum machine_mode mode)
3106 if (! reg_operand (op, mode))
3107 return 0;
3108 if (GET_CODE (op) == SUBREG)
3109 op = SUBREG_REG (op);
3110 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3114 /* Extended precision register. */
3117 ext_reg_operand (rtx op, enum machine_mode mode)
3119 if (! reg_operand (op, mode))
3120 return 0;
3121 if (GET_CODE (op) == SUBREG)
3122 op = SUBREG_REG (op);
3123 if (! REG_P (op))
3124 return 0;
3125 return IS_EXT_OR_PSEUDO_REG (op);
3129 /* Standard precision register. */
3132 std_reg_operand (rtx op, enum machine_mode mode)
3134 if (! reg_operand (op, mode))
3135 return 0;
3136 if (GET_CODE (op) == SUBREG)
3137 op = SUBREG_REG (op);
3138 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3141 /* Standard precision or normal register. */
3144 std_or_reg_operand (rtx op, enum machine_mode mode)
3146 if (reload_in_progress)
3147 return std_reg_operand (op, mode);
3148 return reg_operand (op, mode);
3151 /* Address register. */
3154 addr_reg_operand (rtx op, enum machine_mode mode)
3156 if (! reg_operand (op, mode))
3157 return 0;
3158 return c4x_a_register (op);
3162 /* Index register. */
3165 index_reg_operand (rtx op, enum machine_mode mode)
3167 if (! reg_operand (op, mode))
3168 return 0;
3169 if (GET_CODE (op) == SUBREG)
3170 op = SUBREG_REG (op);
3171 return c4x_x_register (op);
3175 /* DP register. */
3178 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3180 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3184 /* SP register. */
3187 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3189 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3193 /* ST register. */
3196 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3198 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3202 /* RC register. */
3205 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3207 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3212 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3214 return (REG_P (op) || symbolic_address_operand (op, mode));
3218 /* Symbolic address operand. */
3221 symbolic_address_operand (register rtx op,
3222 enum machine_mode mode ATTRIBUTE_UNUSED)
3224 switch (GET_CODE (op))
3226 case CONST:
3227 case SYMBOL_REF:
3228 case LABEL_REF:
3229 return 1;
3230 default:
3231 return 0;
3236 /* Check dst operand of a move instruction. */
3239 dst_operand (rtx op, enum machine_mode mode)
3241 if (GET_CODE (op) == SUBREG
3242 && mixed_subreg_operand (op, mode))
3243 return 0;
3245 if (REG_P (op))
3246 return reg_operand (op, mode);
3248 return nonimmediate_operand (op, mode);
3252 /* Check src operand of two operand arithmetic instructions. */
3255 src_operand (rtx op, enum machine_mode mode)
3257 if (GET_CODE (op) == SUBREG
3258 && mixed_subreg_operand (op, mode))
3259 return 0;
3261 if (REG_P (op))
3262 return reg_operand (op, mode);
3264 if (mode == VOIDmode)
3265 mode = GET_MODE (op);
3267 if (GET_CODE (op) == CONST_INT)
3268 return (mode == QImode || mode == Pmode || mode == HImode)
3269 && c4x_I_constant (op);
3271 /* We don't like CONST_DOUBLE integers. */
3272 if (GET_CODE (op) == CONST_DOUBLE)
3273 return c4x_H_constant (op);
3275 /* Disallow symbolic addresses. Only the predicate
3276 symbolic_address_operand will match these. */
3277 if (GET_CODE (op) == SYMBOL_REF
3278 || GET_CODE (op) == LABEL_REF
3279 || GET_CODE (op) == CONST)
3280 return 0;
3282 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3283 access to symbolic addresses. These operands will get forced
3284 into a register and the movqi expander will generate a
3285 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3286 if (GET_CODE (op) == MEM
3287 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3288 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3289 || GET_CODE (XEXP (op, 0)) == CONST)))
3290 return !TARGET_EXPOSE_LDP &&
3291 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3293 return general_operand (op, mode);
3298 src_hi_operand (rtx op, enum machine_mode mode)
3300 if (c4x_O_constant (op))
3301 return 1;
3302 return src_operand (op, mode);
3306 /* Check src operand of two operand logical instructions. */
3309 lsrc_operand (rtx op, enum machine_mode mode)
3311 if (mode == VOIDmode)
3312 mode = GET_MODE (op);
3314 if (mode != QImode && mode != Pmode)
3315 fatal_insn ("mode not QImode", op);
3317 if (GET_CODE (op) == CONST_INT)
3318 return c4x_L_constant (op) || c4x_J_constant (op);
3320 return src_operand (op, mode);
3324 /* Check src operand of two operand tricky instructions. */
3327 tsrc_operand (rtx op, enum machine_mode mode)
3329 if (mode == VOIDmode)
3330 mode = GET_MODE (op);
3332 if (mode != QImode && mode != Pmode)
3333 fatal_insn ("mode not QImode", op);
3335 if (GET_CODE (op) == CONST_INT)
3336 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3338 return src_operand (op, mode);
3342 /* Check src operand of two operand non immediate instructions. */
3345 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3347 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3348 return 0;
3350 return src_operand (op, mode);
3354 /* Check logical src operand of two operand non immediate instructions. */
3357 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3359 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3360 return 0;
3362 return lsrc_operand (op, mode);
3367 reg_or_const_operand (rtx op, enum machine_mode mode)
3369 return reg_operand (op, mode) || const_operand (op, mode);
3373 /* Check for indirect operands allowable in parallel instruction. */
3376 par_ind_operand (rtx op, enum machine_mode mode)
3378 if (mode != VOIDmode && mode != GET_MODE (op))
3379 return 0;
3381 return c4x_S_indirect (op);
3385 /* Check for operands allowable in parallel instruction. */
3388 parallel_operand (rtx op, enum machine_mode mode)
3390 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3394 static void
3395 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3397 *base = 0;
3398 *incdec = 0;
3399 *index = 0;
3400 *disp = 0;
3402 if (GET_CODE (op) != MEM)
3403 fatal_insn ("invalid indirect memory address", op);
3405 op = XEXP (op, 0);
3406 switch (GET_CODE (op))
3408 case PRE_DEC:
3409 *base = REGNO (XEXP (op, 0));
3410 *incdec = 1;
3411 *disp = -1;
3412 return;
3414 case POST_DEC:
3415 *base = REGNO (XEXP (op, 0));
3416 *incdec = 1;
3417 *disp = 0;
3418 return;
3420 case PRE_INC:
3421 *base = REGNO (XEXP (op, 0));
3422 *incdec = 1;
3423 *disp = 1;
3424 return;
3426 case POST_INC:
3427 *base = REGNO (XEXP (op, 0));
3428 *incdec = 1;
3429 *disp = 0;
3430 return;
3432 case POST_MODIFY:
3433 *base = REGNO (XEXP (op, 0));
3434 if (REG_P (XEXP (XEXP (op, 1), 1)))
3436 *index = REGNO (XEXP (XEXP (op, 1), 1));
3437 *disp = 0; /* ??? */
3439 else
3440 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3441 *incdec = 1;
3442 return;
3444 case PRE_MODIFY:
3445 *base = REGNO (XEXP (op, 0));
3446 if (REG_P (XEXP (XEXP (op, 1), 1)))
3448 *index = REGNO (XEXP (XEXP (op, 1), 1));
3449 *disp = 1; /* ??? */
3451 else
3452 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3453 *incdec = 1;
3455 return;
3457 case REG:
3458 *base = REGNO (op);
3459 return;
3461 case PLUS:
3463 rtx op0 = XEXP (op, 0);
3464 rtx op1 = XEXP (op, 1);
3466 if (c4x_a_register (op0))
3468 if (c4x_x_register (op1))
3470 *base = REGNO (op0);
3471 *index = REGNO (op1);
3472 return;
3474 else if ((GET_CODE (op1) == CONST_INT
3475 && IS_DISP1_CONST (INTVAL (op1))))
3477 *base = REGNO (op0);
3478 *disp = INTVAL (op1);
3479 return;
3482 else if (c4x_x_register (op0) && c4x_a_register (op1))
3484 *base = REGNO (op1);
3485 *index = REGNO (op0);
3486 return;
3489 /* Fall through. */
3491 default:
3492 fatal_insn ("invalid indirect (S) memory address", op);
3498 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3500 int base0;
3501 int base1;
3502 int incdec0;
3503 int incdec1;
3504 int index0;
3505 int index1;
3506 int disp0;
3507 int disp1;
3509 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3510 return 1;
3512 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3513 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3515 if (store0 && store1)
3517 /* If we have two stores in parallel to the same address, then
3518 the C4x only executes one of the stores. This is unlikely to
3519 cause problems except when writing to a hardware device such
3520 as a FIFO since the second write will be lost. The user
3521 should flag the hardware location as being volatile so that
3522 we don't do this optimization. While it is unlikely that we
3523 have an aliased address if both locations are not marked
3524 volatile, it is probably safer to flag a potential conflict
3525 if either location is volatile. */
3526 if (! flag_argument_noalias)
3528 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3529 return 1;
3533 /* If have a parallel load and a store to the same address, the load
3534 is performed first, so there is no conflict. Similarly, there is
3535 no conflict if have parallel loads from the same address. */
3537 /* Cannot use auto increment or auto decrement twice for same
3538 base register. */
3539 if (base0 == base1 && incdec0 && incdec0)
3540 return 1;
3542 /* It might be too confusing for GCC if we have use a base register
3543 with a side effect and a memory reference using the same register
3544 in parallel. */
3545 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3546 return 1;
3548 /* We cannot optimize the case where op1 and op2 refer to the same
3549 address. */
3550 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3551 return 1;
3553 /* No conflict. */
3554 return 0;
3558 /* Check for while loop inside a decrement and branch loop. */
3561 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3563 while (insn)
3565 if (GET_CODE (insn) == CODE_LABEL)
3567 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3568 return 1;
3569 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3570 return 0;
3572 insn = PREV_INSN (insn);
3574 return 1;
3578 /* Validate combination of operands for parallel load/store instructions. */
3581 valid_parallel_load_store (rtx *operands,
3582 enum machine_mode mode ATTRIBUTE_UNUSED)
3584 rtx op0 = operands[0];
3585 rtx op1 = operands[1];
3586 rtx op2 = operands[2];
3587 rtx op3 = operands[3];
3589 if (GET_CODE (op0) == SUBREG)
3590 op0 = SUBREG_REG (op0);
3591 if (GET_CODE (op1) == SUBREG)
3592 op1 = SUBREG_REG (op1);
3593 if (GET_CODE (op2) == SUBREG)
3594 op2 = SUBREG_REG (op2);
3595 if (GET_CODE (op3) == SUBREG)
3596 op3 = SUBREG_REG (op3);
3598 /* The patterns should only allow ext_low_reg_operand() or
3599 par_ind_operand() operands. Thus of the 4 operands, only 2
3600 should be REGs and the other 2 should be MEMs. */
3602 /* This test prevents the multipack pass from using this pattern if
3603 op0 is used as an index or base register in op2 or op3, since
3604 this combination will require reloading. */
3605 if (GET_CODE (op0) == REG
3606 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3607 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3608 return 0;
3610 /* LDI||LDI. */
3611 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3612 return (REGNO (op0) != REGNO (op2))
3613 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3614 && ! c4x_address_conflict (op1, op3, 0, 0);
3616 /* STI||STI. */
3617 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3618 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3619 && ! c4x_address_conflict (op0, op2, 1, 1);
3621 /* LDI||STI. */
3622 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3623 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3624 && ! c4x_address_conflict (op1, op2, 0, 1);
3626 /* STI||LDI. */
3627 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3628 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3629 && ! c4x_address_conflict (op0, op3, 1, 0);
3631 return 0;
3636 valid_parallel_operands_4 (rtx *operands,
3637 enum machine_mode mode ATTRIBUTE_UNUSED)
3639 rtx op0 = operands[0];
3640 rtx op2 = operands[2];
3642 if (GET_CODE (op0) == SUBREG)
3643 op0 = SUBREG_REG (op0);
3644 if (GET_CODE (op2) == SUBREG)
3645 op2 = SUBREG_REG (op2);
3647 /* This test prevents the multipack pass from using this pattern if
3648 op0 is used as an index or base register in op2, since this combination
3649 will require reloading. */
3650 if (GET_CODE (op0) == REG
3651 && GET_CODE (op2) == MEM
3652 && reg_mentioned_p (op0, XEXP (op2, 0)))
3653 return 0;
3655 return 1;
3660 valid_parallel_operands_5 (rtx *operands,
3661 enum machine_mode mode ATTRIBUTE_UNUSED)
3663 int regs = 0;
3664 rtx op0 = operands[0];
3665 rtx op1 = operands[1];
3666 rtx op2 = operands[2];
3667 rtx op3 = operands[3];
3669 if (GET_CODE (op0) == SUBREG)
3670 op0 = SUBREG_REG (op0);
3671 if (GET_CODE (op1) == SUBREG)
3672 op1 = SUBREG_REG (op1);
3673 if (GET_CODE (op2) == SUBREG)
3674 op2 = SUBREG_REG (op2);
3676 /* The patterns should only allow ext_low_reg_operand() or
3677 par_ind_operand() operands. Operands 1 and 2 may be commutative
3678 but only one of them can be a register. */
3679 if (GET_CODE (op1) == REG)
3680 regs++;
3681 if (GET_CODE (op2) == REG)
3682 regs++;
3684 if (regs != 1)
3685 return 0;
3687 /* This test prevents the multipack pass from using this pattern if
3688 op0 is used as an index or base register in op3, since this combination
3689 will require reloading. */
3690 if (GET_CODE (op0) == REG
3691 && GET_CODE (op3) == MEM
3692 && reg_mentioned_p (op0, XEXP (op3, 0)))
3693 return 0;
3695 return 1;
3700 valid_parallel_operands_6 (rtx *operands,
3701 enum machine_mode mode ATTRIBUTE_UNUSED)
3703 int regs = 0;
3704 rtx op0 = operands[0];
3705 rtx op1 = operands[1];
3706 rtx op2 = operands[2];
3707 rtx op4 = operands[4];
3708 rtx op5 = operands[5];
3710 if (GET_CODE (op1) == SUBREG)
3711 op1 = SUBREG_REG (op1);
3712 if (GET_CODE (op2) == SUBREG)
3713 op2 = SUBREG_REG (op2);
3714 if (GET_CODE (op4) == SUBREG)
3715 op4 = SUBREG_REG (op4);
3716 if (GET_CODE (op5) == SUBREG)
3717 op5 = SUBREG_REG (op5);
3719 /* The patterns should only allow ext_low_reg_operand() or
3720 par_ind_operand() operands. Thus of the 4 input operands, only 2
3721 should be REGs and the other 2 should be MEMs. */
3723 if (GET_CODE (op1) == REG)
3724 regs++;
3725 if (GET_CODE (op2) == REG)
3726 regs++;
3727 if (GET_CODE (op4) == REG)
3728 regs++;
3729 if (GET_CODE (op5) == REG)
3730 regs++;
3732 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3733 Perhaps we should count the MEMs as well? */
3734 if (regs != 2)
3735 return 0;
3737 /* This test prevents the multipack pass from using this pattern if
3738 op0 is used as an index or base register in op4 or op5, since
3739 this combination will require reloading. */
3740 if (GET_CODE (op0) == REG
3741 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3742 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3743 return 0;
3745 return 1;
3749 /* Validate combination of src operands. Note that the operands have
3750 been screened by the src_operand predicate. We just have to check
3751 that the combination of operands is valid. If FORCE is set, ensure
3752 that the destination regno is valid if we have a 2 operand insn. */
3754 static int
3755 c4x_valid_operands (enum rtx_code code, rtx *operands,
3756 enum machine_mode mode ATTRIBUTE_UNUSED,
3757 int force)
3759 rtx op0;
3760 rtx op1;
3761 rtx op2;
3762 enum rtx_code code1;
3763 enum rtx_code code2;
3766 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3767 if (code == IF_THEN_ELSE)
3768 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3770 if (code == COMPARE)
3772 op1 = operands[0];
3773 op2 = operands[1];
3775 else
3777 op1 = operands[1];
3778 op2 = operands[2];
3781 op0 = operands[0];
3783 if (GET_CODE (op0) == SUBREG)
3784 op0 = SUBREG_REG (op0);
3785 if (GET_CODE (op1) == SUBREG)
3786 op1 = SUBREG_REG (op1);
3787 if (GET_CODE (op2) == SUBREG)
3788 op2 = SUBREG_REG (op2);
3790 code1 = GET_CODE (op1);
3791 code2 = GET_CODE (op2);
3794 if (code1 == REG && code2 == REG)
3795 return 1;
3797 if (code1 == MEM && code2 == MEM)
3799 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3800 return 1;
3801 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3804 /* We cannot handle two MEMs or two CONSTS, etc. */
3805 if (code1 == code2)
3806 return 0;
3808 if (code1 == REG)
3810 switch (code2)
3812 case CONST_INT:
3813 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3814 return 1;
3815 break;
3817 case CONST_DOUBLE:
3818 if (! c4x_H_constant (op2))
3819 return 0;
3820 break;
3822 /* Any valid memory operand screened by src_operand is OK. */
3823 case MEM:
3824 break;
3826 default:
3827 fatal_insn ("c4x_valid_operands: Internal error", op2);
3828 break;
3831 if (GET_CODE (op0) == SCRATCH)
3832 return 1;
3834 if (!REG_P (op0))
3835 return 0;
3837 /* Check that we have a valid destination register for a two operand
3838 instruction. */
3839 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3843 /* Check non-commutative operators. */
3844 if (code == ASHIFTRT || code == LSHIFTRT
3845 || code == ASHIFT || code == COMPARE)
3846 return code2 == REG
3847 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3850 /* Assume MINUS is commutative since the subtract patterns
3851 also support the reverse subtract instructions. Since op1
3852 is not a register, and op2 is a register, op1 can only
3853 be a restricted memory operand for a shift instruction. */
3854 if (code2 == REG)
3856 switch (code1)
3858 case CONST_INT:
3859 break;
3861 case CONST_DOUBLE:
3862 if (! c4x_H_constant (op1))
3863 return 0;
3864 break;
3866 /* Any valid memory operand screened by src_operand is OK. */
3867 case MEM:
3868 break;
3870 default:
3871 abort ();
3872 break;
3875 if (GET_CODE (op0) == SCRATCH)
3876 return 1;
3878 if (!REG_P (op0))
3879 return 0;
3881 /* Check that we have a valid destination register for a two operand
3882 instruction. */
3883 return ! force || REGNO (op1) == REGNO (op0);
3886 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3887 return 1;
3889 return 0;
3893 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3896 /* If we are not optimizing then we have to let anything go and let
3897 reload fix things up. instantiate_decl in function.c can produce
3898 invalid insns by changing the offset of a memory operand from a
3899 valid one into an invalid one, when the second operand is also a
3900 memory operand. The alternative is not to allow two memory
3901 operands for an insn when not optimizing. The problem only rarely
3902 occurs, for example with the C-torture program DFcmp.c. */
3904 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3909 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3911 /* Compare only has 2 operands. */
3912 if (code == COMPARE)
3914 /* During RTL generation, force constants into pseudos so that
3915 they can get hoisted out of loops. This will tie up an extra
3916 register but can save an extra cycle. Only do this if loop
3917 optimization enabled. (We cannot pull this trick for add and
3918 sub instructions since the flow pass won't find
3919 autoincrements etc.) This allows us to generate compare
3920 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3921 of LDI *AR0++, R0; CMPI 42, R0.
3923 Note that expand_binops will try to load an expensive constant
3924 into a register if it is used within a loop. Unfortunately,
3925 the cost mechanism doesn't allow us to look at the other
3926 operand to decide whether the constant is expensive. */
3928 if (! reload_in_progress
3929 && TARGET_HOIST
3930 && optimize > 0
3931 && GET_CODE (operands[1]) == CONST_INT
3932 && rtx_cost (operands[1], code) > 1)
3933 operands[1] = force_reg (mode, operands[1]);
3935 if (! reload_in_progress
3936 && ! c4x_valid_operands (code, operands, mode, 0))
3937 operands[0] = force_reg (mode, operands[0]);
3938 return 1;
3941 /* We cannot do this for ADDI/SUBI insns since we will
3942 defeat the flow pass from finding autoincrement addressing
3943 opportunities. */
3944 if (! reload_in_progress
3945 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3946 && TARGET_HOIST
3947 && optimize > 1
3948 && GET_CODE (operands[2]) == CONST_INT
3949 && rtx_cost (operands[2], code) > 1)
3950 operands[2] = force_reg (mode, operands[2]);
3952 /* We can get better code on a C30 if we force constant shift counts
3953 into a register. This way they can get hoisted out of loops,
3954 tying up a register but saving an instruction. The downside is
3955 that they may get allocated to an address or index register, and
3956 thus we will get a pipeline conflict if there is a nearby
3957 indirect address using an address register.
3959 Note that expand_binops will not try to load an expensive constant
3960 into a register if it is used within a loop for a shift insn. */
3962 if (! reload_in_progress
3963 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3965 /* If the operand combination is invalid, we force operand1 into a
3966 register, preventing reload from having doing to do this at a
3967 later stage. */
3968 operands[1] = force_reg (mode, operands[1]);
3969 if (TARGET_FORCE)
3971 emit_move_insn (operands[0], operands[1]);
3972 operands[1] = copy_rtx (operands[0]);
3974 else
3976 /* Just in case... */
3977 if (! c4x_valid_operands (code, operands, mode, 0))
3978 operands[2] = force_reg (mode, operands[2]);
3982 /* Right shifts require a negative shift count, but GCC expects
3983 a positive count, so we emit a NEG. */
3984 if ((code == ASHIFTRT || code == LSHIFTRT)
3985 && (GET_CODE (operands[2]) != CONST_INT))
3986 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3989 /* When the shift count is greater than 32 then the result
3990 can be implementation dependent. We truncate the result to
3991 fit in 5 bits so that we do not emit invalid code when
3992 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3993 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3994 && (GET_CODE (operands[2]) == CONST_INT))
3995 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3996 operands[2]
3997 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3999 return 1;
4003 /* The following predicates are used for instruction scheduling. */
4006 group1_reg_operand (rtx op, enum machine_mode mode)
4008 if (mode != VOIDmode && mode != GET_MODE (op))
4009 return 0;
4010 if (GET_CODE (op) == SUBREG)
4011 op = SUBREG_REG (op);
4012 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4017 group1_mem_operand (rtx op, enum machine_mode mode)
4019 if (mode != VOIDmode && mode != GET_MODE (op))
4020 return 0;
4022 if (GET_CODE (op) == MEM)
4024 op = XEXP (op, 0);
4025 if (GET_CODE (op) == PLUS)
4027 rtx op0 = XEXP (op, 0);
4028 rtx op1 = XEXP (op, 1);
4030 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4031 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4032 return 1;
4034 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4035 return 1;
4038 return 0;
4042 /* Return true if any one of the address registers. */
4045 arx_reg_operand (rtx op, enum machine_mode mode)
4047 if (mode != VOIDmode && mode != GET_MODE (op))
4048 return 0;
4049 if (GET_CODE (op) == SUBREG)
4050 op = SUBREG_REG (op);
4051 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4055 static int
4056 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4058 if (mode != VOIDmode && mode != GET_MODE (op))
4059 return 0;
4060 if (GET_CODE (op) == SUBREG)
4061 op = SUBREG_REG (op);
4062 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4066 static int
4067 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4069 if (mode != VOIDmode && mode != GET_MODE (op))
4070 return 0;
4072 if (GET_CODE (op) == MEM)
4074 op = XEXP (op, 0);
4075 switch (GET_CODE (op))
4077 case PRE_DEC:
4078 case POST_DEC:
4079 case PRE_INC:
4080 case POST_INC:
4081 op = XEXP (op, 0);
4083 case REG:
4084 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4086 case PRE_MODIFY:
4087 case POST_MODIFY:
4088 if (REG_P (XEXP (op, 0)) && (! reload_completed
4089 || (REGNO (XEXP (op, 0)) == regno)))
4090 return 1;
4091 if (REG_P (XEXP (XEXP (op, 1), 1))
4092 && (! reload_completed
4093 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4094 return 1;
4095 break;
4097 case PLUS:
4099 rtx op0 = XEXP (op, 0);
4100 rtx op1 = XEXP (op, 1);
4102 if ((REG_P (op0) && (! reload_completed
4103 || (REGNO (op0) == regno)))
4104 || (REG_P (op1) && (! reload_completed
4105 || (REGNO (op1) == regno))))
4106 return 1;
4108 break;
4110 default:
4111 break;
4114 return 0;
4119 ar0_reg_operand (rtx op, enum machine_mode mode)
4121 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4126 ar0_mem_operand (rtx op, enum machine_mode mode)
4128 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4133 ar1_reg_operand (rtx op, enum machine_mode mode)
4135 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4140 ar1_mem_operand (rtx op, enum machine_mode mode)
4142 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4147 ar2_reg_operand (rtx op, enum machine_mode mode)
4149 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4154 ar2_mem_operand (rtx op, enum machine_mode mode)
4156 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4161 ar3_reg_operand (rtx op, enum machine_mode mode)
4163 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4168 ar3_mem_operand (rtx op, enum machine_mode mode)
4170 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4175 ar4_reg_operand (rtx op, enum machine_mode mode)
4177 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4182 ar4_mem_operand (rtx op, enum machine_mode mode)
4184 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4189 ar5_reg_operand (rtx op, enum machine_mode mode)
4191 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4196 ar5_mem_operand (rtx op, enum machine_mode mode)
4198 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4203 ar6_reg_operand (rtx op, enum machine_mode mode)
4205 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4210 ar6_mem_operand (rtx op, enum machine_mode mode)
4212 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4217 ar7_reg_operand (rtx op, enum machine_mode mode)
4219 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4224 ar7_mem_operand (rtx op, enum machine_mode mode)
4226 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4231 ir0_reg_operand (rtx op, enum machine_mode mode)
4233 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4238 ir0_mem_operand (rtx op, enum machine_mode mode)
4240 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4245 ir1_reg_operand (rtx op, enum machine_mode mode)
4247 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4252 ir1_mem_operand (rtx op, enum machine_mode mode)
4254 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4258 /* This is similar to operand_subword but allows autoincrement
4259 addressing. */
4262 c4x_operand_subword (rtx op, int i, int validate_address,
4263 enum machine_mode mode)
4265 if (mode != HImode && mode != HFmode)
4266 fatal_insn ("c4x_operand_subword: invalid mode", op);
4268 if (mode == HFmode && REG_P (op))
4269 fatal_insn ("c4x_operand_subword: invalid operand", op);
4271 if (GET_CODE (op) == MEM)
4273 enum rtx_code code = GET_CODE (XEXP (op, 0));
4274 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4275 enum machine_mode submode;
4277 submode = mode;
4278 if (mode == HImode)
4279 submode = QImode;
4280 else if (mode == HFmode)
4281 submode = QFmode;
4283 switch (code)
4285 case POST_INC:
4286 case PRE_INC:
4287 return gen_rtx_MEM (submode, XEXP (op, 0));
4289 case POST_DEC:
4290 case PRE_DEC:
4291 case PRE_MODIFY:
4292 case POST_MODIFY:
4293 /* We could handle these with some difficulty.
4294 e.g., *p-- => *(p-=2); *(p+1). */
4295 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4297 case SYMBOL_REF:
4298 case LABEL_REF:
4299 case CONST:
4300 case CONST_INT:
4301 fatal_insn ("c4x_operand_subword: invalid address", op);
4303 /* Even though offsettable_address_p considers (MEM
4304 (LO_SUM)) to be offsettable, it is not safe if the
4305 address is at the end of the data page since we also have
4306 to fix up the associated high PART. In this case where
4307 we are trying to split a HImode or HFmode memory
4308 reference, we would have to emit another insn to reload a
4309 new HIGH value. It's easier to disable LO_SUM memory references
4310 in HImode or HFmode and we probably get better code. */
4311 case LO_SUM:
4312 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4314 default:
4315 break;
4319 return operand_subword (op, i, validate_address, mode);
4322 struct name_list
4324 struct name_list *next;
4325 const char *name;
4328 static struct name_list *global_head;
4329 static struct name_list *extern_head;
4332 /* Add NAME to list of global symbols and remove from external list if
4333 present on external list. */
4335 void
4336 c4x_global_label (const char *name)
4338 struct name_list *p, *last;
4340 /* Do not insert duplicate names, so linearly search through list of
4341 existing names. */
4342 p = global_head;
4343 while (p)
4345 if (strcmp (p->name, name) == 0)
4346 return;
4347 p = p->next;
4349 p = (struct name_list *) xmalloc (sizeof *p);
4350 p->next = global_head;
4351 p->name = name;
4352 global_head = p;
4354 /* Remove this name from ref list if present. */
4355 last = NULL;
4356 p = extern_head;
4357 while (p)
4359 if (strcmp (p->name, name) == 0)
4361 if (last)
4362 last->next = p->next;
4363 else
4364 extern_head = p->next;
4365 break;
4367 last = p;
4368 p = p->next;
4373 /* Add NAME to list of external symbols. */
4375 void
4376 c4x_external_ref (const char *name)
4378 struct name_list *p;
4380 /* Do not insert duplicate names. */
4381 p = extern_head;
4382 while (p)
4384 if (strcmp (p->name, name) == 0)
4385 return;
4386 p = p->next;
4389 /* Do not insert ref if global found. */
4390 p = global_head;
4391 while (p)
4393 if (strcmp (p->name, name) == 0)
4394 return;
4395 p = p->next;
4397 p = (struct name_list *) xmalloc (sizeof *p);
4398 p->next = extern_head;
4399 p->name = name;
4400 extern_head = p;
4403 /* We need to have a data section we can identify so that we can set
4404 the DP register back to a data pointer in the small memory model.
4405 This is only required for ISRs if we are paranoid that someone
4406 may have quietly changed this register on the sly. */
4407 static void
4408 c4x_file_start (void)
4410 default_file_start ();
4411 fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
4412 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4416 static void
4417 c4x_file_end (void)
4419 struct name_list *p;
4421 /* Output all external names that are not global. */
4422 p = extern_head;
4423 while (p)
4425 fprintf (asm_out_file, "\t.ref\t");
4426 assemble_name (asm_out_file, p->name);
4427 fprintf (asm_out_file, "\n");
4428 p = p->next;
4430 fprintf (asm_out_file, "\t.end\n");
4434 static void
4435 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4437 while (list != NULL_TREE
4438 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4439 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4440 list = TREE_CHAIN (list);
4441 if (list)
4442 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4443 *attributes);
4447 static void
4448 c4x_insert_attributes (tree decl, tree *attributes)
4450 switch (TREE_CODE (decl))
4452 case FUNCTION_DECL:
4453 c4x_check_attribute ("section", code_tree, decl, attributes);
4454 c4x_check_attribute ("const", pure_tree, decl, attributes);
4455 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4456 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4457 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4458 break;
4460 case VAR_DECL:
4461 c4x_check_attribute ("section", data_tree, decl, attributes);
4462 break;
4464 default:
4465 break;
4469 /* Table of valid machine attributes. */
4470 const struct attribute_spec c4x_attribute_table[] =
4472 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4473 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4474 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4475 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4476 { NULL, 0, 0, false, false, false, NULL }
4479 /* Handle an attribute requiring a FUNCTION_TYPE;
4480 arguments as in struct attribute_spec.handler. */
4481 static tree
4482 c4x_handle_fntype_attribute (tree *node, tree name,
4483 tree args ATTRIBUTE_UNUSED,
4484 int flags ATTRIBUTE_UNUSED,
4485 bool *no_add_attrs)
4487 if (TREE_CODE (*node) != FUNCTION_TYPE)
4489 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4490 IDENTIFIER_POINTER (name));
4491 *no_add_attrs = true;
4494 return NULL_TREE;
4498 /* !!! FIXME to emit RPTS correctly. */
4501 c4x_rptb_rpts_p (rtx insn, rtx op)
4503 /* The next insn should be our label marking where the
4504 repeat block starts. */
4505 insn = NEXT_INSN (insn);
4506 if (GET_CODE (insn) != CODE_LABEL)
4508 /* Some insns may have been shifted between the RPTB insn
4509 and the top label... They were probably destined to
4510 be moved out of the loop. For now, let's leave them
4511 where they are and print a warning. We should
4512 probably move these insns before the repeat block insn. */
4513 if (TARGET_DEBUG)
4514 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4515 insn);
4516 return 0;
4519 /* Skip any notes. */
4520 insn = next_nonnote_insn (insn);
4522 /* This should be our first insn in the loop. */
4523 if (! INSN_P (insn))
4524 return 0;
4526 /* Skip any notes. */
4527 insn = next_nonnote_insn (insn);
4529 if (! INSN_P (insn))
4530 return 0;
4532 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4533 return 0;
4535 if (TARGET_RPTS)
4536 return 1;
4538 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4542 /* Check if register r11 is used as the destination of an insn. */
4544 static int
4545 c4x_r11_set_p(rtx x)
4547 rtx set;
4548 int i, j;
4549 const char *fmt;
4551 if (x == 0)
4552 return 0;
4554 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4555 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4557 if (INSN_P (x) && (set = single_set (x)))
4558 x = SET_DEST (set);
4560 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4561 return 1;
4563 fmt = GET_RTX_FORMAT (GET_CODE (x));
4564 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4566 if (fmt[i] == 'e')
4568 if (c4x_r11_set_p (XEXP (x, i)))
4569 return 1;
4571 else if (fmt[i] == 'E')
4572 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4573 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4574 return 1;
4576 return 0;
4580 /* The c4x sometimes has a problem when the insn before the laj insn
4581 sets the r11 register. Check for this situation. */
4584 c4x_check_laj_p (rtx insn)
4586 insn = prev_nonnote_insn (insn);
4588 /* If this is the start of the function no nop is needed. */
4589 if (insn == 0)
4590 return 0;
4592 /* If the previous insn is a code label we have to insert a nop. This
4593 could be a jump or table jump. We can find the normal jumps by
4594 scanning the function but this will not find table jumps. */
4595 if (GET_CODE (insn) == CODE_LABEL)
4596 return 1;
4598 /* If the previous insn sets register r11 we have to insert a nop. */
4599 if (c4x_r11_set_p (insn))
4600 return 1;
4602 /* No nop needed. */
4603 return 0;
4607 /* Adjust the cost of a scheduling dependency. Return the new cost of
4608 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4609 A set of an address register followed by a use occurs a 2 cycle
4610 stall (reduced to a single cycle on the c40 using LDA), while
4611 a read of an address register followed by a use occurs a single cycle. */
4613 #define SET_USE_COST 3
4614 #define SETLDA_USE_COST 2
4615 #define READ_USE_COST 2
4617 static int
4618 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4620 /* Don't worry about this until we know what registers have been
4621 assigned. */
4622 if (flag_schedule_insns == 0 && ! reload_completed)
4623 return 0;
4625 /* How do we handle dependencies where a read followed by another
4626 read causes a pipeline stall? For example, a read of ar0 followed
4627 by the use of ar0 for a memory reference. It looks like we
4628 need to extend the scheduler to handle this case. */
4630 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4631 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4632 so only deal with insns we know about. */
4633 if (recog_memoized (dep_insn) < 0)
4634 return 0;
4636 if (REG_NOTE_KIND (link) == 0)
4638 int max = 0;
4640 /* Data dependency; DEP_INSN writes a register that INSN reads some
4641 cycles later. */
4642 if (TARGET_C3X)
4644 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4645 max = SET_USE_COST > max ? SET_USE_COST : max;
4646 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4647 max = READ_USE_COST > max ? READ_USE_COST : max;
4649 else
4651 /* This could be significantly optimized. We should look
4652 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4653 insn uses ar0-ar7. We then test if the same register
4654 is used. The tricky bit is that some operands will
4655 use several registers... */
4656 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4657 max = SET_USE_COST > max ? SET_USE_COST : max;
4658 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4659 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4660 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4661 max = READ_USE_COST > max ? READ_USE_COST : max;
4663 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4664 max = SET_USE_COST > max ? SET_USE_COST : max;
4665 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4666 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4667 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4668 max = READ_USE_COST > max ? READ_USE_COST : max;
4670 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4671 max = SET_USE_COST > max ? SET_USE_COST : max;
4672 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4673 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4674 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4675 max = READ_USE_COST > max ? READ_USE_COST : max;
4677 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4678 max = SET_USE_COST > max ? SET_USE_COST : max;
4679 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4680 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4681 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4682 max = READ_USE_COST > max ? READ_USE_COST : max;
4684 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4685 max = SET_USE_COST > max ? SET_USE_COST : max;
4686 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4687 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4688 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4689 max = READ_USE_COST > max ? READ_USE_COST : max;
4691 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4692 max = SET_USE_COST > max ? SET_USE_COST : max;
4693 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4694 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4695 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4696 max = READ_USE_COST > max ? READ_USE_COST : max;
4698 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4699 max = SET_USE_COST > max ? SET_USE_COST : max;
4700 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4701 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4702 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4703 max = READ_USE_COST > max ? READ_USE_COST : max;
4705 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4706 max = SET_USE_COST > max ? SET_USE_COST : max;
4707 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4708 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4709 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4710 max = READ_USE_COST > max ? READ_USE_COST : max;
4712 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4713 max = SET_USE_COST > max ? SET_USE_COST : max;
4714 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4715 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4717 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4718 max = SET_USE_COST > max ? SET_USE_COST : max;
4719 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4720 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4723 if (max)
4724 cost = max;
4726 /* For other data dependencies, the default cost specified in the
4727 md is correct. */
4728 return cost;
4730 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4732 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4733 cycles later. */
4735 /* For c4x anti dependencies, the cost is 0. */
4736 return 0;
4738 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4740 /* Output dependency; DEP_INSN writes a register that INSN writes some
4741 cycles later. */
4743 /* For c4x output dependencies, the cost is 0. */
4744 return 0;
4746 else
4747 abort ();
4750 void
4751 c4x_init_builtins (void)
4753 tree endlink = void_list_node;
4755 lang_hooks.builtin_function ("fast_ftoi",
4756 build_function_type
4757 (integer_type_node,
4758 tree_cons (NULL_TREE, double_type_node,
4759 endlink)),
4760 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4761 lang_hooks.builtin_function ("ansi_ftoi",
4762 build_function_type
4763 (integer_type_node,
4764 tree_cons (NULL_TREE, double_type_node,
4765 endlink)),
4766 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4767 NULL_TREE);
4768 if (TARGET_C3X)
4769 lang_hooks.builtin_function ("fast_imult",
4770 build_function_type
4771 (integer_type_node,
4772 tree_cons (NULL_TREE, integer_type_node,
4773 tree_cons (NULL_TREE,
4774 integer_type_node,
4775 endlink))),
4776 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4777 NULL_TREE);
4778 else
4780 lang_hooks.builtin_function ("toieee",
4781 build_function_type
4782 (double_type_node,
4783 tree_cons (NULL_TREE, double_type_node,
4784 endlink)),
4785 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4786 NULL_TREE);
4787 lang_hooks.builtin_function ("frieee",
4788 build_function_type
4789 (double_type_node,
4790 tree_cons (NULL_TREE, double_type_node,
4791 endlink)),
4792 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4793 NULL_TREE);
4794 lang_hooks.builtin_function ("fast_invf",
4795 build_function_type
4796 (double_type_node,
4797 tree_cons (NULL_TREE, double_type_node,
4798 endlink)),
4799 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4800 NULL_TREE);
4806 c4x_expand_builtin (tree exp, rtx target,
4807 rtx subtarget ATTRIBUTE_UNUSED,
4808 enum machine_mode mode ATTRIBUTE_UNUSED,
4809 int ignore ATTRIBUTE_UNUSED)
4811 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4812 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4813 tree arglist = TREE_OPERAND (exp, 1);
4814 tree arg0, arg1;
4815 rtx r0, r1;
4817 switch (fcode)
4819 case C4X_BUILTIN_FIX:
4820 arg0 = TREE_VALUE (arglist);
4821 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4822 if (! target || ! register_operand (target, QImode))
4823 target = gen_reg_rtx (QImode);
4824 emit_insn (gen_fixqfqi_clobber (target, r0));
4825 return target;
4827 case C4X_BUILTIN_FIX_ANSI:
4828 arg0 = TREE_VALUE (arglist);
4829 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4830 if (! target || ! register_operand (target, QImode))
4831 target = gen_reg_rtx (QImode);
4832 emit_insn (gen_fix_truncqfqi2 (target, r0));
4833 return target;
4835 case C4X_BUILTIN_MPYI:
4836 if (! TARGET_C3X)
4837 break;
4838 arg0 = TREE_VALUE (arglist);
4839 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4840 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4841 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4842 if (! target || ! register_operand (target, QImode))
4843 target = gen_reg_rtx (QImode);
4844 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4845 return target;
4847 case C4X_BUILTIN_TOIEEE:
4848 if (TARGET_C3X)
4849 break;
4850 arg0 = TREE_VALUE (arglist);
4851 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4852 if (! target || ! register_operand (target, QFmode))
4853 target = gen_reg_rtx (QFmode);
4854 emit_insn (gen_toieee (target, r0));
4855 return target;
4857 case C4X_BUILTIN_FRIEEE:
4858 if (TARGET_C3X)
4859 break;
4860 arg0 = TREE_VALUE (arglist);
4861 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4862 if (register_operand (r0, QFmode))
4864 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4865 emit_move_insn (r1, r0);
4866 r0 = r1;
4868 if (! target || ! register_operand (target, QFmode))
4869 target = gen_reg_rtx (QFmode);
4870 emit_insn (gen_frieee (target, r0));
4871 return target;
4873 case C4X_BUILTIN_RCPF:
4874 if (TARGET_C3X)
4875 break;
4876 arg0 = TREE_VALUE (arglist);
4877 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4878 if (! target || ! register_operand (target, QFmode))
4879 target = gen_reg_rtx (QFmode);
4880 emit_insn (gen_rcpfqf_clobber (target, r0));
4881 return target;
4883 return NULL_RTX;
4886 static void
4887 c4x_init_libfuncs (void)
4889 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4890 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4891 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4892 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4893 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4894 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4895 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4896 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4897 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4898 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4899 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4900 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4901 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4902 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4903 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4904 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4905 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4906 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4907 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4908 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4909 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4910 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4911 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4912 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4915 static void
4916 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4917 tree decl ATTRIBUTE_UNUSED)
4919 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4922 static void
4923 c4x_globalize_label (FILE *stream, const char *name)
4925 default_globalize_label (stream, name);
4926 c4x_global_label (name);
4929 #define SHIFT_CODE_P(C) \
4930 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4931 #define LOGICAL_CODE_P(C) \
4932 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4934 /* Compute a (partial) cost for rtx X. Return true if the complete
4935 cost has been computed, and false if subexpressions should be
4936 scanned. In either case, *TOTAL contains the cost result. */
4938 static bool
4939 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4941 HOST_WIDE_INT val;
4943 switch (code)
4945 /* Some small integers are effectively free for the C40. We should
4946 also consider if we are using the small memory model. With
4947 the big memory model we require an extra insn for a constant
4948 loaded from memory. */
4950 case CONST_INT:
4951 val = INTVAL (x);
4952 if (c4x_J_constant (x))
4953 *total = 0;
4954 else if (! TARGET_C3X
4955 && outer_code == AND
4956 && (val == 255 || val == 65535))
4957 *total = 0;
4958 else if (! TARGET_C3X
4959 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4960 && (val == 16 || val == 24))
4961 *total = 0;
4962 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4963 *total = 3;
4964 else if (LOGICAL_CODE_P (outer_code)
4965 ? c4x_L_constant (x) : c4x_I_constant (x))
4966 *total = 2;
4967 else
4968 *total = 4;
4969 return true;
4971 case CONST:
4972 case LABEL_REF:
4973 case SYMBOL_REF:
4974 *total = 4;
4975 return true;
4977 case CONST_DOUBLE:
4978 if (c4x_H_constant (x))
4979 *total = 2;
4980 else if (GET_MODE (x) == QFmode)
4981 *total = 4;
4982 else
4983 *total = 8;
4984 return true;
4986 /* ??? Note that we return true, rather than false so that rtx_cost
4987 doesn't include the constant costs. Otherwise expand_mult will
4988 think that it is cheaper to synthesize a multiply rather than to
4989 use a multiply instruction. I think this is because the algorithm
4990 synth_mult doesn't take into account the loading of the operands,
4991 whereas the calculation of mult_cost does. */
4992 case PLUS:
4993 case MINUS:
4994 case AND:
4995 case IOR:
4996 case XOR:
4997 case ASHIFT:
4998 case ASHIFTRT:
4999 case LSHIFTRT:
5000 *total = COSTS_N_INSNS (1);
5001 return true;
5003 case MULT:
5004 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5005 || TARGET_MPYI ? 1 : 14);
5006 return true;
5008 case DIV:
5009 case UDIV:
5010 case MOD:
5011 case UMOD:
5012 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5013 ? 15 : 50);
5014 return true;
5016 default:
5017 return false;
5021 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5023 static void
5024 c4x_external_libcall (rtx fun)
5026 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5027 c4x_external_ref (XSTR (fun, 0));
5030 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
5032 static rtx
5033 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5034 int incoming ATTRIBUTE_UNUSED)
5036 return gen_rtx_REG (Pmode, AR0_REGNO);