1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
52 #include "target-def.h"
56 rtx fix_truncqfhi2_libfunc
;
57 rtx fixuns_truncqfhi2_libfunc
;
58 rtx fix_trunchfhi2_libfunc
;
59 rtx fixuns_trunchfhi2_libfunc
;
60 rtx floathiqf2_libfunc
;
61 rtx floatunshiqf2_libfunc
;
62 rtx floathihf2_libfunc
;
63 rtx floatunshihf2_libfunc
;
65 static int c4x_leaf_function
;
67 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
76 /* Reg Modes Saved. */
77 R0R1_REGS
, /* R0 QI, QF, HF No. */
78 R0R1_REGS
, /* R1 QI, QF, HF No. */
79 R2R3_REGS
, /* R2 QI, QF, HF No. */
80 R2R3_REGS
, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
85 ADDR_REGS
, /* AR0 QI No. */
86 ADDR_REGS
, /* AR1 QI No. */
87 ADDR_REGS
, /* AR2 QI No. */
88 ADDR_REGS
, /* AR3 QI QI. */
89 ADDR_REGS
, /* AR4 QI QI. */
90 ADDR_REGS
, /* AR5 QI QI. */
91 ADDR_REGS
, /* AR6 QI QI. */
92 ADDR_REGS
, /* AR7 QI QI. */
93 DP_REG
, /* DP QI No. */
94 INDEX_REGS
, /* IR0 QI No. */
95 INDEX_REGS
, /* IR1 QI No. */
96 BK_REG
, /* BK QI QI. */
97 SP_REG
, /* SP QI No. */
98 ST_REG
, /* ST CC No. */
99 NO_REGS
, /* DIE/IE No. */
100 NO_REGS
, /* IIE/IF No. */
101 NO_REGS
, /* IIF/IOF No. */
102 INT_REGS
, /* RS QI No. */
103 INT_REGS
, /* RE QI No. */
104 RC_REG
, /* RC QI No. */
105 EXT_REGS
, /* R8 QI, QF, HF QI. */
106 EXT_REGS
, /* R9 QI, QF, HF No. */
107 EXT_REGS
, /* R10 QI, QF, HF No. */
108 EXT_REGS
, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
113 /* Reg Modes Saved. */
114 HFmode
, /* R0 QI, QF, HF No. */
115 HFmode
, /* R1 QI, QF, HF No. */
116 HFmode
, /* R2 QI, QF, HF No. */
117 HFmode
, /* R3 QI, QF, HF No. */
118 QFmode
, /* R4 QI, QF, HF QI. */
119 QFmode
, /* R5 QI, QF, HF QI. */
120 QImode
, /* R6 QI, QF, HF QF. */
121 QImode
, /* R7 QI, QF, HF QF. */
122 QImode
, /* AR0 QI No. */
123 QImode
, /* AR1 QI No. */
124 QImode
, /* AR2 QI No. */
125 QImode
, /* AR3 QI QI. */
126 QImode
, /* AR4 QI QI. */
127 QImode
, /* AR5 QI QI. */
128 QImode
, /* AR6 QI QI. */
129 QImode
, /* AR7 QI QI. */
130 VOIDmode
, /* DP QI No. */
131 QImode
, /* IR0 QI No. */
132 QImode
, /* IR1 QI No. */
133 QImode
, /* BK QI QI. */
134 VOIDmode
, /* SP QI No. */
135 VOIDmode
, /* ST CC No. */
136 VOIDmode
, /* DIE/IE No. */
137 VOIDmode
, /* IIE/IF No. */
138 VOIDmode
, /* IIF/IOF No. */
139 QImode
, /* RS QI No. */
140 QImode
, /* RE QI No. */
141 VOIDmode
, /* RC QI No. */
142 QFmode
, /* R8 QI, QF, HF QI. */
143 HFmode
, /* R9 QI, QF, HF No. */
144 HFmode
, /* R10 QI, QF, HF No. */
145 HFmode
, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
155 const char *c4x_rpts_cycles_string
;
156 int c4x_rpts_cycles
= 0; /* Max. cycles for RPTS. */
157 const char *c4x_cpu_version_string
;
158 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
160 /* Pragma definitions. */
162 tree code_tree
= NULL_TREE
;
163 tree data_tree
= NULL_TREE
;
164 tree pure_tree
= NULL_TREE
;
165 tree noreturn_tree
= NULL_TREE
;
166 tree interrupt_tree
= NULL_TREE
;
167 tree naked_tree
= NULL_TREE
;
169 /* Forward declarations */
170 static int c4x_isr_reg_used_p
PARAMS ((unsigned int));
171 static int c4x_leaf_function_p
PARAMS ((void));
172 static int c4x_naked_function_p
PARAMS ((void));
173 static int c4x_immed_float_p
PARAMS ((rtx
));
174 static int c4x_a_register
PARAMS ((rtx
));
175 static int c4x_x_register
PARAMS ((rtx
));
176 static int c4x_immed_int_constant
PARAMS ((rtx
));
177 static int c4x_immed_float_constant
PARAMS ((rtx
));
178 static int c4x_K_constant
PARAMS ((rtx
));
179 static int c4x_N_constant
PARAMS ((rtx
));
180 static int c4x_O_constant
PARAMS ((rtx
));
181 static int c4x_R_indirect
PARAMS ((rtx
));
182 static int c4x_S_indirect
PARAMS ((rtx
));
183 static void c4x_S_address_parse
PARAMS ((rtx
, int *, int *, int *, int *));
184 static int c4x_valid_operands
PARAMS ((enum rtx_code
, rtx
*,
185 enum machine_mode
, int));
186 static int c4x_arn_reg_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
187 static int c4x_arn_mem_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
188 static void c4x_file_start
PARAMS ((void));
189 static void c4x_file_end
PARAMS ((void));
190 static void c4x_check_attribute
PARAMS ((const char *, tree
, tree
, tree
*));
191 static int c4x_r11_set_p
PARAMS ((rtx
));
192 static int c4x_rptb_valid_p
PARAMS ((rtx
, rtx
));
193 static void c4x_reorg
PARAMS ((void));
194 static int c4x_label_ref_used_p
PARAMS ((rtx
, rtx
));
195 static tree c4x_handle_fntype_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
196 const struct attribute_spec c4x_attribute_table
[];
197 static void c4x_insert_attributes
PARAMS ((tree
, tree
*));
198 static void c4x_asm_named_section
PARAMS ((const char *, unsigned int));
199 static int c4x_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
200 static void c4x_globalize_label
PARAMS ((FILE *, const char *));
201 static bool c4x_rtx_costs
PARAMS ((rtx
, int, int, int *));
202 static int c4x_address_cost
PARAMS ((rtx
));
204 /* Initialize the GCC target structure. */
205 #undef TARGET_ASM_BYTE_OP
206 #define TARGET_ASM_BYTE_OP "\t.word\t"
207 #undef TARGET_ASM_ALIGNED_HI_OP
208 #define TARGET_ASM_ALIGNED_HI_OP NULL
209 #undef TARGET_ASM_ALIGNED_SI_OP
210 #define TARGET_ASM_ALIGNED_SI_OP NULL
211 #undef TARGET_ASM_FILE_START
212 #define TARGET_ASM_FILE_START c4x_file_start
213 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
214 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
215 #undef TARGET_ASM_FILE_END
216 #define TARGET_ASM_FILE_END c4x_file_end
218 #undef TARGET_ATTRIBUTE_TABLE
219 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
224 #undef TARGET_INIT_BUILTINS
225 #define TARGET_INIT_BUILTINS c4x_init_builtins
227 #undef TARGET_EXPAND_BUILTIN
228 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
230 #undef TARGET_SCHED_ADJUST_COST
231 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
233 #undef TARGET_ASM_GLOBALIZE_LABEL
234 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS c4x_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST c4x_address_cost
241 #undef TARGET_MACHINE_DEPENDENT_REORG
242 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
244 struct gcc_target targetm
= TARGET_INITIALIZER
;
246 /* Override command line options.
247 Called once after all options have been parsed.
248 Mostly we process the processor
249 type and sometimes adjust other TARGET_ options. */
252 c4x_override_options ()
254 if (c4x_rpts_cycles_string
)
255 c4x_rpts_cycles
= atoi (c4x_rpts_cycles_string
);
260 c4x_cpu_version
= 30;
262 c4x_cpu_version
= 31;
264 c4x_cpu_version
= 32;
266 c4x_cpu_version
= 33;
268 c4x_cpu_version
= 40;
270 c4x_cpu_version
= 44;
272 c4x_cpu_version
= 40;
274 /* -mcpu=xx overrides -m40 etc. */
275 if (c4x_cpu_version_string
)
277 const char *p
= c4x_cpu_version_string
;
279 /* Also allow -mcpu=c30 etc. */
280 if (*p
== 'c' || *p
== 'C')
282 c4x_cpu_version
= atoi (p
);
285 target_flags
&= ~(C30_FLAG
| C31_FLAG
| C32_FLAG
| C33_FLAG
|
286 C40_FLAG
| C44_FLAG
);
288 switch (c4x_cpu_version
)
290 case 30: target_flags
|= C30_FLAG
; break;
291 case 31: target_flags
|= C31_FLAG
; break;
292 case 32: target_flags
|= C32_FLAG
; break;
293 case 33: target_flags
|= C33_FLAG
; break;
294 case 40: target_flags
|= C40_FLAG
; break;
295 case 44: target_flags
|= C44_FLAG
; break;
297 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version
);
298 c4x_cpu_version
= 40;
299 target_flags
|= C40_FLAG
;
302 if (TARGET_C30
|| TARGET_C31
|| TARGET_C32
|| TARGET_C33
)
303 target_flags
|= C3X_FLAG
;
305 target_flags
&= ~C3X_FLAG
;
307 /* Convert foo / 8.0 into foo * 0.125, etc. */
308 set_fast_math_flags (1);
310 /* We should phase out the following at some stage.
311 This provides compatibility with the old -mno-aliases option. */
312 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
313 flag_argument_noalias
= 1;
315 /* We're C4X floating point, not IEEE floating point. */
316 memset (real_format_for_mode
, 0, sizeof real_format_for_mode
);
317 real_format_for_mode
[QFmode
- QFmode
] = &c4x_single_format
;
318 real_format_for_mode
[HFmode
- QFmode
] = &c4x_extended_format
;
322 /* This is called before c4x_override_options. */
325 c4x_optimization_options (level
, size
)
326 int level ATTRIBUTE_UNUSED
;
327 int size ATTRIBUTE_UNUSED
;
329 /* Scheduling before register allocation can screw up global
330 register allocation, especially for functions that use MPY||ADD
331 instructions. The benefit we gain we get by scheduling before
332 register allocation is probably marginal anyhow. */
333 flag_schedule_insns
= 0;
337 /* Write an ASCII string. */
339 #define C4X_ASCII_LIMIT 40
342 c4x_output_ascii (stream
, ptr
, len
)
347 char sbuf
[C4X_ASCII_LIMIT
+ 1];
348 int s
, l
, special
, first
= 1, onlys
;
351 fprintf (stream
, "\t.byte\t");
353 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
357 /* Escape " and \ with a \". */
358 special
= *ptr
== '\"' || *ptr
== '\\';
360 /* If printable - add to buff. */
361 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
366 if (s
< C4X_ASCII_LIMIT
- 1)
381 fprintf (stream
, "\"%s\"", sbuf
);
383 if (TARGET_TI
&& l
>= 80 && len
> 1)
385 fprintf (stream
, "\n\t.byte\t");
403 fprintf (stream
, "%d", *ptr
);
405 if (TARGET_TI
&& l
>= 80 && len
> 1)
407 fprintf (stream
, "\n\t.byte\t");
418 fprintf (stream
, "\"%s\"", sbuf
);
421 fputc ('\n', stream
);
426 c4x_hard_regno_mode_ok (regno
, mode
)
428 enum machine_mode mode
;
433 case Pmode
: /* Pointer (24/32 bits). */
435 case QImode
: /* Integer (32 bits). */
436 return IS_INT_REGNO (regno
);
438 case QFmode
: /* Float, Double (32 bits). */
439 case HFmode
: /* Long Double (40 bits). */
440 return IS_EXT_REGNO (regno
);
442 case CCmode
: /* Condition Codes. */
443 case CC_NOOVmode
: /* Condition Codes. */
444 return IS_ST_REGNO (regno
);
446 case HImode
: /* Long Long (64 bits). */
447 /* We need two registers to store long longs. Note that
448 it is much easier to constrain the first register
449 to start on an even boundary. */
450 return IS_INT_REGNO (regno
)
451 && IS_INT_REGNO (regno
+ 1)
455 return 0; /* We don't support these modes. */
461 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
463 c4x_hard_regno_rename_ok (regno1
, regno2
)
467 /* We can not copy call saved registers from mode QI into QF or from
469 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
471 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
473 /* We cannot copy from an extended (40 bit) register to a standard
474 (32 bit) register because we only set the condition codes for
475 extended registers. */
476 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
478 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
483 /* The TI C3x C compiler register argument runtime model uses 6 registers,
484 AR2, R2, R3, RC, RS, RE.
486 The first two floating point arguments (float, double, long double)
487 that are found scanning from left to right are assigned to R2 and R3.
489 The remaining integer (char, short, int, long) or pointer arguments
490 are assigned to the remaining registers in the order AR2, R2, R3,
491 RC, RS, RE when scanning left to right, except for the last named
492 argument prior to an ellipsis denoting variable number of
493 arguments. We don't have to worry about the latter condition since
494 function.c treats the last named argument as anonymous (unnamed).
496 All arguments that cannot be passed in registers are pushed onto
497 the stack in reverse order (right to left). GCC handles that for us.
499 c4x_init_cumulative_args() is called at the start, so we can parse
500 the args to see how many floating point arguments and how many
501 integer (or pointer) arguments there are. c4x_function_arg() is
502 then called (sometimes repeatedly) for each argument (parsed left
503 to right) to obtain the register to pass the argument in, or zero
504 if the argument is to be passed on the stack. Once the compiler is
505 happy, c4x_function_arg_advance() is called.
507 Don't use R0 to pass arguments in, we use 0 to indicate a stack
510 static const int c4x_int_reglist
[3][6] =
512 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
513 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
514 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
517 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
520 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
521 function whose data type is FNTYPE.
522 For a library call, FNTYPE is 0. */
525 c4x_init_cumulative_args (cum
, fntype
, libname
)
526 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize. */
527 tree fntype
; /* Tree ptr for function decl. */
528 rtx libname
; /* SYMBOL_REF of library name or 0. */
530 tree param
, next_param
;
532 cum
->floats
= cum
->ints
= 0;
539 fprintf (stderr
, "\nc4x_init_cumulative_args (");
542 tree ret_type
= TREE_TYPE (fntype
);
544 fprintf (stderr
, "fntype code = %s, ret code = %s",
545 tree_code_name
[(int) TREE_CODE (fntype
)],
546 tree_code_name
[(int) TREE_CODE (ret_type
)]);
549 fprintf (stderr
, "no fntype");
552 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
555 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
557 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
558 param
; param
= next_param
)
562 next_param
= TREE_CHAIN (param
);
564 type
= TREE_VALUE (param
);
565 if (type
&& type
!= void_type_node
)
567 enum machine_mode mode
;
569 /* If the last arg doesn't have void type then we have
570 variable arguments. */
574 if ((mode
= TYPE_MODE (type
)))
576 if (! MUST_PASS_IN_STACK (mode
, type
))
578 /* Look for float, double, or long double argument. */
579 if (mode
== QFmode
|| mode
== HFmode
)
581 /* Look for integer, enumeral, boolean, char, or pointer
583 else if (mode
== QImode
|| mode
== Pmode
)
592 fprintf (stderr
, "%s%s, args = %d)\n",
593 cum
->prototype
? ", prototype" : "",
594 cum
->var
? ", variable args" : "",
599 /* Update the data in CUM to advance over an argument
600 of mode MODE and data type TYPE.
601 (TYPE is null for libcalls where that information may not be available.) */
604 c4x_function_arg_advance (cum
, mode
, type
, named
)
605 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
606 enum machine_mode mode
; /* Current arg mode. */
607 tree type
; /* Type of the arg or 0 if lib support. */
608 int named
; /* Whether or not the argument was named. */
611 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
612 GET_MODE_NAME (mode
), named
);
616 && ! MUST_PASS_IN_STACK (mode
, type
))
618 /* Look for float, double, or long double argument. */
619 if (mode
== QFmode
|| mode
== HFmode
)
621 /* Look for integer, enumeral, boolean, char, or pointer argument. */
622 else if (mode
== QImode
|| mode
== Pmode
)
625 else if (! TARGET_MEMPARM
&& ! type
)
627 /* Handle libcall arguments. */
628 if (mode
== QFmode
|| mode
== HFmode
)
630 else if (mode
== QImode
|| mode
== Pmode
)
637 /* Define where to put the arguments to a function. Value is zero to
638 push the argument on the stack, or a hard register in which to
641 MODE is the argument's machine mode.
642 TYPE is the data type of the argument (as a tree).
643 This is null for libcalls where that information may
645 CUM is a variable of type CUMULATIVE_ARGS which gives info about
646 the preceding args and about the function being called.
647 NAMED is nonzero if this argument is a named parameter
648 (otherwise it is an extra parameter matching an ellipsis). */
651 c4x_function_arg (cum
, mode
, type
, named
)
652 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
653 enum machine_mode mode
; /* Current arg mode. */
654 tree type
; /* Type of the arg or 0 if lib support. */
655 int named
; /* != 0 for normal args, == 0 for ... args. */
657 int reg
= 0; /* Default to passing argument on stack. */
661 /* We can handle at most 2 floats in R2, R3. */
662 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
664 /* We can handle at most 6 integers minus number of floats passed
666 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
667 6 - cum
->maxfloats
: cum
->ints
;
669 /* If there is no prototype, assume all the arguments are integers. */
670 if (! cum
->prototype
)
673 cum
->ints
= cum
->floats
= 0;
677 /* This marks the last argument. We don't need to pass this through
679 if (type
== void_type_node
)
685 && ! MUST_PASS_IN_STACK (mode
, type
))
687 /* Look for float, double, or long double argument. */
688 if (mode
== QFmode
|| mode
== HFmode
)
690 if (cum
->floats
< cum
->maxfloats
)
691 reg
= c4x_fp_reglist
[cum
->floats
];
693 /* Look for integer, enumeral, boolean, char, or pointer argument. */
694 else if (mode
== QImode
|| mode
== Pmode
)
696 if (cum
->ints
< cum
->maxints
)
697 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
700 else if (! TARGET_MEMPARM
&& ! type
)
702 /* We could use a different argument calling model for libcalls,
703 since we're only calling functions in libgcc. Thus we could
704 pass arguments for long longs in registers rather than on the
705 stack. In the meantime, use the odd TI format. We make the
706 assumption that we won't have more than two floating point
707 args, six integer args, and that all the arguments are of the
709 if (mode
== QFmode
|| mode
== HFmode
)
710 reg
= c4x_fp_reglist
[cum
->floats
];
711 else if (mode
== QImode
|| mode
== Pmode
)
712 reg
= c4x_int_reglist
[0][cum
->ints
];
717 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
718 GET_MODE_NAME (mode
), named
);
720 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
722 fprintf (stderr
, ", stack");
723 fprintf (stderr
, ")\n");
726 return gen_rtx_REG (mode
, reg
);
731 /* C[34]x arguments grow in weird ways (downwards) that the standard
732 varargs stuff can't handle.. */
734 c4x_va_arg (valist
, type
)
739 t
= build (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
740 build_int_2 (int_size_in_bytes (type
), 0));
741 TREE_SIDE_EFFECTS (t
) = 1;
743 return expand_expr (t
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
748 c4x_isr_reg_used_p (regno
)
751 /* Don't save/restore FP or ST, we handle them separately. */
752 if (regno
== FRAME_POINTER_REGNUM
753 || IS_ST_REGNO (regno
))
756 /* We could be a little smarter abut saving/restoring DP.
757 We'll only save if for the big memory model or if
758 we're paranoid. ;-) */
759 if (IS_DP_REGNO (regno
))
760 return ! TARGET_SMALL
|| TARGET_PARANOID
;
762 /* Only save/restore regs in leaf function that are used. */
763 if (c4x_leaf_function
)
764 return regs_ever_live
[regno
] && fixed_regs
[regno
] == 0;
766 /* Only save/restore regs that are used by the ISR and regs
767 that are likely to be used by functions the ISR calls
768 if they are not fixed. */
769 return IS_EXT_REGNO (regno
)
770 || ((regs_ever_live
[regno
] || call_used_regs
[regno
])
771 && fixed_regs
[regno
] == 0);
776 c4x_leaf_function_p ()
778 /* A leaf function makes no calls, so we only need
779 to save/restore the registers we actually use.
780 For the global variable leaf_function to be set, we need
781 to define LEAF_REGISTERS and all that it entails.
782 Let's check ourselves... */
784 if (lookup_attribute ("leaf_pretend",
785 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
788 /* Use the leaf_pretend attribute at your own risk. This is a hack
789 to speed up ISRs that call a function infrequently where the
790 overhead of saving and restoring the additional registers is not
791 warranted. You must save and restore the additional registers
792 required by the called function. Caveat emptor. Here's enough
795 if (leaf_function_p ())
803 c4x_naked_function_p ()
807 type
= TREE_TYPE (current_function_decl
);
808 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
;
813 c4x_interrupt_function_p ()
815 if (lookup_attribute ("interrupt",
816 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
819 /* Look for TI style c_intnn. */
820 return current_function_name
[0] == 'c'
821 && current_function_name
[1] == '_'
822 && current_function_name
[2] == 'i'
823 && current_function_name
[3] == 'n'
824 && current_function_name
[4] == 't'
825 && ISDIGIT (current_function_name
[5])
826 && ISDIGIT (current_function_name
[6]);
830 c4x_expand_prologue ()
833 int size
= get_frame_size ();
836 /* In functions where ar3 is not used but frame pointers are still
837 specified, frame pointers are not adjusted (if >= -O2) and this
838 is used so it won't needlessly push the frame pointer. */
841 /* For __naked__ function don't build a prologue. */
842 if (c4x_naked_function_p ())
847 /* For __interrupt__ function build specific prologue. */
848 if (c4x_interrupt_function_p ())
850 c4x_leaf_function
= c4x_leaf_function_p ();
852 insn
= emit_insn (gen_push_st ());
853 RTX_FRAME_RELATED_P (insn
) = 1;
856 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
857 RTX_FRAME_RELATED_P (insn
) = 1;
858 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
859 gen_rtx_REG (QImode
, SP_REGNO
)));
860 RTX_FRAME_RELATED_P (insn
) = 1;
861 /* We require that an ISR uses fewer than 32768 words of
862 local variables, otherwise we have to go to lots of
863 effort to save a register, load it with the desired size,
864 adjust the stack pointer, and then restore the modified
865 register. Frankly, I think it is a poor ISR that
866 requires more than 32767 words of local temporary
869 error ("ISR %s requires %d words of local vars, max is 32767",
870 current_function_name
, size
);
872 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
873 gen_rtx_REG (QImode
, SP_REGNO
),
875 RTX_FRAME_RELATED_P (insn
) = 1;
877 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
879 if (c4x_isr_reg_used_p (regno
))
881 if (regno
== DP_REGNO
)
883 insn
= emit_insn (gen_push_dp ());
884 RTX_FRAME_RELATED_P (insn
) = 1;
888 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
889 RTX_FRAME_RELATED_P (insn
) = 1;
890 if (IS_EXT_REGNO (regno
))
892 insn
= emit_insn (gen_pushqf
893 (gen_rtx_REG (QFmode
, regno
)));
894 RTX_FRAME_RELATED_P (insn
) = 1;
899 /* We need to clear the repeat mode flag if the ISR is
900 going to use a RPTB instruction or uses the RC, RS, or RE
902 if (regs_ever_live
[RC_REGNO
]
903 || regs_ever_live
[RS_REGNO
]
904 || regs_ever_live
[RE_REGNO
])
906 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
907 RTX_FRAME_RELATED_P (insn
) = 1;
910 /* Reload DP reg if we are paranoid about some turkey
911 violating small memory model rules. */
912 if (TARGET_SMALL
&& TARGET_PARANOID
)
914 insn
= emit_insn (gen_set_ldp_prologue
915 (gen_rtx_REG (QImode
, DP_REGNO
),
916 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
917 RTX_FRAME_RELATED_P (insn
) = 1;
922 if (frame_pointer_needed
)
925 || (current_function_args_size
!= 0)
928 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
929 RTX_FRAME_RELATED_P (insn
) = 1;
930 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
931 gen_rtx_REG (QImode
, SP_REGNO
)));
932 RTX_FRAME_RELATED_P (insn
) = 1;
937 /* Since ar3 is not used, we don't need to push it. */
943 /* If we use ar3, we need to push it. */
945 if ((size
!= 0) || (current_function_args_size
!= 0))
947 /* If we are omitting the frame pointer, we still have
948 to make space for it so the offsets are correct
949 unless we don't use anything on the stack at all. */
956 /* Local vars are too big, it will take multiple operations
960 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
961 GEN_INT(size
>> 16)));
962 RTX_FRAME_RELATED_P (insn
) = 1;
963 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
964 gen_rtx_REG (QImode
, R1_REGNO
),
966 RTX_FRAME_RELATED_P (insn
) = 1;
970 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
971 GEN_INT(size
& ~0xffff)));
972 RTX_FRAME_RELATED_P (insn
) = 1;
974 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
975 gen_rtx_REG (QImode
, R1_REGNO
),
976 GEN_INT(size
& 0xffff)));
977 RTX_FRAME_RELATED_P (insn
) = 1;
978 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
979 gen_rtx_REG (QImode
, SP_REGNO
),
980 gen_rtx_REG (QImode
, R1_REGNO
)));
981 RTX_FRAME_RELATED_P (insn
) = 1;
985 /* Local vars take up less than 32767 words, so we can directly
987 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
988 gen_rtx_REG (QImode
, SP_REGNO
),
990 RTX_FRAME_RELATED_P (insn
) = 1;
993 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
995 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
997 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
999 if (TARGET_PRESERVE_FLOAT
)
1001 insn
= emit_insn (gen_pushqi
1002 (gen_rtx_REG (QImode
, regno
)));
1003 RTX_FRAME_RELATED_P (insn
) = 1;
1005 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
1006 RTX_FRAME_RELATED_P (insn
) = 1;
1008 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
1010 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
1011 RTX_FRAME_RELATED_P (insn
) = 1;
1020 c4x_expand_epilogue()
1026 int size
= get_frame_size ();
1028 /* For __naked__ function build no epilogue. */
1029 if (c4x_naked_function_p ())
1031 insn
= emit_jump_insn (gen_return_from_epilogue ());
1032 RTX_FRAME_RELATED_P (insn
) = 1;
1036 /* For __interrupt__ function build specific epilogue. */
1037 if (c4x_interrupt_function_p ())
1039 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1041 if (! c4x_isr_reg_used_p (regno
))
1043 if (regno
== DP_REGNO
)
1045 insn
= emit_insn (gen_pop_dp ());
1046 RTX_FRAME_RELATED_P (insn
) = 1;
1050 /* We have to use unspec because the compiler will delete insns
1051 that are not call-saved. */
1052 if (IS_EXT_REGNO (regno
))
1054 insn
= emit_insn (gen_popqf_unspec
1055 (gen_rtx_REG (QFmode
, regno
)));
1056 RTX_FRAME_RELATED_P (insn
) = 1;
1058 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1059 RTX_FRAME_RELATED_P (insn
) = 1;
1064 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1065 gen_rtx_REG (QImode
, SP_REGNO
),
1067 RTX_FRAME_RELATED_P (insn
) = 1;
1068 insn
= emit_insn (gen_popqi
1069 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1070 RTX_FRAME_RELATED_P (insn
) = 1;
1072 insn
= emit_insn (gen_pop_st ());
1073 RTX_FRAME_RELATED_P (insn
) = 1;
1074 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1075 RTX_FRAME_RELATED_P (insn
) = 1;
1079 if (frame_pointer_needed
)
1082 || (current_function_args_size
!= 0)
1086 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1087 gen_rtx_MEM (QImode
,
1089 (QImode
, gen_rtx_REG (QImode
,
1092 RTX_FRAME_RELATED_P (insn
) = 1;
1094 /* We already have the return value and the fp,
1095 so we need to add those to the stack. */
1102 /* Since ar3 is not used for anything, we don't need to
1109 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1110 if (size
|| current_function_args_size
)
1112 /* If we are ommitting the frame pointer, we still have
1113 to make space for it so the offsets are correct
1114 unless we don't use anything on the stack at all. */
1119 /* Now restore the saved registers, putting in the delayed branch
1121 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1123 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1125 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1128 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1130 insn
= emit_insn (gen_popqf_unspec
1131 (gen_rtx_REG (QFmode
, regno
)));
1132 RTX_FRAME_RELATED_P (insn
) = 1;
1133 if (TARGET_PRESERVE_FLOAT
)
1135 insn
= emit_insn (gen_popqi_unspec
1136 (gen_rtx_REG (QImode
, regno
)));
1137 RTX_FRAME_RELATED_P (insn
) = 1;
1142 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1143 RTX_FRAME_RELATED_P (insn
) = 1;
1148 if (frame_pointer_needed
)
1151 || (current_function_args_size
!= 0)
1154 /* Restore the old FP. */
1157 (gen_rtx_REG (QImode
, AR3_REGNO
),
1158 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1160 RTX_FRAME_RELATED_P (insn
) = 1;
1166 /* Local vars are too big, it will take multiple operations
1170 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1171 GEN_INT(size
>> 16)));
1172 RTX_FRAME_RELATED_P (insn
) = 1;
1173 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1174 gen_rtx_REG (QImode
, R3_REGNO
),
1176 RTX_FRAME_RELATED_P (insn
) = 1;
1180 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1181 GEN_INT(size
& ~0xffff)));
1182 RTX_FRAME_RELATED_P (insn
) = 1;
1184 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1185 gen_rtx_REG (QImode
, R3_REGNO
),
1186 GEN_INT(size
& 0xffff)));
1187 RTX_FRAME_RELATED_P (insn
) = 1;
1188 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1189 gen_rtx_REG (QImode
, SP_REGNO
),
1190 gen_rtx_REG (QImode
, R3_REGNO
)));
1191 RTX_FRAME_RELATED_P (insn
) = 1;
1195 /* Local vars take up less than 32768 words, so we can directly
1196 subtract the number. */
1197 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1198 gen_rtx_REG (QImode
, SP_REGNO
),
1200 RTX_FRAME_RELATED_P (insn
) = 1;
1205 insn
= emit_jump_insn (gen_return_indirect_internal
1206 (gen_rtx_REG (QImode
, R2_REGNO
)));
1207 RTX_FRAME_RELATED_P (insn
) = 1;
1211 insn
= emit_jump_insn (gen_return_from_epilogue ());
1212 RTX_FRAME_RELATED_P (insn
) = 1;
1219 c4x_null_epilogue_p ()
1223 if (reload_completed
1224 && ! c4x_naked_function_p ()
1225 && ! c4x_interrupt_function_p ()
1226 && ! current_function_calls_alloca
1227 && ! current_function_args_size
1229 && ! get_frame_size ())
1231 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1232 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
]
1233 && (regno
!= AR3_REGNO
))
1242 c4x_emit_move_sequence (operands
, mode
)
1244 enum machine_mode mode
;
1246 rtx op0
= operands
[0];
1247 rtx op1
= operands
[1];
1249 if (! reload_in_progress
1252 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1253 op1
= force_reg (mode
, op1
);
1255 if (GET_CODE (op1
) == LO_SUM
1256 && GET_MODE (op1
) == Pmode
1257 && dp_reg_operand (XEXP (op1
, 0), mode
))
1259 /* expand_increment will sometimes create a LO_SUM immediate
1261 op1
= XEXP (op1
, 1);
1263 else if (symbolic_address_operand (op1
, mode
))
1265 if (TARGET_LOAD_ADDRESS
)
1267 /* Alias analysis seems to do a better job if we force
1268 constant addresses to memory after reload. */
1269 emit_insn (gen_load_immed_address (op0
, op1
));
1274 /* Stick symbol or label address into the constant pool. */
1275 op1
= force_const_mem (Pmode
, op1
);
1278 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1280 /* We could be a lot smarter about loading some of these
1282 op1
= force_const_mem (mode
, op1
);
1285 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1286 and emit associated (HIGH (SYMREF)) if large memory model.
1287 c4x_legitimize_address could be used to do this,
1288 perhaps by calling validize_address. */
1289 if (TARGET_EXPOSE_LDP
1290 && ! (reload_in_progress
|| reload_completed
)
1291 && GET_CODE (op1
) == MEM
1292 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1294 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1296 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1297 op1
= change_address (op1
, mode
,
1298 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1301 if (TARGET_EXPOSE_LDP
1302 && ! (reload_in_progress
|| reload_completed
)
1303 && GET_CODE (op0
) == MEM
1304 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1306 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1308 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1309 op0
= change_address (op0
, mode
,
1310 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1313 if (GET_CODE (op0
) == SUBREG
1314 && mixed_subreg_operand (op0
, mode
))
1316 /* We should only generate these mixed mode patterns
1317 during RTL generation. If we need do it later on
1318 then we'll have to emit patterns that won't clobber CC. */
1319 if (reload_in_progress
|| reload_completed
)
1321 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1322 op0
= SUBREG_REG (op0
);
1323 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1325 op0
= copy_rtx (op0
);
1326 PUT_MODE (op0
, QImode
);
1332 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1338 if (GET_CODE (op1
) == SUBREG
1339 && mixed_subreg_operand (op1
, mode
))
1341 /* We should only generate these mixed mode patterns
1342 during RTL generation. If we need do it later on
1343 then we'll have to emit patterns that won't clobber CC. */
1344 if (reload_in_progress
|| reload_completed
)
1346 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1347 op1
= SUBREG_REG (op1
);
1348 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1350 op1
= copy_rtx (op1
);
1351 PUT_MODE (op1
, QImode
);
1357 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1364 && reg_operand (op0
, mode
)
1365 && const_int_operand (op1
, mode
)
1366 && ! IS_INT16_CONST (INTVAL (op1
))
1367 && ! IS_HIGH_CONST (INTVAL (op1
)))
1369 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1374 && reg_operand (op0
, mode
)
1375 && const_int_operand (op1
, mode
))
1377 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1381 /* Adjust operands in case we have modified them. */
1385 /* Emit normal pattern. */
1391 c4x_emit_libcall (libcall
, code
, dmode
, smode
, noperands
, operands
)
1394 enum machine_mode dmode
;
1395 enum machine_mode smode
;
1407 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1408 operands
[1], smode
);
1409 equiv
= gen_rtx (code
, dmode
, operands
[1]);
1413 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1414 operands
[1], smode
, operands
[2], smode
);
1415 equiv
= gen_rtx (code
, dmode
, operands
[1], operands
[2]);
1422 insns
= get_insns ();
1424 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1429 c4x_emit_libcall3 (libcall
, code
, mode
, operands
)
1432 enum machine_mode mode
;
1435 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1440 c4x_emit_libcall_mulhi (libcall
, code
, mode
, operands
)
1443 enum machine_mode mode
;
1451 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1452 operands
[1], mode
, operands
[2], mode
);
1453 equiv
= gen_rtx_TRUNCATE (mode
,
1454 gen_rtx_LSHIFTRT (HImode
,
1455 gen_rtx_MULT (HImode
,
1456 gen_rtx (code
, HImode
, operands
[1]),
1457 gen_rtx (code
, HImode
, operands
[2])),
1459 insns
= get_insns ();
1461 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1466 c4x_check_legit_addr (mode
, addr
, strict
)
1467 enum machine_mode mode
;
1471 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1472 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1473 rtx disp
= NULL_RTX
; /* Displacement. */
1476 code
= GET_CODE (addr
);
1479 /* Register indirect with auto increment/decrement. We don't
1480 allow SP here---push_operand should recognize an operand
1481 being pushed on the stack. */
1486 if (mode
!= QImode
&& mode
!= QFmode
)
1490 base
= XEXP (addr
, 0);
1498 rtx op0
= XEXP (addr
, 0);
1499 rtx op1
= XEXP (addr
, 1);
1501 if (mode
!= QImode
&& mode
!= QFmode
)
1505 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1507 base
= XEXP (op1
, 0);
1510 if (REG_P (XEXP (op1
, 1)))
1511 indx
= XEXP (op1
, 1);
1513 disp
= XEXP (op1
, 1);
1517 /* Register indirect. */
1522 /* Register indirect with displacement or index. */
1525 rtx op0
= XEXP (addr
, 0);
1526 rtx op1
= XEXP (addr
, 1);
1527 enum rtx_code code0
= GET_CODE (op0
);
1534 base
= op0
; /* Base + index. */
1536 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1544 base
= op0
; /* Base + displacement. */
1555 /* Direct addressing with DP register. */
1558 rtx op0
= XEXP (addr
, 0);
1559 rtx op1
= XEXP (addr
, 1);
1561 /* HImode and HFmode direct memory references aren't truly
1562 offsettable (consider case at end of data page). We
1563 probably get better code by loading a pointer and using an
1564 indirect memory reference. */
1565 if (mode
== HImode
|| mode
== HFmode
)
1568 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1571 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1574 if (GET_CODE (op1
) == CONST
)
1580 /* Direct addressing with some work for the assembler... */
1582 /* Direct addressing. */
1585 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1587 /* These need to be converted to a LO_SUM (...).
1588 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1591 /* Do not allow direct memory access to absolute addresses.
1592 This is more pain than it's worth, especially for the
1593 small memory model where we can't guarantee that
1594 this address is within the data page---we don't want
1595 to modify the DP register in the small memory model,
1596 even temporarily, since an interrupt can sneak in.... */
1600 /* Indirect indirect addressing. */
1605 fatal_insn ("using CONST_DOUBLE for address", addr
);
1611 /* Validate the base register. */
1614 /* Check that the address is offsettable for HImode and HFmode. */
1615 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1618 /* Handle DP based stuff. */
1619 if (REGNO (base
) == DP_REGNO
)
1621 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1623 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1627 /* Now validate the index register. */
1630 if (GET_CODE (indx
) != REG
)
1632 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1634 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1638 /* Validate displacement. */
1641 if (GET_CODE (disp
) != CONST_INT
)
1643 if (mode
== HImode
|| mode
== HFmode
)
1645 /* The offset displacement must be legitimate. */
1646 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1651 if (! IS_DISP8_CONST (INTVAL (disp
)))
1654 /* Can't add an index with a disp. */
1663 c4x_legitimize_address (orig
, mode
)
1664 rtx orig ATTRIBUTE_UNUSED
;
1665 enum machine_mode mode ATTRIBUTE_UNUSED
;
1667 if (GET_CODE (orig
) == SYMBOL_REF
1668 || GET_CODE (orig
) == LABEL_REF
)
1670 if (mode
== HImode
|| mode
== HFmode
)
1672 /* We need to force the address into
1673 a register so that it is offsettable. */
1674 rtx addr_reg
= gen_reg_rtx (Pmode
);
1675 emit_move_insn (addr_reg
, orig
);
1680 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1683 emit_insn (gen_set_ldp (dp_reg
, orig
));
1685 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1693 /* Provide the costs of an addressing mode that contains ADDR.
1694 If ADDR is not a valid address, its cost is irrelevant.
1695 This is used in cse and loop optimisation to determine
1696 if it is worthwhile storing a common address into a register.
1697 Unfortunately, the C4x address cost depends on other operands. */
1700 c4x_address_cost (addr
)
1703 switch (GET_CODE (addr
))
1714 /* These shouldn't be directly generated. */
1722 rtx op1
= XEXP (addr
, 1);
1724 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1725 return TARGET_SMALL
? 3 : 4;
1727 if (GET_CODE (op1
) == CONST
)
1729 rtx offset
= const0_rtx
;
1731 op1
= eliminate_constant_term (op1
, &offset
);
1733 /* ??? These costs need rethinking... */
1734 if (GET_CODE (op1
) == LABEL_REF
)
1737 if (GET_CODE (op1
) != SYMBOL_REF
)
1740 if (INTVAL (offset
) == 0)
1745 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1751 register rtx op0
= XEXP (addr
, 0);
1752 register rtx op1
= XEXP (addr
, 1);
1754 if (GET_CODE (op0
) != REG
)
1757 switch (GET_CODE (op1
))
1763 /* This cost for REG+REG must be greater than the cost
1764 for REG if we want autoincrement addressing modes. */
1768 /* The following tries to improve GIV combination
1769 in strength reduce but appears not to help. */
1770 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1773 if (IS_DISP1_CONST (INTVAL (op1
)))
1776 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1791 c4x_gen_compare_reg (code
, x
, y
)
1795 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1798 if (mode
== CC_NOOVmode
1799 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1802 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1803 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1804 gen_rtx_COMPARE (mode
, x
, y
)));
1809 c4x_output_cbranch (form
, seq
)
1818 static char str
[100];
1822 delay
= XVECEXP (final_sequence
, 0, 1);
1823 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1824 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1825 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1828 cp
= &str
[strlen (str
)];
1853 c4x_print_operand (file
, op
, letter
)
1854 FILE *file
; /* File to write to. */
1855 rtx op
; /* Operand to print. */
1856 int letter
; /* %<letter> or 0. */
1863 case '#': /* Delayed. */
1865 fprintf (file
, "d");
1869 code
= GET_CODE (op
);
1872 case 'A': /* Direct address. */
1873 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1874 fprintf (file
, "@");
1877 case 'H': /* Sethi. */
1878 output_addr_const (file
, op
);
1881 case 'I': /* Reversed condition. */
1882 code
= reverse_condition (code
);
1885 case 'L': /* Log 2 of constant. */
1886 if (code
!= CONST_INT
)
1887 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1888 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1891 case 'N': /* Ones complement of small constant. */
1892 if (code
!= CONST_INT
)
1893 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1894 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~INTVAL (op
));
1897 case 'K': /* Generate ldp(k) if direct address. */
1900 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1901 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1902 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1904 op1
= XEXP (XEXP (op
, 0), 1);
1905 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1907 fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1908 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1909 fprintf (file
, "\n");
1914 case 'M': /* Generate ldp(k) if direct address. */
1915 if (! TARGET_SMALL
/* Only used in asm statements. */
1917 && (GET_CODE (XEXP (op
, 0)) == CONST
1918 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1920 fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1921 output_address (XEXP (op
, 0));
1922 fprintf (file
, "\n\t");
1926 case 'O': /* Offset address. */
1927 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1929 else if (code
== MEM
)
1930 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1931 else if (code
== REG
)
1932 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1934 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1937 case 'C': /* Call. */
1940 case 'U': /* Call/callu. */
1941 if (code
!= SYMBOL_REF
)
1942 fprintf (file
, "u");
1952 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1954 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1956 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1960 output_address (XEXP (op
, 0));
1967 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (op
),
1968 sizeof (str
), 0, 1);
1969 fprintf (file
, "%s", str
);
1974 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (op
));
1978 fprintf (file
, "ne");
1982 fprintf (file
, "eq");
1986 fprintf (file
, "ge");
1990 fprintf (file
, "gt");
1994 fprintf (file
, "le");
1998 fprintf (file
, "lt");
2002 fprintf (file
, "hs");
2006 fprintf (file
, "hi");
2010 fprintf (file
, "ls");
2014 fprintf (file
, "lo");
2018 output_addr_const (file
, op
);
2022 output_addr_const (file
, XEXP (op
, 0));
2029 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2036 c4x_print_operand_address (file
, addr
)
2040 switch (GET_CODE (addr
))
2043 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2047 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2051 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2056 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2057 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2059 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2060 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2061 reg_names
[REGNO (op1
)]);
2062 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2063 fprintf (file
, "*%s++(" HOST_WIDE_INT_PRINT_DEC
")",
2064 reg_names
[REGNO (op0
)], INTVAL (op1
));
2065 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2066 fprintf (file
, "*%s--(" HOST_WIDE_INT_PRINT_DEC
")",
2067 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2068 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2069 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2070 reg_names
[REGNO (op1
)]);
2072 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2078 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2079 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2081 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2082 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2083 reg_names
[REGNO (op1
)]);
2084 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2085 fprintf (file
, "*++%s(" HOST_WIDE_INT_PRINT_DEC
")",
2086 reg_names
[REGNO (op0
)], INTVAL (op1
));
2087 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2088 fprintf (file
, "*--%s(" HOST_WIDE_INT_PRINT_DEC
")",
2089 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2090 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2091 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2092 reg_names
[REGNO (op1
)]);
2094 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2099 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2103 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2106 case PLUS
: /* Indirect with displacement. */
2108 rtx op0
= XEXP (addr
, 0);
2109 rtx op1
= XEXP (addr
, 1);
2115 if (IS_INDEX_REG (op0
))
2117 fprintf (file
, "*+%s(%s)",
2118 reg_names
[REGNO (op1
)],
2119 reg_names
[REGNO (op0
)]); /* Index + base. */
2123 fprintf (file
, "*+%s(%s)",
2124 reg_names
[REGNO (op0
)],
2125 reg_names
[REGNO (op1
)]); /* Base + index. */
2128 else if (INTVAL (op1
) < 0)
2130 fprintf (file
, "*-%s(" HOST_WIDE_INT_PRINT_DEC
")",
2131 reg_names
[REGNO (op0
)],
2132 -INTVAL (op1
)); /* Base - displacement. */
2136 fprintf (file
, "*+%s(" HOST_WIDE_INT_PRINT_DEC
")",
2137 reg_names
[REGNO (op0
)],
2138 INTVAL (op1
)); /* Base + displacement. */
2142 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2148 rtx op0
= XEXP (addr
, 0);
2149 rtx op1
= XEXP (addr
, 1);
2151 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2152 c4x_print_operand_address (file
, op1
);
2154 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2161 fprintf (file
, "@");
2162 output_addr_const (file
, addr
);
2165 /* We shouldn't access CONST_INT addresses. */
2169 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2175 /* Return nonzero if the floating point operand will fit
2176 in the immediate field. */
2179 c4x_immed_float_p (op
)
2186 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2187 if (GET_MODE (op
) == HFmode
)
2188 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2191 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2195 /* Sign extend exponent. */
2196 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2197 if (exponent
== -128)
2199 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2200 return 0; /* Precision doesn't fit. */
2201 return (exponent
<= 7) /* Positive exp. */
2202 && (exponent
>= -7); /* Negative exp. */
2206 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2207 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2209 None of the last four instructions from the bottom of the block can
2210 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2211 BcondAT or RETIcondD.
2213 This routine scans the four previous insns for a jump insn, and if
2214 one is found, returns 1 so that we bung in a nop instruction.
2215 This simple minded strategy will add a nop, when it may not
2216 be required. Say when there is a JUMP_INSN near the end of the
2217 block that doesn't get converted into a delayed branch.
2219 Note that we cannot have a call insn, since we don't generate
2220 repeat loops with calls in them (although I suppose we could, but
2221 there's no benefit.)
2223 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2226 c4x_rptb_nop_p (insn
)
2232 /* Extract the start label from the jump pattern (rptb_end). */
2233 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2235 /* If there is a label at the end of the loop we must insert
2238 insn
= previous_insn (insn
);
2239 } while (GET_CODE (insn
) == NOTE
2240 || GET_CODE (insn
) == USE
2241 || GET_CODE (insn
) == CLOBBER
);
2242 if (GET_CODE (insn
) == CODE_LABEL
)
2245 for (i
= 0; i
< 4; i
++)
2247 /* Search back for prev non-note and non-label insn. */
2248 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2249 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2251 if (insn
== start_label
)
2254 insn
= previous_insn (insn
);
2257 /* If we have a jump instruction we should insert a NOP. If we
2258 hit repeat block top we should only insert a NOP if the loop
2260 if (GET_CODE (insn
) == JUMP_INSN
)
2262 insn
= previous_insn (insn
);
2268 /* The C4x looping instruction needs to be emitted at the top of the
2269 loop. Emitting the true RTL for a looping instruction at the top of
2270 the loop can cause problems with flow analysis. So instead, a dummy
2271 doloop insn is emitted at the end of the loop. This routine checks
2272 for the presence of this doloop insn and then searches back to the
2273 top of the loop, where it inserts the true looping insn (provided
2274 there are no instructions in the loop which would cause problems).
2275 Any additional labels can be emitted at this point. In addition, if
2276 the desired loop count register was not allocated, this routine does
2279 Before we can create a repeat block looping instruction we have to
2280 verify that there are no jumps outside the loop and no jumps outside
2281 the loop go into this loop. This can happen in the basic blocks reorder
2282 pass. The C4x cpu can not handle this. */
2285 c4x_label_ref_used_p (x
, code_label
)
2295 code
= GET_CODE (x
);
2296 if (code
== LABEL_REF
)
2297 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2299 fmt
= GET_RTX_FORMAT (code
);
2300 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2304 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2307 else if (fmt
[i
] == 'E')
2308 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2309 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2317 c4x_rptb_valid_p (insn
, start_label
)
2318 rtx insn
, start_label
;
2324 /* Find the start label. */
2325 for (; insn
; insn
= PREV_INSN (insn
))
2326 if (insn
== start_label
)
2329 /* Note found then we can not use a rptb or rpts. The label was
2330 probably moved by the basic block reorder pass. */
2335 /* If any jump jumps inside this block then we must fail. */
2336 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2338 if (GET_CODE (insn
) == CODE_LABEL
)
2340 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2341 if (GET_CODE (tmp
) == JUMP_INSN
2342 && c4x_label_ref_used_p (tmp
, insn
))
2346 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2348 if (GET_CODE (insn
) == CODE_LABEL
)
2350 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2351 if (GET_CODE (tmp
) == JUMP_INSN
2352 && c4x_label_ref_used_p (tmp
, insn
))
2356 /* If any jump jumps outside this block then we must fail. */
2357 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2359 if (GET_CODE (insn
) == CODE_LABEL
)
2361 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2362 if (GET_CODE (tmp
) == JUMP_INSN
2363 && c4x_label_ref_used_p (tmp
, insn
))
2365 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2366 if (GET_CODE (tmp
) == JUMP_INSN
2367 && c4x_label_ref_used_p (tmp
, insn
))
2372 /* All checks OK. */
2378 c4x_rptb_insert (insn
)
2383 rtx new_start_label
;
2386 /* If the count register has not been allocated to RC, say if
2387 there is a movstr pattern in the loop, then do not insert a
2388 RPTB instruction. Instead we emit a decrement and branch
2389 at the end of the loop. */
2390 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2391 if (REGNO (count_reg
) != RC_REGNO
)
2394 /* Extract the start label from the jump pattern (rptb_end). */
2395 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2397 if (! c4x_rptb_valid_p (insn
, start_label
))
2399 /* We can not use the rptb insn. Replace it so reorg can use
2400 the delay slots of the jump insn. */
2401 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, GEN_INT (-1)), insn
);
2402 emit_insn_before (gen_cmpqi (count_reg
, GEN_INT (0)), insn
);
2403 emit_insn_before (gen_bge (start_label
), insn
);
2404 LABEL_NUSES (start_label
)++;
2409 end_label
= gen_label_rtx ();
2410 LABEL_NUSES (end_label
)++;
2411 emit_label_after (end_label
, insn
);
2413 new_start_label
= gen_label_rtx ();
2414 LABEL_NUSES (new_start_label
)++;
2416 for (; insn
; insn
= PREV_INSN (insn
))
2418 if (insn
== start_label
)
2420 if (GET_CODE (insn
) == JUMP_INSN
&&
2421 JUMP_LABEL (insn
) == start_label
)
2422 redirect_jump (insn
, new_start_label
, 0);
2425 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2427 emit_label_after (new_start_label
, insn
);
2429 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2430 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2432 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2433 if (LABEL_NUSES (start_label
) == 0)
2434 delete_insn (start_label
);
2438 /* We need to use direct addressing for large constants and addresses
2439 that cannot fit within an instruction. We must check for these
2440 after after the final jump optimisation pass, since this may
2441 introduce a local_move insn for a SYMBOL_REF. This pass
2442 must come before delayed branch slot filling since it can generate
2443 additional instructions.
2445 This function also fixes up RTPB style loops that didn't get RC
2446 allocated as the loop counter. */
2453 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2455 /* Look for insn. */
2458 int insn_code_number
;
2461 insn_code_number
= recog_memoized (insn
);
2463 if (insn_code_number
< 0)
2466 /* Insert the RTX for RPTB at the top of the loop
2467 and a label at the end of the loop. */
2468 if (insn_code_number
== CODE_FOR_rptb_end
)
2469 c4x_rptb_insert(insn
);
2471 /* We need to split the insn here. Otherwise the calls to
2472 force_const_mem will not work for load_immed_address. */
2475 /* Don't split the insn if it has been deleted. */
2476 if (! INSN_DELETED_P (old
))
2477 insn
= try_split (PATTERN(old
), old
, 1);
2479 /* When not optimizing, the old insn will be still left around
2480 with only the 'deleted' bit set. Transform it into a note
2481 to avoid confusion of subsequent processing. */
2482 if (INSN_DELETED_P (old
))
2484 PUT_CODE (old
, NOTE
);
2485 NOTE_LINE_NUMBER (old
) = NOTE_INSN_DELETED
;
2486 NOTE_SOURCE_FILE (old
) = 0;
2497 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2505 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2510 c4x_immed_int_constant (op
)
2513 if (GET_CODE (op
) != CONST_INT
)
2516 return GET_MODE (op
) == VOIDmode
2517 || GET_MODE_CLASS (op
) == MODE_INT
2518 || GET_MODE_CLASS (op
) == MODE_PARTIAL_INT
;
2523 c4x_immed_float_constant (op
)
2526 if (GET_CODE (op
) != CONST_DOUBLE
)
2529 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2530 present this only means that a MEM rtx has been generated. It does
2531 not mean the rtx is really in memory. */
2533 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2538 c4x_shiftable_constant (op
)
2543 int val
= INTVAL (op
);
2545 for (i
= 0; i
< 16; i
++)
2550 mask
= ((0xffff >> i
) << 16) | 0xffff;
2551 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2552 : (val
>> i
) & mask
))
2562 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2570 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2580 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2588 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2590 return IS_INT5_CONST (INTVAL (op
));
2598 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2606 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2614 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2618 /* The constraints do not have to check the register class,
2619 except when needed to discriminate between the constraints.
2620 The operand has been checked by the predicates to be valid. */
2622 /* ARx + 9-bit signed const or IRn
2623 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2624 We don't include the pre/post inc/dec forms here since
2625 they are handled by the <> constraints. */
2628 c4x_Q_constraint (op
)
2631 enum machine_mode mode
= GET_MODE (op
);
2633 if (GET_CODE (op
) != MEM
)
2636 switch (GET_CODE (op
))
2643 rtx op0
= XEXP (op
, 0);
2644 rtx op1
= XEXP (op
, 1);
2652 if (GET_CODE (op1
) != CONST_INT
)
2655 /* HImode and HFmode must be offsettable. */
2656 if (mode
== HImode
|| mode
== HFmode
)
2657 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2659 return IS_DISP8_CONST (INTVAL (op1
));
2670 /* ARx + 5-bit unsigned const
2671 *ARx, *+ARx(n) for n < 32. */
2674 c4x_R_constraint (op
)
2677 enum machine_mode mode
= GET_MODE (op
);
2681 if (GET_CODE (op
) != MEM
)
2684 switch (GET_CODE (op
))
2691 rtx op0
= XEXP (op
, 0);
2692 rtx op1
= XEXP (op
, 1);
2697 if (GET_CODE (op1
) != CONST_INT
)
2700 /* HImode and HFmode must be offsettable. */
2701 if (mode
== HImode
|| mode
== HFmode
)
2702 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2704 return IS_UINT5_CONST (INTVAL (op1
));
2719 enum machine_mode mode
= GET_MODE (op
);
2721 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2725 switch (GET_CODE (op
))
2728 return IS_ADDR_OR_PSEUDO_REG (op
);
2732 rtx op0
= XEXP (op
, 0);
2733 rtx op1
= XEXP (op
, 1);
2735 /* HImode and HFmode must be offsettable. */
2736 if (mode
== HImode
|| mode
== HFmode
)
2737 return IS_ADDR_OR_PSEUDO_REG (op0
)
2738 && GET_CODE (op1
) == CONST_INT
2739 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2742 && IS_ADDR_OR_PSEUDO_REG (op0
)
2743 && GET_CODE (op1
) == CONST_INT
2744 && IS_UINT5_CONST (INTVAL (op1
));
2755 /* ARx + 1-bit unsigned const or IRn
2756 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2757 We don't include the pre/post inc/dec forms here since
2758 they are handled by the <> constraints. */
2761 c4x_S_constraint (op
)
2764 enum machine_mode mode
= GET_MODE (op
);
2765 if (GET_CODE (op
) != MEM
)
2768 switch (GET_CODE (op
))
2776 rtx op0
= XEXP (op
, 0);
2777 rtx op1
= XEXP (op
, 1);
2779 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2780 || (op0
!= XEXP (op1
, 0)))
2783 op0
= XEXP (op1
, 0);
2784 op1
= XEXP (op1
, 1);
2785 return REG_P (op0
) && REG_P (op1
);
2786 /* Pre or post_modify with a displacement of 0 or 1
2787 should not be generated. */
2793 rtx op0
= XEXP (op
, 0);
2794 rtx op1
= XEXP (op
, 1);
2802 if (GET_CODE (op1
) != CONST_INT
)
2805 /* HImode and HFmode must be offsettable. */
2806 if (mode
== HImode
|| mode
== HFmode
)
2807 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2809 return IS_DISP1_CONST (INTVAL (op1
));
2824 enum machine_mode mode
= GET_MODE (op
);
2825 if (GET_CODE (op
) != MEM
)
2829 switch (GET_CODE (op
))
2833 if (mode
!= QImode
&& mode
!= QFmode
)
2840 return IS_ADDR_OR_PSEUDO_REG (op
);
2845 rtx op0
= XEXP (op
, 0);
2846 rtx op1
= XEXP (op
, 1);
2848 if (mode
!= QImode
&& mode
!= QFmode
)
2851 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2852 || (op0
!= XEXP (op1
, 0)))
2855 op0
= XEXP (op1
, 0);
2856 op1
= XEXP (op1
, 1);
2857 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2858 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2859 /* Pre or post_modify with a displacement of 0 or 1
2860 should not be generated. */
2865 rtx op0
= XEXP (op
, 0);
2866 rtx op1
= XEXP (op
, 1);
2870 /* HImode and HFmode must be offsettable. */
2871 if (mode
== HImode
|| mode
== HFmode
)
2872 return IS_ADDR_OR_PSEUDO_REG (op0
)
2873 && GET_CODE (op1
) == CONST_INT
2874 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2877 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2878 && IS_ADDR_OR_PSEUDO_REG (op0
))
2879 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2880 && IS_INDEX_OR_PSEUDO_REG (op0
));
2882 return IS_ADDR_OR_PSEUDO_REG (op0
)
2883 && GET_CODE (op1
) == CONST_INT
2884 && IS_DISP1_CONST (INTVAL (op1
));
2896 /* Direct memory operand. */
2899 c4x_T_constraint (op
)
2902 if (GET_CODE (op
) != MEM
)
2906 if (GET_CODE (op
) != LO_SUM
)
2908 /* Allow call operands. */
2909 return GET_CODE (op
) == SYMBOL_REF
2910 && GET_MODE (op
) == Pmode
2911 && SYMBOL_REF_FUNCTION_P (op
);
2914 /* HImode and HFmode are not offsettable. */
2915 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2918 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2919 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2920 return c4x_U_constraint (XEXP (op
, 1));
2926 /* Symbolic operand. */
2929 c4x_U_constraint (op
)
2932 /* Don't allow direct addressing to an arbitrary constant. */
2933 return GET_CODE (op
) == CONST
2934 || GET_CODE (op
) == SYMBOL_REF
2935 || GET_CODE (op
) == LABEL_REF
;
2940 c4x_autoinc_operand (op
, mode
)
2942 enum machine_mode mode ATTRIBUTE_UNUSED
;
2944 if (GET_CODE (op
) == MEM
)
2946 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2952 || code
== PRE_MODIFY
2953 || code
== POST_MODIFY
2961 /* Match any operand. */
2964 any_operand (op
, mode
)
2965 register rtx op ATTRIBUTE_UNUSED
;
2966 enum machine_mode mode ATTRIBUTE_UNUSED
;
2972 /* Nonzero if OP is a floating point value with value 0.0. */
2975 fp_zero_operand (op
, mode
)
2977 enum machine_mode mode ATTRIBUTE_UNUSED
;
2981 if (GET_CODE (op
) != CONST_DOUBLE
)
2983 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2984 return REAL_VALUES_EQUAL (r
, dconst0
);
2989 const_operand (op
, mode
)
2991 register enum machine_mode mode
;
2997 if (GET_CODE (op
) != CONST_DOUBLE
2998 || GET_MODE (op
) != mode
2999 || GET_MODE_CLASS (mode
) != MODE_FLOAT
)
3002 return c4x_immed_float_p (op
);
3008 if (GET_CODE (op
) == CONSTANT_P_RTX
)
3011 if (GET_CODE (op
) != CONST_INT
3012 || (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != mode
)
3013 || GET_MODE_CLASS (mode
) != MODE_INT
)
3016 return IS_HIGH_CONST (INTVAL (op
)) || IS_INT16_CONST (INTVAL (op
));
3028 stik_const_operand (op
, mode
)
3030 enum machine_mode mode ATTRIBUTE_UNUSED
;
3032 return c4x_K_constant (op
);
3037 not_const_operand (op
, mode
)
3039 enum machine_mode mode ATTRIBUTE_UNUSED
;
3041 return c4x_N_constant (op
);
3046 reg_operand (op
, mode
)
3048 enum machine_mode mode
;
3050 if (GET_CODE (op
) == SUBREG
3051 && GET_MODE (op
) == QFmode
)
3053 return register_operand (op
, mode
);
3058 mixed_subreg_operand (op
, mode
)
3060 enum machine_mode mode ATTRIBUTE_UNUSED
;
3062 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3063 int and a long double. */
3064 if (GET_CODE (op
) == SUBREG
3065 && (GET_MODE (op
) == QFmode
)
3066 && (GET_MODE (SUBREG_REG (op
)) == QImode
3067 || GET_MODE (SUBREG_REG (op
)) == HImode
))
3074 reg_imm_operand (op
, mode
)
3076 enum machine_mode mode ATTRIBUTE_UNUSED
;
3078 if (REG_P (op
) || CONSTANT_P (op
))
3085 not_modify_reg (op
, mode
)
3087 enum machine_mode mode ATTRIBUTE_UNUSED
;
3089 if (REG_P (op
) || CONSTANT_P (op
))
3091 if (GET_CODE (op
) != MEM
)
3094 switch (GET_CODE (op
))
3101 rtx op0
= XEXP (op
, 0);
3102 rtx op1
= XEXP (op
, 1);
3107 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
3113 rtx op0
= XEXP (op
, 0);
3115 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
3133 not_rc_reg (op
, mode
)
3135 enum machine_mode mode ATTRIBUTE_UNUSED
;
3137 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
3143 /* Extended precision register R0-R1. */
3146 r0r1_reg_operand (op
, mode
)
3148 enum machine_mode mode
;
3150 if (! reg_operand (op
, mode
))
3152 if (GET_CODE (op
) == SUBREG
)
3153 op
= SUBREG_REG (op
);
3154 return REG_P (op
) && IS_R0R1_OR_PSEUDO_REG (op
);
3158 /* Extended precision register R2-R3. */
3161 r2r3_reg_operand (op
, mode
)
3163 enum machine_mode mode
;
3165 if (! reg_operand (op
, mode
))
3167 if (GET_CODE (op
) == SUBREG
)
3168 op
= SUBREG_REG (op
);
3169 return REG_P (op
) && IS_R2R3_OR_PSEUDO_REG (op
);
3173 /* Low extended precision register R0-R7. */
3176 ext_low_reg_operand (op
, mode
)
3178 enum machine_mode mode
;
3180 if (! reg_operand (op
, mode
))
3182 if (GET_CODE (op
) == SUBREG
)
3183 op
= SUBREG_REG (op
);
3184 return REG_P (op
) && IS_EXT_LOW_OR_PSEUDO_REG (op
);
3188 /* Extended precision register. */
3191 ext_reg_operand (op
, mode
)
3193 enum machine_mode mode
;
3195 if (! reg_operand (op
, mode
))
3197 if (GET_CODE (op
) == SUBREG
)
3198 op
= SUBREG_REG (op
);
3201 return IS_EXT_OR_PSEUDO_REG (op
);
3205 /* Standard precision register. */
3208 std_reg_operand (op
, mode
)
3210 enum machine_mode mode
;
3212 if (! reg_operand (op
, mode
))
3214 if (GET_CODE (op
) == SUBREG
)
3215 op
= SUBREG_REG (op
);
3216 return REG_P (op
) && IS_STD_OR_PSEUDO_REG (op
);
3219 /* Standard precision or normal register. */
3222 std_or_reg_operand (op
, mode
)
3224 enum machine_mode mode
;
3226 if (reload_in_progress
)
3227 return std_reg_operand (op
, mode
);
3228 return reg_operand (op
, mode
);
3231 /* Address register. */
3234 addr_reg_operand (op
, mode
)
3236 enum machine_mode mode
;
3238 if (! reg_operand (op
, mode
))
3240 return c4x_a_register (op
);
3244 /* Index register. */
3247 index_reg_operand (op
, mode
)
3249 enum machine_mode mode
;
3251 if (! reg_operand (op
, mode
))
3253 if (GET_CODE (op
) == SUBREG
)
3254 op
= SUBREG_REG (op
);
3255 return c4x_x_register (op
);
3262 dp_reg_operand (op
, mode
)
3264 enum machine_mode mode ATTRIBUTE_UNUSED
;
3266 return REG_P (op
) && IS_DP_OR_PSEUDO_REG (op
);
3273 sp_reg_operand (op
, mode
)
3275 enum machine_mode mode ATTRIBUTE_UNUSED
;
3277 return REG_P (op
) && IS_SP_OR_PSEUDO_REG (op
);
3284 st_reg_operand (op
, mode
)
3286 enum machine_mode mode ATTRIBUTE_UNUSED
;
3288 return REG_P (op
) && IS_ST_OR_PSEUDO_REG (op
);
3295 rc_reg_operand (op
, mode
)
3297 enum machine_mode mode ATTRIBUTE_UNUSED
;
3299 return REG_P (op
) && IS_RC_OR_PSEUDO_REG (op
);
3304 call_address_operand (op
, mode
)
3306 enum machine_mode mode ATTRIBUTE_UNUSED
;
3308 return (REG_P (op
) || symbolic_address_operand (op
, mode
));
3312 /* Symbolic address operand. */
3315 symbolic_address_operand (op
, mode
)
3317 enum machine_mode mode ATTRIBUTE_UNUSED
;
3319 switch (GET_CODE (op
))
3331 /* Check dst operand of a move instruction. */
3334 dst_operand (op
, mode
)
3336 enum machine_mode mode
;
3338 if (GET_CODE (op
) == SUBREG
3339 && mixed_subreg_operand (op
, mode
))
3343 return reg_operand (op
, mode
);
3345 return nonimmediate_operand (op
, mode
);
3349 /* Check src operand of two operand arithmetic instructions. */
3352 src_operand (op
, mode
)
3354 enum machine_mode mode
;
3356 if (GET_CODE (op
) == SUBREG
3357 && mixed_subreg_operand (op
, mode
))
3361 return reg_operand (op
, mode
);
3363 if (mode
== VOIDmode
)
3364 mode
= GET_MODE (op
);
3366 if (GET_CODE (op
) == CONST_INT
)
3367 return (mode
== QImode
|| mode
== Pmode
|| mode
== HImode
)
3368 && c4x_I_constant (op
);
3370 /* We don't like CONST_DOUBLE integers. */
3371 if (GET_CODE (op
) == CONST_DOUBLE
)
3372 return c4x_H_constant (op
);
3374 /* Disallow symbolic addresses. Only the predicate
3375 symbolic_address_operand will match these. */
3376 if (GET_CODE (op
) == SYMBOL_REF
3377 || GET_CODE (op
) == LABEL_REF
3378 || GET_CODE (op
) == CONST
)
3381 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3382 access to symbolic addresses. These operands will get forced
3383 into a register and the movqi expander will generate a
3384 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3385 if (GET_CODE (op
) == MEM
3386 && ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
3387 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
3388 || GET_CODE (XEXP (op
, 0)) == CONST
)))
3389 return ! TARGET_LOAD_DIRECT_MEMS
&& GET_MODE (op
) == mode
;
3391 return general_operand (op
, mode
);
3396 src_hi_operand (op
, mode
)
3398 enum machine_mode mode
;
3400 if (c4x_O_constant (op
))
3402 return src_operand (op
, mode
);
3406 /* Check src operand of two operand logical instructions. */
3409 lsrc_operand (op
, mode
)
3411 enum machine_mode mode
;
3413 if (mode
== VOIDmode
)
3414 mode
= GET_MODE (op
);
3416 if (mode
!= QImode
&& mode
!= Pmode
)
3417 fatal_insn ("mode not QImode", op
);
3419 if (GET_CODE (op
) == CONST_INT
)
3420 return c4x_L_constant (op
) || c4x_J_constant (op
);
3422 return src_operand (op
, mode
);
3426 /* Check src operand of two operand tricky instructions. */
3429 tsrc_operand (op
, mode
)
3431 enum machine_mode mode
;
3433 if (mode
== VOIDmode
)
3434 mode
= GET_MODE (op
);
3436 if (mode
!= QImode
&& mode
!= Pmode
)
3437 fatal_insn ("mode not QImode", op
);
3439 if (GET_CODE (op
) == CONST_INT
)
3440 return c4x_L_constant (op
) || c4x_N_constant (op
) || c4x_J_constant (op
);
3442 return src_operand (op
, mode
);
3446 /* Check src operand of two operand non immedidate instructions. */
3449 nonimmediate_src_operand (op
, mode
)
3451 enum machine_mode mode
;
3453 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3456 return src_operand (op
, mode
);
3460 /* Check logical src operand of two operand non immedidate instructions. */
3463 nonimmediate_lsrc_operand (op
, mode
)
3465 enum machine_mode mode
;
3467 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3470 return lsrc_operand (op
, mode
);
3475 reg_or_const_operand (op
, mode
)
3477 enum machine_mode mode
;
3479 return reg_operand (op
, mode
) || const_operand (op
, mode
);
3483 /* Check for indirect operands allowable in parallel instruction. */
3486 par_ind_operand (op
, mode
)
3488 enum machine_mode mode
;
3490 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3493 return c4x_S_indirect (op
);
3497 /* Check for operands allowable in parallel instruction. */
3500 parallel_operand (op
, mode
)
3502 enum machine_mode mode
;
3504 return ext_low_reg_operand (op
, mode
) || par_ind_operand (op
, mode
);
3509 c4x_S_address_parse (op
, base
, incdec
, index
, disp
)
3521 if (GET_CODE (op
) != MEM
)
3522 fatal_insn ("invalid indirect memory address", op
);
3525 switch (GET_CODE (op
))
3528 *base
= REGNO (XEXP (op
, 0));
3534 *base
= REGNO (XEXP (op
, 0));
3540 *base
= REGNO (XEXP (op
, 0));
3546 *base
= REGNO (XEXP (op
, 0));
3552 *base
= REGNO (XEXP (op
, 0));
3553 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3555 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3556 *disp
= 0; /* ??? */
3559 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3564 *base
= REGNO (XEXP (op
, 0));
3565 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3567 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3568 *disp
= 1; /* ??? */
3571 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3582 rtx op0
= XEXP (op
, 0);
3583 rtx op1
= XEXP (op
, 1);
3585 if (c4x_a_register (op0
))
3587 if (c4x_x_register (op1
))
3589 *base
= REGNO (op0
);
3590 *index
= REGNO (op1
);
3593 else if ((GET_CODE (op1
) == CONST_INT
3594 && IS_DISP1_CONST (INTVAL (op1
))))
3596 *base
= REGNO (op0
);
3597 *disp
= INTVAL (op1
);
3601 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3603 *base
= REGNO (op1
);
3604 *index
= REGNO (op0
);
3611 fatal_insn ("invalid indirect (S) memory address", op
);
3617 c4x_address_conflict (op0
, op1
, store0
, store1
)
3632 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3635 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3636 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3638 if (store0
&& store1
)
3640 /* If we have two stores in parallel to the same address, then
3641 the C4x only executes one of the stores. This is unlikely to
3642 cause problems except when writing to a hardware device such
3643 as a FIFO since the second write will be lost. The user
3644 should flag the hardware location as being volatile so that
3645 we don't do this optimisation. While it is unlikely that we
3646 have an aliased address if both locations are not marked
3647 volatile, it is probably safer to flag a potential conflict
3648 if either location is volatile. */
3649 if (! flag_argument_noalias
)
3651 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3656 /* If have a parallel load and a store to the same address, the load
3657 is performed first, so there is no conflict. Similarly, there is
3658 no conflict if have parallel loads from the same address. */
3660 /* Cannot use auto increment or auto decrement twice for same
3662 if (base0
== base1
&& incdec0
&& incdec0
)
3665 /* It might be too confusing for GCC if we have use a base register
3666 with a side effect and a memory reference using the same register
3668 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3671 /* We can not optimize the case where op1 and op2 refer to the same
3673 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3681 /* Check for while loop inside a decrement and branch loop. */
3684 c4x_label_conflict (insn
, jump
, db
)
3691 if (GET_CODE (insn
) == CODE_LABEL
)
3693 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3695 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3698 insn
= PREV_INSN (insn
);
3704 /* Validate combination of operands for parallel load/store instructions. */
3707 valid_parallel_load_store (operands
, mode
)
3709 enum machine_mode mode ATTRIBUTE_UNUSED
;
3711 rtx op0
= operands
[0];
3712 rtx op1
= operands
[1];
3713 rtx op2
= operands
[2];
3714 rtx op3
= operands
[3];
3716 if (GET_CODE (op0
) == SUBREG
)
3717 op0
= SUBREG_REG (op0
);
3718 if (GET_CODE (op1
) == SUBREG
)
3719 op1
= SUBREG_REG (op1
);
3720 if (GET_CODE (op2
) == SUBREG
)
3721 op2
= SUBREG_REG (op2
);
3722 if (GET_CODE (op3
) == SUBREG
)
3723 op3
= SUBREG_REG (op3
);
3725 /* The patterns should only allow ext_low_reg_operand() or
3726 par_ind_operand() operands. Thus of the 4 operands, only 2
3727 should be REGs and the other 2 should be MEMs. */
3729 /* This test prevents the multipack pass from using this pattern if
3730 op0 is used as an index or base register in op2 or op3, since
3731 this combination will require reloading. */
3732 if (GET_CODE (op0
) == REG
3733 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3734 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3738 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3739 return (REGNO (op0
) != REGNO (op2
))
3740 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3741 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3744 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3745 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3746 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3749 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3750 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3751 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3754 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3755 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3756 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3763 valid_parallel_operands_4 (operands
, mode
)
3765 enum machine_mode mode ATTRIBUTE_UNUSED
;
3767 rtx op0
= operands
[0];
3768 rtx op2
= operands
[2];
3770 if (GET_CODE (op0
) == SUBREG
)
3771 op0
= SUBREG_REG (op0
);
3772 if (GET_CODE (op2
) == SUBREG
)
3773 op2
= SUBREG_REG (op2
);
3775 /* This test prevents the multipack pass from using this pattern if
3776 op0 is used as an index or base register in op2, since this combination
3777 will require reloading. */
3778 if (GET_CODE (op0
) == REG
3779 && GET_CODE (op2
) == MEM
3780 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3788 valid_parallel_operands_5 (operands
, mode
)
3790 enum machine_mode mode ATTRIBUTE_UNUSED
;
3793 rtx op0
= operands
[0];
3794 rtx op1
= operands
[1];
3795 rtx op2
= operands
[2];
3796 rtx op3
= operands
[3];
3798 if (GET_CODE (op0
) == SUBREG
)
3799 op0
= SUBREG_REG (op0
);
3800 if (GET_CODE (op1
) == SUBREG
)
3801 op1
= SUBREG_REG (op1
);
3802 if (GET_CODE (op2
) == SUBREG
)
3803 op2
= SUBREG_REG (op2
);
3805 /* The patterns should only allow ext_low_reg_operand() or
3806 par_ind_operand() operands. Operands 1 and 2 may be commutative
3807 but only one of them can be a register. */
3808 if (GET_CODE (op1
) == REG
)
3810 if (GET_CODE (op2
) == REG
)
3816 /* This test prevents the multipack pass from using this pattern if
3817 op0 is used as an index or base register in op3, since this combination
3818 will require reloading. */
3819 if (GET_CODE (op0
) == REG
3820 && GET_CODE (op3
) == MEM
3821 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3829 valid_parallel_operands_6 (operands
, mode
)
3831 enum machine_mode mode ATTRIBUTE_UNUSED
;
3834 rtx op0
= operands
[0];
3835 rtx op1
= operands
[1];
3836 rtx op2
= operands
[2];
3837 rtx op4
= operands
[4];
3838 rtx op5
= operands
[5];
3840 if (GET_CODE (op1
) == SUBREG
)
3841 op1
= SUBREG_REG (op1
);
3842 if (GET_CODE (op2
) == SUBREG
)
3843 op2
= SUBREG_REG (op2
);
3844 if (GET_CODE (op4
) == SUBREG
)
3845 op4
= SUBREG_REG (op4
);
3846 if (GET_CODE (op5
) == SUBREG
)
3847 op5
= SUBREG_REG (op5
);
3849 /* The patterns should only allow ext_low_reg_operand() or
3850 par_ind_operand() operands. Thus of the 4 input operands, only 2
3851 should be REGs and the other 2 should be MEMs. */
3853 if (GET_CODE (op1
) == REG
)
3855 if (GET_CODE (op2
) == REG
)
3857 if (GET_CODE (op4
) == REG
)
3859 if (GET_CODE (op5
) == REG
)
3862 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3863 Perhaps we should count the MEMs as well? */
3867 /* This test prevents the multipack pass from using this pattern if
3868 op0 is used as an index or base register in op4 or op5, since
3869 this combination will require reloading. */
3870 if (GET_CODE (op0
) == REG
3871 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3872 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3879 /* Validate combination of src operands. Note that the operands have
3880 been screened by the src_operand predicate. We just have to check
3881 that the combination of operands is valid. If FORCE is set, ensure
3882 that the destination regno is valid if we have a 2 operand insn. */
3885 c4x_valid_operands (code
, operands
, mode
, force
)
3888 enum machine_mode mode ATTRIBUTE_UNUSED
;
3893 enum rtx_code code1
;
3894 enum rtx_code code2
;
3896 if (code
== COMPARE
)
3907 if (GET_CODE (op1
) == SUBREG
)
3908 op1
= SUBREG_REG (op1
);
3909 if (GET_CODE (op2
) == SUBREG
)
3910 op2
= SUBREG_REG (op2
);
3912 code1
= GET_CODE (op1
);
3913 code2
= GET_CODE (op2
);
3915 if (code1
== REG
&& code2
== REG
)
3918 if (code1
== MEM
&& code2
== MEM
)
3920 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3922 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3933 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3938 if (! c4x_H_constant (op2
))
3942 /* Any valid memory operand screened by src_operand is OK. */
3945 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3946 into a stack slot memory address comprising a PLUS and a
3952 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3956 /* Check that we have a valid destination register for a two operand
3958 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (operands
[0]);
3961 /* We assume MINUS is commutative since the subtract patterns
3962 also support the reverse subtract instructions. Since op1
3963 is not a register, and op2 is a register, op1 can only
3964 be a restricted memory operand for a shift instruction. */
3965 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3966 || code
== ASHIFT
|| code
== COMPARE
)
3968 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
3973 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
3978 if (! c4x_H_constant (op1
))
3982 /* Any valid memory operand screened by src_operand is OK. */
3990 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3991 into a stack slot memory address comprising a PLUS and a
4001 /* Check that we have a valid destination register for a two operand
4003 return ! force
|| REGNO (op1
) == REGNO (operands
[0]);
4007 int valid_operands (code
, operands
, mode
)
4010 enum machine_mode mode
;
4013 /* If we are not optimizing then we have to let anything go and let
4014 reload fix things up. instantiate_decl in function.c can produce
4015 invalid insns by changing the offset of a memory operand from a
4016 valid one into an invalid one, when the second operand is also a
4017 memory operand. The alternative is not to allow two memory
4018 operands for an insn when not optimizing. The problem only rarely
4019 occurs, for example with the C-torture program DFcmp.c. */
4021 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
4026 legitimize_operands (code
, operands
, mode
)
4029 enum machine_mode mode
;
4031 /* Compare only has 2 operands. */
4032 if (code
== COMPARE
)
4034 /* During RTL generation, force constants into pseudos so that
4035 they can get hoisted out of loops. This will tie up an extra
4036 register but can save an extra cycle. Only do this if loop
4037 optimisation enabled. (We cannot pull this trick for add and
4038 sub instructions since the flow pass won't find
4039 autoincrements etc.) This allows us to generate compare
4040 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4041 of LDI *AR0++, R0; CMPI 42, R0.
4043 Note that expand_binops will try to load an expensive constant
4044 into a register if it is used within a loop. Unfortunately,
4045 the cost mechanism doesn't allow us to look at the other
4046 operand to decide whether the constant is expensive. */
4048 if (! reload_in_progress
4051 && GET_CODE (operands
[1]) == CONST_INT
4052 && preserve_subexpressions_p ()
4053 && rtx_cost (operands
[1], code
) > 1)
4054 operands
[1] = force_reg (mode
, operands
[1]);
4056 if (! reload_in_progress
4057 && ! c4x_valid_operands (code
, operands
, mode
, 0))
4058 operands
[0] = force_reg (mode
, operands
[0]);
4062 /* We cannot do this for ADDI/SUBI insns since we will
4063 defeat the flow pass from finding autoincrement addressing
4065 if (! reload_in_progress
4066 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
4069 && GET_CODE (operands
[2]) == CONST_INT
4070 && preserve_subexpressions_p ()
4071 && rtx_cost (operands
[2], code
) > 1)
4072 operands
[2] = force_reg (mode
, operands
[2]);
4074 /* We can get better code on a C30 if we force constant shift counts
4075 into a register. This way they can get hoisted out of loops,
4076 tying up a register, but saving an instruction. The downside is
4077 that they may get allocated to an address or index register, and
4078 thus we will get a pipeline conflict if there is a nearby
4079 indirect address using an address register.
4081 Note that expand_binops will not try to load an expensive constant
4082 into a register if it is used within a loop for a shift insn. */
4084 if (! reload_in_progress
4085 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
4087 /* If the operand combination is invalid, we force operand1 into a
4088 register, preventing reload from having doing to do this at a
4090 operands
[1] = force_reg (mode
, operands
[1]);
4093 emit_move_insn (operands
[0], operands
[1]);
4094 operands
[1] = copy_rtx (operands
[0]);
4098 /* Just in case... */
4099 if (! c4x_valid_operands (code
, operands
, mode
, 0))
4100 operands
[2] = force_reg (mode
, operands
[2]);
4104 /* Right shifts require a negative shift count, but GCC expects
4105 a positive count, so we emit a NEG. */
4106 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
4107 && (GET_CODE (operands
[2]) != CONST_INT
))
4108 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
4114 /* The following predicates are used for instruction scheduling. */
4117 group1_reg_operand (op
, mode
)
4119 enum machine_mode mode
;
4121 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4123 if (GET_CODE (op
) == SUBREG
)
4124 op
= SUBREG_REG (op
);
4125 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
4130 group1_mem_operand (op
, mode
)
4132 enum machine_mode mode
;
4134 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4137 if (GET_CODE (op
) == MEM
)
4140 if (GET_CODE (op
) == PLUS
)
4142 rtx op0
= XEXP (op
, 0);
4143 rtx op1
= XEXP (op
, 1);
4145 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
4146 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
4149 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
4157 /* Return true if any one of the address registers. */
4160 arx_reg_operand (op
, mode
)
4162 enum machine_mode mode
;
4164 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4166 if (GET_CODE (op
) == SUBREG
)
4167 op
= SUBREG_REG (op
);
4168 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
4173 c4x_arn_reg_operand (op
, mode
, regno
)
4175 enum machine_mode mode
;
4178 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4180 if (GET_CODE (op
) == SUBREG
)
4181 op
= SUBREG_REG (op
);
4182 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4187 c4x_arn_mem_operand (op
, mode
, regno
)
4189 enum machine_mode mode
;
4192 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4195 if (GET_CODE (op
) == MEM
)
4198 switch (GET_CODE (op
))
4207 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4211 if (REG_P (XEXP (op
, 0)) && (! reload_completed
4212 || (REGNO (XEXP (op
, 0)) == regno
)))
4214 if (REG_P (XEXP (XEXP (op
, 1), 1))
4215 && (! reload_completed
4216 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
4222 rtx op0
= XEXP (op
, 0);
4223 rtx op1
= XEXP (op
, 1);
4225 if ((REG_P (op0
) && (! reload_completed
4226 || (REGNO (op0
) == regno
)))
4227 || (REG_P (op1
) && (! reload_completed
4228 || (REGNO (op1
) == regno
))))
4242 ar0_reg_operand (op
, mode
)
4244 enum machine_mode mode
;
4246 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
4251 ar0_mem_operand (op
, mode
)
4253 enum machine_mode mode
;
4255 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
4260 ar1_reg_operand (op
, mode
)
4262 enum machine_mode mode
;
4264 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
4269 ar1_mem_operand (op
, mode
)
4271 enum machine_mode mode
;
4273 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
4278 ar2_reg_operand (op
, mode
)
4280 enum machine_mode mode
;
4282 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
4287 ar2_mem_operand (op
, mode
)
4289 enum machine_mode mode
;
4291 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
4296 ar3_reg_operand (op
, mode
)
4298 enum machine_mode mode
;
4300 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
4305 ar3_mem_operand (op
, mode
)
4307 enum machine_mode mode
;
4309 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
4314 ar4_reg_operand (op
, mode
)
4316 enum machine_mode mode
;
4318 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
4323 ar4_mem_operand (op
, mode
)
4325 enum machine_mode mode
;
4327 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
4332 ar5_reg_operand (op
, mode
)
4334 enum machine_mode mode
;
4336 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
4341 ar5_mem_operand (op
, mode
)
4343 enum machine_mode mode
;
4345 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
4350 ar6_reg_operand (op
, mode
)
4352 enum machine_mode mode
;
4354 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
4359 ar6_mem_operand (op
, mode
)
4361 enum machine_mode mode
;
4363 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
4368 ar7_reg_operand (op
, mode
)
4370 enum machine_mode mode
;
4372 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
4377 ar7_mem_operand (op
, mode
)
4379 enum machine_mode mode
;
4381 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
4386 ir0_reg_operand (op
, mode
)
4388 enum machine_mode mode
;
4390 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
4395 ir0_mem_operand (op
, mode
)
4397 enum machine_mode mode
;
4399 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
4404 ir1_reg_operand (op
, mode
)
4406 enum machine_mode mode
;
4408 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
4413 ir1_mem_operand (op
, mode
)
4415 enum machine_mode mode
;
4417 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
4421 /* This is similar to operand_subword but allows autoincrement
4425 c4x_operand_subword (op
, i
, validate_address
, mode
)
4428 int validate_address
;
4429 enum machine_mode mode
;
4431 if (mode
!= HImode
&& mode
!= HFmode
)
4432 fatal_insn ("c4x_operand_subword: invalid mode", op
);
4434 if (mode
== HFmode
&& REG_P (op
))
4435 fatal_insn ("c4x_operand_subword: invalid operand", op
);
4437 if (GET_CODE (op
) == MEM
)
4439 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
4440 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
4441 enum machine_mode submode
;
4446 else if (mode
== HFmode
)
4453 return gen_rtx_MEM (submode
, XEXP (op
, 0));
4459 /* We could handle these with some difficulty.
4460 e.g., *p-- => *(p-=2); *(p+1). */
4461 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
4467 fatal_insn ("c4x_operand_subword: invalid address", op
);
4469 /* Even though offsettable_address_p considers (MEM
4470 (LO_SUM)) to be offsettable, it is not safe if the
4471 address is at the end of the data page since we also have
4472 to fix up the associated high PART. In this case where
4473 we are trying to split a HImode or HFmode memory
4474 reference, we would have to emit another insn to reload a
4475 new HIGH value. It's easier to disable LO_SUM memory references
4476 in HImode or HFmode and we probably get better code. */
4478 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
4485 return operand_subword (op
, i
, validate_address
, mode
);
4490 struct name_list
*next
;
4494 static struct name_list
*global_head
;
4495 static struct name_list
*extern_head
;
4498 /* Add NAME to list of global symbols and remove from external list if
4499 present on external list. */
4502 c4x_global_label (name
)
4505 struct name_list
*p
, *last
;
4507 /* Do not insert duplicate names, so linearly search through list of
4512 if (strcmp (p
->name
, name
) == 0)
4516 p
= (struct name_list
*) xmalloc (sizeof *p
);
4517 p
->next
= global_head
;
4521 /* Remove this name from ref list if present. */
4526 if (strcmp (p
->name
, name
) == 0)
4529 last
->next
= p
->next
;
4531 extern_head
= p
->next
;
4540 /* Add NAME to list of external symbols. */
4543 c4x_external_ref (name
)
4546 struct name_list
*p
;
4548 /* Do not insert duplicate names. */
4552 if (strcmp (p
->name
, name
) == 0)
4557 /* Do not insert ref if global found. */
4561 if (strcmp (p
->name
, name
) == 0)
4565 p
= (struct name_list
*) xmalloc (sizeof *p
);
4566 p
->next
= extern_head
;
4571 /* We need to have a data section we can identify so that we can set
4572 the DP register back to a data pointer in the small memory model.
4573 This is only required for ISRs if we are paranoid that someone
4574 may have quietly changed this register on the sly. */
4579 if (TARGET_C30
) dspversion
= 30;
4580 if (TARGET_C31
) dspversion
= 31;
4581 if (TARGET_C32
) dspversion
= 32;
4582 if (TARGET_C33
) dspversion
= 33;
4583 if (TARGET_C40
) dspversion
= 40;
4584 if (TARGET_C44
) dspversion
= 44;
4586 default_file_start ();
4587 fprintf (asm_out_file
, "\t.version\t%d\n", dspversion
);
4588 fputs ("\n\t.data\ndata_sec:\n", asm_out_file
);
4595 struct name_list
*p
;
4597 /* Output all external names that are not global. */
4601 fprintf (asm_out_file
, "\t.ref\t");
4602 assemble_name (asm_out_file
, p
->name
);
4603 fprintf (asm_out_file
, "\n");
4606 fprintf (asm_out_file
, "\t.end\n");
4611 c4x_check_attribute (attrib
, list
, decl
, attributes
)
4613 tree list
, decl
, *attributes
;
4615 while (list
!= NULL_TREE
4616 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4617 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4618 list
= TREE_CHAIN (list
);
4620 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4626 c4x_insert_attributes (decl
, attributes
)
4627 tree decl
, *attributes
;
4629 switch (TREE_CODE (decl
))
4632 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4633 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4634 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4635 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4636 c4x_check_attribute ("naked", naked_tree
, decl
, attributes
);
4640 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4648 /* Table of valid machine attributes. */
4649 const struct attribute_spec c4x_attribute_table
[] =
4651 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4652 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4653 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4654 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4655 { NULL
, 0, 0, false, false, false, NULL
}
4658 /* Handle an attribute requiring a FUNCTION_TYPE;
4659 arguments as in struct attribute_spec.handler. */
4661 c4x_handle_fntype_attribute (node
, name
, args
, flags
, no_add_attrs
)
4664 tree args ATTRIBUTE_UNUSED
;
4665 int flags ATTRIBUTE_UNUSED
;
4668 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4670 warning ("`%s' attribute only applies to functions",
4671 IDENTIFIER_POINTER (name
));
4672 *no_add_attrs
= true;
4679 /* !!! FIXME to emit RPTS correctly. */
4682 c4x_rptb_rpts_p (insn
, op
)
4685 /* The next insn should be our label marking where the
4686 repeat block starts. */
4687 insn
= NEXT_INSN (insn
);
4688 if (GET_CODE (insn
) != CODE_LABEL
)
4690 /* Some insns may have been shifted between the RPTB insn
4691 and the top label... They were probably destined to
4692 be moved out of the loop. For now, let's leave them
4693 where they are and print a warning. We should
4694 probably move these insns before the repeat block insn. */
4696 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4701 /* Skip any notes. */
4702 insn
= next_nonnote_insn (insn
);
4704 /* This should be our first insn in the loop. */
4705 if (! INSN_P (insn
))
4708 /* Skip any notes. */
4709 insn
= next_nonnote_insn (insn
);
4711 if (! INSN_P (insn
))
4714 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4720 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4724 /* Check if register r11 is used as the destination of an insn. */
4737 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4738 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4740 if (INSN_P (x
) && (set
= single_set (x
)))
4743 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4746 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4747 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4751 if (c4x_r11_set_p (XEXP (x
, i
)))
4754 else if (fmt
[i
] == 'E')
4755 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4756 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4763 /* The c4x sometimes has a problem when the insn before the laj insn
4764 sets the r11 register. Check for this situation. */
4767 c4x_check_laj_p (insn
)
4770 insn
= prev_nonnote_insn (insn
);
4772 /* If this is the start of the function no nop is needed. */
4776 /* If the previous insn is a code label we have to insert a nop. This
4777 could be a jump or table jump. We can find the normal jumps by
4778 scanning the function but this will not find table jumps. */
4779 if (GET_CODE (insn
) == CODE_LABEL
)
4782 /* If the previous insn sets register r11 we have to insert a nop. */
4783 if (c4x_r11_set_p (insn
))
4786 /* No nop needed. */
4791 /* Adjust the cost of a scheduling dependency. Return the new cost of
4792 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4793 A set of an address register followed by a use occurs a 2 cycle
4794 stall (reduced to a single cycle on the c40 using LDA), while
4795 a read of an address register followed by a use occurs a single cycle. */
4797 #define SET_USE_COST 3
4798 #define SETLDA_USE_COST 2
4799 #define READ_USE_COST 2
4802 c4x_adjust_cost (insn
, link
, dep_insn
, cost
)
4808 /* Don't worry about this until we know what registers have been
4810 if (flag_schedule_insns
== 0 && ! reload_completed
)
4813 /* How do we handle dependencies where a read followed by another
4814 read causes a pipeline stall? For example, a read of ar0 followed
4815 by the use of ar0 for a memory reference. It looks like we
4816 need to extend the scheduler to handle this case. */
4818 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4819 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4820 so only deal with insns we know about. */
4821 if (recog_memoized (dep_insn
) < 0)
4824 if (REG_NOTE_KIND (link
) == 0)
4828 /* Data dependency; DEP_INSN writes a register that INSN reads some
4832 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4833 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4834 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4835 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4839 /* This could be significantly optimized. We should look
4840 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4841 insn uses ar0-ar7. We then test if the same register
4842 is used. The tricky bit is that some operands will
4843 use several registers... */
4844 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4845 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4846 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4847 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4848 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4849 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4851 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4852 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4853 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4854 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4855 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4856 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4858 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4859 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4860 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4861 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4862 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4863 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4865 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4866 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4867 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4868 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4869 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4870 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4872 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4873 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4874 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4875 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4876 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4877 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4879 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4880 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4881 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4882 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4883 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4884 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4886 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4887 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4888 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4889 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4890 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4891 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4893 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4894 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4895 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4896 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4897 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4898 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4900 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4901 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4902 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4903 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4905 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4906 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4907 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4908 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4914 /* For other data dependencies, the default cost specified in the
4918 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4920 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4923 /* For c4x anti dependencies, the cost is 0. */
4926 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4928 /* Output dependency; DEP_INSN writes a register that INSN writes some
4931 /* For c4x output dependencies, the cost is 0. */
4939 c4x_init_builtins ()
4941 tree endlink
= void_list_node
;
4943 builtin_function ("fast_ftoi",
4946 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4947 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4948 builtin_function ("ansi_ftoi",
4951 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4952 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4954 builtin_function ("fast_imult",
4957 tree_cons (NULL_TREE
, integer_type_node
,
4958 tree_cons (NULL_TREE
,
4959 integer_type_node
, endlink
))),
4960 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4963 builtin_function ("toieee",
4966 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4967 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4968 builtin_function ("frieee",
4971 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4972 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4973 builtin_function ("fast_invf",
4976 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4977 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4983 c4x_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4986 rtx subtarget ATTRIBUTE_UNUSED
;
4987 enum machine_mode mode ATTRIBUTE_UNUSED
;
4988 int ignore ATTRIBUTE_UNUSED
;
4990 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4991 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4992 tree arglist
= TREE_OPERAND (exp
, 1);
4998 case C4X_BUILTIN_FIX
:
4999 arg0
= TREE_VALUE (arglist
);
5000 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5001 r0
= protect_from_queue (r0
, 0);
5002 if (! target
|| ! register_operand (target
, QImode
))
5003 target
= gen_reg_rtx (QImode
);
5004 emit_insn (gen_fixqfqi_clobber (target
, r0
));
5007 case C4X_BUILTIN_FIX_ANSI
:
5008 arg0
= TREE_VALUE (arglist
);
5009 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5010 r0
= protect_from_queue (r0
, 0);
5011 if (! target
|| ! register_operand (target
, QImode
))
5012 target
= gen_reg_rtx (QImode
);
5013 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
5016 case C4X_BUILTIN_MPYI
:
5019 arg0
= TREE_VALUE (arglist
);
5020 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5021 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
5022 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
5023 r0
= protect_from_queue (r0
, 0);
5024 r1
= protect_from_queue (r1
, 0);
5025 if (! target
|| ! register_operand (target
, QImode
))
5026 target
= gen_reg_rtx (QImode
);
5027 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
5030 case C4X_BUILTIN_TOIEEE
:
5033 arg0
= TREE_VALUE (arglist
);
5034 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5035 r0
= protect_from_queue (r0
, 0);
5036 if (! target
|| ! register_operand (target
, QFmode
))
5037 target
= gen_reg_rtx (QFmode
);
5038 emit_insn (gen_toieee (target
, r0
));
5041 case C4X_BUILTIN_FRIEEE
:
5044 arg0
= TREE_VALUE (arglist
);
5045 if (TREE_CODE (arg0
) == VAR_DECL
|| TREE_CODE (arg0
) == PARM_DECL
)
5046 put_var_into_stack (arg0
, /*rescan=*/true);
5047 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5048 r0
= protect_from_queue (r0
, 0);
5049 if (register_operand (r0
, QFmode
))
5051 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
5052 emit_move_insn (r1
, r0
);
5055 if (! target
|| ! register_operand (target
, QFmode
))
5056 target
= gen_reg_rtx (QFmode
);
5057 emit_insn (gen_frieee (target
, r0
));
5060 case C4X_BUILTIN_RCPF
:
5063 arg0
= TREE_VALUE (arglist
);
5064 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5065 r0
= protect_from_queue (r0
, 0);
5066 if (! target
|| ! register_operand (target
, QFmode
))
5067 target
= gen_reg_rtx (QFmode
);
5068 emit_insn (gen_rcpfqf_clobber (target
, r0
));
5075 c4x_asm_named_section (name
, flags
)
5077 unsigned int flags ATTRIBUTE_UNUSED
;
5079 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);
5083 c4x_globalize_label (stream
, name
)
5087 default_globalize_label (stream
, name
);
5088 c4x_global_label (name
);
5091 #define SHIFT_CODE_P(C) \
5092 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
5093 #define LOGICAL_CODE_P(C) \
5094 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
5096 /* Compute a (partial) cost for rtx X. Return true if the complete
5097 cost has been computed, and false if subexpressions should be
5098 scanned. In either case, *TOTAL contains the cost result. */
5101 c4x_rtx_costs (x
, code
, outer_code
, total
)
5103 int code
, outer_code
;
5110 /* Some small integers are effectively free for the C40. We should
5111 also consider if we are using the small memory model. With
5112 the big memory model we require an extra insn for a constant
5113 loaded from memory. */
5117 if (c4x_J_constant (x
))
5119 else if (! TARGET_C3X
5120 && outer_code
== AND
5121 && (val
== 255 || val
== 65535))
5123 else if (! TARGET_C3X
5124 && (outer_code
== ASHIFTRT
|| outer_code
== LSHIFTRT
)
5125 && (val
== 16 || val
== 24))
5127 else if (TARGET_C3X
&& SHIFT_CODE_P (outer_code
))
5129 else if (LOGICAL_CODE_P (outer_code
)
5130 ? c4x_L_constant (x
) : c4x_I_constant (x
))
5143 if (c4x_H_constant (x
))
5145 else if (GET_MODE (x
) == QFmode
)
5151 /* ??? Note that we return true, rather than false so that rtx_cost
5152 doesn't include the constant costs. Otherwise expand_mult will
5153 think that it is cheaper to synthesize a multiply rather than to
5154 use a multiply instruction. I think this is because the algorithm
5155 synth_mult doesn't take into account the loading of the operands,
5156 whereas the calculation of mult_cost does. */
5165 *total
= COSTS_N_INSNS (1);
5169 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
5170 || TARGET_MPYI
? 1 : 14);
5177 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT