1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
53 #include "target-def.h"
57 rtx fix_truncqfhi2_libfunc
;
58 rtx fixuns_truncqfhi2_libfunc
;
59 rtx fix_trunchfhi2_libfunc
;
60 rtx fixuns_trunchfhi2_libfunc
;
61 rtx floathiqf2_libfunc
;
62 rtx floatunshiqf2_libfunc
;
63 rtx floathihf2_libfunc
;
64 rtx floatunshihf2_libfunc
;
66 static int c4x_leaf_function
;
68 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
77 /* Reg Modes Saved. */
78 R0R1_REGS
, /* R0 QI, QF, HF No. */
79 R0R1_REGS
, /* R1 QI, QF, HF No. */
80 R2R3_REGS
, /* R2 QI, QF, HF No. */
81 R2R3_REGS
, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
86 ADDR_REGS
, /* AR0 QI No. */
87 ADDR_REGS
, /* AR1 QI No. */
88 ADDR_REGS
, /* AR2 QI No. */
89 ADDR_REGS
, /* AR3 QI QI. */
90 ADDR_REGS
, /* AR4 QI QI. */
91 ADDR_REGS
, /* AR5 QI QI. */
92 ADDR_REGS
, /* AR6 QI QI. */
93 ADDR_REGS
, /* AR7 QI QI. */
94 DP_REG
, /* DP QI No. */
95 INDEX_REGS
, /* IR0 QI No. */
96 INDEX_REGS
, /* IR1 QI No. */
97 BK_REG
, /* BK QI QI. */
98 SP_REG
, /* SP QI No. */
99 ST_REG
, /* ST CC No. */
100 NO_REGS
, /* DIE/IE No. */
101 NO_REGS
, /* IIE/IF No. */
102 NO_REGS
, /* IIF/IOF No. */
103 INT_REGS
, /* RS QI No. */
104 INT_REGS
, /* RE QI No. */
105 RC_REG
, /* RC QI No. */
106 EXT_REGS
, /* R8 QI, QF, HF QI. */
107 EXT_REGS
, /* R9 QI, QF, HF No. */
108 EXT_REGS
, /* R10 QI, QF, HF No. */
109 EXT_REGS
, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
114 /* Reg Modes Saved. */
115 HFmode
, /* R0 QI, QF, HF No. */
116 HFmode
, /* R1 QI, QF, HF No. */
117 HFmode
, /* R2 QI, QF, HF No. */
118 HFmode
, /* R3 QI, QF, HF No. */
119 QFmode
, /* R4 QI, QF, HF QI. */
120 QFmode
, /* R5 QI, QF, HF QI. */
121 QImode
, /* R6 QI, QF, HF QF. */
122 QImode
, /* R7 QI, QF, HF QF. */
123 QImode
, /* AR0 QI No. */
124 QImode
, /* AR1 QI No. */
125 QImode
, /* AR2 QI No. */
126 QImode
, /* AR3 QI QI. */
127 QImode
, /* AR4 QI QI. */
128 QImode
, /* AR5 QI QI. */
129 QImode
, /* AR6 QI QI. */
130 QImode
, /* AR7 QI QI. */
131 VOIDmode
, /* DP QI No. */
132 QImode
, /* IR0 QI No. */
133 QImode
, /* IR1 QI No. */
134 QImode
, /* BK QI QI. */
135 VOIDmode
, /* SP QI No. */
136 VOIDmode
, /* ST CC No. */
137 VOIDmode
, /* DIE/IE No. */
138 VOIDmode
, /* IIE/IF No. */
139 VOIDmode
, /* IIF/IOF No. */
140 QImode
, /* RS QI No. */
141 QImode
, /* RE QI No. */
142 VOIDmode
, /* RC QI No. */
143 QFmode
, /* R8 QI, QF, HF QI. */
144 HFmode
, /* R9 QI, QF, HF No. */
145 HFmode
, /* R10 QI, QF, HF No. */
146 HFmode
, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
156 const char *c4x_rpts_cycles_string
;
157 int c4x_rpts_cycles
= 0; /* Max. cycles for RPTS. */
158 const char *c4x_cpu_version_string
;
159 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
161 /* Pragma definitions. */
163 tree code_tree
= NULL_TREE
;
164 tree data_tree
= NULL_TREE
;
165 tree pure_tree
= NULL_TREE
;
166 tree noreturn_tree
= NULL_TREE
;
167 tree interrupt_tree
= NULL_TREE
;
168 tree naked_tree
= NULL_TREE
;
170 /* Forward declarations */
171 static int c4x_isr_reg_used_p
PARAMS ((unsigned int));
172 static int c4x_leaf_function_p
PARAMS ((void));
173 static int c4x_naked_function_p
PARAMS ((void));
174 static int c4x_immed_float_p
PARAMS ((rtx
));
175 static int c4x_a_register
PARAMS ((rtx
));
176 static int c4x_x_register
PARAMS ((rtx
));
177 static int c4x_immed_int_constant
PARAMS ((rtx
));
178 static int c4x_immed_float_constant
PARAMS ((rtx
));
179 static int c4x_K_constant
PARAMS ((rtx
));
180 static int c4x_N_constant
PARAMS ((rtx
));
181 static int c4x_O_constant
PARAMS ((rtx
));
182 static int c4x_R_indirect
PARAMS ((rtx
));
183 static int c4x_S_indirect
PARAMS ((rtx
));
184 static void c4x_S_address_parse
PARAMS ((rtx
, int *, int *, int *, int *));
185 static int c4x_valid_operands
PARAMS ((enum rtx_code
, rtx
*,
186 enum machine_mode
, int));
187 static int c4x_arn_reg_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
188 static int c4x_arn_mem_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
189 static void c4x_check_attribute
PARAMS ((const char *, tree
, tree
, tree
*));
190 static int c4x_r11_set_p
PARAMS ((rtx
));
191 static int c4x_rptb_valid_p
PARAMS ((rtx
, rtx
));
192 static int c4x_label_ref_used_p
PARAMS ((rtx
, rtx
));
193 static tree c4x_handle_fntype_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
194 const struct attribute_spec c4x_attribute_table
[];
195 static void c4x_insert_attributes
PARAMS ((tree
, tree
*));
196 static void c4x_asm_named_section
PARAMS ((const char *, unsigned int));
197 static int c4x_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
198 static void c4x_encode_section_info
PARAMS ((tree
, int));
199 static void c4x_globalize_label
PARAMS ((FILE *, const char *));
200 static bool c4x_rtx_costs
PARAMS ((rtx
, int, int, int *));
201 static int c4x_address_cost
PARAMS ((rtx
));
203 /* Initialize the GCC target structure. */
204 #undef TARGET_ASM_BYTE_OP
205 #define TARGET_ASM_BYTE_OP "\t.word\t"
206 #undef TARGET_ASM_ALIGNED_HI_OP
207 #define TARGET_ASM_ALIGNED_HI_OP NULL
208 #undef TARGET_ASM_ALIGNED_SI_OP
209 #define TARGET_ASM_ALIGNED_SI_OP NULL
211 #undef TARGET_ATTRIBUTE_TABLE
212 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
214 #undef TARGET_INSERT_ATTRIBUTES
215 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
217 #undef TARGET_INIT_BUILTINS
218 #define TARGET_INIT_BUILTINS c4x_init_builtins
220 #undef TARGET_EXPAND_BUILTIN
221 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
223 #undef TARGET_SCHED_ADJUST_COST
224 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
226 #undef TARGET_ENCODE_SECTION_INFO
227 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
229 #undef TARGET_ASM_GLOBALIZE_LABEL
230 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
232 #undef TARGET_RTX_COSTS
233 #define TARGET_RTX_COSTS c4x_rtx_costs
234 #undef TARGET_ADDRESS_COST
235 #define TARGET_ADDRESS_COST c4x_address_cost
237 struct gcc_target targetm
= TARGET_INITIALIZER
;
239 /* Override command line options.
240 Called once after all options have been parsed.
241 Mostly we process the processor
242 type and sometimes adjust other TARGET_ options. */
245 c4x_override_options ()
247 if (c4x_rpts_cycles_string
)
248 c4x_rpts_cycles
= atoi (c4x_rpts_cycles_string
);
253 c4x_cpu_version
= 30;
255 c4x_cpu_version
= 31;
257 c4x_cpu_version
= 32;
259 c4x_cpu_version
= 33;
261 c4x_cpu_version
= 40;
263 c4x_cpu_version
= 44;
265 c4x_cpu_version
= 40;
267 /* -mcpu=xx overrides -m40 etc. */
268 if (c4x_cpu_version_string
)
270 const char *p
= c4x_cpu_version_string
;
272 /* Also allow -mcpu=c30 etc. */
273 if (*p
== 'c' || *p
== 'C')
275 c4x_cpu_version
= atoi (p
);
278 target_flags
&= ~(C30_FLAG
| C31_FLAG
| C32_FLAG
| C33_FLAG
|
279 C40_FLAG
| C44_FLAG
);
281 switch (c4x_cpu_version
)
283 case 30: target_flags
|= C30_FLAG
; break;
284 case 31: target_flags
|= C31_FLAG
; break;
285 case 32: target_flags
|= C32_FLAG
; break;
286 case 33: target_flags
|= C33_FLAG
; break;
287 case 40: target_flags
|= C40_FLAG
; break;
288 case 44: target_flags
|= C44_FLAG
; break;
290 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version
);
291 c4x_cpu_version
= 40;
292 target_flags
|= C40_FLAG
;
295 if (TARGET_C30
|| TARGET_C31
|| TARGET_C32
|| TARGET_C33
)
296 target_flags
|= C3X_FLAG
;
298 target_flags
&= ~C3X_FLAG
;
300 /* Convert foo / 8.0 into foo * 0.125, etc. */
301 set_fast_math_flags (1);
303 /* We should phase out the following at some stage.
304 This provides compatibility with the old -mno-aliases option. */
305 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
306 flag_argument_noalias
= 1;
308 /* We're C4X floating point, not IEEE floating point. */
309 memset (real_format_for_mode
, 0, sizeof real_format_for_mode
);
310 real_format_for_mode
[QFmode
- QFmode
] = &c4x_single_format
;
311 real_format_for_mode
[HFmode
- QFmode
] = &c4x_extended_format
;
315 /* This is called before c4x_override_options. */
318 c4x_optimization_options (level
, size
)
319 int level ATTRIBUTE_UNUSED
;
320 int size ATTRIBUTE_UNUSED
;
322 /* Scheduling before register allocation can screw up global
323 register allocation, especially for functions that use MPY||ADD
324 instructions. The benefit we gain we get by scheduling before
325 register allocation is probably marginal anyhow. */
326 flag_schedule_insns
= 0;
330 /* Write an ASCII string. */
332 #define C4X_ASCII_LIMIT 40
335 c4x_output_ascii (stream
, ptr
, len
)
340 char sbuf
[C4X_ASCII_LIMIT
+ 1];
341 int s
, l
, special
, first
= 1, onlys
;
344 fprintf (stream
, "\t.byte\t");
346 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
350 /* Escape " and \ with a \". */
351 special
= *ptr
== '\"' || *ptr
== '\\';
353 /* If printable - add to buff. */
354 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
359 if (s
< C4X_ASCII_LIMIT
- 1)
374 fprintf (stream
, "\"%s\"", sbuf
);
376 if (TARGET_TI
&& l
>= 80 && len
> 1)
378 fprintf (stream
, "\n\t.byte\t");
396 fprintf (stream
, "%d", *ptr
);
398 if (TARGET_TI
&& l
>= 80 && len
> 1)
400 fprintf (stream
, "\n\t.byte\t");
411 fprintf (stream
, "\"%s\"", sbuf
);
414 fputc ('\n', stream
);
419 c4x_hard_regno_mode_ok (regno
, mode
)
421 enum machine_mode mode
;
426 case Pmode
: /* Pointer (24/32 bits). */
428 case QImode
: /* Integer (32 bits). */
429 return IS_INT_REGNO (regno
);
431 case QFmode
: /* Float, Double (32 bits). */
432 case HFmode
: /* Long Double (40 bits). */
433 return IS_EXT_REGNO (regno
);
435 case CCmode
: /* Condition Codes. */
436 case CC_NOOVmode
: /* Condition Codes. */
437 return IS_ST_REGNO (regno
);
439 case HImode
: /* Long Long (64 bits). */
440 /* We need two registers to store long longs. Note that
441 it is much easier to constrain the first register
442 to start on an even boundary. */
443 return IS_INT_REGNO (regno
)
444 && IS_INT_REGNO (regno
+ 1)
448 return 0; /* We don't support these modes. */
454 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
456 c4x_hard_regno_rename_ok (regno1
, regno2
)
460 /* We can not copy call saved registers from mode QI into QF or from
462 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
464 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
466 /* We cannot copy from an extended (40 bit) register to a standard
467 (32 bit) register because we only set the condition codes for
468 extended registers. */
469 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
471 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
476 /* The TI C3x C compiler register argument runtime model uses 6 registers,
477 AR2, R2, R3, RC, RS, RE.
479 The first two floating point arguments (float, double, long double)
480 that are found scanning from left to right are assigned to R2 and R3.
482 The remaining integer (char, short, int, long) or pointer arguments
483 are assigned to the remaining registers in the order AR2, R2, R3,
484 RC, RS, RE when scanning left to right, except for the last named
485 argument prior to an ellipsis denoting variable number of
486 arguments. We don't have to worry about the latter condition since
487 function.c treats the last named argument as anonymous (unnamed).
489 All arguments that cannot be passed in registers are pushed onto
490 the stack in reverse order (right to left). GCC handles that for us.
492 c4x_init_cumulative_args() is called at the start, so we can parse
493 the args to see how many floating point arguments and how many
494 integer (or pointer) arguments there are. c4x_function_arg() is
495 then called (sometimes repeatedly) for each argument (parsed left
496 to right) to obtain the register to pass the argument in, or zero
497 if the argument is to be passed on the stack. Once the compiler is
498 happy, c4x_function_arg_advance() is called.
500 Don't use R0 to pass arguments in, we use 0 to indicate a stack
503 static const int c4x_int_reglist
[3][6] =
505 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
506 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
507 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
510 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
513 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
514 function whose data type is FNTYPE.
515 For a library call, FNTYPE is 0. */
518 c4x_init_cumulative_args (cum
, fntype
, libname
)
519 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize. */
520 tree fntype
; /* Tree ptr for function decl. */
521 rtx libname
; /* SYMBOL_REF of library name or 0. */
523 tree param
, next_param
;
525 cum
->floats
= cum
->ints
= 0;
532 fprintf (stderr
, "\nc4x_init_cumulative_args (");
535 tree ret_type
= TREE_TYPE (fntype
);
537 fprintf (stderr
, "fntype code = %s, ret code = %s",
538 tree_code_name
[(int) TREE_CODE (fntype
)],
539 tree_code_name
[(int) TREE_CODE (ret_type
)]);
542 fprintf (stderr
, "no fntype");
545 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
548 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
550 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
551 param
; param
= next_param
)
555 next_param
= TREE_CHAIN (param
);
557 type
= TREE_VALUE (param
);
558 if (type
&& type
!= void_type_node
)
560 enum machine_mode mode
;
562 /* If the last arg doesn't have void type then we have
563 variable arguments. */
567 if ((mode
= TYPE_MODE (type
)))
569 if (! MUST_PASS_IN_STACK (mode
, type
))
571 /* Look for float, double, or long double argument. */
572 if (mode
== QFmode
|| mode
== HFmode
)
574 /* Look for integer, enumeral, boolean, char, or pointer
576 else if (mode
== QImode
|| mode
== Pmode
)
585 fprintf (stderr
, "%s%s, args = %d)\n",
586 cum
->prototype
? ", prototype" : "",
587 cum
->var
? ", variable args" : "",
592 /* Update the data in CUM to advance over an argument
593 of mode MODE and data type TYPE.
594 (TYPE is null for libcalls where that information may not be available.) */
597 c4x_function_arg_advance (cum
, mode
, type
, named
)
598 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
599 enum machine_mode mode
; /* Current arg mode. */
600 tree type
; /* Type of the arg or 0 if lib support. */
601 int named
; /* Whether or not the argument was named. */
604 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
605 GET_MODE_NAME (mode
), named
);
609 && ! MUST_PASS_IN_STACK (mode
, type
))
611 /* Look for float, double, or long double argument. */
612 if (mode
== QFmode
|| mode
== HFmode
)
614 /* Look for integer, enumeral, boolean, char, or pointer argument. */
615 else if (mode
== QImode
|| mode
== Pmode
)
618 else if (! TARGET_MEMPARM
&& ! type
)
620 /* Handle libcall arguments. */
621 if (mode
== QFmode
|| mode
== HFmode
)
623 else if (mode
== QImode
|| mode
== Pmode
)
630 /* Define where to put the arguments to a function. Value is zero to
631 push the argument on the stack, or a hard register in which to
634 MODE is the argument's machine mode.
635 TYPE is the data type of the argument (as a tree).
636 This is null for libcalls where that information may
638 CUM is a variable of type CUMULATIVE_ARGS which gives info about
639 the preceding args and about the function being called.
640 NAMED is nonzero if this argument is a named parameter
641 (otherwise it is an extra parameter matching an ellipsis). */
644 c4x_function_arg (cum
, mode
, type
, named
)
645 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
646 enum machine_mode mode
; /* Current arg mode. */
647 tree type
; /* Type of the arg or 0 if lib support. */
648 int named
; /* != 0 for normal args, == 0 for ... args. */
650 int reg
= 0; /* Default to passing argument on stack. */
654 /* We can handle at most 2 floats in R2, R3. */
655 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
657 /* We can handle at most 6 integers minus number of floats passed
659 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
660 6 - cum
->maxfloats
: cum
->ints
;
662 /* If there is no prototype, assume all the arguments are integers. */
663 if (! cum
->prototype
)
666 cum
->ints
= cum
->floats
= 0;
670 /* This marks the last argument. We don't need to pass this through
672 if (type
== void_type_node
)
678 && ! MUST_PASS_IN_STACK (mode
, type
))
680 /* Look for float, double, or long double argument. */
681 if (mode
== QFmode
|| mode
== HFmode
)
683 if (cum
->floats
< cum
->maxfloats
)
684 reg
= c4x_fp_reglist
[cum
->floats
];
686 /* Look for integer, enumeral, boolean, char, or pointer argument. */
687 else if (mode
== QImode
|| mode
== Pmode
)
689 if (cum
->ints
< cum
->maxints
)
690 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
693 else if (! TARGET_MEMPARM
&& ! type
)
695 /* We could use a different argument calling model for libcalls,
696 since we're only calling functions in libgcc. Thus we could
697 pass arguments for long longs in registers rather than on the
698 stack. In the meantime, use the odd TI format. We make the
699 assumption that we won't have more than two floating point
700 args, six integer args, and that all the arguments are of the
702 if (mode
== QFmode
|| mode
== HFmode
)
703 reg
= c4x_fp_reglist
[cum
->floats
];
704 else if (mode
== QImode
|| mode
== Pmode
)
705 reg
= c4x_int_reglist
[0][cum
->ints
];
710 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
711 GET_MODE_NAME (mode
), named
);
713 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
715 fprintf (stderr
, ", stack");
716 fprintf (stderr
, ")\n");
719 return gen_rtx_REG (mode
, reg
);
724 /* C[34]x arguments grow in weird ways (downwards) that the standard
725 varargs stuff can't handle.. */
727 c4x_va_arg (valist
, type
)
732 t
= build (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
733 build_int_2 (int_size_in_bytes (type
), 0));
734 TREE_SIDE_EFFECTS (t
) = 1;
736 return expand_expr (t
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
741 c4x_isr_reg_used_p (regno
)
744 /* Don't save/restore FP or ST, we handle them separately. */
745 if (regno
== FRAME_POINTER_REGNUM
746 || IS_ST_REGNO (regno
))
749 /* We could be a little smarter abut saving/restoring DP.
750 We'll only save if for the big memory model or if
751 we're paranoid. ;-) */
752 if (IS_DP_REGNO (regno
))
753 return ! TARGET_SMALL
|| TARGET_PARANOID
;
755 /* Only save/restore regs in leaf function that are used. */
756 if (c4x_leaf_function
)
757 return regs_ever_live
[regno
] && fixed_regs
[regno
] == 0;
759 /* Only save/restore regs that are used by the ISR and regs
760 that are likely to be used by functions the ISR calls
761 if they are not fixed. */
762 return IS_EXT_REGNO (regno
)
763 || ((regs_ever_live
[regno
] || call_used_regs
[regno
])
764 && fixed_regs
[regno
] == 0);
769 c4x_leaf_function_p ()
771 /* A leaf function makes no calls, so we only need
772 to save/restore the registers we actually use.
773 For the global variable leaf_function to be set, we need
774 to define LEAF_REGISTERS and all that it entails.
775 Let's check ourselves... */
777 if (lookup_attribute ("leaf_pretend",
778 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
781 /* Use the leaf_pretend attribute at your own risk. This is a hack
782 to speed up ISRs that call a function infrequently where the
783 overhead of saving and restoring the additional registers is not
784 warranted. You must save and restore the additional registers
785 required by the called function. Caveat emptor. Here's enough
788 if (leaf_function_p ())
796 c4x_naked_function_p ()
800 type
= TREE_TYPE (current_function_decl
);
801 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
;
806 c4x_interrupt_function_p ()
808 if (lookup_attribute ("interrupt",
809 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
812 /* Look for TI style c_intnn. */
813 return current_function_name
[0] == 'c'
814 && current_function_name
[1] == '_'
815 && current_function_name
[2] == 'i'
816 && current_function_name
[3] == 'n'
817 && current_function_name
[4] == 't'
818 && ISDIGIT (current_function_name
[5])
819 && ISDIGIT (current_function_name
[6]);
823 c4x_expand_prologue ()
826 int size
= get_frame_size ();
829 /* In functions where ar3 is not used but frame pointers are still
830 specified, frame pointers are not adjusted (if >= -O2) and this
831 is used so it won't needlessly push the frame pointer. */
834 /* For __naked__ function don't build a prologue. */
835 if (c4x_naked_function_p ())
840 /* For __interrupt__ function build specific prologue. */
841 if (c4x_interrupt_function_p ())
843 c4x_leaf_function
= c4x_leaf_function_p ();
845 insn
= emit_insn (gen_push_st ());
846 RTX_FRAME_RELATED_P (insn
) = 1;
849 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
850 RTX_FRAME_RELATED_P (insn
) = 1;
851 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
852 gen_rtx_REG (QImode
, SP_REGNO
)));
853 RTX_FRAME_RELATED_P (insn
) = 1;
854 /* We require that an ISR uses fewer than 32768 words of
855 local variables, otherwise we have to go to lots of
856 effort to save a register, load it with the desired size,
857 adjust the stack pointer, and then restore the modified
858 register. Frankly, I think it is a poor ISR that
859 requires more than 32767 words of local temporary
862 error ("ISR %s requires %d words of local vars, max is 32767",
863 current_function_name
, size
);
865 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
866 gen_rtx_REG (QImode
, SP_REGNO
),
868 RTX_FRAME_RELATED_P (insn
) = 1;
870 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
872 if (c4x_isr_reg_used_p (regno
))
874 if (regno
== DP_REGNO
)
876 insn
= emit_insn (gen_push_dp ());
877 RTX_FRAME_RELATED_P (insn
) = 1;
881 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
882 RTX_FRAME_RELATED_P (insn
) = 1;
883 if (IS_EXT_REGNO (regno
))
885 insn
= emit_insn (gen_pushqf
886 (gen_rtx_REG (QFmode
, regno
)));
887 RTX_FRAME_RELATED_P (insn
) = 1;
892 /* We need to clear the repeat mode flag if the ISR is
893 going to use a RPTB instruction or uses the RC, RS, or RE
895 if (regs_ever_live
[RC_REGNO
]
896 || regs_ever_live
[RS_REGNO
]
897 || regs_ever_live
[RE_REGNO
])
899 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
900 RTX_FRAME_RELATED_P (insn
) = 1;
903 /* Reload DP reg if we are paranoid about some turkey
904 violating small memory model rules. */
905 if (TARGET_SMALL
&& TARGET_PARANOID
)
907 insn
= emit_insn (gen_set_ldp_prologue
908 (gen_rtx_REG (QImode
, DP_REGNO
),
909 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
910 RTX_FRAME_RELATED_P (insn
) = 1;
915 if (frame_pointer_needed
)
918 || (current_function_args_size
!= 0)
921 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
922 RTX_FRAME_RELATED_P (insn
) = 1;
923 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
924 gen_rtx_REG (QImode
, SP_REGNO
)));
925 RTX_FRAME_RELATED_P (insn
) = 1;
930 /* Since ar3 is not used, we don't need to push it. */
936 /* If we use ar3, we need to push it. */
938 if ((size
!= 0) || (current_function_args_size
!= 0))
940 /* If we are omitting the frame pointer, we still have
941 to make space for it so the offsets are correct
942 unless we don't use anything on the stack at all. */
949 /* Local vars are too big, it will take multiple operations
953 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
954 GEN_INT(size
>> 16)));
955 RTX_FRAME_RELATED_P (insn
) = 1;
956 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
957 gen_rtx_REG (QImode
, R1_REGNO
),
959 RTX_FRAME_RELATED_P (insn
) = 1;
963 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
964 GEN_INT(size
& ~0xffff)));
965 RTX_FRAME_RELATED_P (insn
) = 1;
967 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
968 gen_rtx_REG (QImode
, R1_REGNO
),
969 GEN_INT(size
& 0xffff)));
970 RTX_FRAME_RELATED_P (insn
) = 1;
971 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
972 gen_rtx_REG (QImode
, SP_REGNO
),
973 gen_rtx_REG (QImode
, R1_REGNO
)));
974 RTX_FRAME_RELATED_P (insn
) = 1;
978 /* Local vars take up less than 32767 words, so we can directly
980 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
981 gen_rtx_REG (QImode
, SP_REGNO
),
983 RTX_FRAME_RELATED_P (insn
) = 1;
986 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
988 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
990 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
992 if (TARGET_PRESERVE_FLOAT
)
994 insn
= emit_insn (gen_pushqi
995 (gen_rtx_REG (QImode
, regno
)));
996 RTX_FRAME_RELATED_P (insn
) = 1;
998 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
999 RTX_FRAME_RELATED_P (insn
) = 1;
1001 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
1003 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
1004 RTX_FRAME_RELATED_P (insn
) = 1;
1013 c4x_expand_epilogue()
1019 int size
= get_frame_size ();
1021 /* For __naked__ function build no epilogue. */
1022 if (c4x_naked_function_p ())
1024 insn
= emit_jump_insn (gen_return_from_epilogue ());
1025 RTX_FRAME_RELATED_P (insn
) = 1;
1029 /* For __interrupt__ function build specific epilogue. */
1030 if (c4x_interrupt_function_p ())
1032 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1034 if (! c4x_isr_reg_used_p (regno
))
1036 if (regno
== DP_REGNO
)
1038 insn
= emit_insn (gen_pop_dp ());
1039 RTX_FRAME_RELATED_P (insn
) = 1;
1043 /* We have to use unspec because the compiler will delete insns
1044 that are not call-saved. */
1045 if (IS_EXT_REGNO (regno
))
1047 insn
= emit_insn (gen_popqf_unspec
1048 (gen_rtx_REG (QFmode
, regno
)));
1049 RTX_FRAME_RELATED_P (insn
) = 1;
1051 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1052 RTX_FRAME_RELATED_P (insn
) = 1;
1057 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1058 gen_rtx_REG (QImode
, SP_REGNO
),
1060 RTX_FRAME_RELATED_P (insn
) = 1;
1061 insn
= emit_insn (gen_popqi
1062 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1063 RTX_FRAME_RELATED_P (insn
) = 1;
1065 insn
= emit_insn (gen_pop_st ());
1066 RTX_FRAME_RELATED_P (insn
) = 1;
1067 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1068 RTX_FRAME_RELATED_P (insn
) = 1;
1072 if (frame_pointer_needed
)
1075 || (current_function_args_size
!= 0)
1079 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1080 gen_rtx_MEM (QImode
,
1082 (QImode
, gen_rtx_REG (QImode
,
1085 RTX_FRAME_RELATED_P (insn
) = 1;
1087 /* We already have the return value and the fp,
1088 so we need to add those to the stack. */
1095 /* Since ar3 is not used for anything, we don't need to
1102 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1103 if (size
|| current_function_args_size
)
1105 /* If we are ommitting the frame pointer, we still have
1106 to make space for it so the offsets are correct
1107 unless we don't use anything on the stack at all. */
1112 /* Now restore the saved registers, putting in the delayed branch
1114 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1116 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1118 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1121 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1123 insn
= emit_insn (gen_popqf_unspec
1124 (gen_rtx_REG (QFmode
, regno
)));
1125 RTX_FRAME_RELATED_P (insn
) = 1;
1126 if (TARGET_PRESERVE_FLOAT
)
1128 insn
= emit_insn (gen_popqi_unspec
1129 (gen_rtx_REG (QImode
, regno
)));
1130 RTX_FRAME_RELATED_P (insn
) = 1;
1135 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1136 RTX_FRAME_RELATED_P (insn
) = 1;
1141 if (frame_pointer_needed
)
1144 || (current_function_args_size
!= 0)
1147 /* Restore the old FP. */
1150 (gen_rtx_REG (QImode
, AR3_REGNO
),
1151 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1153 RTX_FRAME_RELATED_P (insn
) = 1;
1159 /* Local vars are too big, it will take multiple operations
1163 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1164 GEN_INT(size
>> 16)));
1165 RTX_FRAME_RELATED_P (insn
) = 1;
1166 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1167 gen_rtx_REG (QImode
, R3_REGNO
),
1169 RTX_FRAME_RELATED_P (insn
) = 1;
1173 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1174 GEN_INT(size
& ~0xffff)));
1175 RTX_FRAME_RELATED_P (insn
) = 1;
1177 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1178 gen_rtx_REG (QImode
, R3_REGNO
),
1179 GEN_INT(size
& 0xffff)));
1180 RTX_FRAME_RELATED_P (insn
) = 1;
1181 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1182 gen_rtx_REG (QImode
, SP_REGNO
),
1183 gen_rtx_REG (QImode
, R3_REGNO
)));
1184 RTX_FRAME_RELATED_P (insn
) = 1;
1188 /* Local vars take up less than 32768 words, so we can directly
1189 subtract the number. */
1190 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1191 gen_rtx_REG (QImode
, SP_REGNO
),
1193 RTX_FRAME_RELATED_P (insn
) = 1;
1198 insn
= emit_jump_insn (gen_return_indirect_internal
1199 (gen_rtx_REG (QImode
, R2_REGNO
)));
1200 RTX_FRAME_RELATED_P (insn
) = 1;
1204 insn
= emit_jump_insn (gen_return_from_epilogue ());
1205 RTX_FRAME_RELATED_P (insn
) = 1;
1212 c4x_null_epilogue_p ()
1216 if (reload_completed
1217 && ! c4x_naked_function_p ()
1218 && ! c4x_interrupt_function_p ()
1219 && ! current_function_calls_alloca
1220 && ! current_function_args_size
1222 && ! get_frame_size ())
1224 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1225 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
]
1226 && (regno
!= AR3_REGNO
))
1235 c4x_emit_move_sequence (operands
, mode
)
1237 enum machine_mode mode
;
1239 rtx op0
= operands
[0];
1240 rtx op1
= operands
[1];
1242 if (! reload_in_progress
1245 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1246 op1
= force_reg (mode
, op1
);
1248 if (GET_CODE (op1
) == LO_SUM
1249 && GET_MODE (op1
) == Pmode
1250 && dp_reg_operand (XEXP (op1
, 0), mode
))
1252 /* expand_increment will sometimes create a LO_SUM immediate
1254 op1
= XEXP (op1
, 1);
1256 else if (symbolic_address_operand (op1
, mode
))
1258 if (TARGET_LOAD_ADDRESS
)
1260 /* Alias analysis seems to do a better job if we force
1261 constant addresses to memory after reload. */
1262 emit_insn (gen_load_immed_address (op0
, op1
));
1267 /* Stick symbol or label address into the constant pool. */
1268 op1
= force_const_mem (Pmode
, op1
);
1271 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1273 /* We could be a lot smarter about loading some of these
1275 op1
= force_const_mem (mode
, op1
);
1278 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1279 and emit associated (HIGH (SYMREF)) if large memory model.
1280 c4x_legitimize_address could be used to do this,
1281 perhaps by calling validize_address. */
1282 if (TARGET_EXPOSE_LDP
1283 && ! (reload_in_progress
|| reload_completed
)
1284 && GET_CODE (op1
) == MEM
1285 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1287 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1289 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1290 op1
= change_address (op1
, mode
,
1291 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1294 if (TARGET_EXPOSE_LDP
1295 && ! (reload_in_progress
|| reload_completed
)
1296 && GET_CODE (op0
) == MEM
1297 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1299 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1301 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1302 op0
= change_address (op0
, mode
,
1303 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1306 if (GET_CODE (op0
) == SUBREG
1307 && mixed_subreg_operand (op0
, mode
))
1309 /* We should only generate these mixed mode patterns
1310 during RTL generation. If we need do it later on
1311 then we'll have to emit patterns that won't clobber CC. */
1312 if (reload_in_progress
|| reload_completed
)
1314 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1315 op0
= SUBREG_REG (op0
);
1316 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1318 op0
= copy_rtx (op0
);
1319 PUT_MODE (op0
, QImode
);
1325 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1331 if (GET_CODE (op1
) == SUBREG
1332 && mixed_subreg_operand (op1
, mode
))
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress
|| reload_completed
)
1339 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1340 op1
= SUBREG_REG (op1
);
1341 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1343 op1
= copy_rtx (op1
);
1344 PUT_MODE (op1
, QImode
);
1350 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1357 && reg_operand (op0
, mode
)
1358 && const_int_operand (op1
, mode
)
1359 && ! IS_INT16_CONST (INTVAL (op1
))
1360 && ! IS_HIGH_CONST (INTVAL (op1
)))
1362 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1367 && reg_operand (op0
, mode
)
1368 && const_int_operand (op1
, mode
))
1370 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1374 /* Adjust operands in case we have modified them. */
1378 /* Emit normal pattern. */
1384 c4x_emit_libcall (libcall
, code
, dmode
, smode
, noperands
, operands
)
1387 enum machine_mode dmode
;
1388 enum machine_mode smode
;
1400 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1401 operands
[1], smode
);
1402 equiv
= gen_rtx (code
, dmode
, operands
[1]);
1406 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1407 operands
[1], smode
, operands
[2], smode
);
1408 equiv
= gen_rtx (code
, dmode
, operands
[1], operands
[2]);
1415 insns
= get_insns ();
1417 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1422 c4x_emit_libcall3 (libcall
, code
, mode
, operands
)
1425 enum machine_mode mode
;
1428 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1433 c4x_emit_libcall_mulhi (libcall
, code
, mode
, operands
)
1436 enum machine_mode mode
;
1444 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1445 operands
[1], mode
, operands
[2], mode
);
1446 equiv
= gen_rtx_TRUNCATE (mode
,
1447 gen_rtx_LSHIFTRT (HImode
,
1448 gen_rtx_MULT (HImode
,
1449 gen_rtx (code
, HImode
, operands
[1]),
1450 gen_rtx (code
, HImode
, operands
[2])),
1452 insns
= get_insns ();
1454 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1458 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1459 yet use this info. */
1462 c4x_encode_section_info (decl
, first
)
1464 int first ATTRIBUTE_UNUSED
;
1466 if (TREE_CODE (decl
) == FUNCTION_DECL
)
1467 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
1472 c4x_check_legit_addr (mode
, addr
, strict
)
1473 enum machine_mode mode
;
1477 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1478 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1479 rtx disp
= NULL_RTX
; /* Displacement. */
1482 code
= GET_CODE (addr
);
1485 /* Register indirect with auto increment/decrement. We don't
1486 allow SP here---push_operand should recognize an operand
1487 being pushed on the stack. */
1492 if (mode
!= QImode
&& mode
!= QFmode
)
1496 base
= XEXP (addr
, 0);
1504 rtx op0
= XEXP (addr
, 0);
1505 rtx op1
= XEXP (addr
, 1);
1507 if (mode
!= QImode
&& mode
!= QFmode
)
1511 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1513 base
= XEXP (op1
, 0);
1516 if (REG_P (XEXP (op1
, 1)))
1517 indx
= XEXP (op1
, 1);
1519 disp
= XEXP (op1
, 1);
1523 /* Register indirect. */
1528 /* Register indirect with displacement or index. */
1531 rtx op0
= XEXP (addr
, 0);
1532 rtx op1
= XEXP (addr
, 1);
1533 enum rtx_code code0
= GET_CODE (op0
);
1540 base
= op0
; /* Base + index. */
1542 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1550 base
= op0
; /* Base + displacement. */
1561 /* Direct addressing with DP register. */
1564 rtx op0
= XEXP (addr
, 0);
1565 rtx op1
= XEXP (addr
, 1);
1567 /* HImode and HFmode direct memory references aren't truly
1568 offsettable (consider case at end of data page). We
1569 probably get better code by loading a pointer and using an
1570 indirect memory reference. */
1571 if (mode
== HImode
|| mode
== HFmode
)
1574 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1577 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1580 if (GET_CODE (op1
) == CONST
)
1586 /* Direct addressing with some work for the assembler... */
1588 /* Direct addressing. */
1591 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1593 /* These need to be converted to a LO_SUM (...).
1594 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1597 /* Do not allow direct memory access to absolute addresses.
1598 This is more pain than it's worth, especially for the
1599 small memory model where we can't guarantee that
1600 this address is within the data page---we don't want
1601 to modify the DP register in the small memory model,
1602 even temporarily, since an interrupt can sneak in.... */
1606 /* Indirect indirect addressing. */
1611 fatal_insn ("using CONST_DOUBLE for address", addr
);
1617 /* Validate the base register. */
1620 /* Check that the address is offsettable for HImode and HFmode. */
1621 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1624 /* Handle DP based stuff. */
1625 if (REGNO (base
) == DP_REGNO
)
1627 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1629 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1633 /* Now validate the index register. */
1636 if (GET_CODE (indx
) != REG
)
1638 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1640 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1644 /* Validate displacement. */
1647 if (GET_CODE (disp
) != CONST_INT
)
1649 if (mode
== HImode
|| mode
== HFmode
)
1651 /* The offset displacement must be legitimate. */
1652 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1657 if (! IS_DISP8_CONST (INTVAL (disp
)))
1660 /* Can't add an index with a disp. */
1669 c4x_legitimize_address (orig
, mode
)
1670 rtx orig ATTRIBUTE_UNUSED
;
1671 enum machine_mode mode ATTRIBUTE_UNUSED
;
1673 if (GET_CODE (orig
) == SYMBOL_REF
1674 || GET_CODE (orig
) == LABEL_REF
)
1676 if (mode
== HImode
|| mode
== HFmode
)
1678 /* We need to force the address into
1679 a register so that it is offsettable. */
1680 rtx addr_reg
= gen_reg_rtx (Pmode
);
1681 emit_move_insn (addr_reg
, orig
);
1686 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1689 emit_insn (gen_set_ldp (dp_reg
, orig
));
1691 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1699 /* Provide the costs of an addressing mode that contains ADDR.
1700 If ADDR is not a valid address, its cost is irrelevant.
1701 This is used in cse and loop optimisation to determine
1702 if it is worthwhile storing a common address into a register.
1703 Unfortunately, the C4x address cost depends on other operands. */
1706 c4x_address_cost (addr
)
1709 switch (GET_CODE (addr
))
1720 /* These shouldn't be directly generated. */
1728 rtx op1
= XEXP (addr
, 1);
1730 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1731 return TARGET_SMALL
? 3 : 4;
1733 if (GET_CODE (op1
) == CONST
)
1735 rtx offset
= const0_rtx
;
1737 op1
= eliminate_constant_term (op1
, &offset
);
1739 /* ??? These costs need rethinking... */
1740 if (GET_CODE (op1
) == LABEL_REF
)
1743 if (GET_CODE (op1
) != SYMBOL_REF
)
1746 if (INTVAL (offset
) == 0)
1751 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1757 register rtx op0
= XEXP (addr
, 0);
1758 register rtx op1
= XEXP (addr
, 1);
1760 if (GET_CODE (op0
) != REG
)
1763 switch (GET_CODE (op1
))
1769 /* This cost for REG+REG must be greater than the cost
1770 for REG if we want autoincrement addressing modes. */
1774 /* The following tries to improve GIV combination
1775 in strength reduce but appears not to help. */
1776 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1779 if (IS_DISP1_CONST (INTVAL (op1
)))
1782 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1797 c4x_gen_compare_reg (code
, x
, y
)
1801 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1804 if (mode
== CC_NOOVmode
1805 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1808 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1809 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1810 gen_rtx_COMPARE (mode
, x
, y
)));
1815 c4x_output_cbranch (form
, seq
)
1824 static char str
[100];
1828 delay
= XVECEXP (final_sequence
, 0, 1);
1829 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1830 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1831 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1834 cp
= &str
[strlen (str
)];
1859 c4x_print_operand (file
, op
, letter
)
1860 FILE *file
; /* File to write to. */
1861 rtx op
; /* Operand to print. */
1862 int letter
; /* %<letter> or 0. */
1869 case '#': /* Delayed. */
1871 fprintf (file
, "d");
1875 code
= GET_CODE (op
);
1878 case 'A': /* Direct address. */
1879 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1880 fprintf (file
, "@");
1883 case 'H': /* Sethi. */
1884 output_addr_const (file
, op
);
1887 case 'I': /* Reversed condition. */
1888 code
= reverse_condition (code
);
1891 case 'L': /* Log 2 of constant. */
1892 if (code
!= CONST_INT
)
1893 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1894 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1897 case 'N': /* Ones complement of small constant. */
1898 if (code
!= CONST_INT
)
1899 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1900 fprintf (file
, "%d", ~INTVAL (op
));
1903 case 'K': /* Generate ldp(k) if direct address. */
1906 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1907 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1908 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1910 op1
= XEXP (XEXP (op
, 0), 1);
1911 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1913 fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1914 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1915 fprintf (file
, "\n");
1920 case 'M': /* Generate ldp(k) if direct address. */
1921 if (! TARGET_SMALL
/* Only used in asm statements. */
1923 && (GET_CODE (XEXP (op
, 0)) == CONST
1924 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1926 fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1927 output_address (XEXP (op
, 0));
1928 fprintf (file
, "\n\t");
1932 case 'O': /* Offset address. */
1933 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1935 else if (code
== MEM
)
1936 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1937 else if (code
== REG
)
1938 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1940 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1943 case 'C': /* Call. */
1946 case 'U': /* Call/callu. */
1947 if (code
!= SYMBOL_REF
)
1948 fprintf (file
, "u");
1958 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1960 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1962 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1966 output_address (XEXP (op
, 0));
1973 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (op
),
1974 sizeof (str
), 0, 1);
1975 fprintf (file
, "%s", str
);
1980 fprintf (file
, "%d", INTVAL (op
));
1984 fprintf (file
, "ne");
1988 fprintf (file
, "eq");
1992 fprintf (file
, "ge");
1996 fprintf (file
, "gt");
2000 fprintf (file
, "le");
2004 fprintf (file
, "lt");
2008 fprintf (file
, "hs");
2012 fprintf (file
, "hi");
2016 fprintf (file
, "ls");
2020 fprintf (file
, "lo");
2024 output_addr_const (file
, op
);
2028 output_addr_const (file
, XEXP (op
, 0));
2035 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2042 c4x_print_operand_address (file
, addr
)
2046 switch (GET_CODE (addr
))
2049 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2053 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2057 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2062 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2063 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2065 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2066 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2067 reg_names
[REGNO (op1
)]);
2068 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2069 fprintf (file
, "*%s++(%d)", reg_names
[REGNO (op0
)],
2071 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2072 fprintf (file
, "*%s--(%d)", reg_names
[REGNO (op0
)],
2074 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2075 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2076 reg_names
[REGNO (op1
)]);
2078 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2084 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2085 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2087 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2088 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2089 reg_names
[REGNO (op1
)]);
2090 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2091 fprintf (file
, "*++%s(%d)", reg_names
[REGNO (op0
)],
2093 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2094 fprintf (file
, "*--%s(%d)", reg_names
[REGNO (op0
)],
2096 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2097 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2098 reg_names
[REGNO (op1
)]);
2100 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2105 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2109 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2112 case PLUS
: /* Indirect with displacement. */
2114 rtx op0
= XEXP (addr
, 0);
2115 rtx op1
= XEXP (addr
, 1);
2121 if (IS_INDEX_REG (op0
))
2123 fprintf (file
, "*+%s(%s)",
2124 reg_names
[REGNO (op1
)],
2125 reg_names
[REGNO (op0
)]); /* Index + base. */
2129 fprintf (file
, "*+%s(%s)",
2130 reg_names
[REGNO (op0
)],
2131 reg_names
[REGNO (op1
)]); /* Base + index. */
2134 else if (INTVAL (op1
) < 0)
2136 fprintf (file
, "*-%s(%d)",
2137 reg_names
[REGNO (op0
)],
2138 -INTVAL (op1
)); /* Base - displacement. */
2142 fprintf (file
, "*+%s(%d)",
2143 reg_names
[REGNO (op0
)],
2144 INTVAL (op1
)); /* Base + displacement. */
2148 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2154 rtx op0
= XEXP (addr
, 0);
2155 rtx op1
= XEXP (addr
, 1);
2157 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2158 c4x_print_operand_address (file
, op1
);
2160 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2167 fprintf (file
, "@");
2168 output_addr_const (file
, addr
);
2171 /* We shouldn't access CONST_INT addresses. */
2175 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2181 /* Return nonzero if the floating point operand will fit
2182 in the immediate field. */
2185 c4x_immed_float_p (op
)
2192 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2193 if (GET_MODE (op
) == HFmode
)
2194 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2197 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2201 /* Sign extend exponent. */
2202 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2203 if (exponent
== -128)
2205 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2206 return 0; /* Precision doesn't fit. */
2207 return (exponent
<= 7) /* Positive exp. */
2208 && (exponent
>= -7); /* Negative exp. */
2212 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2213 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2215 None of the last four instructions from the bottom of the block can
2216 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2217 BcondAT or RETIcondD.
2219 This routine scans the four previous insns for a jump insn, and if
2220 one is found, returns 1 so that we bung in a nop instruction.
2221 This simple minded strategy will add a nop, when it may not
2222 be required. Say when there is a JUMP_INSN near the end of the
2223 block that doesn't get converted into a delayed branch.
2225 Note that we cannot have a call insn, since we don't generate
2226 repeat loops with calls in them (although I suppose we could, but
2227 there's no benefit.)
2229 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2232 c4x_rptb_nop_p (insn
)
2238 /* Extract the start label from the jump pattern (rptb_end). */
2239 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2241 /* If there is a label at the end of the loop we must insert
2244 insn
= previous_insn (insn
);
2245 } while (GET_CODE (insn
) == NOTE
2246 || GET_CODE (insn
) == USE
2247 || GET_CODE (insn
) == CLOBBER
);
2248 if (GET_CODE (insn
) == CODE_LABEL
)
2251 for (i
= 0; i
< 4; i
++)
2253 /* Search back for prev non-note and non-label insn. */
2254 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2255 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2257 if (insn
== start_label
)
2260 insn
= previous_insn (insn
);
2263 /* If we have a jump instruction we should insert a NOP. If we
2264 hit repeat block top we should only insert a NOP if the loop
2266 if (GET_CODE (insn
) == JUMP_INSN
)
2268 insn
= previous_insn (insn
);
2274 /* The C4x looping instruction needs to be emitted at the top of the
2275 loop. Emitting the true RTL for a looping instruction at the top of
2276 the loop can cause problems with flow analysis. So instead, a dummy
2277 doloop insn is emitted at the end of the loop. This routine checks
2278 for the presence of this doloop insn and then searches back to the
2279 top of the loop, where it inserts the true looping insn (provided
2280 there are no instructions in the loop which would cause problems).
2281 Any additional labels can be emitted at this point. In addition, if
2282 the desired loop count register was not allocated, this routine does
2285 Before we can create a repeat block looping instruction we have to
2286 verify that there are no jumps outside the loop and no jumps outside
2287 the loop go into this loop. This can happen in the basic blocks reorder
2288 pass. The C4x cpu can not handle this. */
2291 c4x_label_ref_used_p (x
, code_label
)
2301 code
= GET_CODE (x
);
2302 if (code
== LABEL_REF
)
2303 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2305 fmt
= GET_RTX_FORMAT (code
);
2306 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2310 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2313 else if (fmt
[i
] == 'E')
2314 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2315 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2323 c4x_rptb_valid_p (insn
, start_label
)
2324 rtx insn
, start_label
;
2330 /* Find the start label. */
2331 for (; insn
; insn
= PREV_INSN (insn
))
2332 if (insn
== start_label
)
2335 /* Note found then we can not use a rptb or rpts. The label was
2336 probably moved by the basic block reorder pass. */
2341 /* If any jump jumps inside this block then we must fail. */
2342 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2344 if (GET_CODE (insn
) == CODE_LABEL
)
2346 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2347 if (GET_CODE (tmp
) == JUMP_INSN
2348 && c4x_label_ref_used_p (tmp
, insn
))
2352 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2354 if (GET_CODE (insn
) == CODE_LABEL
)
2356 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2357 if (GET_CODE (tmp
) == JUMP_INSN
2358 && c4x_label_ref_used_p (tmp
, insn
))
2362 /* If any jump jumps outside this block then we must fail. */
2363 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2365 if (GET_CODE (insn
) == CODE_LABEL
)
2367 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2368 if (GET_CODE (tmp
) == JUMP_INSN
2369 && c4x_label_ref_used_p (tmp
, insn
))
2371 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2372 if (GET_CODE (tmp
) == JUMP_INSN
2373 && c4x_label_ref_used_p (tmp
, insn
))
2378 /* All checks OK. */
2384 c4x_rptb_insert (insn
)
2389 rtx new_start_label
;
2392 /* If the count register has not been allocated to RC, say if
2393 there is a movstr pattern in the loop, then do not insert a
2394 RPTB instruction. Instead we emit a decrement and branch
2395 at the end of the loop. */
2396 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2397 if (REGNO (count_reg
) != RC_REGNO
)
2400 /* Extract the start label from the jump pattern (rptb_end). */
2401 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2403 if (! c4x_rptb_valid_p (insn
, start_label
))
2405 /* We can not use the rptb insn. Replace it so reorg can use
2406 the delay slots of the jump insn. */
2407 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, GEN_INT (-1)), insn
);
2408 emit_insn_before (gen_cmpqi (count_reg
, GEN_INT (0)), insn
);
2409 emit_insn_before (gen_bge (start_label
), insn
);
2410 LABEL_NUSES (start_label
)++;
2415 end_label
= gen_label_rtx ();
2416 LABEL_NUSES (end_label
)++;
2417 emit_label_after (end_label
, insn
);
2419 new_start_label
= gen_label_rtx ();
2420 LABEL_NUSES (new_start_label
)++;
2422 for (; insn
; insn
= PREV_INSN (insn
))
2424 if (insn
== start_label
)
2426 if (GET_CODE (insn
) == JUMP_INSN
&&
2427 JUMP_LABEL (insn
) == start_label
)
2428 redirect_jump (insn
, new_start_label
, 0);
2431 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2433 emit_label_after (new_start_label
, insn
);
2435 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2436 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2438 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2439 if (LABEL_NUSES (start_label
) == 0)
2440 delete_insn (start_label
);
2444 /* This function is a C4x special called immediately before delayed
2445 branch scheduling. We fix up RTPB style loops that didn't get RC
2446 allocated as the loop counter. */
2449 c4x_process_after_reload (first
)
2454 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2456 /* Look for insn. */
2459 int insn_code_number
;
2462 insn_code_number
= recog_memoized (insn
);
2464 if (insn_code_number
< 0)
2467 /* Insert the RTX for RPTB at the top of the loop
2468 and a label at the end of the loop. */
2469 if (insn_code_number
== CODE_FOR_rptb_end
)
2470 c4x_rptb_insert(insn
);
2472 /* We need to split the insn here. Otherwise the calls to
2473 force_const_mem will not work for load_immed_address. */
2476 /* Don't split the insn if it has been deleted. */
2477 if (! INSN_DELETED_P (old
))
2478 insn
= try_split (PATTERN(old
), old
, 1);
2480 /* When not optimizing, the old insn will be still left around
2481 with only the 'deleted' bit set. Transform it into a note
2482 to avoid confusion of subsequent processing. */
2483 if (INSN_DELETED_P (old
))
2485 PUT_CODE (old
, NOTE
);
2486 NOTE_LINE_NUMBER (old
) = NOTE_INSN_DELETED
;
2487 NOTE_SOURCE_FILE (old
) = 0;
2498 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2506 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2511 c4x_immed_int_constant (op
)
2514 if (GET_CODE (op
) != CONST_INT
)
2517 return GET_MODE (op
) == VOIDmode
2518 || GET_MODE_CLASS (op
) == MODE_INT
2519 || GET_MODE_CLASS (op
) == MODE_PARTIAL_INT
;
2524 c4x_immed_float_constant (op
)
2527 if (GET_CODE (op
) != CONST_DOUBLE
)
2530 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2531 present this only means that a MEM rtx has been generated. It does
2532 not mean the rtx is really in memory. */
2534 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2539 c4x_shiftable_constant (op
)
2544 int val
= INTVAL (op
);
2546 for (i
= 0; i
< 16; i
++)
2551 mask
= ((0xffff >> i
) << 16) | 0xffff;
2552 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2553 : (val
>> i
) & mask
))
2563 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2571 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2581 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2589 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2591 return IS_INT5_CONST (INTVAL (op
));
2599 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2607 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2615 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2619 /* The constraints do not have to check the register class,
2620 except when needed to discriminate between the constraints.
2621 The operand has been checked by the predicates to be valid. */
2623 /* ARx + 9-bit signed const or IRn
2624 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2625 We don't include the pre/post inc/dec forms here since
2626 they are handled by the <> constraints. */
2629 c4x_Q_constraint (op
)
2632 enum machine_mode mode
= GET_MODE (op
);
2634 if (GET_CODE (op
) != MEM
)
2637 switch (GET_CODE (op
))
2644 rtx op0
= XEXP (op
, 0);
2645 rtx op1
= XEXP (op
, 1);
2653 if (GET_CODE (op1
) != CONST_INT
)
2656 /* HImode and HFmode must be offsettable. */
2657 if (mode
== HImode
|| mode
== HFmode
)
2658 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2660 return IS_DISP8_CONST (INTVAL (op1
));
2671 /* ARx + 5-bit unsigned const
2672 *ARx, *+ARx(n) for n < 32. */
2675 c4x_R_constraint (op
)
2678 enum machine_mode mode
= GET_MODE (op
);
2682 if (GET_CODE (op
) != MEM
)
2685 switch (GET_CODE (op
))
2692 rtx op0
= XEXP (op
, 0);
2693 rtx op1
= XEXP (op
, 1);
2698 if (GET_CODE (op1
) != CONST_INT
)
2701 /* HImode and HFmode must be offsettable. */
2702 if (mode
== HImode
|| mode
== HFmode
)
2703 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2705 return IS_UINT5_CONST (INTVAL (op1
));
2720 enum machine_mode mode
= GET_MODE (op
);
2722 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2726 switch (GET_CODE (op
))
2729 return IS_ADDR_OR_PSEUDO_REG (op
);
2733 rtx op0
= XEXP (op
, 0);
2734 rtx op1
= XEXP (op
, 1);
2736 /* HImode and HFmode must be offsettable. */
2737 if (mode
== HImode
|| mode
== HFmode
)
2738 return IS_ADDR_OR_PSEUDO_REG (op0
)
2739 && GET_CODE (op1
) == CONST_INT
2740 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2743 && IS_ADDR_OR_PSEUDO_REG (op0
)
2744 && GET_CODE (op1
) == CONST_INT
2745 && IS_UINT5_CONST (INTVAL (op1
));
2756 /* ARx + 1-bit unsigned const or IRn
2757 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2758 We don't include the pre/post inc/dec forms here since
2759 they are handled by the <> constraints. */
2762 c4x_S_constraint (op
)
2765 enum machine_mode mode
= GET_MODE (op
);
2766 if (GET_CODE (op
) != MEM
)
2769 switch (GET_CODE (op
))
2777 rtx op0
= XEXP (op
, 0);
2778 rtx op1
= XEXP (op
, 1);
2780 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2781 || (op0
!= XEXP (op1
, 0)))
2784 op0
= XEXP (op1
, 0);
2785 op1
= XEXP (op1
, 1);
2786 return REG_P (op0
) && REG_P (op1
);
2787 /* Pre or post_modify with a displacement of 0 or 1
2788 should not be generated. */
2794 rtx op0
= XEXP (op
, 0);
2795 rtx op1
= XEXP (op
, 1);
2803 if (GET_CODE (op1
) != CONST_INT
)
2806 /* HImode and HFmode must be offsettable. */
2807 if (mode
== HImode
|| mode
== HFmode
)
2808 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2810 return IS_DISP1_CONST (INTVAL (op1
));
2825 enum machine_mode mode
= GET_MODE (op
);
2826 if (GET_CODE (op
) != MEM
)
2830 switch (GET_CODE (op
))
2834 if (mode
!= QImode
&& mode
!= QFmode
)
2841 return IS_ADDR_OR_PSEUDO_REG (op
);
2846 rtx op0
= XEXP (op
, 0);
2847 rtx op1
= XEXP (op
, 1);
2849 if (mode
!= QImode
&& mode
!= QFmode
)
2852 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2853 || (op0
!= XEXP (op1
, 0)))
2856 op0
= XEXP (op1
, 0);
2857 op1
= XEXP (op1
, 1);
2858 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2859 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2860 /* Pre or post_modify with a displacement of 0 or 1
2861 should not be generated. */
2866 rtx op0
= XEXP (op
, 0);
2867 rtx op1
= XEXP (op
, 1);
2871 /* HImode and HFmode must be offsettable. */
2872 if (mode
== HImode
|| mode
== HFmode
)
2873 return IS_ADDR_OR_PSEUDO_REG (op0
)
2874 && GET_CODE (op1
) == CONST_INT
2875 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2878 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2879 && IS_ADDR_OR_PSEUDO_REG (op0
))
2880 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2881 && IS_INDEX_OR_PSEUDO_REG (op0
));
2883 return IS_ADDR_OR_PSEUDO_REG (op0
)
2884 && GET_CODE (op1
) == CONST_INT
2885 && IS_DISP1_CONST (INTVAL (op1
));
2897 /* Direct memory operand. */
2900 c4x_T_constraint (op
)
2903 if (GET_CODE (op
) != MEM
)
2907 if (GET_CODE (op
) != LO_SUM
)
2909 /* Allow call operands. */
2910 return GET_CODE (op
) == SYMBOL_REF
2911 && GET_MODE (op
) == Pmode
2912 && SYMBOL_REF_FLAG (op
);
2915 /* HImode and HFmode are not offsettable. */
2916 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2919 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2920 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2921 return c4x_U_constraint (XEXP (op
, 1));
2927 /* Symbolic operand. */
2930 c4x_U_constraint (op
)
2933 /* Don't allow direct addressing to an arbitrary constant. */
2934 return GET_CODE (op
) == CONST
2935 || GET_CODE (op
) == SYMBOL_REF
2936 || GET_CODE (op
) == LABEL_REF
;
2941 c4x_autoinc_operand (op
, mode
)
2943 enum machine_mode mode ATTRIBUTE_UNUSED
;
2945 if (GET_CODE (op
) == MEM
)
2947 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2953 || code
== PRE_MODIFY
2954 || code
== POST_MODIFY
2962 /* Match any operand. */
2965 any_operand (op
, mode
)
2966 register rtx op ATTRIBUTE_UNUSED
;
2967 enum machine_mode mode ATTRIBUTE_UNUSED
;
2973 /* Nonzero if OP is a floating point value with value 0.0. */
2976 fp_zero_operand (op
, mode
)
2978 enum machine_mode mode ATTRIBUTE_UNUSED
;
2982 if (GET_CODE (op
) != CONST_DOUBLE
)
2984 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2985 return REAL_VALUES_EQUAL (r
, dconst0
);
2990 const_operand (op
, mode
)
2992 register enum machine_mode mode
;
2998 if (GET_CODE (op
) != CONST_DOUBLE
2999 || GET_MODE (op
) != mode
3000 || GET_MODE_CLASS (mode
) != MODE_FLOAT
)
3003 return c4x_immed_float_p (op
);
3009 if (GET_CODE (op
) == CONSTANT_P_RTX
)
3012 if (GET_CODE (op
) != CONST_INT
3013 || (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != mode
)
3014 || GET_MODE_CLASS (mode
) != MODE_INT
)
3017 return IS_HIGH_CONST (INTVAL (op
)) || IS_INT16_CONST (INTVAL (op
));
3029 stik_const_operand (op
, mode
)
3031 enum machine_mode mode ATTRIBUTE_UNUSED
;
3033 return c4x_K_constant (op
);
3038 not_const_operand (op
, mode
)
3040 enum machine_mode mode ATTRIBUTE_UNUSED
;
3042 return c4x_N_constant (op
);
3047 reg_operand (op
, mode
)
3049 enum machine_mode mode
;
3051 if (GET_CODE (op
) == SUBREG
3052 && GET_MODE (op
) == QFmode
)
3054 return register_operand (op
, mode
);
3059 mixed_subreg_operand (op
, mode
)
3061 enum machine_mode mode ATTRIBUTE_UNUSED
;
3063 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3064 int and a long double. */
3065 if (GET_CODE (op
) == SUBREG
3066 && (GET_MODE (op
) == QFmode
)
3067 && (GET_MODE (SUBREG_REG (op
)) == QImode
3068 || GET_MODE (SUBREG_REG (op
)) == HImode
))
3075 reg_imm_operand (op
, mode
)
3077 enum machine_mode mode ATTRIBUTE_UNUSED
;
3079 if (REG_P (op
) || CONSTANT_P (op
))
3086 not_modify_reg (op
, mode
)
3088 enum machine_mode mode ATTRIBUTE_UNUSED
;
3090 if (REG_P (op
) || CONSTANT_P (op
))
3092 if (GET_CODE (op
) != MEM
)
3095 switch (GET_CODE (op
))
3102 rtx op0
= XEXP (op
, 0);
3103 rtx op1
= XEXP (op
, 1);
3108 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
3114 rtx op0
= XEXP (op
, 0);
3116 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
3134 not_rc_reg (op
, mode
)
3136 enum machine_mode mode ATTRIBUTE_UNUSED
;
3138 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
3144 /* Extended precision register R0-R1. */
3147 r0r1_reg_operand (op
, mode
)
3149 enum machine_mode mode
;
3151 if (! reg_operand (op
, mode
))
3153 if (GET_CODE (op
) == SUBREG
)
3154 op
= SUBREG_REG (op
);
3155 return REG_P (op
) && IS_R0R1_OR_PSEUDO_REG (op
);
3159 /* Extended precision register R2-R3. */
3162 r2r3_reg_operand (op
, mode
)
3164 enum machine_mode mode
;
3166 if (! reg_operand (op
, mode
))
3168 if (GET_CODE (op
) == SUBREG
)
3169 op
= SUBREG_REG (op
);
3170 return REG_P (op
) && IS_R2R3_OR_PSEUDO_REG (op
);
3174 /* Low extended precision register R0-R7. */
3177 ext_low_reg_operand (op
, mode
)
3179 enum machine_mode mode
;
3181 if (! reg_operand (op
, mode
))
3183 if (GET_CODE (op
) == SUBREG
)
3184 op
= SUBREG_REG (op
);
3185 return REG_P (op
) && IS_EXT_LOW_OR_PSEUDO_REG (op
);
3189 /* Extended precision register. */
3192 ext_reg_operand (op
, mode
)
3194 enum machine_mode mode
;
3196 if (! reg_operand (op
, mode
))
3198 if (GET_CODE (op
) == SUBREG
)
3199 op
= SUBREG_REG (op
);
3202 return IS_EXT_OR_PSEUDO_REG (op
);
3206 /* Standard precision register. */
3209 std_reg_operand (op
, mode
)
3211 enum machine_mode mode
;
3213 if (! reg_operand (op
, mode
))
3215 if (GET_CODE (op
) == SUBREG
)
3216 op
= SUBREG_REG (op
);
3217 return REG_P (op
) && IS_STD_OR_PSEUDO_REG (op
);
3220 /* Standard precision or normal register. */
3223 std_or_reg_operand (op
, mode
)
3225 enum machine_mode mode
;
3227 if (reload_in_progress
)
3228 return std_reg_operand (op
, mode
);
3229 return reg_operand (op
, mode
);
3232 /* Address register. */
3235 addr_reg_operand (op
, mode
)
3237 enum machine_mode mode
;
3239 if (! reg_operand (op
, mode
))
3241 return c4x_a_register (op
);
3245 /* Index register. */
3248 index_reg_operand (op
, mode
)
3250 enum machine_mode mode
;
3252 if (! reg_operand (op
, mode
))
3254 if (GET_CODE (op
) == SUBREG
)
3255 op
= SUBREG_REG (op
);
3256 return c4x_x_register (op
);
3263 dp_reg_operand (op
, mode
)
3265 enum machine_mode mode ATTRIBUTE_UNUSED
;
3267 return REG_P (op
) && IS_DP_OR_PSEUDO_REG (op
);
3274 sp_reg_operand (op
, mode
)
3276 enum machine_mode mode ATTRIBUTE_UNUSED
;
3278 return REG_P (op
) && IS_SP_OR_PSEUDO_REG (op
);
3285 st_reg_operand (op
, mode
)
3287 enum machine_mode mode ATTRIBUTE_UNUSED
;
3289 return REG_P (op
) && IS_ST_OR_PSEUDO_REG (op
);
3296 rc_reg_operand (op
, mode
)
3298 enum machine_mode mode ATTRIBUTE_UNUSED
;
3300 return REG_P (op
) && IS_RC_OR_PSEUDO_REG (op
);
3305 call_address_operand (op
, mode
)
3307 enum machine_mode mode ATTRIBUTE_UNUSED
;
3309 return (REG_P (op
) || symbolic_address_operand (op
, mode
));
3313 /* Symbolic address operand. */
3316 symbolic_address_operand (op
, mode
)
3318 enum machine_mode mode ATTRIBUTE_UNUSED
;
3320 switch (GET_CODE (op
))
3332 /* Check dst operand of a move instruction. */
3335 dst_operand (op
, mode
)
3337 enum machine_mode mode
;
3339 if (GET_CODE (op
) == SUBREG
3340 && mixed_subreg_operand (op
, mode
))
3344 return reg_operand (op
, mode
);
3346 return nonimmediate_operand (op
, mode
);
3350 /* Check src operand of two operand arithmetic instructions. */
3353 src_operand (op
, mode
)
3355 enum machine_mode mode
;
3357 if (GET_CODE (op
) == SUBREG
3358 && mixed_subreg_operand (op
, mode
))
3362 return reg_operand (op
, mode
);
3364 if (mode
== VOIDmode
)
3365 mode
= GET_MODE (op
);
3367 if (GET_CODE (op
) == CONST_INT
)
3368 return (mode
== QImode
|| mode
== Pmode
|| mode
== HImode
)
3369 && c4x_I_constant (op
);
3371 /* We don't like CONST_DOUBLE integers. */
3372 if (GET_CODE (op
) == CONST_DOUBLE
)
3373 return c4x_H_constant (op
);
3375 /* Disallow symbolic addresses. Only the predicate
3376 symbolic_address_operand will match these. */
3377 if (GET_CODE (op
) == SYMBOL_REF
3378 || GET_CODE (op
) == LABEL_REF
3379 || GET_CODE (op
) == CONST
)
3382 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3383 access to symbolic addresses. These operands will get forced
3384 into a register and the movqi expander will generate a
3385 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3386 if (GET_CODE (op
) == MEM
3387 && ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
3388 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
3389 || GET_CODE (XEXP (op
, 0)) == CONST
)))
3390 return ! TARGET_LOAD_DIRECT_MEMS
&& GET_MODE (op
) == mode
;
3392 return general_operand (op
, mode
);
3397 src_hi_operand (op
, mode
)
3399 enum machine_mode mode
;
3401 if (c4x_O_constant (op
))
3403 return src_operand (op
, mode
);
3407 /* Check src operand of two operand logical instructions. */
3410 lsrc_operand (op
, mode
)
3412 enum machine_mode mode
;
3414 if (mode
== VOIDmode
)
3415 mode
= GET_MODE (op
);
3417 if (mode
!= QImode
&& mode
!= Pmode
)
3418 fatal_insn ("mode not QImode", op
);
3420 if (GET_CODE (op
) == CONST_INT
)
3421 return c4x_L_constant (op
) || c4x_J_constant (op
);
3423 return src_operand (op
, mode
);
3427 /* Check src operand of two operand tricky instructions. */
3430 tsrc_operand (op
, mode
)
3432 enum machine_mode mode
;
3434 if (mode
== VOIDmode
)
3435 mode
= GET_MODE (op
);
3437 if (mode
!= QImode
&& mode
!= Pmode
)
3438 fatal_insn ("mode not QImode", op
);
3440 if (GET_CODE (op
) == CONST_INT
)
3441 return c4x_L_constant (op
) || c4x_N_constant (op
) || c4x_J_constant (op
);
3443 return src_operand (op
, mode
);
3447 /* Check src operand of two operand non immedidate instructions. */
3450 nonimmediate_src_operand (op
, mode
)
3452 enum machine_mode mode
;
3454 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3457 return src_operand (op
, mode
);
3461 /* Check logical src operand of two operand non immedidate instructions. */
3464 nonimmediate_lsrc_operand (op
, mode
)
3466 enum machine_mode mode
;
3468 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3471 return lsrc_operand (op
, mode
);
3476 reg_or_const_operand (op
, mode
)
3478 enum machine_mode mode
;
3480 return reg_operand (op
, mode
) || const_operand (op
, mode
);
3484 /* Check for indirect operands allowable in parallel instruction. */
3487 par_ind_operand (op
, mode
)
3489 enum machine_mode mode
;
3491 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3494 return c4x_S_indirect (op
);
3498 /* Check for operands allowable in parallel instruction. */
3501 parallel_operand (op
, mode
)
3503 enum machine_mode mode
;
3505 return ext_low_reg_operand (op
, mode
) || par_ind_operand (op
, mode
);
3510 c4x_S_address_parse (op
, base
, incdec
, index
, disp
)
3522 if (GET_CODE (op
) != MEM
)
3523 fatal_insn ("invalid indirect memory address", op
);
3526 switch (GET_CODE (op
))
3529 *base
= REGNO (XEXP (op
, 0));
3535 *base
= REGNO (XEXP (op
, 0));
3541 *base
= REGNO (XEXP (op
, 0));
3547 *base
= REGNO (XEXP (op
, 0));
3553 *base
= REGNO (XEXP (op
, 0));
3554 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3556 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3557 *disp
= 0; /* ??? */
3560 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3565 *base
= REGNO (XEXP (op
, 0));
3566 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3568 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3569 *disp
= 1; /* ??? */
3572 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3583 rtx op0
= XEXP (op
, 0);
3584 rtx op1
= XEXP (op
, 1);
3586 if (c4x_a_register (op0
))
3588 if (c4x_x_register (op1
))
3590 *base
= REGNO (op0
);
3591 *index
= REGNO (op1
);
3594 else if ((GET_CODE (op1
) == CONST_INT
3595 && IS_DISP1_CONST (INTVAL (op1
))))
3597 *base
= REGNO (op0
);
3598 *disp
= INTVAL (op1
);
3602 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3604 *base
= REGNO (op1
);
3605 *index
= REGNO (op0
);
3612 fatal_insn ("invalid indirect (S) memory address", op
);
3618 c4x_address_conflict (op0
, op1
, store0
, store1
)
3633 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3636 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3637 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3639 if (store0
&& store1
)
3641 /* If we have two stores in parallel to the same address, then
3642 the C4x only executes one of the stores. This is unlikely to
3643 cause problems except when writing to a hardware device such
3644 as a FIFO since the second write will be lost. The user
3645 should flag the hardware location as being volatile so that
3646 we don't do this optimisation. While it is unlikely that we
3647 have an aliased address if both locations are not marked
3648 volatile, it is probably safer to flag a potential conflict
3649 if either location is volatile. */
3650 if (! flag_argument_noalias
)
3652 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3657 /* If have a parallel load and a store to the same address, the load
3658 is performed first, so there is no conflict. Similarly, there is
3659 no conflict if have parallel loads from the same address. */
3661 /* Cannot use auto increment or auto decrement twice for same
3663 if (base0
== base1
&& incdec0
&& incdec0
)
3666 /* It might be too confusing for GCC if we have use a base register
3667 with a side effect and a memory reference using the same register
3669 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3672 /* We can not optimize the case where op1 and op2 refer to the same
3674 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3682 /* Check for while loop inside a decrement and branch loop. */
3685 c4x_label_conflict (insn
, jump
, db
)
3692 if (GET_CODE (insn
) == CODE_LABEL
)
3694 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3696 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3699 insn
= PREV_INSN (insn
);
3705 /* Validate combination of operands for parallel load/store instructions. */
3708 valid_parallel_load_store (operands
, mode
)
3710 enum machine_mode mode ATTRIBUTE_UNUSED
;
3712 rtx op0
= operands
[0];
3713 rtx op1
= operands
[1];
3714 rtx op2
= operands
[2];
3715 rtx op3
= operands
[3];
3717 if (GET_CODE (op0
) == SUBREG
)
3718 op0
= SUBREG_REG (op0
);
3719 if (GET_CODE (op1
) == SUBREG
)
3720 op1
= SUBREG_REG (op1
);
3721 if (GET_CODE (op2
) == SUBREG
)
3722 op2
= SUBREG_REG (op2
);
3723 if (GET_CODE (op3
) == SUBREG
)
3724 op3
= SUBREG_REG (op3
);
3726 /* The patterns should only allow ext_low_reg_operand() or
3727 par_ind_operand() operands. Thus of the 4 operands, only 2
3728 should be REGs and the other 2 should be MEMs. */
3730 /* This test prevents the multipack pass from using this pattern if
3731 op0 is used as an index or base register in op2 or op3, since
3732 this combination will require reloading. */
3733 if (GET_CODE (op0
) == REG
3734 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3735 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3739 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3740 return (REGNO (op0
) != REGNO (op2
))
3741 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3742 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3745 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3746 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3747 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3750 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3751 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3752 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3755 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3756 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3757 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3764 valid_parallel_operands_4 (operands
, mode
)
3766 enum machine_mode mode ATTRIBUTE_UNUSED
;
3768 rtx op0
= operands
[0];
3769 rtx op2
= operands
[2];
3771 if (GET_CODE (op0
) == SUBREG
)
3772 op0
= SUBREG_REG (op0
);
3773 if (GET_CODE (op2
) == SUBREG
)
3774 op2
= SUBREG_REG (op2
);
3776 /* This test prevents the multipack pass from using this pattern if
3777 op0 is used as an index or base register in op2, since this combination
3778 will require reloading. */
3779 if (GET_CODE (op0
) == REG
3780 && GET_CODE (op2
) == MEM
3781 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3789 valid_parallel_operands_5 (operands
, mode
)
3791 enum machine_mode mode ATTRIBUTE_UNUSED
;
3794 rtx op0
= operands
[0];
3795 rtx op1
= operands
[1];
3796 rtx op2
= operands
[2];
3797 rtx op3
= operands
[3];
3799 if (GET_CODE (op0
) == SUBREG
)
3800 op0
= SUBREG_REG (op0
);
3801 if (GET_CODE (op1
) == SUBREG
)
3802 op1
= SUBREG_REG (op1
);
3803 if (GET_CODE (op2
) == SUBREG
)
3804 op2
= SUBREG_REG (op2
);
3806 /* The patterns should only allow ext_low_reg_operand() or
3807 par_ind_operand() operands. Operands 1 and 2 may be commutative
3808 but only one of them can be a register. */
3809 if (GET_CODE (op1
) == REG
)
3811 if (GET_CODE (op2
) == REG
)
3817 /* This test prevents the multipack pass from using this pattern if
3818 op0 is used as an index or base register in op3, since this combination
3819 will require reloading. */
3820 if (GET_CODE (op0
) == REG
3821 && GET_CODE (op3
) == MEM
3822 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3830 valid_parallel_operands_6 (operands
, mode
)
3832 enum machine_mode mode ATTRIBUTE_UNUSED
;
3835 rtx op0
= operands
[0];
3836 rtx op1
= operands
[1];
3837 rtx op2
= operands
[2];
3838 rtx op4
= operands
[4];
3839 rtx op5
= operands
[5];
3841 if (GET_CODE (op1
) == SUBREG
)
3842 op1
= SUBREG_REG (op1
);
3843 if (GET_CODE (op2
) == SUBREG
)
3844 op2
= SUBREG_REG (op2
);
3845 if (GET_CODE (op4
) == SUBREG
)
3846 op4
= SUBREG_REG (op4
);
3847 if (GET_CODE (op5
) == SUBREG
)
3848 op5
= SUBREG_REG (op5
);
3850 /* The patterns should only allow ext_low_reg_operand() or
3851 par_ind_operand() operands. Thus of the 4 input operands, only 2
3852 should be REGs and the other 2 should be MEMs. */
3854 if (GET_CODE (op1
) == REG
)
3856 if (GET_CODE (op2
) == REG
)
3858 if (GET_CODE (op4
) == REG
)
3860 if (GET_CODE (op5
) == REG
)
3863 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3864 Perhaps we should count the MEMs as well? */
3868 /* This test prevents the multipack pass from using this pattern if
3869 op0 is used as an index or base register in op4 or op5, since
3870 this combination will require reloading. */
3871 if (GET_CODE (op0
) == REG
3872 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3873 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3880 /* Validate combination of src operands. Note that the operands have
3881 been screened by the src_operand predicate. We just have to check
3882 that the combination of operands is valid. If FORCE is set, ensure
3883 that the destination regno is valid if we have a 2 operand insn. */
3886 c4x_valid_operands (code
, operands
, mode
, force
)
3889 enum machine_mode mode ATTRIBUTE_UNUSED
;
3894 enum rtx_code code1
;
3895 enum rtx_code code2
;
3897 if (code
== COMPARE
)
3908 if (GET_CODE (op1
) == SUBREG
)
3909 op1
= SUBREG_REG (op1
);
3910 if (GET_CODE (op2
) == SUBREG
)
3911 op2
= SUBREG_REG (op2
);
3913 code1
= GET_CODE (op1
);
3914 code2
= GET_CODE (op2
);
3916 if (code1
== REG
&& code2
== REG
)
3919 if (code1
== MEM
&& code2
== MEM
)
3921 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3923 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3934 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3939 if (! c4x_H_constant (op2
))
3943 /* Any valid memory operand screened by src_operand is OK. */
3946 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3947 into a stack slot memory address comprising a PLUS and a
3953 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3957 /* Check that we have a valid destination register for a two operand
3959 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (operands
[0]);
3962 /* We assume MINUS is commutative since the subtract patterns
3963 also support the reverse subtract instructions. Since op1
3964 is not a register, and op2 is a register, op1 can only
3965 be a restricted memory operand for a shift instruction. */
3966 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3967 || code
== ASHIFT
|| code
== COMPARE
)
3969 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
3974 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
3979 if (! c4x_H_constant (op1
))
3983 /* Any valid memory operand screened by src_operand is OK. */
3991 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3992 into a stack slot memory address comprising a PLUS and a
4002 /* Check that we have a valid destination register for a two operand
4004 return ! force
|| REGNO (op1
) == REGNO (operands
[0]);
4008 int valid_operands (code
, operands
, mode
)
4011 enum machine_mode mode
;
4014 /* If we are not optimizing then we have to let anything go and let
4015 reload fix things up. instantiate_decl in function.c can produce
4016 invalid insns by changing the offset of a memory operand from a
4017 valid one into an invalid one, when the second operand is also a
4018 memory operand. The alternative is not to allow two memory
4019 operands for an insn when not optimizing. The problem only rarely
4020 occurs, for example with the C-torture program DFcmp.c. */
4022 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
4027 legitimize_operands (code
, operands
, mode
)
4030 enum machine_mode mode
;
4032 /* Compare only has 2 operands. */
4033 if (code
== COMPARE
)
4035 /* During RTL generation, force constants into pseudos so that
4036 they can get hoisted out of loops. This will tie up an extra
4037 register but can save an extra cycle. Only do this if loop
4038 optimisation enabled. (We cannot pull this trick for add and
4039 sub instructions since the flow pass won't find
4040 autoincrements etc.) This allows us to generate compare
4041 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4042 of LDI *AR0++, R0; CMPI 42, R0.
4044 Note that expand_binops will try to load an expensive constant
4045 into a register if it is used within a loop. Unfortunately,
4046 the cost mechanism doesn't allow us to look at the other
4047 operand to decide whether the constant is expensive. */
4049 if (! reload_in_progress
4052 && GET_CODE (operands
[1]) == CONST_INT
4053 && preserve_subexpressions_p ()
4054 && rtx_cost (operands
[1], code
) > 1)
4055 operands
[1] = force_reg (mode
, operands
[1]);
4057 if (! reload_in_progress
4058 && ! c4x_valid_operands (code
, operands
, mode
, 0))
4059 operands
[0] = force_reg (mode
, operands
[0]);
4063 /* We cannot do this for ADDI/SUBI insns since we will
4064 defeat the flow pass from finding autoincrement addressing
4066 if (! reload_in_progress
4067 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
4070 && GET_CODE (operands
[2]) == CONST_INT
4071 && preserve_subexpressions_p ()
4072 && rtx_cost (operands
[2], code
) > 1)
4073 operands
[2] = force_reg (mode
, operands
[2]);
4075 /* We can get better code on a C30 if we force constant shift counts
4076 into a register. This way they can get hoisted out of loops,
4077 tying up a register, but saving an instruction. The downside is
4078 that they may get allocated to an address or index register, and
4079 thus we will get a pipeline conflict if there is a nearby
4080 indirect address using an address register.
4082 Note that expand_binops will not try to load an expensive constant
4083 into a register if it is used within a loop for a shift insn. */
4085 if (! reload_in_progress
4086 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
4088 /* If the operand combination is invalid, we force operand1 into a
4089 register, preventing reload from having doing to do this at a
4091 operands
[1] = force_reg (mode
, operands
[1]);
4094 emit_move_insn (operands
[0], operands
[1]);
4095 operands
[1] = copy_rtx (operands
[0]);
4099 /* Just in case... */
4100 if (! c4x_valid_operands (code
, operands
, mode
, 0))
4101 operands
[2] = force_reg (mode
, operands
[2]);
4105 /* Right shifts require a negative shift count, but GCC expects
4106 a positive count, so we emit a NEG. */
4107 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
4108 && (GET_CODE (operands
[2]) != CONST_INT
))
4109 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
4115 /* The following predicates are used for instruction scheduling. */
4118 group1_reg_operand (op
, mode
)
4120 enum machine_mode mode
;
4122 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4124 if (GET_CODE (op
) == SUBREG
)
4125 op
= SUBREG_REG (op
);
4126 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
4131 group1_mem_operand (op
, mode
)
4133 enum machine_mode mode
;
4135 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4138 if (GET_CODE (op
) == MEM
)
4141 if (GET_CODE (op
) == PLUS
)
4143 rtx op0
= XEXP (op
, 0);
4144 rtx op1
= XEXP (op
, 1);
4146 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
4147 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
4150 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
4158 /* Return true if any one of the address registers. */
4161 arx_reg_operand (op
, mode
)
4163 enum machine_mode mode
;
4165 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4167 if (GET_CODE (op
) == SUBREG
)
4168 op
= SUBREG_REG (op
);
4169 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
4174 c4x_arn_reg_operand (op
, mode
, regno
)
4176 enum machine_mode mode
;
4179 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4181 if (GET_CODE (op
) == SUBREG
)
4182 op
= SUBREG_REG (op
);
4183 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4188 c4x_arn_mem_operand (op
, mode
, regno
)
4190 enum machine_mode mode
;
4193 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4196 if (GET_CODE (op
) == MEM
)
4199 switch (GET_CODE (op
))
4208 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4212 if (REG_P (XEXP (op
, 0)) && (! reload_completed
4213 || (REGNO (XEXP (op
, 0)) == regno
)))
4215 if (REG_P (XEXP (XEXP (op
, 1), 1))
4216 && (! reload_completed
4217 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
4223 rtx op0
= XEXP (op
, 0);
4224 rtx op1
= XEXP (op
, 1);
4226 if ((REG_P (op0
) && (! reload_completed
4227 || (REGNO (op0
) == regno
)))
4228 || (REG_P (op1
) && (! reload_completed
4229 || (REGNO (op1
) == regno
))))
4243 ar0_reg_operand (op
, mode
)
4245 enum machine_mode mode
;
4247 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
4252 ar0_mem_operand (op
, mode
)
4254 enum machine_mode mode
;
4256 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
4261 ar1_reg_operand (op
, mode
)
4263 enum machine_mode mode
;
4265 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
4270 ar1_mem_operand (op
, mode
)
4272 enum machine_mode mode
;
4274 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
4279 ar2_reg_operand (op
, mode
)
4281 enum machine_mode mode
;
4283 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
4288 ar2_mem_operand (op
, mode
)
4290 enum machine_mode mode
;
4292 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
4297 ar3_reg_operand (op
, mode
)
4299 enum machine_mode mode
;
4301 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
4306 ar3_mem_operand (op
, mode
)
4308 enum machine_mode mode
;
4310 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
4315 ar4_reg_operand (op
, mode
)
4317 enum machine_mode mode
;
4319 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
4324 ar4_mem_operand (op
, mode
)
4326 enum machine_mode mode
;
4328 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
4333 ar5_reg_operand (op
, mode
)
4335 enum machine_mode mode
;
4337 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
4342 ar5_mem_operand (op
, mode
)
4344 enum machine_mode mode
;
4346 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
4351 ar6_reg_operand (op
, mode
)
4353 enum machine_mode mode
;
4355 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
4360 ar6_mem_operand (op
, mode
)
4362 enum machine_mode mode
;
4364 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
4369 ar7_reg_operand (op
, mode
)
4371 enum machine_mode mode
;
4373 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
4378 ar7_mem_operand (op
, mode
)
4380 enum machine_mode mode
;
4382 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
4387 ir0_reg_operand (op
, mode
)
4389 enum machine_mode mode
;
4391 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
4396 ir0_mem_operand (op
, mode
)
4398 enum machine_mode mode
;
4400 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
4405 ir1_reg_operand (op
, mode
)
4407 enum machine_mode mode
;
4409 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
4414 ir1_mem_operand (op
, mode
)
4416 enum machine_mode mode
;
4418 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
4422 /* This is similar to operand_subword but allows autoincrement
4426 c4x_operand_subword (op
, i
, validate_address
, mode
)
4429 int validate_address
;
4430 enum machine_mode mode
;
4432 if (mode
!= HImode
&& mode
!= HFmode
)
4433 fatal_insn ("c4x_operand_subword: invalid mode", op
);
4435 if (mode
== HFmode
&& REG_P (op
))
4436 fatal_insn ("c4x_operand_subword: invalid operand", op
);
4438 if (GET_CODE (op
) == MEM
)
4440 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
4441 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
4442 enum machine_mode submode
;
4447 else if (mode
== HFmode
)
4454 return gen_rtx_MEM (submode
, XEXP (op
, 0));
4460 /* We could handle these with some difficulty.
4461 e.g., *p-- => *(p-=2); *(p+1). */
4462 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
4468 fatal_insn ("c4x_operand_subword: invalid address", op
);
4470 /* Even though offsettable_address_p considers (MEM
4471 (LO_SUM)) to be offsettable, it is not safe if the
4472 address is at the end of the data page since we also have
4473 to fix up the associated high PART. In this case where
4474 we are trying to split a HImode or HFmode memory
4475 reference, we would have to emit another insn to reload a
4476 new HIGH value. It's easier to disable LO_SUM memory references
4477 in HImode or HFmode and we probably get better code. */
4479 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
4486 return operand_subword (op
, i
, validate_address
, mode
);
4491 struct name_list
*next
;
4495 static struct name_list
*global_head
;
4496 static struct name_list
*extern_head
;
4499 /* Add NAME to list of global symbols and remove from external list if
4500 present on external list. */
4503 c4x_global_label (name
)
4506 struct name_list
*p
, *last
;
4508 /* Do not insert duplicate names, so linearly search through list of
4513 if (strcmp (p
->name
, name
) == 0)
4517 p
= (struct name_list
*) xmalloc (sizeof *p
);
4518 p
->next
= global_head
;
4522 /* Remove this name from ref list if present. */
4527 if (strcmp (p
->name
, name
) == 0)
4530 last
->next
= p
->next
;
4532 extern_head
= p
->next
;
4541 /* Add NAME to list of external symbols. */
4544 c4x_external_ref (name
)
4547 struct name_list
*p
;
4549 /* Do not insert duplicate names. */
4553 if (strcmp (p
->name
, name
) == 0)
4558 /* Do not insert ref if global found. */
4562 if (strcmp (p
->name
, name
) == 0)
4566 p
= (struct name_list
*) xmalloc (sizeof *p
);
4567 p
->next
= extern_head
;
4577 struct name_list
*p
;
4579 /* Output all external names that are not global. */
4583 fprintf (fp
, "\t.ref\t");
4584 assemble_name (fp
, p
->name
);
4588 fprintf (fp
, "\t.end\n");
4593 c4x_check_attribute (attrib
, list
, decl
, attributes
)
4595 tree list
, decl
, *attributes
;
4597 while (list
!= NULL_TREE
4598 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4599 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4600 list
= TREE_CHAIN (list
);
4602 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4608 c4x_insert_attributes (decl
, attributes
)
4609 tree decl
, *attributes
;
4611 switch (TREE_CODE (decl
))
4614 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4615 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4616 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4617 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4618 c4x_check_attribute ("naked", naked_tree
, decl
, attributes
);
4622 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4630 /* Table of valid machine attributes. */
4631 const struct attribute_spec c4x_attribute_table
[] =
4633 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4634 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4635 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4636 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4637 { NULL
, 0, 0, false, false, false, NULL
}
4640 /* Handle an attribute requiring a FUNCTION_TYPE;
4641 arguments as in struct attribute_spec.handler. */
4643 c4x_handle_fntype_attribute (node
, name
, args
, flags
, no_add_attrs
)
4646 tree args ATTRIBUTE_UNUSED
;
4647 int flags ATTRIBUTE_UNUSED
;
4650 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4652 warning ("`%s' attribute only applies to functions",
4653 IDENTIFIER_POINTER (name
));
4654 *no_add_attrs
= true;
4661 /* !!! FIXME to emit RPTS correctly. */
4664 c4x_rptb_rpts_p (insn
, op
)
4667 /* The next insn should be our label marking where the
4668 repeat block starts. */
4669 insn
= NEXT_INSN (insn
);
4670 if (GET_CODE (insn
) != CODE_LABEL
)
4672 /* Some insns may have been shifted between the RPTB insn
4673 and the top label... They were probably destined to
4674 be moved out of the loop. For now, let's leave them
4675 where they are and print a warning. We should
4676 probably move these insns before the repeat block insn. */
4678 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4683 /* Skip any notes. */
4684 insn
= next_nonnote_insn (insn
);
4686 /* This should be our first insn in the loop. */
4687 if (! INSN_P (insn
))
4690 /* Skip any notes. */
4691 insn
= next_nonnote_insn (insn
);
4693 if (! INSN_P (insn
))
4696 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4702 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4706 /* Check if register r11 is used as the destination of an insn. */
4719 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4720 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4722 if (INSN_P (x
) && (set
= single_set (x
)))
4725 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4728 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4729 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4733 if (c4x_r11_set_p (XEXP (x
, i
)))
4736 else if (fmt
[i
] == 'E')
4737 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4738 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4745 /* The c4x sometimes has a problem when the insn before the laj insn
4746 sets the r11 register. Check for this situation. */
4749 c4x_check_laj_p (insn
)
4752 insn
= prev_nonnote_insn (insn
);
4754 /* If this is the start of the function no nop is needed. */
4758 /* If the previous insn is a code label we have to insert a nop. This
4759 could be a jump or table jump. We can find the normal jumps by
4760 scanning the function but this will not find table jumps. */
4761 if (GET_CODE (insn
) == CODE_LABEL
)
4764 /* If the previous insn sets register r11 we have to insert a nop. */
4765 if (c4x_r11_set_p (insn
))
4768 /* No nop needed. */
4773 /* Adjust the cost of a scheduling dependency. Return the new cost of
4774 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4775 A set of an address register followed by a use occurs a 2 cycle
4776 stall (reduced to a single cycle on the c40 using LDA), while
4777 a read of an address register followed by a use occurs a single cycle. */
4779 #define SET_USE_COST 3
4780 #define SETLDA_USE_COST 2
4781 #define READ_USE_COST 2
4784 c4x_adjust_cost (insn
, link
, dep_insn
, cost
)
4790 /* Don't worry about this until we know what registers have been
4792 if (flag_schedule_insns
== 0 && ! reload_completed
)
4795 /* How do we handle dependencies where a read followed by another
4796 read causes a pipeline stall? For example, a read of ar0 followed
4797 by the use of ar0 for a memory reference. It looks like we
4798 need to extend the scheduler to handle this case. */
4800 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4801 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4802 so only deal with insns we know about. */
4803 if (recog_memoized (dep_insn
) < 0)
4806 if (REG_NOTE_KIND (link
) == 0)
4810 /* Data dependency; DEP_INSN writes a register that INSN reads some
4814 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4815 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4816 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4817 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4821 /* This could be significantly optimized. We should look
4822 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4823 insn uses ar0-ar7. We then test if the same register
4824 is used. The tricky bit is that some operands will
4825 use several registers... */
4826 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4827 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4828 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4829 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4830 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4831 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4833 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4834 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4835 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4836 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4837 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4838 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4840 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4841 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4842 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4843 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4844 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4845 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4847 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4848 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4849 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4850 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4851 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4852 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4854 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4855 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4856 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4857 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4858 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4859 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4861 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4862 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4863 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4864 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4865 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4866 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4868 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4869 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4870 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4871 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4872 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4873 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4875 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4876 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4877 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4878 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4879 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4880 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4882 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4883 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4884 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4885 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4887 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4888 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4889 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4890 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4896 /* For other data dependencies, the default cost specified in the
4900 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4902 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4905 /* For c4x anti dependencies, the cost is 0. */
4908 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4910 /* Output dependency; DEP_INSN writes a register that INSN writes some
4913 /* For c4x output dependencies, the cost is 0. */
4921 c4x_init_builtins ()
4923 tree endlink
= void_list_node
;
4925 builtin_function ("fast_ftoi",
4928 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4929 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4930 builtin_function ("ansi_ftoi",
4933 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4934 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4936 builtin_function ("fast_imult",
4939 tree_cons (NULL_TREE
, integer_type_node
,
4940 tree_cons (NULL_TREE
,
4941 integer_type_node
, endlink
))),
4942 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4945 builtin_function ("toieee",
4948 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4949 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4950 builtin_function ("frieee",
4953 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4954 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4955 builtin_function ("fast_invf",
4958 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4959 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4965 c4x_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4968 rtx subtarget ATTRIBUTE_UNUSED
;
4969 enum machine_mode mode ATTRIBUTE_UNUSED
;
4970 int ignore ATTRIBUTE_UNUSED
;
4972 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4973 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4974 tree arglist
= TREE_OPERAND (exp
, 1);
4980 case C4X_BUILTIN_FIX
:
4981 arg0
= TREE_VALUE (arglist
);
4982 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4983 r0
= protect_from_queue (r0
, 0);
4984 if (! target
|| ! register_operand (target
, QImode
))
4985 target
= gen_reg_rtx (QImode
);
4986 emit_insn (gen_fixqfqi_clobber (target
, r0
));
4989 case C4X_BUILTIN_FIX_ANSI
:
4990 arg0
= TREE_VALUE (arglist
);
4991 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4992 r0
= protect_from_queue (r0
, 0);
4993 if (! target
|| ! register_operand (target
, QImode
))
4994 target
= gen_reg_rtx (QImode
);
4995 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
4998 case C4X_BUILTIN_MPYI
:
5001 arg0
= TREE_VALUE (arglist
);
5002 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5003 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
5004 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
5005 r0
= protect_from_queue (r0
, 0);
5006 r1
= protect_from_queue (r1
, 0);
5007 if (! target
|| ! register_operand (target
, QImode
))
5008 target
= gen_reg_rtx (QImode
);
5009 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
5012 case C4X_BUILTIN_TOIEEE
:
5015 arg0
= TREE_VALUE (arglist
);
5016 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5017 r0
= protect_from_queue (r0
, 0);
5018 if (! target
|| ! register_operand (target
, QFmode
))
5019 target
= gen_reg_rtx (QFmode
);
5020 emit_insn (gen_toieee (target
, r0
));
5023 case C4X_BUILTIN_FRIEEE
:
5026 arg0
= TREE_VALUE (arglist
);
5027 if (TREE_CODE (arg0
) == VAR_DECL
|| TREE_CODE (arg0
) == PARM_DECL
)
5028 put_var_into_stack (arg0
);
5029 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5030 r0
= protect_from_queue (r0
, 0);
5031 if (register_operand (r0
, QFmode
))
5033 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
5034 emit_move_insn (r1
, r0
);
5037 if (! target
|| ! register_operand (target
, QFmode
))
5038 target
= gen_reg_rtx (QFmode
);
5039 emit_insn (gen_frieee (target
, r0
));
5042 case C4X_BUILTIN_RCPF
:
5045 arg0
= TREE_VALUE (arglist
);
5046 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5047 r0
= protect_from_queue (r0
, 0);
5048 if (! target
|| ! register_operand (target
, QFmode
))
5049 target
= gen_reg_rtx (QFmode
);
5050 emit_insn (gen_rcpfqf_clobber (target
, r0
));
5057 c4x_asm_named_section (name
, flags
)
5059 unsigned int flags ATTRIBUTE_UNUSED
;
5061 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);
5065 c4x_globalize_label (stream
, name
)
5069 default_globalize_label (stream
, name
);
5070 c4x_global_label (name
);
5073 #define SHIFT_CODE_P(C) \
5074 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
5075 #define LOGICAL_CODE_P(C) \
5076 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
5078 /* Compute a (partial) cost for rtx X. Return true if the complete
5079 cost has been computed, and false if subexpressions should be
5080 scanned. In either case, *TOTAL contains the cost result. */
5083 c4x_rtx_costs (x
, code
, outer_code
, total
)
5085 int code
, outer_code
;
5092 /* Some small integers are effectively free for the C40. We should
5093 also consider if we are using the small memory model. With
5094 the big memory model we require an extra insn for a constant
5095 loaded from memory. */
5099 if (c4x_J_constant (x
))
5101 else if (! TARGET_C3X
5102 && outer_code
== AND
5103 && (val
== 255 || val
== 65535))
5105 else if (! TARGET_C3X
5106 && (outer_code
== ASHIFTRT
|| outer_code
== LSHIFTRT
)
5107 && (val
== 16 || val
== 24))
5109 else if (TARGET_C3X
&& SHIFT_CODE_P (outer_code
))
5111 else if (LOGICAL_CODE_P (outer_code
)
5112 ? c4x_L_constant (x
) : c4x_I_constant (x
))
5125 if (c4x_H_constant (x
))
5127 else if (GET_MODE (x
) == QFmode
)
5133 /* ??? Note that we return true, rather than false so that rtx_cost
5134 doesn't include the constant costs. Otherwise expand_mult will
5135 think that it is cheaper to synthesize a multiply rather than to
5136 use a multiply instruction. I think this is because the algorithm
5137 synth_mult doesn't take into account the loading of the operands,
5138 whereas the calculation of mult_cost does. */
5147 *total
= COSTS_N_INSNS (1);
5151 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
5152 || TARGET_MPYI
? 1 : 14);
5159 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT