1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 3, or (at your option)
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Some output-actions in c4x.md need these. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
51 #include "target-def.h"
52 #include "langhooks.h"
56 rtx fix_truncqfhi2_libfunc
;
57 rtx fixuns_truncqfhi2_libfunc
;
58 rtx fix_trunchfhi2_libfunc
;
59 rtx fixuns_trunchfhi2_libfunc
;
60 rtx floathiqf2_libfunc
;
61 rtx floatunshiqf2_libfunc
;
62 rtx floathihf2_libfunc
;
63 rtx floatunshihf2_libfunc
;
65 static int c4x_leaf_function
;
67 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
74 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
76 /* Reg Modes Saved. */
77 R0R1_REGS
, /* R0 QI, QF, HF No. */
78 R0R1_REGS
, /* R1 QI, QF, HF No. */
79 R2R3_REGS
, /* R2 QI, QF, HF No. */
80 R2R3_REGS
, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
85 ADDR_REGS
, /* AR0 QI No. */
86 ADDR_REGS
, /* AR1 QI No. */
87 ADDR_REGS
, /* AR2 QI No. */
88 ADDR_REGS
, /* AR3 QI QI. */
89 ADDR_REGS
, /* AR4 QI QI. */
90 ADDR_REGS
, /* AR5 QI QI. */
91 ADDR_REGS
, /* AR6 QI QI. */
92 ADDR_REGS
, /* AR7 QI QI. */
93 DP_REG
, /* DP QI No. */
94 INDEX_REGS
, /* IR0 QI No. */
95 INDEX_REGS
, /* IR1 QI No. */
96 BK_REG
, /* BK QI QI. */
97 SP_REG
, /* SP QI No. */
98 ST_REG
, /* ST CC No. */
99 NO_REGS
, /* DIE/IE No. */
100 NO_REGS
, /* IIE/IF No. */
101 NO_REGS
, /* IIF/IOF No. */
102 INT_REGS
, /* RS QI No. */
103 INT_REGS
, /* RE QI No. */
104 RC_REG
, /* RC QI No. */
105 EXT_REGS
, /* R8 QI, QF, HF QI. */
106 EXT_REGS
, /* R9 QI, QF, HF No. */
107 EXT_REGS
, /* R10 QI, QF, HF No. */
108 EXT_REGS
, /* R11 QI, QF, HF No. */
111 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
113 /* Reg Modes Saved. */
114 HFmode
, /* R0 QI, QF, HF No. */
115 HFmode
, /* R1 QI, QF, HF No. */
116 HFmode
, /* R2 QI, QF, HF No. */
117 HFmode
, /* R3 QI, QF, HF No. */
118 QFmode
, /* R4 QI, QF, HF QI. */
119 QFmode
, /* R5 QI, QF, HF QI. */
120 QImode
, /* R6 QI, QF, HF QF. */
121 QImode
, /* R7 QI, QF, HF QF. */
122 QImode
, /* AR0 QI No. */
123 QImode
, /* AR1 QI No. */
124 QImode
, /* AR2 QI No. */
125 QImode
, /* AR3 QI QI. */
126 QImode
, /* AR4 QI QI. */
127 QImode
, /* AR5 QI QI. */
128 QImode
, /* AR6 QI QI. */
129 QImode
, /* AR7 QI QI. */
130 VOIDmode
, /* DP QI No. */
131 QImode
, /* IR0 QI No. */
132 QImode
, /* IR1 QI No. */
133 QImode
, /* BK QI QI. */
134 VOIDmode
, /* SP QI No. */
135 VOIDmode
, /* ST CC No. */
136 VOIDmode
, /* DIE/IE No. */
137 VOIDmode
, /* IIE/IF No. */
138 VOIDmode
, /* IIF/IOF No. */
139 QImode
, /* RS QI No. */
140 QImode
, /* RE QI No. */
141 VOIDmode
, /* RC QI No. */
142 QFmode
, /* R8 QI, QF, HF QI. */
143 HFmode
, /* R9 QI, QF, HF No. */
144 HFmode
, /* R10 QI, QF, HF No. */
145 HFmode
, /* R11 QI, QF, HF No. */
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
155 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
157 /* Pragma definitions. */
159 tree code_tree
= NULL_TREE
;
160 tree data_tree
= NULL_TREE
;
161 tree pure_tree
= NULL_TREE
;
162 tree noreturn_tree
= NULL_TREE
;
163 tree interrupt_tree
= NULL_TREE
;
164 tree naked_tree
= NULL_TREE
;
166 /* Forward declarations */
167 static bool c4x_handle_option (size_t, const char *, int);
168 static int c4x_isr_reg_used_p (unsigned int);
169 static int c4x_leaf_function_p (void);
170 static int c4x_naked_function_p (void);
171 static int c4x_immed_int_constant (rtx
);
172 static int c4x_immed_float_constant (rtx
);
173 static int c4x_R_indirect (rtx
);
174 static void c4x_S_address_parse (rtx
, int *, int *, int *, int *);
175 static int c4x_valid_operands (enum rtx_code
, rtx
*, enum machine_mode
, int);
176 static int c4x_arn_reg_operand (rtx
, enum machine_mode
, unsigned int);
177 static int c4x_arn_mem_operand (rtx
, enum machine_mode
, unsigned int);
178 static void c4x_file_start (void);
179 static void c4x_file_end (void);
180 static void c4x_check_attribute (const char *, tree
, tree
, tree
*);
181 static int c4x_r11_set_p (rtx
);
182 static int c4x_rptb_valid_p (rtx
, rtx
);
183 static void c4x_reorg (void);
184 static int c4x_label_ref_used_p (rtx
, rtx
);
185 static tree
c4x_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
186 const struct attribute_spec c4x_attribute_table
[];
187 static void c4x_insert_attributes (tree
, tree
*);
188 static void c4x_asm_named_section (const char *, unsigned int, tree
);
189 static int c4x_adjust_cost (rtx
, rtx
, rtx
, int);
190 static void c4x_globalize_label (FILE *, const char *);
191 static bool c4x_rtx_costs (rtx
, int, int, int *);
192 static int c4x_address_cost (rtx
);
193 static void c4x_init_libfuncs (void);
194 static void c4x_external_libcall (rtx
);
195 static rtx
c4x_struct_value_rtx (tree
, int);
196 static tree
c4x_gimplify_va_arg_expr (tree
, tree
, tree
*, tree
*);
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 #undef TARGET_ASM_FILE_START
206 #define TARGET_ASM_FILE_START c4x_file_start
207 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
208 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
209 #undef TARGET_ASM_FILE_END
210 #define TARGET_ASM_FILE_END c4x_file_end
212 #undef TARGET_ASM_EXTERNAL_LIBCALL
213 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
215 /* Play safe, not the fastest code. */
216 #undef TARGET_DEFAULT_TARGET_FLAGS
217 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
218 | MASK_PARALLEL_MPY | MASK_RPTB)
219 #undef TARGET_HANDLE_OPTION
220 #define TARGET_HANDLE_OPTION c4x_handle_option
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
225 #undef TARGET_INSERT_ATTRIBUTES
226 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
228 #undef TARGET_INIT_BUILTINS
229 #define TARGET_INIT_BUILTINS c4x_init_builtins
231 #undef TARGET_EXPAND_BUILTIN
232 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
234 #undef TARGET_SCHED_ADJUST_COST
235 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
237 #undef TARGET_ASM_GLOBALIZE_LABEL
238 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
240 #undef TARGET_RTX_COSTS
241 #define TARGET_RTX_COSTS c4x_rtx_costs
242 #undef TARGET_ADDRESS_COST
243 #define TARGET_ADDRESS_COST c4x_address_cost
245 #undef TARGET_MACHINE_DEPENDENT_REORG
246 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
248 #undef TARGET_INIT_LIBFUNCS
249 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
251 #undef TARGET_STRUCT_VALUE_RTX
252 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
254 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
255 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
257 struct gcc_target targetm
= TARGET_INITIALIZER
;
259 /* Implement TARGET_HANDLE_OPTION. */
262 c4x_handle_option (size_t code
, const char *arg
, int value
)
266 case OPT_m30
: c4x_cpu_version
= 30; return true;
267 case OPT_m31
: c4x_cpu_version
= 31; return true;
268 case OPT_m32
: c4x_cpu_version
= 32; return true;
269 case OPT_m33
: c4x_cpu_version
= 33; return true;
270 case OPT_m40
: c4x_cpu_version
= 40; return true;
271 case OPT_m44
: c4x_cpu_version
= 44; return true;
274 if (arg
[0] == 'c' || arg
[0] == 'C')
279 case 30: case 31: case 32: case 33: case 40: case 44:
280 c4x_cpu_version
= value
;
290 /* Override command line options.
291 Called once after all options have been parsed.
292 Mostly we process the processor
293 type and sometimes adjust other TARGET_ options. */
296 c4x_override_options (void)
298 /* Convert foo / 8.0 into foo * 0.125, etc. */
299 set_fast_math_flags (1);
301 /* We should phase out the following at some stage.
302 This provides compatibility with the old -mno-aliases option. */
303 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
304 flag_argument_noalias
= 1;
307 target_flags
|= MASK_MPYI
| MASK_DB
;
310 target_flags
&= ~(MASK_RPTB
| MASK_PARALLEL
);
312 if (!TARGET_PARALLEL
)
313 target_flags
&= ~MASK_PARALLEL_MPY
;
317 /* This is called before c4x_override_options. */
320 c4x_optimization_options (int level ATTRIBUTE_UNUSED
,
321 int size ATTRIBUTE_UNUSED
)
323 /* Scheduling before register allocation can screw up global
324 register allocation, especially for functions that use MPY||ADD
325 instructions. The benefit we gain we get by scheduling before
326 register allocation is probably marginal anyhow. */
327 flag_schedule_insns
= 0;
331 /* Write an ASCII string. */
333 #define C4X_ASCII_LIMIT 40
336 c4x_output_ascii (FILE *stream
, const char *ptr
, int len
)
338 char sbuf
[C4X_ASCII_LIMIT
+ 1];
339 int s
, l
, special
, first
= 1, onlys
;
342 fprintf (stream
, "\t.byte\t");
344 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
348 /* Escape " and \ with a \". */
349 special
= *ptr
== '\"' || *ptr
== '\\';
351 /* If printable - add to buff. */
352 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
357 if (s
< C4X_ASCII_LIMIT
- 1)
372 fprintf (stream
, "\"%s\"", sbuf
);
374 if (TARGET_TI
&& l
>= 80 && len
> 1)
376 fprintf (stream
, "\n\t.byte\t");
394 fprintf (stream
, "%d", *ptr
);
396 if (TARGET_TI
&& l
>= 80 && len
> 1)
398 fprintf (stream
, "\n\t.byte\t");
409 fprintf (stream
, "\"%s\"", sbuf
);
412 fputc ('\n', stream
);
417 c4x_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
422 case Pmode
: /* Pointer (24/32 bits). */
424 case QImode
: /* Integer (32 bits). */
425 return IS_INT_REGNO (regno
);
427 case QFmode
: /* Float, Double (32 bits). */
428 case HFmode
: /* Long Double (40 bits). */
429 return IS_EXT_REGNO (regno
);
431 case CCmode
: /* Condition Codes. */
432 case CC_NOOVmode
: /* Condition Codes. */
433 return IS_ST_REGNO (regno
);
435 case HImode
: /* Long Long (64 bits). */
436 /* We need two registers to store long longs. Note that
437 it is much easier to constrain the first register
438 to start on an even boundary. */
439 return IS_INT_REGNO (regno
)
440 && IS_INT_REGNO (regno
+ 1)
444 return 0; /* We don't support these modes. */
450 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
452 c4x_hard_regno_rename_ok (unsigned int regno1
, unsigned int regno2
)
454 /* We cannot copy call saved registers from mode QI into QF or from
456 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
458 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
460 /* We cannot copy from an extended (40 bit) register to a standard
461 (32 bit) register because we only set the condition codes for
462 extended registers. */
463 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
465 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
470 /* The TI C3x C compiler register argument runtime model uses 6 registers,
471 AR2, R2, R3, RC, RS, RE.
473 The first two floating point arguments (float, double, long double)
474 that are found scanning from left to right are assigned to R2 and R3.
476 The remaining integer (char, short, int, long) or pointer arguments
477 are assigned to the remaining registers in the order AR2, R2, R3,
478 RC, RS, RE when scanning left to right, except for the last named
479 argument prior to an ellipsis denoting variable number of
480 arguments. We don't have to worry about the latter condition since
481 function.c treats the last named argument as anonymous (unnamed).
483 All arguments that cannot be passed in registers are pushed onto
484 the stack in reverse order (right to left). GCC handles that for us.
486 c4x_init_cumulative_args() is called at the start, so we can parse
487 the args to see how many floating point arguments and how many
488 integer (or pointer) arguments there are. c4x_function_arg() is
489 then called (sometimes repeatedly) for each argument (parsed left
490 to right) to obtain the register to pass the argument in, or zero
491 if the argument is to be passed on the stack. Once the compiler is
492 happy, c4x_function_arg_advance() is called.
494 Don't use R0 to pass arguments in, we use 0 to indicate a stack
497 static const int c4x_int_reglist
[3][6] =
499 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
500 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
501 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
504 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
507 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
508 function whose data type is FNTYPE.
509 For a library call, FNTYPE is 0. */
512 c4x_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
)
514 tree param
, next_param
;
516 cum
->floats
= cum
->ints
= 0;
523 fprintf (stderr
, "\nc4x_init_cumulative_args (");
526 tree ret_type
= TREE_TYPE (fntype
);
528 fprintf (stderr
, "fntype code = %s, ret code = %s",
529 tree_code_name
[(int) TREE_CODE (fntype
)],
530 tree_code_name
[(int) TREE_CODE (ret_type
)]);
533 fprintf (stderr
, "no fntype");
536 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
539 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
541 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
542 param
; param
= next_param
)
546 next_param
= TREE_CHAIN (param
);
548 type
= TREE_VALUE (param
);
549 if (type
&& type
!= void_type_node
)
551 enum machine_mode mode
;
553 /* If the last arg doesn't have void type then we have
554 variable arguments. */
558 if ((mode
= TYPE_MODE (type
)))
560 if (! targetm
.calls
.must_pass_in_stack (mode
, type
))
562 /* Look for float, double, or long double argument. */
563 if (mode
== QFmode
|| mode
== HFmode
)
565 /* Look for integer, enumeral, boolean, char, or pointer
567 else if (mode
== QImode
|| mode
== Pmode
)
576 fprintf (stderr
, "%s%s, args = %d)\n",
577 cum
->prototype
? ", prototype" : "",
578 cum
->var
? ", variable args" : "",
583 /* Update the data in CUM to advance over an argument
584 of mode MODE and data type TYPE.
585 (TYPE is null for libcalls where that information may not be available.) */
588 c4x_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
589 tree type
, int named
)
592 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
593 GET_MODE_NAME (mode
), named
);
597 && ! targetm
.calls
.must_pass_in_stack (mode
, type
))
599 /* Look for float, double, or long double argument. */
600 if (mode
== QFmode
|| mode
== HFmode
)
602 /* Look for integer, enumeral, boolean, char, or pointer argument. */
603 else if (mode
== QImode
|| mode
== Pmode
)
606 else if (! TARGET_MEMPARM
&& ! type
)
608 /* Handle libcall arguments. */
609 if (mode
== QFmode
|| mode
== HFmode
)
611 else if (mode
== QImode
|| mode
== Pmode
)
618 /* Define where to put the arguments to a function. Value is zero to
619 push the argument on the stack, or a hard register in which to
622 MODE is the argument's machine mode.
623 TYPE is the data type of the argument (as a tree).
624 This is null for libcalls where that information may
626 CUM is a variable of type CUMULATIVE_ARGS which gives info about
627 the preceding args and about the function being called.
628 NAMED is nonzero if this argument is a named parameter
629 (otherwise it is an extra parameter matching an ellipsis). */
632 c4x_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
633 tree type
, int named
)
635 int reg
= 0; /* Default to passing argument on stack. */
639 /* We can handle at most 2 floats in R2, R3. */
640 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
642 /* We can handle at most 6 integers minus number of floats passed
644 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
645 6 - cum
->maxfloats
: cum
->ints
;
647 /* If there is no prototype, assume all the arguments are integers. */
648 if (! cum
->prototype
)
651 cum
->ints
= cum
->floats
= 0;
655 /* This marks the last argument. We don't need to pass this through
657 if (type
== void_type_node
)
663 && ! targetm
.calls
.must_pass_in_stack (mode
, type
))
665 /* Look for float, double, or long double argument. */
666 if (mode
== QFmode
|| mode
== HFmode
)
668 if (cum
->floats
< cum
->maxfloats
)
669 reg
= c4x_fp_reglist
[cum
->floats
];
671 /* Look for integer, enumeral, boolean, char, or pointer argument. */
672 else if (mode
== QImode
|| mode
== Pmode
)
674 if (cum
->ints
< cum
->maxints
)
675 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
678 else if (! TARGET_MEMPARM
&& ! type
)
680 /* We could use a different argument calling model for libcalls,
681 since we're only calling functions in libgcc. Thus we could
682 pass arguments for long longs in registers rather than on the
683 stack. In the meantime, use the odd TI format. We make the
684 assumption that we won't have more than two floating point
685 args, six integer args, and that all the arguments are of the
687 if (mode
== QFmode
|| mode
== HFmode
)
688 reg
= c4x_fp_reglist
[cum
->floats
];
689 else if (mode
== QImode
|| mode
== Pmode
)
690 reg
= c4x_int_reglist
[0][cum
->ints
];
695 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
696 GET_MODE_NAME (mode
), named
);
698 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
700 fprintf (stderr
, ", stack");
701 fprintf (stderr
, ")\n");
704 return gen_rtx_REG (mode
, reg
);
709 /* C[34]x arguments grow in weird ways (downwards) that the standard
710 varargs stuff can't handle.. */
713 c4x_gimplify_va_arg_expr (tree valist
, tree type
,
714 tree
*pre_p ATTRIBUTE_UNUSED
,
715 tree
*post_p ATTRIBUTE_UNUSED
)
720 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
722 type
= build_pointer_type (type
);
724 t
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
725 build_int_cst (NULL_TREE
, int_size_in_bytes (type
)));
726 t
= fold_convert (build_pointer_type (type
), t
);
727 t
= build_va_arg_indirect_ref (t
);
730 t
= build_va_arg_indirect_ref (t
);
737 c4x_isr_reg_used_p (unsigned int regno
)
739 /* Don't save/restore FP or ST, we handle them separately. */
740 if (regno
== FRAME_POINTER_REGNUM
741 || IS_ST_REGNO (regno
))
744 /* We could be a little smarter abut saving/restoring DP.
745 We'll only save if for the big memory model or if
746 we're paranoid. ;-) */
747 if (IS_DP_REGNO (regno
))
748 return ! TARGET_SMALL
|| TARGET_PARANOID
;
750 /* Only save/restore regs in leaf function that are used. */
751 if (c4x_leaf_function
)
752 return df_regs_ever_live_p (regno
) && fixed_regs
[regno
] == 0;
754 /* Only save/restore regs that are used by the ISR and regs
755 that are likely to be used by functions the ISR calls
756 if they are not fixed. */
757 return IS_EXT_REGNO (regno
)
758 || ((df_regs_ever_live_p (regno
) || call_used_regs
[regno
])
759 && fixed_regs
[regno
] == 0);
764 c4x_leaf_function_p (void)
766 /* A leaf function makes no calls, so we only need
767 to save/restore the registers we actually use.
768 For the global variable leaf_function to be set, we need
769 to define LEAF_REGISTERS and all that it entails.
770 Let's check ourselves.... */
772 if (lookup_attribute ("leaf_pretend",
773 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
776 /* Use the leaf_pretend attribute at your own risk. This is a hack
777 to speed up ISRs that call a function infrequently where the
778 overhead of saving and restoring the additional registers is not
779 warranted. You must save and restore the additional registers
780 required by the called function. Caveat emptor. Here's enough
783 if (leaf_function_p ())
791 c4x_naked_function_p (void)
795 type
= TREE_TYPE (current_function_decl
);
796 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
;
801 c4x_interrupt_function_p (void)
803 const char *cfun_name
;
804 if (lookup_attribute ("interrupt",
805 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
808 /* Look for TI style c_intnn. */
809 cfun_name
= current_function_name ();
810 return cfun_name
[0] == 'c'
811 && cfun_name
[1] == '_'
812 && cfun_name
[2] == 'i'
813 && cfun_name
[3] == 'n'
814 && cfun_name
[4] == 't'
815 && ISDIGIT (cfun_name
[5])
816 && ISDIGIT (cfun_name
[6]);
820 c4x_expand_prologue (void)
823 int size
= get_frame_size ();
826 /* In functions where ar3 is not used but frame pointers are still
827 specified, frame pointers are not adjusted (if >= -O2) and this
828 is used so it won't needlessly push the frame pointer. */
831 /* For __naked__ function don't build a prologue. */
832 if (c4x_naked_function_p ())
837 /* For __interrupt__ function build specific prologue. */
838 if (c4x_interrupt_function_p ())
840 c4x_leaf_function
= c4x_leaf_function_p ();
842 insn
= emit_insn (gen_push_st ());
843 RTX_FRAME_RELATED_P (insn
) = 1;
846 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
847 RTX_FRAME_RELATED_P (insn
) = 1;
848 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
849 gen_rtx_REG (QImode
, SP_REGNO
)));
850 RTX_FRAME_RELATED_P (insn
) = 1;
851 /* We require that an ISR uses fewer than 32768 words of
852 local variables, otherwise we have to go to lots of
853 effort to save a register, load it with the desired size,
854 adjust the stack pointer, and then restore the modified
855 register. Frankly, I think it is a poor ISR that
856 requires more than 32767 words of local temporary
859 error ("ISR %s requires %d words of local vars, max is 32767",
860 current_function_name (), size
);
862 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
863 gen_rtx_REG (QImode
, SP_REGNO
),
865 RTX_FRAME_RELATED_P (insn
) = 1;
867 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
869 if (c4x_isr_reg_used_p (regno
))
871 if (regno
== DP_REGNO
)
873 insn
= emit_insn (gen_push_dp ());
874 RTX_FRAME_RELATED_P (insn
) = 1;
878 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
879 RTX_FRAME_RELATED_P (insn
) = 1;
880 if (IS_EXT_REGNO (regno
))
882 insn
= emit_insn (gen_pushqf
883 (gen_rtx_REG (QFmode
, regno
)));
884 RTX_FRAME_RELATED_P (insn
) = 1;
889 /* We need to clear the repeat mode flag if the ISR is
890 going to use a RPTB instruction or uses the RC, RS, or RE
892 if (df_regs_ever_live_p (RC_REGNO
)
893 || df_regs_ever_live_p (RS_REGNO
)
894 || df_regs_ever_live_p (RE_REGNO
))
896 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
897 RTX_FRAME_RELATED_P (insn
) = 1;
900 /* Reload DP reg if we are paranoid about some turkey
901 violating small memory model rules. */
902 if (TARGET_SMALL
&& TARGET_PARANOID
)
904 insn
= emit_insn (gen_set_ldp_prologue
905 (gen_rtx_REG (QImode
, DP_REGNO
),
906 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
907 RTX_FRAME_RELATED_P (insn
) = 1;
912 if (frame_pointer_needed
)
915 || (current_function_args_size
!= 0)
918 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
919 RTX_FRAME_RELATED_P (insn
) = 1;
920 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
921 gen_rtx_REG (QImode
, SP_REGNO
)));
922 RTX_FRAME_RELATED_P (insn
) = 1;
927 /* Since ar3 is not used, we don't need to push it. */
933 /* If we use ar3, we need to push it. */
935 if ((size
!= 0) || (current_function_args_size
!= 0))
937 /* If we are omitting the frame pointer, we still have
938 to make space for it so the offsets are correct
939 unless we don't use anything on the stack at all. */
946 /* Local vars are too big, it will take multiple operations
950 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
951 GEN_INT(size
>> 16)));
952 RTX_FRAME_RELATED_P (insn
) = 1;
953 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
954 gen_rtx_REG (QImode
, R1_REGNO
),
956 RTX_FRAME_RELATED_P (insn
) = 1;
960 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
961 GEN_INT(size
& ~0xffff)));
962 RTX_FRAME_RELATED_P (insn
) = 1;
964 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
965 gen_rtx_REG (QImode
, R1_REGNO
),
966 GEN_INT(size
& 0xffff)));
967 RTX_FRAME_RELATED_P (insn
) = 1;
968 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
969 gen_rtx_REG (QImode
, SP_REGNO
),
970 gen_rtx_REG (QImode
, R1_REGNO
)));
971 RTX_FRAME_RELATED_P (insn
) = 1;
975 /* Local vars take up less than 32767 words, so we can directly
977 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
978 gen_rtx_REG (QImode
, SP_REGNO
),
980 RTX_FRAME_RELATED_P (insn
) = 1;
983 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
985 if (df_regs_ever_live_p (regno
) && ! call_used_regs
[regno
])
987 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
989 if (TARGET_PRESERVE_FLOAT
)
991 insn
= emit_insn (gen_pushqi
992 (gen_rtx_REG (QImode
, regno
)));
993 RTX_FRAME_RELATED_P (insn
) = 1;
995 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
996 RTX_FRAME_RELATED_P (insn
) = 1;
998 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
1000 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
1001 RTX_FRAME_RELATED_P (insn
) = 1;
1010 c4x_expand_epilogue(void)
1016 int size
= get_frame_size ();
1018 /* For __naked__ function build no epilogue. */
1019 if (c4x_naked_function_p ())
1021 insn
= emit_jump_insn (gen_return_from_epilogue ());
1022 RTX_FRAME_RELATED_P (insn
) = 1;
1026 /* For __interrupt__ function build specific epilogue. */
1027 if (c4x_interrupt_function_p ())
1029 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1031 if (! c4x_isr_reg_used_p (regno
))
1033 if (regno
== DP_REGNO
)
1035 insn
= emit_insn (gen_pop_dp ());
1036 RTX_FRAME_RELATED_P (insn
) = 1;
1040 /* We have to use unspec because the compiler will delete insns
1041 that are not call-saved. */
1042 if (IS_EXT_REGNO (regno
))
1044 insn
= emit_insn (gen_popqf_unspec
1045 (gen_rtx_REG (QFmode
, regno
)));
1046 RTX_FRAME_RELATED_P (insn
) = 1;
1048 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1049 RTX_FRAME_RELATED_P (insn
) = 1;
1054 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1055 gen_rtx_REG (QImode
, SP_REGNO
),
1057 RTX_FRAME_RELATED_P (insn
) = 1;
1058 insn
= emit_insn (gen_popqi
1059 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1060 RTX_FRAME_RELATED_P (insn
) = 1;
1062 insn
= emit_insn (gen_pop_st ());
1063 RTX_FRAME_RELATED_P (insn
) = 1;
1064 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1065 RTX_FRAME_RELATED_P (insn
) = 1;
1069 if (frame_pointer_needed
)
1072 || (current_function_args_size
!= 0)
1076 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1077 gen_rtx_MEM (QImode
,
1079 (QImode
, gen_rtx_REG (QImode
,
1082 RTX_FRAME_RELATED_P (insn
) = 1;
1084 /* We already have the return value and the fp,
1085 so we need to add those to the stack. */
1092 /* Since ar3 is not used for anything, we don't need to
1099 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1100 if (size
|| current_function_args_size
)
1102 /* If we are omitting the frame pointer, we still have
1103 to make space for it so the offsets are correct
1104 unless we don't use anything on the stack at all. */
1109 /* Now restore the saved registers, putting in the delayed branch
1111 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1113 if (df_regs_ever_live_p (regno
) && ! call_used_regs
[regno
])
1115 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1118 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1120 insn
= emit_insn (gen_popqf_unspec
1121 (gen_rtx_REG (QFmode
, regno
)));
1122 RTX_FRAME_RELATED_P (insn
) = 1;
1123 if (TARGET_PRESERVE_FLOAT
)
1125 insn
= emit_insn (gen_popqi_unspec
1126 (gen_rtx_REG (QImode
, regno
)));
1127 RTX_FRAME_RELATED_P (insn
) = 1;
1132 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1133 RTX_FRAME_RELATED_P (insn
) = 1;
1138 if (frame_pointer_needed
)
1141 || (current_function_args_size
!= 0)
1144 /* Restore the old FP. */
1147 (gen_rtx_REG (QImode
, AR3_REGNO
),
1148 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1150 RTX_FRAME_RELATED_P (insn
) = 1;
1156 /* Local vars are too big, it will take multiple operations
1160 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1161 GEN_INT(size
>> 16)));
1162 RTX_FRAME_RELATED_P (insn
) = 1;
1163 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1164 gen_rtx_REG (QImode
, R3_REGNO
),
1166 RTX_FRAME_RELATED_P (insn
) = 1;
1170 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1171 GEN_INT(size
& ~0xffff)));
1172 RTX_FRAME_RELATED_P (insn
) = 1;
1174 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1175 gen_rtx_REG (QImode
, R3_REGNO
),
1176 GEN_INT(size
& 0xffff)));
1177 RTX_FRAME_RELATED_P (insn
) = 1;
1178 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1179 gen_rtx_REG (QImode
, SP_REGNO
),
1180 gen_rtx_REG (QImode
, R3_REGNO
)));
1181 RTX_FRAME_RELATED_P (insn
) = 1;
1185 /* Local vars take up less than 32768 words, so we can directly
1186 subtract the number. */
1187 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1188 gen_rtx_REG (QImode
, SP_REGNO
),
1190 RTX_FRAME_RELATED_P (insn
) = 1;
1195 insn
= emit_jump_insn (gen_return_indirect_internal
1196 (gen_rtx_REG (QImode
, R2_REGNO
)));
1197 RTX_FRAME_RELATED_P (insn
) = 1;
1201 insn
= emit_jump_insn (gen_return_from_epilogue ());
1202 RTX_FRAME_RELATED_P (insn
) = 1;
1209 c4x_null_epilogue_p (void)
1213 if (reload_completed
1214 && ! c4x_naked_function_p ()
1215 && ! c4x_interrupt_function_p ()
1216 && ! current_function_calls_alloca
1217 && ! current_function_args_size
1219 && ! get_frame_size ())
1221 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1222 if (df_regs_ever_live_p (regno
) && ! call_used_regs
[regno
]
1223 && (regno
!= AR3_REGNO
))
1232 c4x_emit_move_sequence (rtx
*operands
, enum machine_mode mode
)
1234 rtx op0
= operands
[0];
1235 rtx op1
= operands
[1];
1237 if (! reload_in_progress
1240 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1241 op1
= force_reg (mode
, op1
);
1243 if (GET_CODE (op1
) == LO_SUM
1244 && GET_MODE (op1
) == Pmode
1245 && dp_reg_operand (XEXP (op1
, 0), mode
))
1247 /* expand_increment will sometimes create a LO_SUM immediate
1248 address. Undo this silliness. */
1249 op1
= XEXP (op1
, 1);
1252 if (symbolic_address_operand (op1
, mode
))
1254 if (TARGET_LOAD_ADDRESS
)
1256 /* Alias analysis seems to do a better job if we force
1257 constant addresses to memory after reload. */
1258 emit_insn (gen_load_immed_address (op0
, op1
));
1263 /* Stick symbol or label address into the constant pool. */
1264 op1
= force_const_mem (Pmode
, op1
);
1267 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1269 /* We could be a lot smarter about loading some of these
1271 op1
= force_const_mem (mode
, op1
);
1274 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1275 and emit associated (HIGH (SYMREF)) if large memory model.
1276 c4x_legitimize_address could be used to do this,
1277 perhaps by calling validize_address. */
1278 if (TARGET_EXPOSE_LDP
1279 && ! (reload_in_progress
|| reload_completed
)
1280 && GET_CODE (op1
) == MEM
1281 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1283 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1285 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1286 op1
= change_address (op1
, mode
,
1287 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1290 if (TARGET_EXPOSE_LDP
1291 && ! (reload_in_progress
|| reload_completed
)
1292 && GET_CODE (op0
) == MEM
1293 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1295 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1297 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1298 op0
= change_address (op0
, mode
,
1299 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1302 if (GET_CODE (op0
) == SUBREG
1303 && mixed_subreg_operand (op0
, mode
))
1305 /* We should only generate these mixed mode patterns
1306 during RTL generation. If we need do it later on
1307 then we'll have to emit patterns that won't clobber CC. */
1308 if (reload_in_progress
|| reload_completed
)
1310 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1311 op0
= SUBREG_REG (op0
);
1312 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1314 op0
= copy_rtx (op0
);
1315 PUT_MODE (op0
, QImode
);
1321 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1327 if (GET_CODE (op1
) == SUBREG
1328 && mixed_subreg_operand (op1
, mode
))
1330 /* We should only generate these mixed mode patterns
1331 during RTL generation. If we need do it later on
1332 then we'll have to emit patterns that won't clobber CC. */
1333 if (reload_in_progress
|| reload_completed
)
1335 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1336 op1
= SUBREG_REG (op1
);
1337 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1339 op1
= copy_rtx (op1
);
1340 PUT_MODE (op1
, QImode
);
1346 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1353 && reg_operand (op0
, mode
)
1354 && const_int_operand (op1
, mode
)
1355 && ! IS_INT16_CONST (INTVAL (op1
))
1356 && ! IS_HIGH_CONST (INTVAL (op1
)))
1358 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1363 && reg_operand (op0
, mode
)
1364 && const_int_operand (op1
, mode
))
1366 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1370 /* Adjust operands in case we have modified them. */
1374 /* Emit normal pattern. */
1380 c4x_emit_libcall (rtx libcall
, enum rtx_code code
,
1381 enum machine_mode dmode
, enum machine_mode smode
,
1382 int noperands
, rtx
*operands
)
1392 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1393 operands
[1], smode
);
1394 equiv
= gen_rtx_fmt_e (code
, dmode
, operands
[1]);
1398 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1399 operands
[1], smode
, operands
[2], smode
);
1400 equiv
= gen_rtx_fmt_ee (code
, dmode
, operands
[1], operands
[2]);
1407 insns
= get_insns ();
1409 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1414 c4x_emit_libcall3 (rtx libcall
, enum rtx_code code
,
1415 enum machine_mode mode
, rtx
*operands
)
1417 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1422 c4x_emit_libcall_mulhi (rtx libcall
, enum rtx_code code
,
1423 enum machine_mode mode
, rtx
*operands
)
1430 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1431 operands
[1], mode
, operands
[2], mode
);
1432 equiv
= gen_rtx_TRUNCATE (mode
,
1433 gen_rtx_LSHIFTRT (HImode
,
1434 gen_rtx_MULT (HImode
,
1435 gen_rtx_fmt_e (code
, HImode
, operands
[1]),
1436 gen_rtx_fmt_e (code
, HImode
, operands
[2])),
1438 insns
= get_insns ();
1440 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1445 c4x_legitimate_address_p (enum machine_mode mode
, rtx addr
, int strict
)
1447 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1448 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1449 rtx disp
= NULL_RTX
; /* Displacement. */
1452 code
= GET_CODE (addr
);
1455 /* Register indirect with auto increment/decrement. We don't
1456 allow SP here---push_operand should recognize an operand
1457 being pushed on the stack. */
1462 if (mode
!= QImode
&& mode
!= QFmode
)
1466 base
= XEXP (addr
, 0);
1474 rtx op0
= XEXP (addr
, 0);
1475 rtx op1
= XEXP (addr
, 1);
1477 if (mode
!= QImode
&& mode
!= QFmode
)
1481 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1483 base
= XEXP (op1
, 0);
1486 if (REGNO (base
) != REGNO (op0
))
1488 if (REG_P (XEXP (op1
, 1)))
1489 indx
= XEXP (op1
, 1);
1491 disp
= XEXP (op1
, 1);
1495 /* Register indirect. */
1500 /* Register indirect with displacement or index. */
1503 rtx op0
= XEXP (addr
, 0);
1504 rtx op1
= XEXP (addr
, 1);
1505 enum rtx_code code0
= GET_CODE (op0
);
1512 base
= op0
; /* Base + index. */
1514 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1522 base
= op0
; /* Base + displacement. */
1533 /* Direct addressing with DP register. */
1536 rtx op0
= XEXP (addr
, 0);
1537 rtx op1
= XEXP (addr
, 1);
1539 /* HImode and HFmode direct memory references aren't truly
1540 offsettable (consider case at end of data page). We
1541 probably get better code by loading a pointer and using an
1542 indirect memory reference. */
1543 if (mode
== HImode
|| mode
== HFmode
)
1546 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1549 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1552 if (GET_CODE (op1
) == CONST
)
1558 /* Direct addressing with some work for the assembler... */
1560 /* Direct addressing. */
1563 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1565 /* These need to be converted to a LO_SUM (...).
1566 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1569 /* Do not allow direct memory access to absolute addresses.
1570 This is more pain than it's worth, especially for the
1571 small memory model where we can't guarantee that
1572 this address is within the data page---we don't want
1573 to modify the DP register in the small memory model,
1574 even temporarily, since an interrupt can sneak in.... */
1578 /* Indirect indirect addressing. */
1583 fatal_insn ("using CONST_DOUBLE for address", addr
);
1589 /* Validate the base register. */
1592 /* Check that the address is offsettable for HImode and HFmode. */
1593 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1596 /* Handle DP based stuff. */
1597 if (REGNO (base
) == DP_REGNO
)
1599 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1601 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1605 /* Now validate the index register. */
1608 if (GET_CODE (indx
) != REG
)
1610 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1612 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1616 /* Validate displacement. */
1619 if (GET_CODE (disp
) != CONST_INT
)
1621 if (mode
== HImode
|| mode
== HFmode
)
1623 /* The offset displacement must be legitimate. */
1624 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1629 if (! IS_DISP8_CONST (INTVAL (disp
)))
1632 /* Can't add an index with a disp. */
1641 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED
,
1642 enum machine_mode mode ATTRIBUTE_UNUSED
)
1644 if (GET_CODE (orig
) == SYMBOL_REF
1645 || GET_CODE (orig
) == LABEL_REF
)
1647 if (mode
== HImode
|| mode
== HFmode
)
1649 /* We need to force the address into
1650 a register so that it is offsettable. */
1651 rtx addr_reg
= gen_reg_rtx (Pmode
);
1652 emit_move_insn (addr_reg
, orig
);
1657 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1660 emit_insn (gen_set_ldp (dp_reg
, orig
));
1662 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1670 /* Provide the costs of an addressing mode that contains ADDR.
1671 If ADDR is not a valid address, its cost is irrelevant.
1672 This is used in cse and loop optimization to determine
1673 if it is worthwhile storing a common address into a register.
1674 Unfortunately, the C4x address cost depends on other operands. */
1677 c4x_address_cost (rtx addr
)
1679 switch (GET_CODE (addr
))
1690 /* These shouldn't be directly generated. */
1698 rtx op1
= XEXP (addr
, 1);
1700 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1701 return TARGET_SMALL
? 3 : 4;
1703 if (GET_CODE (op1
) == CONST
)
1705 rtx offset
= const0_rtx
;
1707 op1
= eliminate_constant_term (op1
, &offset
);
1709 /* ??? These costs need rethinking... */
1710 if (GET_CODE (op1
) == LABEL_REF
)
1713 if (GET_CODE (op1
) != SYMBOL_REF
)
1716 if (INTVAL (offset
) == 0)
1721 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1727 register rtx op0
= XEXP (addr
, 0);
1728 register rtx op1
= XEXP (addr
, 1);
1730 if (GET_CODE (op0
) != REG
)
1733 switch (GET_CODE (op1
))
1739 /* This cost for REG+REG must be greater than the cost
1740 for REG if we want autoincrement addressing modes. */
1744 /* The following tries to improve GIV combination
1745 in strength reduce but appears not to help. */
1746 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1749 if (IS_DISP1_CONST (INTVAL (op1
)))
1752 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1767 c4x_gen_compare_reg (enum rtx_code code
, rtx x
, rtx y
)
1769 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1772 if (mode
== CC_NOOVmode
1773 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1776 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1777 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1778 gen_rtx_COMPARE (mode
, x
, y
)));
1783 c4x_output_cbranch (const char *form
, rtx seq
)
1790 static char str
[100];
1794 delay
= XVECEXP (final_sequence
, 0, 1);
1795 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1796 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1797 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1800 cp
= &str
[strlen (str
)];
1825 c4x_print_operand (FILE *file
, rtx op
, int letter
)
1832 case '#': /* Delayed. */
1834 fprintf (file
, "d");
1838 code
= GET_CODE (op
);
1841 case 'A': /* Direct address. */
1842 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1843 fprintf (file
, "@");
1846 case 'H': /* Sethi. */
1847 output_addr_const (file
, op
);
1850 case 'I': /* Reversed condition. */
1851 code
= reverse_condition (code
);
1854 case 'L': /* Log 2 of constant. */
1855 if (code
!= CONST_INT
)
1856 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1857 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1860 case 'N': /* Ones complement of small constant. */
1861 if (code
!= CONST_INT
)
1862 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1863 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~INTVAL (op
));
1866 case 'K': /* Generate ldp(k) if direct address. */
1869 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1870 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1871 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1873 op1
= XEXP (XEXP (op
, 0), 1);
1874 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1876 fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1877 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1878 fprintf (file
, "\n");
1883 case 'M': /* Generate ldp(k) if direct address. */
1884 if (! TARGET_SMALL
/* Only used in asm statements. */
1886 && (GET_CODE (XEXP (op
, 0)) == CONST
1887 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1889 fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1890 output_address (XEXP (op
, 0));
1891 fprintf (file
, "\n\t");
1895 case 'O': /* Offset address. */
1896 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1898 else if (code
== MEM
)
1899 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1900 else if (code
== REG
)
1901 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1903 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1906 case 'C': /* Call. */
1909 case 'U': /* Call/callu. */
1910 if (code
!= SYMBOL_REF
)
1911 fprintf (file
, "u");
1921 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1923 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1925 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1929 output_address (XEXP (op
, 0));
1936 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (op
),
1937 sizeof (str
), 0, 1);
1938 fprintf (file
, "%s", str
);
1943 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (op
));
1947 fprintf (file
, "ne");
1951 fprintf (file
, "eq");
1955 fprintf (file
, "ge");
1959 fprintf (file
, "gt");
1963 fprintf (file
, "le");
1967 fprintf (file
, "lt");
1971 fprintf (file
, "hs");
1975 fprintf (file
, "hi");
1979 fprintf (file
, "ls");
1983 fprintf (file
, "lo");
1987 output_addr_const (file
, op
);
1991 output_addr_const (file
, XEXP (op
, 0));
1998 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2005 c4x_print_operand_address (FILE *file
, rtx addr
)
2007 switch (GET_CODE (addr
))
2010 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2014 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2018 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2023 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2024 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2026 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2027 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2028 reg_names
[REGNO (op1
)]);
2029 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2030 fprintf (file
, "*%s++(" HOST_WIDE_INT_PRINT_DEC
")",
2031 reg_names
[REGNO (op0
)], INTVAL (op1
));
2032 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2033 fprintf (file
, "*%s--(" HOST_WIDE_INT_PRINT_DEC
")",
2034 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2035 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2036 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2037 reg_names
[REGNO (op1
)]);
2039 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2045 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2046 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2048 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2049 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2050 reg_names
[REGNO (op1
)]);
2051 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2052 fprintf (file
, "*++%s(" HOST_WIDE_INT_PRINT_DEC
")",
2053 reg_names
[REGNO (op0
)], INTVAL (op1
));
2054 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2055 fprintf (file
, "*--%s(" HOST_WIDE_INT_PRINT_DEC
")",
2056 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2057 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2058 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2059 reg_names
[REGNO (op1
)]);
2061 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2066 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2070 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2073 case PLUS
: /* Indirect with displacement. */
2075 rtx op0
= XEXP (addr
, 0);
2076 rtx op1
= XEXP (addr
, 1);
2082 if (IS_INDEX_REG (op0
))
2084 fprintf (file
, "*+%s(%s)",
2085 reg_names
[REGNO (op1
)],
2086 reg_names
[REGNO (op0
)]); /* Index + base. */
2090 fprintf (file
, "*+%s(%s)",
2091 reg_names
[REGNO (op0
)],
2092 reg_names
[REGNO (op1
)]); /* Base + index. */
2095 else if (INTVAL (op1
) < 0)
2097 fprintf (file
, "*-%s(" HOST_WIDE_INT_PRINT_DEC
")",
2098 reg_names
[REGNO (op0
)],
2099 -INTVAL (op1
)); /* Base - displacement. */
2103 fprintf (file
, "*+%s(" HOST_WIDE_INT_PRINT_DEC
")",
2104 reg_names
[REGNO (op0
)],
2105 INTVAL (op1
)); /* Base + displacement. */
2109 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2115 rtx op0
= XEXP (addr
, 0);
2116 rtx op1
= XEXP (addr
, 1);
2118 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2119 c4x_print_operand_address (file
, op1
);
2121 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2128 fprintf (file
, "@");
2129 output_addr_const (file
, addr
);
2132 /* We shouldn't access CONST_INT addresses. */
2136 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2142 /* Return nonzero if the floating point operand will fit
2143 in the immediate field. */
2146 c4x_immed_float_p (rtx op
)
2152 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2153 if (GET_MODE (op
) == HFmode
)
2154 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2157 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2161 /* Sign extend exponent. */
2162 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2163 if (exponent
== -128)
2165 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2166 return 0; /* Precision doesn't fit. */
2167 return (exponent
<= 7) /* Positive exp. */
2168 && (exponent
>= -7); /* Negative exp. */
2172 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2173 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2175 None of the last four instructions from the bottom of the block can
2176 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2177 BcondAT or RETIcondD.
2179 This routine scans the four previous insns for a jump insn, and if
2180 one is found, returns 1 so that we bung in a nop instruction.
2181 This simple minded strategy will add a nop, when it may not
2182 be required. Say when there is a JUMP_INSN near the end of the
2183 block that doesn't get converted into a delayed branch.
2185 Note that we cannot have a call insn, since we don't generate
2186 repeat loops with calls in them (although I suppose we could, but
2187 there's no benefit.)
2189 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2192 c4x_rptb_nop_p (rtx insn
)
2197 /* Extract the start label from the jump pattern (rptb_end). */
2198 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2200 /* If there is a label at the end of the loop we must insert
2203 insn
= previous_insn (insn
);
2204 } while (GET_CODE (insn
) == NOTE
2205 || GET_CODE (insn
) == USE
2206 || GET_CODE (insn
) == CLOBBER
);
2207 if (GET_CODE (insn
) == CODE_LABEL
)
2210 for (i
= 0; i
< 4; i
++)
2212 /* Search back for prev non-note and non-label insn. */
2213 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2214 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2216 if (insn
== start_label
)
2219 insn
= previous_insn (insn
);
2222 /* If we have a jump instruction we should insert a NOP. If we
2223 hit repeat block top we should only insert a NOP if the loop
2225 if (GET_CODE (insn
) == JUMP_INSN
)
2227 insn
= previous_insn (insn
);
2233 /* The C4x looping instruction needs to be emitted at the top of the
2234 loop. Emitting the true RTL for a looping instruction at the top of
2235 the loop can cause problems with flow analysis. So instead, a dummy
2236 doloop insn is emitted at the end of the loop. This routine checks
2237 for the presence of this doloop insn and then searches back to the
2238 top of the loop, where it inserts the true looping insn (provided
2239 there are no instructions in the loop which would cause problems).
2240 Any additional labels can be emitted at this point. In addition, if
2241 the desired loop count register was not allocated, this routine does
2244 Before we can create a repeat block looping instruction we have to
2245 verify that there are no jumps outside the loop and no jumps outside
2246 the loop go into this loop. This can happen in the basic blocks reorder
2247 pass. The C4x cpu cannot handle this. */
2250 c4x_label_ref_used_p (rtx x
, rtx code_label
)
2259 code
= GET_CODE (x
);
2260 if (code
== LABEL_REF
)
2261 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2263 fmt
= GET_RTX_FORMAT (code
);
2264 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2268 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2271 else if (fmt
[i
] == 'E')
2272 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2273 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2281 c4x_rptb_valid_p (rtx insn
, rtx start_label
)
2287 /* Find the start label. */
2288 for (; insn
; insn
= PREV_INSN (insn
))
2289 if (insn
== start_label
)
2292 /* Note found then we cannot use a rptb or rpts. The label was
2293 probably moved by the basic block reorder pass. */
2298 /* If any jump jumps inside this block then we must fail. */
2299 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2301 if (GET_CODE (insn
) == CODE_LABEL
)
2303 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2304 if (GET_CODE (tmp
) == JUMP_INSN
2305 && c4x_label_ref_used_p (tmp
, insn
))
2309 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2311 if (GET_CODE (insn
) == CODE_LABEL
)
2313 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2314 if (GET_CODE (tmp
) == JUMP_INSN
2315 && c4x_label_ref_used_p (tmp
, insn
))
2319 /* If any jump jumps outside this block then we must fail. */
2320 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2322 if (GET_CODE (insn
) == CODE_LABEL
)
2324 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2325 if (GET_CODE (tmp
) == JUMP_INSN
2326 && c4x_label_ref_used_p (tmp
, insn
))
2328 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2329 if (GET_CODE (tmp
) == JUMP_INSN
2330 && c4x_label_ref_used_p (tmp
, insn
))
2335 /* All checks OK. */
2341 c4x_rptb_insert (rtx insn
)
2345 rtx new_start_label
;
2348 /* If the count register has not been allocated to RC, say if
2349 there is a movmem pattern in the loop, then do not insert a
2350 RPTB instruction. Instead we emit a decrement and branch
2351 at the end of the loop. */
2352 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2353 if (REGNO (count_reg
) != RC_REGNO
)
2356 /* Extract the start label from the jump pattern (rptb_end). */
2357 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2359 if (! c4x_rptb_valid_p (insn
, start_label
))
2361 /* We cannot use the rptb insn. Replace it so reorg can use
2362 the delay slots of the jump insn. */
2363 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, constm1_rtx
), insn
);
2364 emit_insn_before (gen_cmpqi (count_reg
, const0_rtx
), insn
);
2365 emit_insn_before (gen_bge (start_label
), insn
);
2366 LABEL_NUSES (start_label
)++;
2371 end_label
= gen_label_rtx ();
2372 LABEL_NUSES (end_label
)++;
2373 emit_label_after (end_label
, insn
);
2375 new_start_label
= gen_label_rtx ();
2376 LABEL_NUSES (new_start_label
)++;
2378 for (; insn
; insn
= PREV_INSN (insn
))
2380 if (insn
== start_label
)
2382 if (GET_CODE (insn
) == JUMP_INSN
&&
2383 JUMP_LABEL (insn
) == start_label
)
2384 redirect_jump (insn
, new_start_label
, 0);
2387 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2389 emit_label_after (new_start_label
, insn
);
2391 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2392 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2394 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2395 if (LABEL_NUSES (start_label
) == 0)
2396 delete_insn (start_label
);
2400 /* We need to use direct addressing for large constants and addresses
2401 that cannot fit within an instruction. We must check for these
2402 after after the final jump optimization pass, since this may
2403 introduce a local_move insn for a SYMBOL_REF. This pass
2404 must come before delayed branch slot filling since it can generate
2405 additional instructions.
2407 This function also fixes up RTPB style loops that didn't get RC
2408 allocated as the loop counter. */
2415 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2417 /* Look for insn. */
2420 int insn_code_number
;
2423 insn_code_number
= recog_memoized (insn
);
2425 if (insn_code_number
< 0)
2428 /* Insert the RTX for RPTB at the top of the loop
2429 and a label at the end of the loop. */
2430 if (insn_code_number
== CODE_FOR_rptb_end
)
2431 c4x_rptb_insert(insn
);
2433 /* We need to split the insn here. Otherwise the calls to
2434 force_const_mem will not work for load_immed_address. */
2437 /* Don't split the insn if it has been deleted. */
2438 if (! INSN_DELETED_P (old
))
2439 insn
= try_split (PATTERN(old
), old
, 1);
2441 /* When not optimizing, the old insn will be still left around
2442 with only the 'deleted' bit set. Transform it into a note
2443 to avoid confusion of subsequent processing. */
2444 if (INSN_DELETED_P (old
))
2445 SET_INSN_DELETED (old
);
2452 c4x_a_register (rtx op
)
2454 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2459 c4x_x_register (rtx op
)
2461 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2466 c4x_immed_int_constant (rtx op
)
2468 if (GET_CODE (op
) != CONST_INT
)
2471 return GET_MODE (op
) == VOIDmode
2472 || GET_MODE_CLASS (GET_MODE (op
)) == MODE_INT
2473 || GET_MODE_CLASS (GET_MODE (op
)) == MODE_PARTIAL_INT
;
2478 c4x_immed_float_constant (rtx op
)
2480 if (GET_CODE (op
) != CONST_DOUBLE
)
2483 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2484 present this only means that a MEM rtx has been generated. It does
2485 not mean the rtx is really in memory. */
2487 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2492 c4x_shiftable_constant (rtx op
)
2496 int val
= INTVAL (op
);
2498 for (i
= 0; i
< 16; i
++)
2503 mask
= ((0xffff >> i
) << 16) | 0xffff;
2504 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2505 : (val
>> i
) & mask
))
2512 c4x_H_constant (rtx op
)
2514 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2519 c4x_I_constant (rtx op
)
2521 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2526 c4x_J_constant (rtx op
)
2530 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2535 c4x_K_constant (rtx op
)
2537 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2539 return IS_INT5_CONST (INTVAL (op
));
2544 c4x_L_constant (rtx op
)
2546 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2551 c4x_N_constant (rtx op
)
2553 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2558 c4x_O_constant (rtx op
)
2560 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2564 /* The constraints do not have to check the register class,
2565 except when needed to discriminate between the constraints.
2566 The operand has been checked by the predicates to be valid. */
2568 /* ARx + 9-bit signed const or IRn
2569 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2570 We don't include the pre/post inc/dec forms here since
2571 they are handled by the <> constraints. */
2574 c4x_Q_constraint (rtx op
)
2576 enum machine_mode mode
= GET_MODE (op
);
2578 if (GET_CODE (op
) != MEM
)
2581 switch (GET_CODE (op
))
2588 rtx op0
= XEXP (op
, 0);
2589 rtx op1
= XEXP (op
, 1);
2597 if (GET_CODE (op1
) != CONST_INT
)
2600 /* HImode and HFmode must be offsettable. */
2601 if (mode
== HImode
|| mode
== HFmode
)
2602 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2604 return IS_DISP8_CONST (INTVAL (op1
));
2615 /* ARx + 5-bit unsigned const
2616 *ARx, *+ARx(n) for n < 32. */
2619 c4x_R_constraint (rtx op
)
2621 enum machine_mode mode
= GET_MODE (op
);
2625 if (GET_CODE (op
) != MEM
)
2628 switch (GET_CODE (op
))
2635 rtx op0
= XEXP (op
, 0);
2636 rtx op1
= XEXP (op
, 1);
2641 if (GET_CODE (op1
) != CONST_INT
)
2644 /* HImode and HFmode must be offsettable. */
2645 if (mode
== HImode
|| mode
== HFmode
)
2646 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2648 return IS_UINT5_CONST (INTVAL (op1
));
2660 c4x_R_indirect (rtx op
)
2662 enum machine_mode mode
= GET_MODE (op
);
2664 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2668 switch (GET_CODE (op
))
2671 return IS_ADDR_OR_PSEUDO_REG (op
);
2675 rtx op0
= XEXP (op
, 0);
2676 rtx op1
= XEXP (op
, 1);
2678 /* HImode and HFmode must be offsettable. */
2679 if (mode
== HImode
|| mode
== HFmode
)
2680 return IS_ADDR_OR_PSEUDO_REG (op0
)
2681 && GET_CODE (op1
) == CONST_INT
2682 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2685 && IS_ADDR_OR_PSEUDO_REG (op0
)
2686 && GET_CODE (op1
) == CONST_INT
2687 && IS_UINT5_CONST (INTVAL (op1
));
2698 /* ARx + 1-bit unsigned const or IRn
2699 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2700 We don't include the pre/post inc/dec forms here since
2701 they are handled by the <> constraints. */
2704 c4x_S_constraint (rtx op
)
2706 enum machine_mode mode
= GET_MODE (op
);
2707 if (GET_CODE (op
) != MEM
)
2710 switch (GET_CODE (op
))
2718 rtx op0
= XEXP (op
, 0);
2719 rtx op1
= XEXP (op
, 1);
2721 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2722 || (op0
!= XEXP (op1
, 0)))
2725 op0
= XEXP (op1
, 0);
2726 op1
= XEXP (op1
, 1);
2727 return REG_P (op0
) && REG_P (op1
);
2728 /* Pre or post_modify with a displacement of 0 or 1
2729 should not be generated. */
2735 rtx op0
= XEXP (op
, 0);
2736 rtx op1
= XEXP (op
, 1);
2744 if (GET_CODE (op1
) != CONST_INT
)
2747 /* HImode and HFmode must be offsettable. */
2748 if (mode
== HImode
|| mode
== HFmode
)
2749 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2751 return IS_DISP1_CONST (INTVAL (op1
));
2763 c4x_S_indirect (rtx op
)
2765 enum machine_mode mode
= GET_MODE (op
);
2766 if (GET_CODE (op
) != MEM
)
2770 switch (GET_CODE (op
))
2774 if (mode
!= QImode
&& mode
!= QFmode
)
2781 return IS_ADDR_OR_PSEUDO_REG (op
);
2786 rtx op0
= XEXP (op
, 0);
2787 rtx op1
= XEXP (op
, 1);
2789 if (mode
!= QImode
&& mode
!= QFmode
)
2792 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2793 || (op0
!= XEXP (op1
, 0)))
2796 op0
= XEXP (op1
, 0);
2797 op1
= XEXP (op1
, 1);
2798 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2799 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2800 /* Pre or post_modify with a displacement of 0 or 1
2801 should not be generated. */
2806 rtx op0
= XEXP (op
, 0);
2807 rtx op1
= XEXP (op
, 1);
2811 /* HImode and HFmode must be offsettable. */
2812 if (mode
== HImode
|| mode
== HFmode
)
2813 return IS_ADDR_OR_PSEUDO_REG (op0
)
2814 && GET_CODE (op1
) == CONST_INT
2815 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2818 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2819 && IS_ADDR_OR_PSEUDO_REG (op0
))
2820 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2821 && IS_INDEX_OR_PSEUDO_REG (op0
));
2823 return IS_ADDR_OR_PSEUDO_REG (op0
)
2824 && GET_CODE (op1
) == CONST_INT
2825 && IS_DISP1_CONST (INTVAL (op1
));
2837 /* Direct memory operand. */
2840 c4x_T_constraint (rtx op
)
2842 if (GET_CODE (op
) != MEM
)
2846 if (GET_CODE (op
) != LO_SUM
)
2848 /* Allow call operands. */
2849 return GET_CODE (op
) == SYMBOL_REF
2850 && GET_MODE (op
) == Pmode
2851 && SYMBOL_REF_FUNCTION_P (op
);
2854 /* HImode and HFmode are not offsettable. */
2855 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2858 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2859 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2860 return c4x_U_constraint (XEXP (op
, 1));
2866 /* Symbolic operand. */
2869 c4x_U_constraint (rtx op
)
2871 /* Don't allow direct addressing to an arbitrary constant. */
2872 return GET_CODE (op
) == CONST
2873 || GET_CODE (op
) == SYMBOL_REF
2874 || GET_CODE (op
) == LABEL_REF
;
2879 c4x_autoinc_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2881 if (GET_CODE (op
) == MEM
)
2883 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2889 || code
== PRE_MODIFY
2890 || code
== POST_MODIFY
2899 mixed_subreg_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2901 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2902 int and a long double. */
2903 if (GET_CODE (op
) == SUBREG
2904 && (GET_MODE (op
) == QFmode
)
2905 && (GET_MODE (SUBREG_REG (op
)) == QImode
2906 || GET_MODE (SUBREG_REG (op
)) == HImode
))
2913 reg_imm_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2915 if (REG_P (op
) || CONSTANT_P (op
))
2922 not_modify_reg (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2924 if (REG_P (op
) || CONSTANT_P (op
))
2926 if (GET_CODE (op
) != MEM
)
2929 switch (GET_CODE (op
))
2936 rtx op0
= XEXP (op
, 0);
2937 rtx op1
= XEXP (op
, 1);
2942 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
2948 rtx op0
= XEXP (op
, 0);
2950 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2968 not_rc_reg (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2970 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
2977 c4x_S_address_parse (rtx op
, int *base
, int *incdec
, int *index
, int *disp
)
2984 if (GET_CODE (op
) != MEM
)
2985 fatal_insn ("invalid indirect memory address", op
);
2988 switch (GET_CODE (op
))
2991 *base
= REGNO (XEXP (op
, 0));
2997 *base
= REGNO (XEXP (op
, 0));
3003 *base
= REGNO (XEXP (op
, 0));
3009 *base
= REGNO (XEXP (op
, 0));
3015 *base
= REGNO (XEXP (op
, 0));
3016 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3018 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3019 *disp
= 0; /* ??? */
3022 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3027 *base
= REGNO (XEXP (op
, 0));
3028 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3030 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3031 *disp
= 1; /* ??? */
3034 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3045 rtx op0
= XEXP (op
, 0);
3046 rtx op1
= XEXP (op
, 1);
3048 if (c4x_a_register (op0
))
3050 if (c4x_x_register (op1
))
3052 *base
= REGNO (op0
);
3053 *index
= REGNO (op1
);
3056 else if ((GET_CODE (op1
) == CONST_INT
3057 && IS_DISP1_CONST (INTVAL (op1
))))
3059 *base
= REGNO (op0
);
3060 *disp
= INTVAL (op1
);
3064 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3066 *base
= REGNO (op1
);
3067 *index
= REGNO (op0
);
3074 fatal_insn ("invalid indirect (S) memory address", op
);
3080 c4x_address_conflict (rtx op0
, rtx op1
, int store0
, int store1
)
3091 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3094 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3095 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3097 if (store0
&& store1
)
3099 /* If we have two stores in parallel to the same address, then
3100 the C4x only executes one of the stores. This is unlikely to
3101 cause problems except when writing to a hardware device such
3102 as a FIFO since the second write will be lost. The user
3103 should flag the hardware location as being volatile so that
3104 we don't do this optimization. While it is unlikely that we
3105 have an aliased address if both locations are not marked
3106 volatile, it is probably safer to flag a potential conflict
3107 if either location is volatile. */
3108 if (! flag_argument_noalias
)
3110 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3115 /* If have a parallel load and a store to the same address, the load
3116 is performed first, so there is no conflict. Similarly, there is
3117 no conflict if have parallel loads from the same address. */
3119 /* Cannot use auto increment or auto decrement twice for same
3121 if (base0
== base1
&& incdec0
&& incdec0
)
3124 /* It might be too confusing for GCC if we have use a base register
3125 with a side effect and a memory reference using the same register
3127 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3130 /* We cannot optimize the case where op1 and op2 refer to the same
3132 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3140 /* Check for while loop inside a decrement and branch loop. */
3143 c4x_label_conflict (rtx insn
, rtx jump
, rtx db
)
3147 if (GET_CODE (insn
) == CODE_LABEL
)
3149 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3151 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3154 insn
= PREV_INSN (insn
);
3160 /* Validate combination of operands for parallel load/store instructions. */
3163 valid_parallel_load_store (rtx
*operands
,
3164 enum machine_mode mode ATTRIBUTE_UNUSED
)
3166 rtx op0
= operands
[0];
3167 rtx op1
= operands
[1];
3168 rtx op2
= operands
[2];
3169 rtx op3
= operands
[3];
3171 if (GET_CODE (op0
) == SUBREG
)
3172 op0
= SUBREG_REG (op0
);
3173 if (GET_CODE (op1
) == SUBREG
)
3174 op1
= SUBREG_REG (op1
);
3175 if (GET_CODE (op2
) == SUBREG
)
3176 op2
= SUBREG_REG (op2
);
3177 if (GET_CODE (op3
) == SUBREG
)
3178 op3
= SUBREG_REG (op3
);
3180 /* The patterns should only allow ext_low_reg_operand() or
3181 par_ind_operand() operands. Thus of the 4 operands, only 2
3182 should be REGs and the other 2 should be MEMs. */
3184 /* This test prevents the multipack pass from using this pattern if
3185 op0 is used as an index or base register in op2 or op3, since
3186 this combination will require reloading. */
3187 if (GET_CODE (op0
) == REG
3188 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3189 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3193 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3194 return (REGNO (op0
) != REGNO (op2
))
3195 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3196 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3199 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3200 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3201 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3204 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3205 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3206 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3209 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3210 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3211 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3218 valid_parallel_operands_4 (rtx
*operands
,
3219 enum machine_mode mode ATTRIBUTE_UNUSED
)
3221 rtx op0
= operands
[0];
3222 rtx op2
= operands
[2];
3224 if (GET_CODE (op0
) == SUBREG
)
3225 op0
= SUBREG_REG (op0
);
3226 if (GET_CODE (op2
) == SUBREG
)
3227 op2
= SUBREG_REG (op2
);
3229 /* This test prevents the multipack pass from using this pattern if
3230 op0 is used as an index or base register in op2, since this combination
3231 will require reloading. */
3232 if (GET_CODE (op0
) == REG
3233 && GET_CODE (op2
) == MEM
3234 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3242 valid_parallel_operands_5 (rtx
*operands
,
3243 enum machine_mode mode ATTRIBUTE_UNUSED
)
3246 rtx op0
= operands
[0];
3247 rtx op1
= operands
[1];
3248 rtx op2
= operands
[2];
3249 rtx op3
= operands
[3];
3251 if (GET_CODE (op0
) == SUBREG
)
3252 op0
= SUBREG_REG (op0
);
3253 if (GET_CODE (op1
) == SUBREG
)
3254 op1
= SUBREG_REG (op1
);
3255 if (GET_CODE (op2
) == SUBREG
)
3256 op2
= SUBREG_REG (op2
);
3258 /* The patterns should only allow ext_low_reg_operand() or
3259 par_ind_operand() operands. Operands 1 and 2 may be commutative
3260 but only one of them can be a register. */
3261 if (GET_CODE (op1
) == REG
)
3263 if (GET_CODE (op2
) == REG
)
3269 /* This test prevents the multipack pass from using this pattern if
3270 op0 is used as an index or base register in op3, since this combination
3271 will require reloading. */
3272 if (GET_CODE (op0
) == REG
3273 && GET_CODE (op3
) == MEM
3274 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3282 valid_parallel_operands_6 (rtx
*operands
,
3283 enum machine_mode mode ATTRIBUTE_UNUSED
)
3286 rtx op0
= operands
[0];
3287 rtx op1
= operands
[1];
3288 rtx op2
= operands
[2];
3289 rtx op4
= operands
[4];
3290 rtx op5
= operands
[5];
3292 if (GET_CODE (op1
) == SUBREG
)
3293 op1
= SUBREG_REG (op1
);
3294 if (GET_CODE (op2
) == SUBREG
)
3295 op2
= SUBREG_REG (op2
);
3296 if (GET_CODE (op4
) == SUBREG
)
3297 op4
= SUBREG_REG (op4
);
3298 if (GET_CODE (op5
) == SUBREG
)
3299 op5
= SUBREG_REG (op5
);
3301 /* The patterns should only allow ext_low_reg_operand() or
3302 par_ind_operand() operands. Thus of the 4 input operands, only 2
3303 should be REGs and the other 2 should be MEMs. */
3305 if (GET_CODE (op1
) == REG
)
3307 if (GET_CODE (op2
) == REG
)
3309 if (GET_CODE (op4
) == REG
)
3311 if (GET_CODE (op5
) == REG
)
3314 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3315 Perhaps we should count the MEMs as well? */
3319 /* This test prevents the multipack pass from using this pattern if
3320 op0 is used as an index or base register in op4 or op5, since
3321 this combination will require reloading. */
3322 if (GET_CODE (op0
) == REG
3323 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3324 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3331 /* Validate combination of src operands. Note that the operands have
3332 been screened by the src_operand predicate. We just have to check
3333 that the combination of operands is valid. If FORCE is set, ensure
3334 that the destination regno is valid if we have a 2 operand insn. */
3337 c4x_valid_operands (enum rtx_code code
, rtx
*operands
,
3338 enum machine_mode mode ATTRIBUTE_UNUSED
,
3344 enum rtx_code code1
;
3345 enum rtx_code code2
;
3348 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3349 if (code
== IF_THEN_ELSE
)
3350 return 1 || (operands
[0] == operands
[2] || operands
[0] == operands
[3]);
3352 if (code
== COMPARE
)
3365 if (GET_CODE (op0
) == SUBREG
)
3366 op0
= SUBREG_REG (op0
);
3367 if (GET_CODE (op1
) == SUBREG
)
3368 op1
= SUBREG_REG (op1
);
3369 if (GET_CODE (op2
) == SUBREG
)
3370 op2
= SUBREG_REG (op2
);
3372 code1
= GET_CODE (op1
);
3373 code2
= GET_CODE (op2
);
3376 if (code1
== REG
&& code2
== REG
)
3379 if (code1
== MEM
&& code2
== MEM
)
3381 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3383 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3386 /* We cannot handle two MEMs or two CONSTS, etc. */
3395 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3400 if (! c4x_H_constant (op2
))
3404 /* Any valid memory operand screened by src_operand is OK. */
3409 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3413 if (GET_CODE (op0
) == SCRATCH
)
3419 /* Check that we have a valid destination register for a two operand
3421 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (op0
);
3425 /* Check non-commutative operators. */
3426 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3427 || code
== ASHIFT
|| code
== COMPARE
)
3429 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
3432 /* Assume MINUS is commutative since the subtract patterns
3433 also support the reverse subtract instructions. Since op1
3434 is not a register, and op2 is a register, op1 can only
3435 be a restricted memory operand for a shift instruction. */
3444 if (! c4x_H_constant (op1
))
3448 /* Any valid memory operand screened by src_operand is OK. */
3457 if (GET_CODE (op0
) == SCRATCH
)
3463 /* Check that we have a valid destination register for a two operand
3465 return ! force
|| REGNO (op1
) == REGNO (op0
);
3468 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
3475 int valid_operands (enum rtx_code code
, rtx
*operands
, enum machine_mode mode
)
3478 /* If we are not optimizing then we have to let anything go and let
3479 reload fix things up. instantiate_decl in function.c can produce
3480 invalid insns by changing the offset of a memory operand from a
3481 valid one into an invalid one, when the second operand is also a
3482 memory operand. The alternative is not to allow two memory
3483 operands for an insn when not optimizing. The problem only rarely
3484 occurs, for example with the C-torture program DFcmp.c. */
3486 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
3491 legitimize_operands (enum rtx_code code
, rtx
*operands
, enum machine_mode mode
)
3493 /* Compare only has 2 operands. */
3494 if (code
== COMPARE
)
3496 /* During RTL generation, force constants into pseudos so that
3497 they can get hoisted out of loops. This will tie up an extra
3498 register but can save an extra cycle. Only do this if loop
3499 optimization enabled. (We cannot pull this trick for add and
3500 sub instructions since the flow pass won't find
3501 autoincrements etc.) This allows us to generate compare
3502 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3503 of LDI *AR0++, R0; CMPI 42, R0.
3505 Note that expand_binops will try to load an expensive constant
3506 into a register if it is used within a loop. Unfortunately,
3507 the cost mechanism doesn't allow us to look at the other
3508 operand to decide whether the constant is expensive. */
3510 if (! reload_in_progress
3513 && GET_CODE (operands
[1]) == CONST_INT
3514 && rtx_cost (operands
[1], code
) > 1)
3515 operands
[1] = force_reg (mode
, operands
[1]);
3517 if (! reload_in_progress
3518 && ! c4x_valid_operands (code
, operands
, mode
, 0))
3519 operands
[0] = force_reg (mode
, operands
[0]);
3523 /* We cannot do this for ADDI/SUBI insns since we will
3524 defeat the flow pass from finding autoincrement addressing
3526 if (! reload_in_progress
3527 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
3530 && GET_CODE (operands
[2]) == CONST_INT
3531 && rtx_cost (operands
[2], code
) > 1)
3532 operands
[2] = force_reg (mode
, operands
[2]);
3534 /* We can get better code on a C30 if we force constant shift counts
3535 into a register. This way they can get hoisted out of loops,
3536 tying up a register but saving an instruction. The downside is
3537 that they may get allocated to an address or index register, and
3538 thus we will get a pipeline conflict if there is a nearby
3539 indirect address using an address register.
3541 Note that expand_binops will not try to load an expensive constant
3542 into a register if it is used within a loop for a shift insn. */
3544 if (! reload_in_progress
3545 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
3547 /* If the operand combination is invalid, we force operand1 into a
3548 register, preventing reload from having doing to do this at a
3550 operands
[1] = force_reg (mode
, operands
[1]);
3553 emit_move_insn (operands
[0], operands
[1]);
3554 operands
[1] = copy_rtx (operands
[0]);
3558 /* Just in case... */
3559 if (! c4x_valid_operands (code
, operands
, mode
, 0))
3560 operands
[2] = force_reg (mode
, operands
[2]);
3564 /* Right shifts require a negative shift count, but GCC expects
3565 a positive count, so we emit a NEG. */
3566 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
3567 && (GET_CODE (operands
[2]) != CONST_INT
))
3568 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
3571 /* When the shift count is greater than 32 then the result
3572 can be implementation dependent. We truncate the result to
3573 fit in 5 bits so that we do not emit invalid code when
3574 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3575 if (((code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ASHIFT
)
3576 && (GET_CODE (operands
[2]) == CONST_INT
))
3577 && INTVAL (operands
[2]) > (GET_MODE_BITSIZE (mode
) - 1))
3579 = GEN_INT (INTVAL (operands
[2]) & (GET_MODE_BITSIZE (mode
) - 1));
3585 /* The following predicates are used for instruction scheduling. */
3588 group1_reg_operand (rtx op
, enum machine_mode mode
)
3590 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3592 if (GET_CODE (op
) == SUBREG
)
3593 op
= SUBREG_REG (op
);
3594 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
3599 group1_mem_operand (rtx op
, enum machine_mode mode
)
3601 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3604 if (GET_CODE (op
) == MEM
)
3607 if (GET_CODE (op
) == PLUS
)
3609 rtx op0
= XEXP (op
, 0);
3610 rtx op1
= XEXP (op
, 1);
3612 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
3613 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
3616 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
3624 /* Return true if any one of the address registers. */
3627 arx_reg_operand (rtx op
, enum machine_mode mode
)
3629 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3631 if (GET_CODE (op
) == SUBREG
)
3632 op
= SUBREG_REG (op
);
3633 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
3638 c4x_arn_reg_operand (rtx op
, enum machine_mode mode
, unsigned int regno
)
3640 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3642 if (GET_CODE (op
) == SUBREG
)
3643 op
= SUBREG_REG (op
);
3644 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
3649 c4x_arn_mem_operand (rtx op
, enum machine_mode mode
, unsigned int regno
)
3651 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3654 if (GET_CODE (op
) == MEM
)
3657 switch (GET_CODE (op
))
3666 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
3670 if (REG_P (XEXP (op
, 0)) && (! reload_completed
3671 || (REGNO (XEXP (op
, 0)) == regno
)))
3673 if (REG_P (XEXP (XEXP (op
, 1), 1))
3674 && (! reload_completed
3675 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
3681 rtx op0
= XEXP (op
, 0);
3682 rtx op1
= XEXP (op
, 1);
3684 if ((REG_P (op0
) && (! reload_completed
3685 || (REGNO (op0
) == regno
)))
3686 || (REG_P (op1
) && (! reload_completed
3687 || (REGNO (op1
) == regno
))))
3701 ar0_reg_operand (rtx op
, enum machine_mode mode
)
3703 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
3708 ar0_mem_operand (rtx op
, enum machine_mode mode
)
3710 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
3715 ar1_reg_operand (rtx op
, enum machine_mode mode
)
3717 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
3722 ar1_mem_operand (rtx op
, enum machine_mode mode
)
3724 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
3729 ar2_reg_operand (rtx op
, enum machine_mode mode
)
3731 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
3736 ar2_mem_operand (rtx op
, enum machine_mode mode
)
3738 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
3743 ar3_reg_operand (rtx op
, enum machine_mode mode
)
3745 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
3750 ar3_mem_operand (rtx op
, enum machine_mode mode
)
3752 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
3757 ar4_reg_operand (rtx op
, enum machine_mode mode
)
3759 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
3764 ar4_mem_operand (rtx op
, enum machine_mode mode
)
3766 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
3771 ar5_reg_operand (rtx op
, enum machine_mode mode
)
3773 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
3778 ar5_mem_operand (rtx op
, enum machine_mode mode
)
3780 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
3785 ar6_reg_operand (rtx op
, enum machine_mode mode
)
3787 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
3792 ar6_mem_operand (rtx op
, enum machine_mode mode
)
3794 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
3799 ar7_reg_operand (rtx op
, enum machine_mode mode
)
3801 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
3806 ar7_mem_operand (rtx op
, enum machine_mode mode
)
3808 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
3813 ir0_reg_operand (rtx op
, enum machine_mode mode
)
3815 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
3820 ir0_mem_operand (rtx op
, enum machine_mode mode
)
3822 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
3827 ir1_reg_operand (rtx op
, enum machine_mode mode
)
3829 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
3834 ir1_mem_operand (rtx op
, enum machine_mode mode
)
3836 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
3840 /* This is similar to operand_subword but allows autoincrement
3844 c4x_operand_subword (rtx op
, int i
, int validate_address
,
3845 enum machine_mode mode
)
3847 if (mode
!= HImode
&& mode
!= HFmode
)
3848 fatal_insn ("c4x_operand_subword: invalid mode", op
);
3850 if (mode
== HFmode
&& REG_P (op
))
3851 fatal_insn ("c4x_operand_subword: invalid operand", op
);
3853 if (GET_CODE (op
) == MEM
)
3855 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
3856 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
3857 enum machine_mode submode
;
3862 else if (mode
== HFmode
)
3869 return gen_rtx_MEM (submode
, XEXP (op
, 0));
3875 /* We could handle these with some difficulty.
3876 e.g., *p-- => *(p-=2); *(p+1). */
3877 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
3883 fatal_insn ("c4x_operand_subword: invalid address", op
);
3885 /* Even though offsettable_address_p considers (MEM
3886 (LO_SUM)) to be offsettable, it is not safe if the
3887 address is at the end of the data page since we also have
3888 to fix up the associated high PART. In this case where
3889 we are trying to split a HImode or HFmode memory
3890 reference, we would have to emit another insn to reload a
3891 new HIGH value. It's easier to disable LO_SUM memory references
3892 in HImode or HFmode and we probably get better code. */
3894 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
3901 return operand_subword (op
, i
, validate_address
, mode
);
3906 struct name_list
*next
;
3910 static struct name_list
*global_head
;
3911 static struct name_list
*extern_head
;
3914 /* Add NAME to list of global symbols and remove from external list if
3915 present on external list. */
3918 c4x_global_label (const char *name
)
3920 struct name_list
*p
, *last
;
3922 /* Do not insert duplicate names, so linearly search through list of
3927 if (strcmp (p
->name
, name
) == 0)
3931 p
= (struct name_list
*) xmalloc (sizeof *p
);
3932 p
->next
= global_head
;
3936 /* Remove this name from ref list if present. */
3941 if (strcmp (p
->name
, name
) == 0)
3944 last
->next
= p
->next
;
3946 extern_head
= p
->next
;
3955 /* Add NAME to list of external symbols. */
3958 c4x_external_ref (const char *name
)
3960 struct name_list
*p
;
3962 /* Do not insert duplicate names. */
3966 if (strcmp (p
->name
, name
) == 0)
3971 /* Do not insert ref if global found. */
3975 if (strcmp (p
->name
, name
) == 0)
3979 p
= (struct name_list
*) xmalloc (sizeof *p
);
3980 p
->next
= extern_head
;
3985 /* We need to have a data section we can identify so that we can set
3986 the DP register back to a data pointer in the small memory model.
3987 This is only required for ISRs if we are paranoid that someone
3988 may have quietly changed this register on the sly. */
3990 c4x_file_start (void)
3992 default_file_start ();
3993 fprintf (asm_out_file
, "\t.version\t%d\n", c4x_cpu_version
);
3994 fputs ("\n\t.data\ndata_sec:\n", asm_out_file
);
4001 struct name_list
*p
;
4003 /* Output all external names that are not global. */
4007 fprintf (asm_out_file
, "\t.ref\t");
4008 assemble_name (asm_out_file
, p
->name
);
4009 fprintf (asm_out_file
, "\n");
4012 fprintf (asm_out_file
, "\t.end\n");
4017 c4x_check_attribute (const char *attrib
, tree list
, tree decl
, tree
*attributes
)
4019 while (list
!= NULL_TREE
4020 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4021 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4022 list
= TREE_CHAIN (list
);
4024 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4030 c4x_insert_attributes (tree decl
, tree
*attributes
)
4032 switch (TREE_CODE (decl
))
4035 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4036 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4037 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4038 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4039 c4x_check_attribute ("naked", naked_tree
, decl
, attributes
);
4043 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4051 /* Table of valid machine attributes. */
4052 const struct attribute_spec c4x_attribute_table
[] =
4054 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4055 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4056 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4057 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4058 { NULL
, 0, 0, false, false, false, NULL
}
4061 /* Handle an attribute requiring a FUNCTION_TYPE;
4062 arguments as in struct attribute_spec.handler. */
4064 c4x_handle_fntype_attribute (tree
*node
, tree name
,
4065 tree args ATTRIBUTE_UNUSED
,
4066 int flags ATTRIBUTE_UNUSED
,
4069 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4071 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4072 IDENTIFIER_POINTER (name
));
4073 *no_add_attrs
= true;
4080 /* !!! FIXME to emit RPTS correctly. */
4083 c4x_rptb_rpts_p (rtx insn
, rtx op
)
4085 /* The next insn should be our label marking where the
4086 repeat block starts. */
4087 insn
= NEXT_INSN (insn
);
4088 if (GET_CODE (insn
) != CODE_LABEL
)
4090 /* Some insns may have been shifted between the RPTB insn
4091 and the top label... They were probably destined to
4092 be moved out of the loop. For now, let's leave them
4093 where they are and print a warning. We should
4094 probably move these insns before the repeat block insn. */
4096 fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4101 /* Skip any notes. */
4102 insn
= next_nonnote_insn (insn
);
4104 /* This should be our first insn in the loop. */
4105 if (! INSN_P (insn
))
4108 /* Skip any notes. */
4109 insn
= next_nonnote_insn (insn
);
4111 if (! INSN_P (insn
))
4114 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4120 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4124 /* Check if register r11 is used as the destination of an insn. */
4127 c4x_r11_set_p(rtx x
)
4136 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4137 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4139 if (INSN_P (x
) && (set
= single_set (x
)))
4142 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4145 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4146 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4150 if (c4x_r11_set_p (XEXP (x
, i
)))
4153 else if (fmt
[i
] == 'E')
4154 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4155 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4162 /* The c4x sometimes has a problem when the insn before the laj insn
4163 sets the r11 register. Check for this situation. */
4166 c4x_check_laj_p (rtx insn
)
4168 insn
= prev_nonnote_insn (insn
);
4170 /* If this is the start of the function no nop is needed. */
4174 /* If the previous insn is a code label we have to insert a nop. This
4175 could be a jump or table jump. We can find the normal jumps by
4176 scanning the function but this will not find table jumps. */
4177 if (GET_CODE (insn
) == CODE_LABEL
)
4180 /* If the previous insn sets register r11 we have to insert a nop. */
4181 if (c4x_r11_set_p (insn
))
4184 /* No nop needed. */
4189 /* Adjust the cost of a scheduling dependency. Return the new cost of
4190 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4191 A set of an address register followed by a use occurs a 2 cycle
4192 stall (reduced to a single cycle on the c40 using LDA), while
4193 a read of an address register followed by a use occurs a single cycle. */
4195 #define SET_USE_COST 3
4196 #define SETLDA_USE_COST 2
4197 #define READ_USE_COST 2
4200 c4x_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
4202 /* Don't worry about this until we know what registers have been
4204 if (flag_schedule_insns
== 0 && ! reload_completed
)
4207 /* How do we handle dependencies where a read followed by another
4208 read causes a pipeline stall? For example, a read of ar0 followed
4209 by the use of ar0 for a memory reference. It looks like we
4210 need to extend the scheduler to handle this case. */
4212 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4213 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4214 so only deal with insns we know about. */
4215 if (recog_memoized (dep_insn
) < 0)
4218 if (REG_NOTE_KIND (link
) == 0)
4222 /* Data dependency; DEP_INSN writes a register that INSN reads some
4226 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4227 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4228 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4229 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4233 /* This could be significantly optimized. We should look
4234 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4235 insn uses ar0-ar7. We then test if the same register
4236 is used. The tricky bit is that some operands will
4237 use several registers... */
4238 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4239 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4240 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4241 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4242 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4243 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4245 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4246 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4247 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4248 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4249 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4250 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4252 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4253 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4254 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4255 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4256 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4257 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4259 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4260 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4261 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4262 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4263 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4264 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4266 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4267 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4268 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4269 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4270 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4271 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4273 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4274 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4275 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4276 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4277 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4278 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4280 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4281 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4282 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4283 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4284 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4285 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4287 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4288 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4289 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4290 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4291 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4292 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4294 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4295 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4296 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4297 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4299 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4300 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4301 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4302 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4308 /* For other data dependencies, the default cost specified in the
4312 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4314 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4317 /* For c4x anti dependencies, the cost is 0. */
4320 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4322 /* Output dependency; DEP_INSN writes a register that INSN writes some
4325 /* For c4x output dependencies, the cost is 0. */
4333 c4x_init_builtins (void)
4335 tree endlink
= void_list_node
;
4337 add_builtin_function ("fast_ftoi",
4340 tree_cons (NULL_TREE
, double_type_node
,
4342 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4343 add_builtin_function ("ansi_ftoi",
4346 tree_cons (NULL_TREE
, double_type_node
,
4348 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
,
4351 add_builtin_function ("fast_imult",
4354 tree_cons (NULL_TREE
, integer_type_node
,
4355 tree_cons (NULL_TREE
,
4358 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
,
4362 add_builtin_function ("toieee",
4365 tree_cons (NULL_TREE
, double_type_node
,
4367 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
,
4369 add_builtin_function ("frieee",
4372 tree_cons (NULL_TREE
, double_type_node
,
4374 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
,
4376 add_builtin_function ("fast_invf",
4379 tree_cons (NULL_TREE
, double_type_node
,
4381 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
,
4388 c4x_expand_builtin (tree exp
, rtx target
,
4389 rtx subtarget ATTRIBUTE_UNUSED
,
4390 enum machine_mode mode ATTRIBUTE_UNUSED
,
4391 int ignore ATTRIBUTE_UNUSED
)
4393 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
4394 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4400 case C4X_BUILTIN_FIX
:
4401 arg0
= CALL_EXPR_ARG (exp
, 0);
4402 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4403 if (! target
|| ! register_operand (target
, QImode
))
4404 target
= gen_reg_rtx (QImode
);
4405 emit_insn (gen_fixqfqi_clobber (target
, r0
));
4408 case C4X_BUILTIN_FIX_ANSI
:
4409 arg0
= CALL_EXPR_ARG (exp
, 0);
4410 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4411 if (! target
|| ! register_operand (target
, QImode
))
4412 target
= gen_reg_rtx (QImode
);
4413 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
4416 case C4X_BUILTIN_MPYI
:
4419 arg0
= CALL_EXPR_ARG (exp
, 0);
4420 arg1
= CALL_EXPR_ARG (exp
, 1);
4421 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
4422 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
4423 if (! target
|| ! register_operand (target
, QImode
))
4424 target
= gen_reg_rtx (QImode
);
4425 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
4428 case C4X_BUILTIN_TOIEEE
:
4431 arg0
= CALL_EXPR_ARG (exp
, 0);
4432 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4433 if (! target
|| ! register_operand (target
, QFmode
))
4434 target
= gen_reg_rtx (QFmode
);
4435 emit_insn (gen_toieee (target
, r0
));
4438 case C4X_BUILTIN_FRIEEE
:
4441 arg0
= CALL_EXPR_ARG (exp
, 0);
4442 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4443 if (register_operand (r0
, QFmode
))
4445 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
4446 emit_move_insn (r1
, r0
);
4449 if (! target
|| ! register_operand (target
, QFmode
))
4450 target
= gen_reg_rtx (QFmode
);
4451 emit_insn (gen_frieee (target
, r0
));
4454 case C4X_BUILTIN_RCPF
:
4457 arg0
= CALL_EXPR_ARG (exp
, 0);
4458 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4459 if (! target
|| ! register_operand (target
, QFmode
))
4460 target
= gen_reg_rtx (QFmode
);
4461 emit_insn (gen_rcpfqf_clobber (target
, r0
));
4468 c4x_init_libfuncs (void)
4470 set_optab_libfunc (smul_optab
, QImode
, "__mulqi3");
4471 set_optab_libfunc (sdiv_optab
, QImode
, "__divqi3");
4472 set_optab_libfunc (udiv_optab
, QImode
, "__udivqi3");
4473 set_optab_libfunc (smod_optab
, QImode
, "__modqi3");
4474 set_optab_libfunc (umod_optab
, QImode
, "__umodqi3");
4475 set_optab_libfunc (sdiv_optab
, QFmode
, "__divqf3");
4476 set_optab_libfunc (smul_optab
, HFmode
, "__mulhf3");
4477 set_optab_libfunc (sdiv_optab
, HFmode
, "__divhf3");
4478 set_optab_libfunc (smul_optab
, HImode
, "__mulhi3");
4479 set_optab_libfunc (sdiv_optab
, HImode
, "__divhi3");
4480 set_optab_libfunc (udiv_optab
, HImode
, "__udivhi3");
4481 set_optab_libfunc (smod_optab
, HImode
, "__modhi3");
4482 set_optab_libfunc (umod_optab
, HImode
, "__umodhi3");
4483 set_optab_libfunc (ffs_optab
, QImode
, "__ffs");
4484 smulhi3_libfunc
= init_one_libfunc ("__smulhi3_high");
4485 umulhi3_libfunc
= init_one_libfunc ("__umulhi3_high");
4486 fix_truncqfhi2_libfunc
= init_one_libfunc ("__fix_truncqfhi2");
4487 fixuns_truncqfhi2_libfunc
= init_one_libfunc ("__ufix_truncqfhi2");
4488 fix_trunchfhi2_libfunc
= init_one_libfunc ("__fix_trunchfhi2");
4489 fixuns_trunchfhi2_libfunc
= init_one_libfunc ("__ufix_trunchfhi2");
4490 floathiqf2_libfunc
= init_one_libfunc ("__floathiqf2");
4491 floatunshiqf2_libfunc
= init_one_libfunc ("__ufloathiqf2");
4492 floathihf2_libfunc
= init_one_libfunc ("__floathihf2");
4493 floatunshihf2_libfunc
= init_one_libfunc ("__ufloathihf2");
4497 c4x_asm_named_section (const char *name
, unsigned int flags ATTRIBUTE_UNUSED
,
4498 tree decl ATTRIBUTE_UNUSED
)
4500 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);
4504 c4x_globalize_label (FILE *stream
, const char *name
)
4506 default_globalize_label (stream
, name
);
4507 c4x_global_label (name
);
4510 #define SHIFT_CODE_P(C) \
4511 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4512 #define LOGICAL_CODE_P(C) \
4513 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4515 /* Compute a (partial) cost for rtx X. Return true if the complete
4516 cost has been computed, and false if subexpressions should be
4517 scanned. In either case, *TOTAL contains the cost result. */
4520 c4x_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
4526 /* Some small integers are effectively free for the C40. We should
4527 also consider if we are using the small memory model. With
4528 the big memory model we require an extra insn for a constant
4529 loaded from memory. */
4533 if (c4x_J_constant (x
))
4535 else if (! TARGET_C3X
4536 && outer_code
== AND
4537 && (val
== 255 || val
== 65535))
4539 else if (! TARGET_C3X
4540 && (outer_code
== ASHIFTRT
|| outer_code
== LSHIFTRT
)
4541 && (val
== 16 || val
== 24))
4543 else if (TARGET_C3X
&& SHIFT_CODE_P (outer_code
))
4545 else if (LOGICAL_CODE_P (outer_code
)
4546 ? c4x_L_constant (x
) : c4x_I_constant (x
))
4559 if (c4x_H_constant (x
))
4561 else if (GET_MODE (x
) == QFmode
)
4567 /* ??? Note that we return true, rather than false so that rtx_cost
4568 doesn't include the constant costs. Otherwise expand_mult will
4569 think that it is cheaper to synthesize a multiply rather than to
4570 use a multiply instruction. I think this is because the algorithm
4571 synth_mult doesn't take into account the loading of the operands,
4572 whereas the calculation of mult_cost does. */
4581 *total
= COSTS_N_INSNS (1);
4585 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
4586 || TARGET_MPYI
? 1 : 14);
4593 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
4602 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
4605 c4x_external_libcall (rtx fun
)
4607 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
4608 c4x_external_ref (XSTR (fun
, 0));
4611 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
4614 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
4615 int incoming ATTRIBUTE_UNUSED
)
4617 return gen_rtx_REG (Pmode
, AR0_REGNO
);