1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
4 Free Software Foundation, Inc.
6 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation; either version 2, or (at your option)
16 GCC is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to
23 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
24 Boston, MA 02110-1301, USA. */
26 /* Some output-actions in c4x.md need these. */
29 #include "coretypes.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "conditions.h"
52 #include "target-def.h"
53 #include "langhooks.h"
57 rtx fix_truncqfhi2_libfunc
;
58 rtx fixuns_truncqfhi2_libfunc
;
59 rtx fix_trunchfhi2_libfunc
;
60 rtx fixuns_trunchfhi2_libfunc
;
61 rtx floathiqf2_libfunc
;
62 rtx floatunshiqf2_libfunc
;
63 rtx floathihf2_libfunc
;
64 rtx floatunshihf2_libfunc
;
66 static int c4x_leaf_function
;
68 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
70 /* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
75 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
77 /* Reg Modes Saved. */
78 R0R1_REGS
, /* R0 QI, QF, HF No. */
79 R0R1_REGS
, /* R1 QI, QF, HF No. */
80 R2R3_REGS
, /* R2 QI, QF, HF No. */
81 R2R3_REGS
, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
86 ADDR_REGS
, /* AR0 QI No. */
87 ADDR_REGS
, /* AR1 QI No. */
88 ADDR_REGS
, /* AR2 QI No. */
89 ADDR_REGS
, /* AR3 QI QI. */
90 ADDR_REGS
, /* AR4 QI QI. */
91 ADDR_REGS
, /* AR5 QI QI. */
92 ADDR_REGS
, /* AR6 QI QI. */
93 ADDR_REGS
, /* AR7 QI QI. */
94 DP_REG
, /* DP QI No. */
95 INDEX_REGS
, /* IR0 QI No. */
96 INDEX_REGS
, /* IR1 QI No. */
97 BK_REG
, /* BK QI QI. */
98 SP_REG
, /* SP QI No. */
99 ST_REG
, /* ST CC No. */
100 NO_REGS
, /* DIE/IE No. */
101 NO_REGS
, /* IIE/IF No. */
102 NO_REGS
, /* IIF/IOF No. */
103 INT_REGS
, /* RS QI No. */
104 INT_REGS
, /* RE QI No. */
105 RC_REG
, /* RC QI No. */
106 EXT_REGS
, /* R8 QI, QF, HF QI. */
107 EXT_REGS
, /* R9 QI, QF, HF No. */
108 EXT_REGS
, /* R10 QI, QF, HF No. */
109 EXT_REGS
, /* R11 QI, QF, HF No. */
112 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
114 /* Reg Modes Saved. */
115 HFmode
, /* R0 QI, QF, HF No. */
116 HFmode
, /* R1 QI, QF, HF No. */
117 HFmode
, /* R2 QI, QF, HF No. */
118 HFmode
, /* R3 QI, QF, HF No. */
119 QFmode
, /* R4 QI, QF, HF QI. */
120 QFmode
, /* R5 QI, QF, HF QI. */
121 QImode
, /* R6 QI, QF, HF QF. */
122 QImode
, /* R7 QI, QF, HF QF. */
123 QImode
, /* AR0 QI No. */
124 QImode
, /* AR1 QI No. */
125 QImode
, /* AR2 QI No. */
126 QImode
, /* AR3 QI QI. */
127 QImode
, /* AR4 QI QI. */
128 QImode
, /* AR5 QI QI. */
129 QImode
, /* AR6 QI QI. */
130 QImode
, /* AR7 QI QI. */
131 VOIDmode
, /* DP QI No. */
132 QImode
, /* IR0 QI No. */
133 QImode
, /* IR1 QI No. */
134 QImode
, /* BK QI QI. */
135 VOIDmode
, /* SP QI No. */
136 VOIDmode
, /* ST CC No. */
137 VOIDmode
, /* DIE/IE No. */
138 VOIDmode
, /* IIE/IF No. */
139 VOIDmode
, /* IIF/IOF No. */
140 QImode
, /* RS QI No. */
141 QImode
, /* RE QI No. */
142 VOIDmode
, /* RC QI No. */
143 QFmode
, /* R8 QI, QF, HF QI. */
144 HFmode
, /* R9 QI, QF, HF No. */
145 HFmode
, /* R10 QI, QF, HF No. */
146 HFmode
, /* R11 QI, QF, HF No. */
150 /* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
156 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
158 /* Pragma definitions. */
160 tree code_tree
= NULL_TREE
;
161 tree data_tree
= NULL_TREE
;
162 tree pure_tree
= NULL_TREE
;
163 tree noreturn_tree
= NULL_TREE
;
164 tree interrupt_tree
= NULL_TREE
;
165 tree naked_tree
= NULL_TREE
;
167 /* Forward declarations */
168 static bool c4x_handle_option (size_t, const char *, int);
169 static int c4x_isr_reg_used_p (unsigned int);
170 static int c4x_leaf_function_p (void);
171 static int c4x_naked_function_p (void);
172 static int c4x_immed_int_constant (rtx
);
173 static int c4x_immed_float_constant (rtx
);
174 static int c4x_R_indirect (rtx
);
175 static void c4x_S_address_parse (rtx
, int *, int *, int *, int *);
176 static int c4x_valid_operands (enum rtx_code
, rtx
*, enum machine_mode
, int);
177 static int c4x_arn_reg_operand (rtx
, enum machine_mode
, unsigned int);
178 static int c4x_arn_mem_operand (rtx
, enum machine_mode
, unsigned int);
179 static void c4x_file_start (void);
180 static void c4x_file_end (void);
181 static void c4x_check_attribute (const char *, tree
, tree
, tree
*);
182 static int c4x_r11_set_p (rtx
);
183 static int c4x_rptb_valid_p (rtx
, rtx
);
184 static void c4x_reorg (void);
185 static int c4x_label_ref_used_p (rtx
, rtx
);
186 static tree
c4x_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
187 const struct attribute_spec c4x_attribute_table
[];
188 static void c4x_insert_attributes (tree
, tree
*);
189 static void c4x_asm_named_section (const char *, unsigned int, tree
);
190 static int c4x_adjust_cost (rtx
, rtx
, rtx
, int);
191 static void c4x_globalize_label (FILE *, const char *);
192 static bool c4x_rtx_costs (rtx
, int, int, int *);
193 static int c4x_address_cost (rtx
);
194 static void c4x_init_libfuncs (void);
195 static void c4x_external_libcall (rtx
);
196 static rtx
c4x_struct_value_rtx (tree
, int);
197 static tree
c4x_gimplify_va_arg_expr (tree
, tree
, tree
*, tree
*);
199 /* Initialize the GCC target structure. */
200 #undef TARGET_ASM_BYTE_OP
201 #define TARGET_ASM_BYTE_OP "\t.word\t"
202 #undef TARGET_ASM_ALIGNED_HI_OP
203 #define TARGET_ASM_ALIGNED_HI_OP NULL
204 #undef TARGET_ASM_ALIGNED_SI_OP
205 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ASM_FILE_START
207 #define TARGET_ASM_FILE_START c4x_file_start
208 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
209 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
210 #undef TARGET_ASM_FILE_END
211 #define TARGET_ASM_FILE_END c4x_file_end
213 #undef TARGET_ASM_EXTERNAL_LIBCALL
214 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
216 /* Play safe, not the fastest code. */
217 #undef TARGET_DEFAULT_TARGET_FLAGS
218 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
219 | MASK_PARALLEL_MPY | MASK_RPTB)
220 #undef TARGET_HANDLE_OPTION
221 #define TARGET_HANDLE_OPTION c4x_handle_option
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
226 #undef TARGET_INSERT_ATTRIBUTES
227 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
229 #undef TARGET_INIT_BUILTINS
230 #define TARGET_INIT_BUILTINS c4x_init_builtins
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
235 #undef TARGET_SCHED_ADJUST_COST
236 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
238 #undef TARGET_ASM_GLOBALIZE_LABEL
239 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
241 #undef TARGET_RTX_COSTS
242 #define TARGET_RTX_COSTS c4x_rtx_costs
243 #undef TARGET_ADDRESS_COST
244 #define TARGET_ADDRESS_COST c4x_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
249 #undef TARGET_INIT_LIBFUNCS
250 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
252 #undef TARGET_STRUCT_VALUE_RTX
253 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
255 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
256 #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
258 struct gcc_target targetm
= TARGET_INITIALIZER
;
260 /* Implement TARGET_HANDLE_OPTION. */
263 c4x_handle_option (size_t code
, const char *arg
, int value
)
267 case OPT_m30
: c4x_cpu_version
= 30; return true;
268 case OPT_m31
: c4x_cpu_version
= 31; return true;
269 case OPT_m32
: c4x_cpu_version
= 32; return true;
270 case OPT_m33
: c4x_cpu_version
= 33; return true;
271 case OPT_m40
: c4x_cpu_version
= 40; return true;
272 case OPT_m44
: c4x_cpu_version
= 44; return true;
275 if (arg
[0] == 'c' || arg
[0] == 'C')
280 case 30: case 31: case 32: case 33: case 40: case 44:
281 c4x_cpu_version
= value
;
291 /* Override command line options.
292 Called once after all options have been parsed.
293 Mostly we process the processor
294 type and sometimes adjust other TARGET_ options. */
297 c4x_override_options (void)
299 /* Convert foo / 8.0 into foo * 0.125, etc. */
300 set_fast_math_flags (1);
302 /* We should phase out the following at some stage.
303 This provides compatibility with the old -mno-aliases option. */
304 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
305 flag_argument_noalias
= 1;
308 target_flags
|= MASK_MPYI
| MASK_DB
;
311 target_flags
&= ~(MASK_RPTB
| MASK_PARALLEL
);
313 if (!TARGET_PARALLEL
)
314 target_flags
&= ~MASK_PARALLEL_MPY
;
318 /* This is called before c4x_override_options. */
321 c4x_optimization_options (int level ATTRIBUTE_UNUSED
,
322 int size ATTRIBUTE_UNUSED
)
324 /* Scheduling before register allocation can screw up global
325 register allocation, especially for functions that use MPY||ADD
326 instructions. The benefit we gain we get by scheduling before
327 register allocation is probably marginal anyhow. */
328 flag_schedule_insns
= 0;
332 /* Write an ASCII string. */
334 #define C4X_ASCII_LIMIT 40
337 c4x_output_ascii (FILE *stream
, const char *ptr
, int len
)
339 char sbuf
[C4X_ASCII_LIMIT
+ 1];
340 int s
, l
, special
, first
= 1, onlys
;
343 fprintf (stream
, "\t.byte\t");
345 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
349 /* Escape " and \ with a \". */
350 special
= *ptr
== '\"' || *ptr
== '\\';
352 /* If printable - add to buff. */
353 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
358 if (s
< C4X_ASCII_LIMIT
- 1)
373 fprintf (stream
, "\"%s\"", sbuf
);
375 if (TARGET_TI
&& l
>= 80 && len
> 1)
377 fprintf (stream
, "\n\t.byte\t");
395 fprintf (stream
, "%d", *ptr
);
397 if (TARGET_TI
&& l
>= 80 && len
> 1)
399 fprintf (stream
, "\n\t.byte\t");
410 fprintf (stream
, "\"%s\"", sbuf
);
413 fputc ('\n', stream
);
418 c4x_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
423 case Pmode
: /* Pointer (24/32 bits). */
425 case QImode
: /* Integer (32 bits). */
426 return IS_INT_REGNO (regno
);
428 case QFmode
: /* Float, Double (32 bits). */
429 case HFmode
: /* Long Double (40 bits). */
430 return IS_EXT_REGNO (regno
);
432 case CCmode
: /* Condition Codes. */
433 case CC_NOOVmode
: /* Condition Codes. */
434 return IS_ST_REGNO (regno
);
436 case HImode
: /* Long Long (64 bits). */
437 /* We need two registers to store long longs. Note that
438 it is much easier to constrain the first register
439 to start on an even boundary. */
440 return IS_INT_REGNO (regno
)
441 && IS_INT_REGNO (regno
+ 1)
445 return 0; /* We don't support these modes. */
451 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
453 c4x_hard_regno_rename_ok (unsigned int regno1
, unsigned int regno2
)
455 /* We cannot copy call saved registers from mode QI into QF or from
457 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
459 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
461 /* We cannot copy from an extended (40 bit) register to a standard
462 (32 bit) register because we only set the condition codes for
463 extended registers. */
464 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
466 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
471 /* The TI C3x C compiler register argument runtime model uses 6 registers,
472 AR2, R2, R3, RC, RS, RE.
474 The first two floating point arguments (float, double, long double)
475 that are found scanning from left to right are assigned to R2 and R3.
477 The remaining integer (char, short, int, long) or pointer arguments
478 are assigned to the remaining registers in the order AR2, R2, R3,
479 RC, RS, RE when scanning left to right, except for the last named
480 argument prior to an ellipsis denoting variable number of
481 arguments. We don't have to worry about the latter condition since
482 function.c treats the last named argument as anonymous (unnamed).
484 All arguments that cannot be passed in registers are pushed onto
485 the stack in reverse order (right to left). GCC handles that for us.
487 c4x_init_cumulative_args() is called at the start, so we can parse
488 the args to see how many floating point arguments and how many
489 integer (or pointer) arguments there are. c4x_function_arg() is
490 then called (sometimes repeatedly) for each argument (parsed left
491 to right) to obtain the register to pass the argument in, or zero
492 if the argument is to be passed on the stack. Once the compiler is
493 happy, c4x_function_arg_advance() is called.
495 Don't use R0 to pass arguments in, we use 0 to indicate a stack
498 static const int c4x_int_reglist
[3][6] =
500 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
501 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
502 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
505 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
508 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
509 function whose data type is FNTYPE.
510 For a library call, FNTYPE is 0. */
513 c4x_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
)
515 tree param
, next_param
;
517 cum
->floats
= cum
->ints
= 0;
524 fprintf (stderr
, "\nc4x_init_cumulative_args (");
527 tree ret_type
= TREE_TYPE (fntype
);
529 fprintf (stderr
, "fntype code = %s, ret code = %s",
530 tree_code_name
[(int) TREE_CODE (fntype
)],
531 tree_code_name
[(int) TREE_CODE (ret_type
)]);
534 fprintf (stderr
, "no fntype");
537 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
540 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
542 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
543 param
; param
= next_param
)
547 next_param
= TREE_CHAIN (param
);
549 type
= TREE_VALUE (param
);
550 if (type
&& type
!= void_type_node
)
552 enum machine_mode mode
;
554 /* If the last arg doesn't have void type then we have
555 variable arguments. */
559 if ((mode
= TYPE_MODE (type
)))
561 if (! targetm
.calls
.must_pass_in_stack (mode
, type
))
563 /* Look for float, double, or long double argument. */
564 if (mode
== QFmode
|| mode
== HFmode
)
566 /* Look for integer, enumeral, boolean, char, or pointer
568 else if (mode
== QImode
|| mode
== Pmode
)
577 fprintf (stderr
, "%s%s, args = %d)\n",
578 cum
->prototype
? ", prototype" : "",
579 cum
->var
? ", variable args" : "",
584 /* Update the data in CUM to advance over an argument
585 of mode MODE and data type TYPE.
586 (TYPE is null for libcalls where that information may not be available.) */
589 c4x_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
590 tree type
, int named
)
593 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
594 GET_MODE_NAME (mode
), named
);
598 && ! targetm
.calls
.must_pass_in_stack (mode
, type
))
600 /* Look for float, double, or long double argument. */
601 if (mode
== QFmode
|| mode
== HFmode
)
603 /* Look for integer, enumeral, boolean, char, or pointer argument. */
604 else if (mode
== QImode
|| mode
== Pmode
)
607 else if (! TARGET_MEMPARM
&& ! type
)
609 /* Handle libcall arguments. */
610 if (mode
== QFmode
|| mode
== HFmode
)
612 else if (mode
== QImode
|| mode
== Pmode
)
619 /* Define where to put the arguments to a function. Value is zero to
620 push the argument on the stack, or a hard register in which to
623 MODE is the argument's machine mode.
624 TYPE is the data type of the argument (as a tree).
625 This is null for libcalls where that information may
627 CUM is a variable of type CUMULATIVE_ARGS which gives info about
628 the preceding args and about the function being called.
629 NAMED is nonzero if this argument is a named parameter
630 (otherwise it is an extra parameter matching an ellipsis). */
633 c4x_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
634 tree type
, int named
)
636 int reg
= 0; /* Default to passing argument on stack. */
640 /* We can handle at most 2 floats in R2, R3. */
641 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
643 /* We can handle at most 6 integers minus number of floats passed
645 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
646 6 - cum
->maxfloats
: cum
->ints
;
648 /* If there is no prototype, assume all the arguments are integers. */
649 if (! cum
->prototype
)
652 cum
->ints
= cum
->floats
= 0;
656 /* This marks the last argument. We don't need to pass this through
658 if (type
== void_type_node
)
664 && ! targetm
.calls
.must_pass_in_stack (mode
, type
))
666 /* Look for float, double, or long double argument. */
667 if (mode
== QFmode
|| mode
== HFmode
)
669 if (cum
->floats
< cum
->maxfloats
)
670 reg
= c4x_fp_reglist
[cum
->floats
];
672 /* Look for integer, enumeral, boolean, char, or pointer argument. */
673 else if (mode
== QImode
|| mode
== Pmode
)
675 if (cum
->ints
< cum
->maxints
)
676 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
679 else if (! TARGET_MEMPARM
&& ! type
)
681 /* We could use a different argument calling model for libcalls,
682 since we're only calling functions in libgcc. Thus we could
683 pass arguments for long longs in registers rather than on the
684 stack. In the meantime, use the odd TI format. We make the
685 assumption that we won't have more than two floating point
686 args, six integer args, and that all the arguments are of the
688 if (mode
== QFmode
|| mode
== HFmode
)
689 reg
= c4x_fp_reglist
[cum
->floats
];
690 else if (mode
== QImode
|| mode
== Pmode
)
691 reg
= c4x_int_reglist
[0][cum
->ints
];
696 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
697 GET_MODE_NAME (mode
), named
);
699 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
701 fprintf (stderr
, ", stack");
702 fprintf (stderr
, ")\n");
705 return gen_rtx_REG (mode
, reg
);
710 /* C[34]x arguments grow in weird ways (downwards) that the standard
711 varargs stuff can't handle.. */
714 c4x_gimplify_va_arg_expr (tree valist
, tree type
,
715 tree
*pre_p ATTRIBUTE_UNUSED
,
716 tree
*post_p ATTRIBUTE_UNUSED
)
721 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
723 type
= build_pointer_type (type
);
725 t
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
726 build_int_cst (NULL_TREE
, int_size_in_bytes (type
)));
727 t
= fold_convert (build_pointer_type (type
), t
);
728 t
= build_va_arg_indirect_ref (t
);
731 t
= build_va_arg_indirect_ref (t
);
738 c4x_isr_reg_used_p (unsigned int regno
)
740 /* Don't save/restore FP or ST, we handle them separately. */
741 if (regno
== FRAME_POINTER_REGNUM
742 || IS_ST_REGNO (regno
))
745 /* We could be a little smarter abut saving/restoring DP.
746 We'll only save if for the big memory model or if
747 we're paranoid. ;-) */
748 if (IS_DP_REGNO (regno
))
749 return ! TARGET_SMALL
|| TARGET_PARANOID
;
751 /* Only save/restore regs in leaf function that are used. */
752 if (c4x_leaf_function
)
753 return regs_ever_live
[regno
] && fixed_regs
[regno
] == 0;
755 /* Only save/restore regs that are used by the ISR and regs
756 that are likely to be used by functions the ISR calls
757 if they are not fixed. */
758 return IS_EXT_REGNO (regno
)
759 || ((regs_ever_live
[regno
] || call_used_regs
[regno
])
760 && fixed_regs
[regno
] == 0);
765 c4x_leaf_function_p (void)
767 /* A leaf function makes no calls, so we only need
768 to save/restore the registers we actually use.
769 For the global variable leaf_function to be set, we need
770 to define LEAF_REGISTERS and all that it entails.
771 Let's check ourselves.... */
773 if (lookup_attribute ("leaf_pretend",
774 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
777 /* Use the leaf_pretend attribute at your own risk. This is a hack
778 to speed up ISRs that call a function infrequently where the
779 overhead of saving and restoring the additional registers is not
780 warranted. You must save and restore the additional registers
781 required by the called function. Caveat emptor. Here's enough
784 if (leaf_function_p ())
792 c4x_naked_function_p (void)
796 type
= TREE_TYPE (current_function_decl
);
797 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
;
802 c4x_interrupt_function_p (void)
804 const char *cfun_name
;
805 if (lookup_attribute ("interrupt",
806 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
809 /* Look for TI style c_intnn. */
810 cfun_name
= current_function_name ();
811 return cfun_name
[0] == 'c'
812 && cfun_name
[1] == '_'
813 && cfun_name
[2] == 'i'
814 && cfun_name
[3] == 'n'
815 && cfun_name
[4] == 't'
816 && ISDIGIT (cfun_name
[5])
817 && ISDIGIT (cfun_name
[6]);
821 c4x_expand_prologue (void)
824 int size
= get_frame_size ();
827 /* In functions where ar3 is not used but frame pointers are still
828 specified, frame pointers are not adjusted (if >= -O2) and this
829 is used so it won't needlessly push the frame pointer. */
832 /* For __naked__ function don't build a prologue. */
833 if (c4x_naked_function_p ())
838 /* For __interrupt__ function build specific prologue. */
839 if (c4x_interrupt_function_p ())
841 c4x_leaf_function
= c4x_leaf_function_p ();
843 insn
= emit_insn (gen_push_st ());
844 RTX_FRAME_RELATED_P (insn
) = 1;
847 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
848 RTX_FRAME_RELATED_P (insn
) = 1;
849 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
850 gen_rtx_REG (QImode
, SP_REGNO
)));
851 RTX_FRAME_RELATED_P (insn
) = 1;
852 /* We require that an ISR uses fewer than 32768 words of
853 local variables, otherwise we have to go to lots of
854 effort to save a register, load it with the desired size,
855 adjust the stack pointer, and then restore the modified
856 register. Frankly, I think it is a poor ISR that
857 requires more than 32767 words of local temporary
860 error ("ISR %s requires %d words of local vars, max is 32767",
861 current_function_name (), size
);
863 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
864 gen_rtx_REG (QImode
, SP_REGNO
),
866 RTX_FRAME_RELATED_P (insn
) = 1;
868 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
870 if (c4x_isr_reg_used_p (regno
))
872 if (regno
== DP_REGNO
)
874 insn
= emit_insn (gen_push_dp ());
875 RTX_FRAME_RELATED_P (insn
) = 1;
879 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
880 RTX_FRAME_RELATED_P (insn
) = 1;
881 if (IS_EXT_REGNO (regno
))
883 insn
= emit_insn (gen_pushqf
884 (gen_rtx_REG (QFmode
, regno
)));
885 RTX_FRAME_RELATED_P (insn
) = 1;
890 /* We need to clear the repeat mode flag if the ISR is
891 going to use a RPTB instruction or uses the RC, RS, or RE
893 if (regs_ever_live
[RC_REGNO
]
894 || regs_ever_live
[RS_REGNO
]
895 || regs_ever_live
[RE_REGNO
])
897 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
898 RTX_FRAME_RELATED_P (insn
) = 1;
901 /* Reload DP reg if we are paranoid about some turkey
902 violating small memory model rules. */
903 if (TARGET_SMALL
&& TARGET_PARANOID
)
905 insn
= emit_insn (gen_set_ldp_prologue
906 (gen_rtx_REG (QImode
, DP_REGNO
),
907 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
908 RTX_FRAME_RELATED_P (insn
) = 1;
913 if (frame_pointer_needed
)
916 || (current_function_args_size
!= 0)
919 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
920 RTX_FRAME_RELATED_P (insn
) = 1;
921 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
922 gen_rtx_REG (QImode
, SP_REGNO
)));
923 RTX_FRAME_RELATED_P (insn
) = 1;
928 /* Since ar3 is not used, we don't need to push it. */
934 /* If we use ar3, we need to push it. */
936 if ((size
!= 0) || (current_function_args_size
!= 0))
938 /* If we are omitting the frame pointer, we still have
939 to make space for it so the offsets are correct
940 unless we don't use anything on the stack at all. */
947 /* Local vars are too big, it will take multiple operations
951 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
952 GEN_INT(size
>> 16)));
953 RTX_FRAME_RELATED_P (insn
) = 1;
954 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
955 gen_rtx_REG (QImode
, R1_REGNO
),
957 RTX_FRAME_RELATED_P (insn
) = 1;
961 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
962 GEN_INT(size
& ~0xffff)));
963 RTX_FRAME_RELATED_P (insn
) = 1;
965 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
966 gen_rtx_REG (QImode
, R1_REGNO
),
967 GEN_INT(size
& 0xffff)));
968 RTX_FRAME_RELATED_P (insn
) = 1;
969 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
970 gen_rtx_REG (QImode
, SP_REGNO
),
971 gen_rtx_REG (QImode
, R1_REGNO
)));
972 RTX_FRAME_RELATED_P (insn
) = 1;
976 /* Local vars take up less than 32767 words, so we can directly
978 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
979 gen_rtx_REG (QImode
, SP_REGNO
),
981 RTX_FRAME_RELATED_P (insn
) = 1;
984 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
986 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
988 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
990 if (TARGET_PRESERVE_FLOAT
)
992 insn
= emit_insn (gen_pushqi
993 (gen_rtx_REG (QImode
, regno
)));
994 RTX_FRAME_RELATED_P (insn
) = 1;
996 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
997 RTX_FRAME_RELATED_P (insn
) = 1;
999 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
1001 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
1002 RTX_FRAME_RELATED_P (insn
) = 1;
1011 c4x_expand_epilogue(void)
1017 int size
= get_frame_size ();
1019 /* For __naked__ function build no epilogue. */
1020 if (c4x_naked_function_p ())
1022 insn
= emit_jump_insn (gen_return_from_epilogue ());
1023 RTX_FRAME_RELATED_P (insn
) = 1;
1027 /* For __interrupt__ function build specific epilogue. */
1028 if (c4x_interrupt_function_p ())
1030 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1032 if (! c4x_isr_reg_used_p (regno
))
1034 if (regno
== DP_REGNO
)
1036 insn
= emit_insn (gen_pop_dp ());
1037 RTX_FRAME_RELATED_P (insn
) = 1;
1041 /* We have to use unspec because the compiler will delete insns
1042 that are not call-saved. */
1043 if (IS_EXT_REGNO (regno
))
1045 insn
= emit_insn (gen_popqf_unspec
1046 (gen_rtx_REG (QFmode
, regno
)));
1047 RTX_FRAME_RELATED_P (insn
) = 1;
1049 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1050 RTX_FRAME_RELATED_P (insn
) = 1;
1055 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1056 gen_rtx_REG (QImode
, SP_REGNO
),
1058 RTX_FRAME_RELATED_P (insn
) = 1;
1059 insn
= emit_insn (gen_popqi
1060 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1061 RTX_FRAME_RELATED_P (insn
) = 1;
1063 insn
= emit_insn (gen_pop_st ());
1064 RTX_FRAME_RELATED_P (insn
) = 1;
1065 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1066 RTX_FRAME_RELATED_P (insn
) = 1;
1070 if (frame_pointer_needed
)
1073 || (current_function_args_size
!= 0)
1077 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1078 gen_rtx_MEM (QImode
,
1080 (QImode
, gen_rtx_REG (QImode
,
1083 RTX_FRAME_RELATED_P (insn
) = 1;
1085 /* We already have the return value and the fp,
1086 so we need to add those to the stack. */
1093 /* Since ar3 is not used for anything, we don't need to
1100 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1101 if (size
|| current_function_args_size
)
1103 /* If we are omitting the frame pointer, we still have
1104 to make space for it so the offsets are correct
1105 unless we don't use anything on the stack at all. */
1110 /* Now restore the saved registers, putting in the delayed branch
1112 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1114 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1116 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1119 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1121 insn
= emit_insn (gen_popqf_unspec
1122 (gen_rtx_REG (QFmode
, regno
)));
1123 RTX_FRAME_RELATED_P (insn
) = 1;
1124 if (TARGET_PRESERVE_FLOAT
)
1126 insn
= emit_insn (gen_popqi_unspec
1127 (gen_rtx_REG (QImode
, regno
)));
1128 RTX_FRAME_RELATED_P (insn
) = 1;
1133 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1134 RTX_FRAME_RELATED_P (insn
) = 1;
1139 if (frame_pointer_needed
)
1142 || (current_function_args_size
!= 0)
1145 /* Restore the old FP. */
1148 (gen_rtx_REG (QImode
, AR3_REGNO
),
1149 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1151 RTX_FRAME_RELATED_P (insn
) = 1;
1157 /* Local vars are too big, it will take multiple operations
1161 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1162 GEN_INT(size
>> 16)));
1163 RTX_FRAME_RELATED_P (insn
) = 1;
1164 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1165 gen_rtx_REG (QImode
, R3_REGNO
),
1167 RTX_FRAME_RELATED_P (insn
) = 1;
1171 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1172 GEN_INT(size
& ~0xffff)));
1173 RTX_FRAME_RELATED_P (insn
) = 1;
1175 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1176 gen_rtx_REG (QImode
, R3_REGNO
),
1177 GEN_INT(size
& 0xffff)));
1178 RTX_FRAME_RELATED_P (insn
) = 1;
1179 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1180 gen_rtx_REG (QImode
, SP_REGNO
),
1181 gen_rtx_REG (QImode
, R3_REGNO
)));
1182 RTX_FRAME_RELATED_P (insn
) = 1;
1186 /* Local vars take up less than 32768 words, so we can directly
1187 subtract the number. */
1188 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1189 gen_rtx_REG (QImode
, SP_REGNO
),
1191 RTX_FRAME_RELATED_P (insn
) = 1;
1196 insn
= emit_jump_insn (gen_return_indirect_internal
1197 (gen_rtx_REG (QImode
, R2_REGNO
)));
1198 RTX_FRAME_RELATED_P (insn
) = 1;
1202 insn
= emit_jump_insn (gen_return_from_epilogue ());
1203 RTX_FRAME_RELATED_P (insn
) = 1;
1210 c4x_null_epilogue_p (void)
1214 if (reload_completed
1215 && ! c4x_naked_function_p ()
1216 && ! c4x_interrupt_function_p ()
1217 && ! current_function_calls_alloca
1218 && ! current_function_args_size
1220 && ! get_frame_size ())
1222 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1223 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
]
1224 && (regno
!= AR3_REGNO
))
1233 c4x_emit_move_sequence (rtx
*operands
, enum machine_mode mode
)
1235 rtx op0
= operands
[0];
1236 rtx op1
= operands
[1];
1238 if (! reload_in_progress
1241 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1242 op1
= force_reg (mode
, op1
);
1244 if (GET_CODE (op1
) == LO_SUM
1245 && GET_MODE (op1
) == Pmode
1246 && dp_reg_operand (XEXP (op1
, 0), mode
))
1248 /* expand_increment will sometimes create a LO_SUM immediate
1249 address. Undo this silliness. */
1250 op1
= XEXP (op1
, 1);
1253 if (symbolic_address_operand (op1
, mode
))
1255 if (TARGET_LOAD_ADDRESS
)
1257 /* Alias analysis seems to do a better job if we force
1258 constant addresses to memory after reload. */
1259 emit_insn (gen_load_immed_address (op0
, op1
));
1264 /* Stick symbol or label address into the constant pool. */
1265 op1
= force_const_mem (Pmode
, op1
);
1268 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1270 /* We could be a lot smarter about loading some of these
1272 op1
= force_const_mem (mode
, op1
);
1275 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1276 and emit associated (HIGH (SYMREF)) if large memory model.
1277 c4x_legitimize_address could be used to do this,
1278 perhaps by calling validize_address. */
1279 if (TARGET_EXPOSE_LDP
1280 && ! (reload_in_progress
|| reload_completed
)
1281 && GET_CODE (op1
) == MEM
1282 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1284 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1286 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1287 op1
= change_address (op1
, mode
,
1288 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1291 if (TARGET_EXPOSE_LDP
1292 && ! (reload_in_progress
|| reload_completed
)
1293 && GET_CODE (op0
) == MEM
1294 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1296 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1298 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1299 op0
= change_address (op0
, mode
,
1300 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1303 if (GET_CODE (op0
) == SUBREG
1304 && mixed_subreg_operand (op0
, mode
))
1306 /* We should only generate these mixed mode patterns
1307 during RTL generation. If we need do it later on
1308 then we'll have to emit patterns that won't clobber CC. */
1309 if (reload_in_progress
|| reload_completed
)
1311 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1312 op0
= SUBREG_REG (op0
);
1313 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1315 op0
= copy_rtx (op0
);
1316 PUT_MODE (op0
, QImode
);
1322 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1328 if (GET_CODE (op1
) == SUBREG
1329 && mixed_subreg_operand (op1
, mode
))
1331 /* We should only generate these mixed mode patterns
1332 during RTL generation. If we need do it later on
1333 then we'll have to emit patterns that won't clobber CC. */
1334 if (reload_in_progress
|| reload_completed
)
1336 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1337 op1
= SUBREG_REG (op1
);
1338 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1340 op1
= copy_rtx (op1
);
1341 PUT_MODE (op1
, QImode
);
1347 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1354 && reg_operand (op0
, mode
)
1355 && const_int_operand (op1
, mode
)
1356 && ! IS_INT16_CONST (INTVAL (op1
))
1357 && ! IS_HIGH_CONST (INTVAL (op1
)))
1359 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1364 && reg_operand (op0
, mode
)
1365 && const_int_operand (op1
, mode
))
1367 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1371 /* Adjust operands in case we have modified them. */
1375 /* Emit normal pattern. */
1381 c4x_emit_libcall (rtx libcall
, enum rtx_code code
,
1382 enum machine_mode dmode
, enum machine_mode smode
,
1383 int noperands
, rtx
*operands
)
1393 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1394 operands
[1], smode
);
1395 equiv
= gen_rtx_fmt_e (code
, dmode
, operands
[1]);
1399 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1400 operands
[1], smode
, operands
[2], smode
);
1401 equiv
= gen_rtx_fmt_ee (code
, dmode
, operands
[1], operands
[2]);
1408 insns
= get_insns ();
1410 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1415 c4x_emit_libcall3 (rtx libcall
, enum rtx_code code
,
1416 enum machine_mode mode
, rtx
*operands
)
1418 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1423 c4x_emit_libcall_mulhi (rtx libcall
, enum rtx_code code
,
1424 enum machine_mode mode
, rtx
*operands
)
1431 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1432 operands
[1], mode
, operands
[2], mode
);
1433 equiv
= gen_rtx_TRUNCATE (mode
,
1434 gen_rtx_LSHIFTRT (HImode
,
1435 gen_rtx_MULT (HImode
,
1436 gen_rtx_fmt_e (code
, HImode
, operands
[1]),
1437 gen_rtx_fmt_e (code
, HImode
, operands
[2])),
1439 insns
= get_insns ();
1441 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1446 c4x_legitimate_address_p (enum machine_mode mode
, rtx addr
, int strict
)
1448 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1449 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1450 rtx disp
= NULL_RTX
; /* Displacement. */
1453 code
= GET_CODE (addr
);
1456 /* Register indirect with auto increment/decrement. We don't
1457 allow SP here---push_operand should recognize an operand
1458 being pushed on the stack. */
1463 if (mode
!= QImode
&& mode
!= QFmode
)
1467 base
= XEXP (addr
, 0);
1475 rtx op0
= XEXP (addr
, 0);
1476 rtx op1
= XEXP (addr
, 1);
1478 if (mode
!= QImode
&& mode
!= QFmode
)
1482 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1484 base
= XEXP (op1
, 0);
1487 if (REGNO (base
) != REGNO (op0
))
1489 if (REG_P (XEXP (op1
, 1)))
1490 indx
= XEXP (op1
, 1);
1492 disp
= XEXP (op1
, 1);
1496 /* Register indirect. */
1501 /* Register indirect with displacement or index. */
1504 rtx op0
= XEXP (addr
, 0);
1505 rtx op1
= XEXP (addr
, 1);
1506 enum rtx_code code0
= GET_CODE (op0
);
1513 base
= op0
; /* Base + index. */
1515 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1523 base
= op0
; /* Base + displacement. */
1534 /* Direct addressing with DP register. */
1537 rtx op0
= XEXP (addr
, 0);
1538 rtx op1
= XEXP (addr
, 1);
1540 /* HImode and HFmode direct memory references aren't truly
1541 offsettable (consider case at end of data page). We
1542 probably get better code by loading a pointer and using an
1543 indirect memory reference. */
1544 if (mode
== HImode
|| mode
== HFmode
)
1547 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1550 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1553 if (GET_CODE (op1
) == CONST
)
1559 /* Direct addressing with some work for the assembler... */
1561 /* Direct addressing. */
1564 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1566 /* These need to be converted to a LO_SUM (...).
1567 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1570 /* Do not allow direct memory access to absolute addresses.
1571 This is more pain than it's worth, especially for the
1572 small memory model where we can't guarantee that
1573 this address is within the data page---we don't want
1574 to modify the DP register in the small memory model,
1575 even temporarily, since an interrupt can sneak in.... */
1579 /* Indirect indirect addressing. */
1584 fatal_insn ("using CONST_DOUBLE for address", addr
);
1590 /* Validate the base register. */
1593 /* Check that the address is offsettable for HImode and HFmode. */
1594 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1597 /* Handle DP based stuff. */
1598 if (REGNO (base
) == DP_REGNO
)
1600 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1602 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1606 /* Now validate the index register. */
1609 if (GET_CODE (indx
) != REG
)
1611 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1613 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1617 /* Validate displacement. */
1620 if (GET_CODE (disp
) != CONST_INT
)
1622 if (mode
== HImode
|| mode
== HFmode
)
1624 /* The offset displacement must be legitimate. */
1625 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1630 if (! IS_DISP8_CONST (INTVAL (disp
)))
1633 /* Can't add an index with a disp. */
1642 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED
,
1643 enum machine_mode mode ATTRIBUTE_UNUSED
)
1645 if (GET_CODE (orig
) == SYMBOL_REF
1646 || GET_CODE (orig
) == LABEL_REF
)
1648 if (mode
== HImode
|| mode
== HFmode
)
1650 /* We need to force the address into
1651 a register so that it is offsettable. */
1652 rtx addr_reg
= gen_reg_rtx (Pmode
);
1653 emit_move_insn (addr_reg
, orig
);
1658 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1661 emit_insn (gen_set_ldp (dp_reg
, orig
));
1663 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1671 /* Provide the costs of an addressing mode that contains ADDR.
1672 If ADDR is not a valid address, its cost is irrelevant.
1673 This is used in cse and loop optimization to determine
1674 if it is worthwhile storing a common address into a register.
1675 Unfortunately, the C4x address cost depends on other operands. */
1678 c4x_address_cost (rtx addr
)
1680 switch (GET_CODE (addr
))
1691 /* These shouldn't be directly generated. */
1699 rtx op1
= XEXP (addr
, 1);
1701 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1702 return TARGET_SMALL
? 3 : 4;
1704 if (GET_CODE (op1
) == CONST
)
1706 rtx offset
= const0_rtx
;
1708 op1
= eliminate_constant_term (op1
, &offset
);
1710 /* ??? These costs need rethinking... */
1711 if (GET_CODE (op1
) == LABEL_REF
)
1714 if (GET_CODE (op1
) != SYMBOL_REF
)
1717 if (INTVAL (offset
) == 0)
1722 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1728 register rtx op0
= XEXP (addr
, 0);
1729 register rtx op1
= XEXP (addr
, 1);
1731 if (GET_CODE (op0
) != REG
)
1734 switch (GET_CODE (op1
))
1740 /* This cost for REG+REG must be greater than the cost
1741 for REG if we want autoincrement addressing modes. */
1745 /* The following tries to improve GIV combination
1746 in strength reduce but appears not to help. */
1747 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1750 if (IS_DISP1_CONST (INTVAL (op1
)))
1753 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1768 c4x_gen_compare_reg (enum rtx_code code
, rtx x
, rtx y
)
1770 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1773 if (mode
== CC_NOOVmode
1774 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1777 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1778 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1779 gen_rtx_COMPARE (mode
, x
, y
)));
1784 c4x_output_cbranch (const char *form
, rtx seq
)
1791 static char str
[100];
1795 delay
= XVECEXP (final_sequence
, 0, 1);
1796 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1797 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1798 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1801 cp
= &str
[strlen (str
)];
1826 c4x_print_operand (FILE *file
, rtx op
, int letter
)
1833 case '#': /* Delayed. */
1835 fprintf (file
, "d");
1839 code
= GET_CODE (op
);
1842 case 'A': /* Direct address. */
1843 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1844 fprintf (file
, "@");
1847 case 'H': /* Sethi. */
1848 output_addr_const (file
, op
);
1851 case 'I': /* Reversed condition. */
1852 code
= reverse_condition (code
);
1855 case 'L': /* Log 2 of constant. */
1856 if (code
!= CONST_INT
)
1857 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1858 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1861 case 'N': /* Ones complement of small constant. */
1862 if (code
!= CONST_INT
)
1863 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1864 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~INTVAL (op
));
1867 case 'K': /* Generate ldp(k) if direct address. */
1870 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1871 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1872 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1874 op1
= XEXP (XEXP (op
, 0), 1);
1875 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1877 fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1878 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1879 fprintf (file
, "\n");
1884 case 'M': /* Generate ldp(k) if direct address. */
1885 if (! TARGET_SMALL
/* Only used in asm statements. */
1887 && (GET_CODE (XEXP (op
, 0)) == CONST
1888 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1890 fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1891 output_address (XEXP (op
, 0));
1892 fprintf (file
, "\n\t");
1896 case 'O': /* Offset address. */
1897 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1899 else if (code
== MEM
)
1900 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1901 else if (code
== REG
)
1902 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1904 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1907 case 'C': /* Call. */
1910 case 'U': /* Call/callu. */
1911 if (code
!= SYMBOL_REF
)
1912 fprintf (file
, "u");
1922 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1924 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1926 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1930 output_address (XEXP (op
, 0));
1937 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (op
),
1938 sizeof (str
), 0, 1);
1939 fprintf (file
, "%s", str
);
1944 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (op
));
1948 fprintf (file
, "ne");
1952 fprintf (file
, "eq");
1956 fprintf (file
, "ge");
1960 fprintf (file
, "gt");
1964 fprintf (file
, "le");
1968 fprintf (file
, "lt");
1972 fprintf (file
, "hs");
1976 fprintf (file
, "hi");
1980 fprintf (file
, "ls");
1984 fprintf (file
, "lo");
1988 output_addr_const (file
, op
);
1992 output_addr_const (file
, XEXP (op
, 0));
1999 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2006 c4x_print_operand_address (FILE *file
, rtx addr
)
2008 switch (GET_CODE (addr
))
2011 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2015 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2019 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2024 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2025 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2027 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2028 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2029 reg_names
[REGNO (op1
)]);
2030 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2031 fprintf (file
, "*%s++(" HOST_WIDE_INT_PRINT_DEC
")",
2032 reg_names
[REGNO (op0
)], INTVAL (op1
));
2033 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2034 fprintf (file
, "*%s--(" HOST_WIDE_INT_PRINT_DEC
")",
2035 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2036 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2037 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2038 reg_names
[REGNO (op1
)]);
2040 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2046 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2047 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2049 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2050 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2051 reg_names
[REGNO (op1
)]);
2052 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2053 fprintf (file
, "*++%s(" HOST_WIDE_INT_PRINT_DEC
")",
2054 reg_names
[REGNO (op0
)], INTVAL (op1
));
2055 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2056 fprintf (file
, "*--%s(" HOST_WIDE_INT_PRINT_DEC
")",
2057 reg_names
[REGNO (op0
)], -INTVAL (op1
));
2058 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2059 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2060 reg_names
[REGNO (op1
)]);
2062 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2067 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2071 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2074 case PLUS
: /* Indirect with displacement. */
2076 rtx op0
= XEXP (addr
, 0);
2077 rtx op1
= XEXP (addr
, 1);
2083 if (IS_INDEX_REG (op0
))
2085 fprintf (file
, "*+%s(%s)",
2086 reg_names
[REGNO (op1
)],
2087 reg_names
[REGNO (op0
)]); /* Index + base. */
2091 fprintf (file
, "*+%s(%s)",
2092 reg_names
[REGNO (op0
)],
2093 reg_names
[REGNO (op1
)]); /* Base + index. */
2096 else if (INTVAL (op1
) < 0)
2098 fprintf (file
, "*-%s(" HOST_WIDE_INT_PRINT_DEC
")",
2099 reg_names
[REGNO (op0
)],
2100 -INTVAL (op1
)); /* Base - displacement. */
2104 fprintf (file
, "*+%s(" HOST_WIDE_INT_PRINT_DEC
")",
2105 reg_names
[REGNO (op0
)],
2106 INTVAL (op1
)); /* Base + displacement. */
2110 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2116 rtx op0
= XEXP (addr
, 0);
2117 rtx op1
= XEXP (addr
, 1);
2119 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2120 c4x_print_operand_address (file
, op1
);
2122 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2129 fprintf (file
, "@");
2130 output_addr_const (file
, addr
);
2133 /* We shouldn't access CONST_INT addresses. */
2137 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2143 /* Return nonzero if the floating point operand will fit
2144 in the immediate field. */
2147 c4x_immed_float_p (rtx op
)
2153 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2154 if (GET_MODE (op
) == HFmode
)
2155 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2158 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2162 /* Sign extend exponent. */
2163 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2164 if (exponent
== -128)
2166 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2167 return 0; /* Precision doesn't fit. */
2168 return (exponent
<= 7) /* Positive exp. */
2169 && (exponent
>= -7); /* Negative exp. */
2173 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2174 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2176 None of the last four instructions from the bottom of the block can
2177 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2178 BcondAT or RETIcondD.
2180 This routine scans the four previous insns for a jump insn, and if
2181 one is found, returns 1 so that we bung in a nop instruction.
2182 This simple minded strategy will add a nop, when it may not
2183 be required. Say when there is a JUMP_INSN near the end of the
2184 block that doesn't get converted into a delayed branch.
2186 Note that we cannot have a call insn, since we don't generate
2187 repeat loops with calls in them (although I suppose we could, but
2188 there's no benefit.)
2190 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2193 c4x_rptb_nop_p (rtx insn
)
2198 /* Extract the start label from the jump pattern (rptb_end). */
2199 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2201 /* If there is a label at the end of the loop we must insert
2204 insn
= previous_insn (insn
);
2205 } while (GET_CODE (insn
) == NOTE
2206 || GET_CODE (insn
) == USE
2207 || GET_CODE (insn
) == CLOBBER
);
2208 if (GET_CODE (insn
) == CODE_LABEL
)
2211 for (i
= 0; i
< 4; i
++)
2213 /* Search back for prev non-note and non-label insn. */
2214 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2215 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2217 if (insn
== start_label
)
2220 insn
= previous_insn (insn
);
2223 /* If we have a jump instruction we should insert a NOP. If we
2224 hit repeat block top we should only insert a NOP if the loop
2226 if (GET_CODE (insn
) == JUMP_INSN
)
2228 insn
= previous_insn (insn
);
2234 /* The C4x looping instruction needs to be emitted at the top of the
2235 loop. Emitting the true RTL for a looping instruction at the top of
2236 the loop can cause problems with flow analysis. So instead, a dummy
2237 doloop insn is emitted at the end of the loop. This routine checks
2238 for the presence of this doloop insn and then searches back to the
2239 top of the loop, where it inserts the true looping insn (provided
2240 there are no instructions in the loop which would cause problems).
2241 Any additional labels can be emitted at this point. In addition, if
2242 the desired loop count register was not allocated, this routine does
2245 Before we can create a repeat block looping instruction we have to
2246 verify that there are no jumps outside the loop and no jumps outside
2247 the loop go into this loop. This can happen in the basic blocks reorder
2248 pass. The C4x cpu cannot handle this. */
2251 c4x_label_ref_used_p (rtx x
, rtx code_label
)
2260 code
= GET_CODE (x
);
2261 if (code
== LABEL_REF
)
2262 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2264 fmt
= GET_RTX_FORMAT (code
);
2265 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2269 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2272 else if (fmt
[i
] == 'E')
2273 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2274 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2282 c4x_rptb_valid_p (rtx insn
, rtx start_label
)
2288 /* Find the start label. */
2289 for (; insn
; insn
= PREV_INSN (insn
))
2290 if (insn
== start_label
)
2293 /* Note found then we cannot use a rptb or rpts. The label was
2294 probably moved by the basic block reorder pass. */
2299 /* If any jump jumps inside this block then we must fail. */
2300 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2302 if (GET_CODE (insn
) == CODE_LABEL
)
2304 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2305 if (GET_CODE (tmp
) == JUMP_INSN
2306 && c4x_label_ref_used_p (tmp
, insn
))
2310 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2312 if (GET_CODE (insn
) == CODE_LABEL
)
2314 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2315 if (GET_CODE (tmp
) == JUMP_INSN
2316 && c4x_label_ref_used_p (tmp
, insn
))
2320 /* If any jump jumps outside this block then we must fail. */
2321 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2323 if (GET_CODE (insn
) == CODE_LABEL
)
2325 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2326 if (GET_CODE (tmp
) == JUMP_INSN
2327 && c4x_label_ref_used_p (tmp
, insn
))
2329 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2330 if (GET_CODE (tmp
) == JUMP_INSN
2331 && c4x_label_ref_used_p (tmp
, insn
))
2336 /* All checks OK. */
2342 c4x_rptb_insert (rtx insn
)
2346 rtx new_start_label
;
2349 /* If the count register has not been allocated to RC, say if
2350 there is a movmem pattern in the loop, then do not insert a
2351 RPTB instruction. Instead we emit a decrement and branch
2352 at the end of the loop. */
2353 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2354 if (REGNO (count_reg
) != RC_REGNO
)
2357 /* Extract the start label from the jump pattern (rptb_end). */
2358 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2360 if (! c4x_rptb_valid_p (insn
, start_label
))
2362 /* We cannot use the rptb insn. Replace it so reorg can use
2363 the delay slots of the jump insn. */
2364 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, constm1_rtx
), insn
);
2365 emit_insn_before (gen_cmpqi (count_reg
, const0_rtx
), insn
);
2366 emit_insn_before (gen_bge (start_label
), insn
);
2367 LABEL_NUSES (start_label
)++;
2372 end_label
= gen_label_rtx ();
2373 LABEL_NUSES (end_label
)++;
2374 emit_label_after (end_label
, insn
);
2376 new_start_label
= gen_label_rtx ();
2377 LABEL_NUSES (new_start_label
)++;
2379 for (; insn
; insn
= PREV_INSN (insn
))
2381 if (insn
== start_label
)
2383 if (GET_CODE (insn
) == JUMP_INSN
&&
2384 JUMP_LABEL (insn
) == start_label
)
2385 redirect_jump (insn
, new_start_label
, 0);
2388 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2390 emit_label_after (new_start_label
, insn
);
2392 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2393 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2395 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2396 if (LABEL_NUSES (start_label
) == 0)
2397 delete_insn (start_label
);
2401 /* We need to use direct addressing for large constants and addresses
2402 that cannot fit within an instruction. We must check for these
2403 after after the final jump optimization pass, since this may
2404 introduce a local_move insn for a SYMBOL_REF. This pass
2405 must come before delayed branch slot filling since it can generate
2406 additional instructions.
2408 This function also fixes up RTPB style loops that didn't get RC
2409 allocated as the loop counter. */
2416 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2418 /* Look for insn. */
2421 int insn_code_number
;
2424 insn_code_number
= recog_memoized (insn
);
2426 if (insn_code_number
< 0)
2429 /* Insert the RTX for RPTB at the top of the loop
2430 and a label at the end of the loop. */
2431 if (insn_code_number
== CODE_FOR_rptb_end
)
2432 c4x_rptb_insert(insn
);
2434 /* We need to split the insn here. Otherwise the calls to
2435 force_const_mem will not work for load_immed_address. */
2438 /* Don't split the insn if it has been deleted. */
2439 if (! INSN_DELETED_P (old
))
2440 insn
= try_split (PATTERN(old
), old
, 1);
2442 /* When not optimizing, the old insn will be still left around
2443 with only the 'deleted' bit set. Transform it into a note
2444 to avoid confusion of subsequent processing. */
2445 if (INSN_DELETED_P (old
))
2447 PUT_CODE (old
, NOTE
);
2448 NOTE_LINE_NUMBER (old
) = NOTE_INSN_DELETED
;
2449 NOTE_SOURCE_FILE (old
) = 0;
2457 c4x_a_register (rtx op
)
2459 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2464 c4x_x_register (rtx op
)
2466 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2471 c4x_immed_int_constant (rtx op
)
2473 if (GET_CODE (op
) != CONST_INT
)
2476 return GET_MODE (op
) == VOIDmode
2477 || GET_MODE_CLASS (GET_MODE (op
)) == MODE_INT
2478 || GET_MODE_CLASS (GET_MODE (op
)) == MODE_PARTIAL_INT
;
2483 c4x_immed_float_constant (rtx op
)
2485 if (GET_CODE (op
) != CONST_DOUBLE
)
2488 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2489 present this only means that a MEM rtx has been generated. It does
2490 not mean the rtx is really in memory. */
2492 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2497 c4x_shiftable_constant (rtx op
)
2501 int val
= INTVAL (op
);
2503 for (i
= 0; i
< 16; i
++)
2508 mask
= ((0xffff >> i
) << 16) | 0xffff;
2509 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2510 : (val
>> i
) & mask
))
2517 c4x_H_constant (rtx op
)
2519 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2524 c4x_I_constant (rtx op
)
2526 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2531 c4x_J_constant (rtx op
)
2535 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2540 c4x_K_constant (rtx op
)
2542 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2544 return IS_INT5_CONST (INTVAL (op
));
2549 c4x_L_constant (rtx op
)
2551 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2556 c4x_N_constant (rtx op
)
2558 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2563 c4x_O_constant (rtx op
)
2565 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2569 /* The constraints do not have to check the register class,
2570 except when needed to discriminate between the constraints.
2571 The operand has been checked by the predicates to be valid. */
2573 /* ARx + 9-bit signed const or IRn
2574 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2575 We don't include the pre/post inc/dec forms here since
2576 they are handled by the <> constraints. */
2579 c4x_Q_constraint (rtx op
)
2581 enum machine_mode mode
= GET_MODE (op
);
2583 if (GET_CODE (op
) != MEM
)
2586 switch (GET_CODE (op
))
2593 rtx op0
= XEXP (op
, 0);
2594 rtx op1
= XEXP (op
, 1);
2602 if (GET_CODE (op1
) != CONST_INT
)
2605 /* HImode and HFmode must be offsettable. */
2606 if (mode
== HImode
|| mode
== HFmode
)
2607 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2609 return IS_DISP8_CONST (INTVAL (op1
));
2620 /* ARx + 5-bit unsigned const
2621 *ARx, *+ARx(n) for n < 32. */
2624 c4x_R_constraint (rtx op
)
2626 enum machine_mode mode
= GET_MODE (op
);
2630 if (GET_CODE (op
) != MEM
)
2633 switch (GET_CODE (op
))
2640 rtx op0
= XEXP (op
, 0);
2641 rtx op1
= XEXP (op
, 1);
2646 if (GET_CODE (op1
) != CONST_INT
)
2649 /* HImode and HFmode must be offsettable. */
2650 if (mode
== HImode
|| mode
== HFmode
)
2651 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2653 return IS_UINT5_CONST (INTVAL (op1
));
2665 c4x_R_indirect (rtx op
)
2667 enum machine_mode mode
= GET_MODE (op
);
2669 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2673 switch (GET_CODE (op
))
2676 return IS_ADDR_OR_PSEUDO_REG (op
);
2680 rtx op0
= XEXP (op
, 0);
2681 rtx op1
= XEXP (op
, 1);
2683 /* HImode and HFmode must be offsettable. */
2684 if (mode
== HImode
|| mode
== HFmode
)
2685 return IS_ADDR_OR_PSEUDO_REG (op0
)
2686 && GET_CODE (op1
) == CONST_INT
2687 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2690 && IS_ADDR_OR_PSEUDO_REG (op0
)
2691 && GET_CODE (op1
) == CONST_INT
2692 && IS_UINT5_CONST (INTVAL (op1
));
2703 /* ARx + 1-bit unsigned const or IRn
2704 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2705 We don't include the pre/post inc/dec forms here since
2706 they are handled by the <> constraints. */
2709 c4x_S_constraint (rtx op
)
2711 enum machine_mode mode
= GET_MODE (op
);
2712 if (GET_CODE (op
) != MEM
)
2715 switch (GET_CODE (op
))
2723 rtx op0
= XEXP (op
, 0);
2724 rtx op1
= XEXP (op
, 1);
2726 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2727 || (op0
!= XEXP (op1
, 0)))
2730 op0
= XEXP (op1
, 0);
2731 op1
= XEXP (op1
, 1);
2732 return REG_P (op0
) && REG_P (op1
);
2733 /* Pre or post_modify with a displacement of 0 or 1
2734 should not be generated. */
2740 rtx op0
= XEXP (op
, 0);
2741 rtx op1
= XEXP (op
, 1);
2749 if (GET_CODE (op1
) != CONST_INT
)
2752 /* HImode and HFmode must be offsettable. */
2753 if (mode
== HImode
|| mode
== HFmode
)
2754 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2756 return IS_DISP1_CONST (INTVAL (op1
));
2768 c4x_S_indirect (rtx op
)
2770 enum machine_mode mode
= GET_MODE (op
);
2771 if (GET_CODE (op
) != MEM
)
2775 switch (GET_CODE (op
))
2779 if (mode
!= QImode
&& mode
!= QFmode
)
2786 return IS_ADDR_OR_PSEUDO_REG (op
);
2791 rtx op0
= XEXP (op
, 0);
2792 rtx op1
= XEXP (op
, 1);
2794 if (mode
!= QImode
&& mode
!= QFmode
)
2797 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2798 || (op0
!= XEXP (op1
, 0)))
2801 op0
= XEXP (op1
, 0);
2802 op1
= XEXP (op1
, 1);
2803 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2804 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2805 /* Pre or post_modify with a displacement of 0 or 1
2806 should not be generated. */
2811 rtx op0
= XEXP (op
, 0);
2812 rtx op1
= XEXP (op
, 1);
2816 /* HImode and HFmode must be offsettable. */
2817 if (mode
== HImode
|| mode
== HFmode
)
2818 return IS_ADDR_OR_PSEUDO_REG (op0
)
2819 && GET_CODE (op1
) == CONST_INT
2820 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2823 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2824 && IS_ADDR_OR_PSEUDO_REG (op0
))
2825 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2826 && IS_INDEX_OR_PSEUDO_REG (op0
));
2828 return IS_ADDR_OR_PSEUDO_REG (op0
)
2829 && GET_CODE (op1
) == CONST_INT
2830 && IS_DISP1_CONST (INTVAL (op1
));
2842 /* Direct memory operand. */
2845 c4x_T_constraint (rtx op
)
2847 if (GET_CODE (op
) != MEM
)
2851 if (GET_CODE (op
) != LO_SUM
)
2853 /* Allow call operands. */
2854 return GET_CODE (op
) == SYMBOL_REF
2855 && GET_MODE (op
) == Pmode
2856 && SYMBOL_REF_FUNCTION_P (op
);
2859 /* HImode and HFmode are not offsettable. */
2860 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2863 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2864 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2865 return c4x_U_constraint (XEXP (op
, 1));
2871 /* Symbolic operand. */
2874 c4x_U_constraint (rtx op
)
2876 /* Don't allow direct addressing to an arbitrary constant. */
2877 return GET_CODE (op
) == CONST
2878 || GET_CODE (op
) == SYMBOL_REF
2879 || GET_CODE (op
) == LABEL_REF
;
2884 c4x_autoinc_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2886 if (GET_CODE (op
) == MEM
)
2888 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2894 || code
== PRE_MODIFY
2895 || code
== POST_MODIFY
2904 mixed_subreg_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2906 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2907 int and a long double. */
2908 if (GET_CODE (op
) == SUBREG
2909 && (GET_MODE (op
) == QFmode
)
2910 && (GET_MODE (SUBREG_REG (op
)) == QImode
2911 || GET_MODE (SUBREG_REG (op
)) == HImode
))
2918 reg_imm_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2920 if (REG_P (op
) || CONSTANT_P (op
))
2927 not_modify_reg (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2929 if (REG_P (op
) || CONSTANT_P (op
))
2931 if (GET_CODE (op
) != MEM
)
2934 switch (GET_CODE (op
))
2941 rtx op0
= XEXP (op
, 0);
2942 rtx op1
= XEXP (op
, 1);
2947 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
2953 rtx op0
= XEXP (op
, 0);
2955 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2973 not_rc_reg (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2975 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
2982 c4x_S_address_parse (rtx op
, int *base
, int *incdec
, int *index
, int *disp
)
2989 if (GET_CODE (op
) != MEM
)
2990 fatal_insn ("invalid indirect memory address", op
);
2993 switch (GET_CODE (op
))
2996 *base
= REGNO (XEXP (op
, 0));
3002 *base
= REGNO (XEXP (op
, 0));
3008 *base
= REGNO (XEXP (op
, 0));
3014 *base
= REGNO (XEXP (op
, 0));
3020 *base
= REGNO (XEXP (op
, 0));
3021 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3023 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3024 *disp
= 0; /* ??? */
3027 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3032 *base
= REGNO (XEXP (op
, 0));
3033 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3035 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3036 *disp
= 1; /* ??? */
3039 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3050 rtx op0
= XEXP (op
, 0);
3051 rtx op1
= XEXP (op
, 1);
3053 if (c4x_a_register (op0
))
3055 if (c4x_x_register (op1
))
3057 *base
= REGNO (op0
);
3058 *index
= REGNO (op1
);
3061 else if ((GET_CODE (op1
) == CONST_INT
3062 && IS_DISP1_CONST (INTVAL (op1
))))
3064 *base
= REGNO (op0
);
3065 *disp
= INTVAL (op1
);
3069 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3071 *base
= REGNO (op1
);
3072 *index
= REGNO (op0
);
3079 fatal_insn ("invalid indirect (S) memory address", op
);
3085 c4x_address_conflict (rtx op0
, rtx op1
, int store0
, int store1
)
3096 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3099 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3100 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3102 if (store0
&& store1
)
3104 /* If we have two stores in parallel to the same address, then
3105 the C4x only executes one of the stores. This is unlikely to
3106 cause problems except when writing to a hardware device such
3107 as a FIFO since the second write will be lost. The user
3108 should flag the hardware location as being volatile so that
3109 we don't do this optimization. While it is unlikely that we
3110 have an aliased address if both locations are not marked
3111 volatile, it is probably safer to flag a potential conflict
3112 if either location is volatile. */
3113 if (! flag_argument_noalias
)
3115 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3120 /* If have a parallel load and a store to the same address, the load
3121 is performed first, so there is no conflict. Similarly, there is
3122 no conflict if have parallel loads from the same address. */
3124 /* Cannot use auto increment or auto decrement twice for same
3126 if (base0
== base1
&& incdec0
&& incdec0
)
3129 /* It might be too confusing for GCC if we have use a base register
3130 with a side effect and a memory reference using the same register
3132 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3135 /* We cannot optimize the case where op1 and op2 refer to the same
3137 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3145 /* Check for while loop inside a decrement and branch loop. */
3148 c4x_label_conflict (rtx insn
, rtx jump
, rtx db
)
3152 if (GET_CODE (insn
) == CODE_LABEL
)
3154 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3156 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3159 insn
= PREV_INSN (insn
);
3165 /* Validate combination of operands for parallel load/store instructions. */
3168 valid_parallel_load_store (rtx
*operands
,
3169 enum machine_mode mode ATTRIBUTE_UNUSED
)
3171 rtx op0
= operands
[0];
3172 rtx op1
= operands
[1];
3173 rtx op2
= operands
[2];
3174 rtx op3
= operands
[3];
3176 if (GET_CODE (op0
) == SUBREG
)
3177 op0
= SUBREG_REG (op0
);
3178 if (GET_CODE (op1
) == SUBREG
)
3179 op1
= SUBREG_REG (op1
);
3180 if (GET_CODE (op2
) == SUBREG
)
3181 op2
= SUBREG_REG (op2
);
3182 if (GET_CODE (op3
) == SUBREG
)
3183 op3
= SUBREG_REG (op3
);
3185 /* The patterns should only allow ext_low_reg_operand() or
3186 par_ind_operand() operands. Thus of the 4 operands, only 2
3187 should be REGs and the other 2 should be MEMs. */
3189 /* This test prevents the multipack pass from using this pattern if
3190 op0 is used as an index or base register in op2 or op3, since
3191 this combination will require reloading. */
3192 if (GET_CODE (op0
) == REG
3193 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3194 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3198 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3199 return (REGNO (op0
) != REGNO (op2
))
3200 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3201 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3204 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3205 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3206 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3209 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3210 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3211 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3214 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3215 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3216 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3223 valid_parallel_operands_4 (rtx
*operands
,
3224 enum machine_mode mode ATTRIBUTE_UNUSED
)
3226 rtx op0
= operands
[0];
3227 rtx op2
= operands
[2];
3229 if (GET_CODE (op0
) == SUBREG
)
3230 op0
= SUBREG_REG (op0
);
3231 if (GET_CODE (op2
) == SUBREG
)
3232 op2
= SUBREG_REG (op2
);
3234 /* This test prevents the multipack pass from using this pattern if
3235 op0 is used as an index or base register in op2, since this combination
3236 will require reloading. */
3237 if (GET_CODE (op0
) == REG
3238 && GET_CODE (op2
) == MEM
3239 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3247 valid_parallel_operands_5 (rtx
*operands
,
3248 enum machine_mode mode ATTRIBUTE_UNUSED
)
3251 rtx op0
= operands
[0];
3252 rtx op1
= operands
[1];
3253 rtx op2
= operands
[2];
3254 rtx op3
= operands
[3];
3256 if (GET_CODE (op0
) == SUBREG
)
3257 op0
= SUBREG_REG (op0
);
3258 if (GET_CODE (op1
) == SUBREG
)
3259 op1
= SUBREG_REG (op1
);
3260 if (GET_CODE (op2
) == SUBREG
)
3261 op2
= SUBREG_REG (op2
);
3263 /* The patterns should only allow ext_low_reg_operand() or
3264 par_ind_operand() operands. Operands 1 and 2 may be commutative
3265 but only one of them can be a register. */
3266 if (GET_CODE (op1
) == REG
)
3268 if (GET_CODE (op2
) == REG
)
3274 /* This test prevents the multipack pass from using this pattern if
3275 op0 is used as an index or base register in op3, since this combination
3276 will require reloading. */
3277 if (GET_CODE (op0
) == REG
3278 && GET_CODE (op3
) == MEM
3279 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3287 valid_parallel_operands_6 (rtx
*operands
,
3288 enum machine_mode mode ATTRIBUTE_UNUSED
)
3291 rtx op0
= operands
[0];
3292 rtx op1
= operands
[1];
3293 rtx op2
= operands
[2];
3294 rtx op4
= operands
[4];
3295 rtx op5
= operands
[5];
3297 if (GET_CODE (op1
) == SUBREG
)
3298 op1
= SUBREG_REG (op1
);
3299 if (GET_CODE (op2
) == SUBREG
)
3300 op2
= SUBREG_REG (op2
);
3301 if (GET_CODE (op4
) == SUBREG
)
3302 op4
= SUBREG_REG (op4
);
3303 if (GET_CODE (op5
) == SUBREG
)
3304 op5
= SUBREG_REG (op5
);
3306 /* The patterns should only allow ext_low_reg_operand() or
3307 par_ind_operand() operands. Thus of the 4 input operands, only 2
3308 should be REGs and the other 2 should be MEMs. */
3310 if (GET_CODE (op1
) == REG
)
3312 if (GET_CODE (op2
) == REG
)
3314 if (GET_CODE (op4
) == REG
)
3316 if (GET_CODE (op5
) == REG
)
3319 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3320 Perhaps we should count the MEMs as well? */
3324 /* This test prevents the multipack pass from using this pattern if
3325 op0 is used as an index or base register in op4 or op5, since
3326 this combination will require reloading. */
3327 if (GET_CODE (op0
) == REG
3328 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3329 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3336 /* Validate combination of src operands. Note that the operands have
3337 been screened by the src_operand predicate. We just have to check
3338 that the combination of operands is valid. If FORCE is set, ensure
3339 that the destination regno is valid if we have a 2 operand insn. */
3342 c4x_valid_operands (enum rtx_code code
, rtx
*operands
,
3343 enum machine_mode mode ATTRIBUTE_UNUSED
,
3349 enum rtx_code code1
;
3350 enum rtx_code code2
;
3353 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3354 if (code
== IF_THEN_ELSE
)
3355 return 1 || (operands
[0] == operands
[2] || operands
[0] == operands
[3]);
3357 if (code
== COMPARE
)
3370 if (GET_CODE (op0
) == SUBREG
)
3371 op0
= SUBREG_REG (op0
);
3372 if (GET_CODE (op1
) == SUBREG
)
3373 op1
= SUBREG_REG (op1
);
3374 if (GET_CODE (op2
) == SUBREG
)
3375 op2
= SUBREG_REG (op2
);
3377 code1
= GET_CODE (op1
);
3378 code2
= GET_CODE (op2
);
3381 if (code1
== REG
&& code2
== REG
)
3384 if (code1
== MEM
&& code2
== MEM
)
3386 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3388 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3391 /* We cannot handle two MEMs or two CONSTS, etc. */
3400 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3405 if (! c4x_H_constant (op2
))
3409 /* Any valid memory operand screened by src_operand is OK. */
3414 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3418 if (GET_CODE (op0
) == SCRATCH
)
3424 /* Check that we have a valid destination register for a two operand
3426 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (op0
);
3430 /* Check non-commutative operators. */
3431 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3432 || code
== ASHIFT
|| code
== COMPARE
)
3434 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
3437 /* Assume MINUS is commutative since the subtract patterns
3438 also support the reverse subtract instructions. Since op1
3439 is not a register, and op2 is a register, op1 can only
3440 be a restricted memory operand for a shift instruction. */
3449 if (! c4x_H_constant (op1
))
3453 /* Any valid memory operand screened by src_operand is OK. */
3462 if (GET_CODE (op0
) == SCRATCH
)
3468 /* Check that we have a valid destination register for a two operand
3470 return ! force
|| REGNO (op1
) == REGNO (op0
);
3473 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
3480 int valid_operands (enum rtx_code code
, rtx
*operands
, enum machine_mode mode
)
3483 /* If we are not optimizing then we have to let anything go and let
3484 reload fix things up. instantiate_decl in function.c can produce
3485 invalid insns by changing the offset of a memory operand from a
3486 valid one into an invalid one, when the second operand is also a
3487 memory operand. The alternative is not to allow two memory
3488 operands for an insn when not optimizing. The problem only rarely
3489 occurs, for example with the C-torture program DFcmp.c. */
3491 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
3496 legitimize_operands (enum rtx_code code
, rtx
*operands
, enum machine_mode mode
)
3498 /* Compare only has 2 operands. */
3499 if (code
== COMPARE
)
3501 /* During RTL generation, force constants into pseudos so that
3502 they can get hoisted out of loops. This will tie up an extra
3503 register but can save an extra cycle. Only do this if loop
3504 optimization enabled. (We cannot pull this trick for add and
3505 sub instructions since the flow pass won't find
3506 autoincrements etc.) This allows us to generate compare
3507 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3508 of LDI *AR0++, R0; CMPI 42, R0.
3510 Note that expand_binops will try to load an expensive constant
3511 into a register if it is used within a loop. Unfortunately,
3512 the cost mechanism doesn't allow us to look at the other
3513 operand to decide whether the constant is expensive. */
3515 if (! reload_in_progress
3518 && GET_CODE (operands
[1]) == CONST_INT
3519 && rtx_cost (operands
[1], code
) > 1)
3520 operands
[1] = force_reg (mode
, operands
[1]);
3522 if (! reload_in_progress
3523 && ! c4x_valid_operands (code
, operands
, mode
, 0))
3524 operands
[0] = force_reg (mode
, operands
[0]);
3528 /* We cannot do this for ADDI/SUBI insns since we will
3529 defeat the flow pass from finding autoincrement addressing
3531 if (! reload_in_progress
3532 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
3535 && GET_CODE (operands
[2]) == CONST_INT
3536 && rtx_cost (operands
[2], code
) > 1)
3537 operands
[2] = force_reg (mode
, operands
[2]);
3539 /* We can get better code on a C30 if we force constant shift counts
3540 into a register. This way they can get hoisted out of loops,
3541 tying up a register but saving an instruction. The downside is
3542 that they may get allocated to an address or index register, and
3543 thus we will get a pipeline conflict if there is a nearby
3544 indirect address using an address register.
3546 Note that expand_binops will not try to load an expensive constant
3547 into a register if it is used within a loop for a shift insn. */
3549 if (! reload_in_progress
3550 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
3552 /* If the operand combination is invalid, we force operand1 into a
3553 register, preventing reload from having doing to do this at a
3555 operands
[1] = force_reg (mode
, operands
[1]);
3558 emit_move_insn (operands
[0], operands
[1]);
3559 operands
[1] = copy_rtx (operands
[0]);
3563 /* Just in case... */
3564 if (! c4x_valid_operands (code
, operands
, mode
, 0))
3565 operands
[2] = force_reg (mode
, operands
[2]);
3569 /* Right shifts require a negative shift count, but GCC expects
3570 a positive count, so we emit a NEG. */
3571 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
3572 && (GET_CODE (operands
[2]) != CONST_INT
))
3573 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
3576 /* When the shift count is greater than 32 then the result
3577 can be implementation dependent. We truncate the result to
3578 fit in 5 bits so that we do not emit invalid code when
3579 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3580 if (((code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ASHIFT
)
3581 && (GET_CODE (operands
[2]) == CONST_INT
))
3582 && INTVAL (operands
[2]) > (GET_MODE_BITSIZE (mode
) - 1))
3584 = GEN_INT (INTVAL (operands
[2]) & (GET_MODE_BITSIZE (mode
) - 1));
3590 /* The following predicates are used for instruction scheduling. */
3593 group1_reg_operand (rtx op
, enum machine_mode mode
)
3595 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3597 if (GET_CODE (op
) == SUBREG
)
3598 op
= SUBREG_REG (op
);
3599 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
3604 group1_mem_operand (rtx op
, enum machine_mode mode
)
3606 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3609 if (GET_CODE (op
) == MEM
)
3612 if (GET_CODE (op
) == PLUS
)
3614 rtx op0
= XEXP (op
, 0);
3615 rtx op1
= XEXP (op
, 1);
3617 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
3618 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
3621 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
3629 /* Return true if any one of the address registers. */
3632 arx_reg_operand (rtx op
, enum machine_mode mode
)
3634 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3636 if (GET_CODE (op
) == SUBREG
)
3637 op
= SUBREG_REG (op
);
3638 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
3643 c4x_arn_reg_operand (rtx op
, enum machine_mode mode
, unsigned int regno
)
3645 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3647 if (GET_CODE (op
) == SUBREG
)
3648 op
= SUBREG_REG (op
);
3649 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
3654 c4x_arn_mem_operand (rtx op
, enum machine_mode mode
, unsigned int regno
)
3656 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3659 if (GET_CODE (op
) == MEM
)
3662 switch (GET_CODE (op
))
3671 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
3675 if (REG_P (XEXP (op
, 0)) && (! reload_completed
3676 || (REGNO (XEXP (op
, 0)) == regno
)))
3678 if (REG_P (XEXP (XEXP (op
, 1), 1))
3679 && (! reload_completed
3680 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
3686 rtx op0
= XEXP (op
, 0);
3687 rtx op1
= XEXP (op
, 1);
3689 if ((REG_P (op0
) && (! reload_completed
3690 || (REGNO (op0
) == regno
)))
3691 || (REG_P (op1
) && (! reload_completed
3692 || (REGNO (op1
) == regno
))))
3706 ar0_reg_operand (rtx op
, enum machine_mode mode
)
3708 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
3713 ar0_mem_operand (rtx op
, enum machine_mode mode
)
3715 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
3720 ar1_reg_operand (rtx op
, enum machine_mode mode
)
3722 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
3727 ar1_mem_operand (rtx op
, enum machine_mode mode
)
3729 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
3734 ar2_reg_operand (rtx op
, enum machine_mode mode
)
3736 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
3741 ar2_mem_operand (rtx op
, enum machine_mode mode
)
3743 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
3748 ar3_reg_operand (rtx op
, enum machine_mode mode
)
3750 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
3755 ar3_mem_operand (rtx op
, enum machine_mode mode
)
3757 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
3762 ar4_reg_operand (rtx op
, enum machine_mode mode
)
3764 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
3769 ar4_mem_operand (rtx op
, enum machine_mode mode
)
3771 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
3776 ar5_reg_operand (rtx op
, enum machine_mode mode
)
3778 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
3783 ar5_mem_operand (rtx op
, enum machine_mode mode
)
3785 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
3790 ar6_reg_operand (rtx op
, enum machine_mode mode
)
3792 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
3797 ar6_mem_operand (rtx op
, enum machine_mode mode
)
3799 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
3804 ar7_reg_operand (rtx op
, enum machine_mode mode
)
3806 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
3811 ar7_mem_operand (rtx op
, enum machine_mode mode
)
3813 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
3818 ir0_reg_operand (rtx op
, enum machine_mode mode
)
3820 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
3825 ir0_mem_operand (rtx op
, enum machine_mode mode
)
3827 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
3832 ir1_reg_operand (rtx op
, enum machine_mode mode
)
3834 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
3839 ir1_mem_operand (rtx op
, enum machine_mode mode
)
3841 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
3845 /* This is similar to operand_subword but allows autoincrement
3849 c4x_operand_subword (rtx op
, int i
, int validate_address
,
3850 enum machine_mode mode
)
3852 if (mode
!= HImode
&& mode
!= HFmode
)
3853 fatal_insn ("c4x_operand_subword: invalid mode", op
);
3855 if (mode
== HFmode
&& REG_P (op
))
3856 fatal_insn ("c4x_operand_subword: invalid operand", op
);
3858 if (GET_CODE (op
) == MEM
)
3860 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
3861 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
3862 enum machine_mode submode
;
3867 else if (mode
== HFmode
)
3874 return gen_rtx_MEM (submode
, XEXP (op
, 0));
3880 /* We could handle these with some difficulty.
3881 e.g., *p-- => *(p-=2); *(p+1). */
3882 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
3888 fatal_insn ("c4x_operand_subword: invalid address", op
);
3890 /* Even though offsettable_address_p considers (MEM
3891 (LO_SUM)) to be offsettable, it is not safe if the
3892 address is at the end of the data page since we also have
3893 to fix up the associated high PART. In this case where
3894 we are trying to split a HImode or HFmode memory
3895 reference, we would have to emit another insn to reload a
3896 new HIGH value. It's easier to disable LO_SUM memory references
3897 in HImode or HFmode and we probably get better code. */
3899 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
3906 return operand_subword (op
, i
, validate_address
, mode
);
3911 struct name_list
*next
;
3915 static struct name_list
*global_head
;
3916 static struct name_list
*extern_head
;
3919 /* Add NAME to list of global symbols and remove from external list if
3920 present on external list. */
3923 c4x_global_label (const char *name
)
3925 struct name_list
*p
, *last
;
3927 /* Do not insert duplicate names, so linearly search through list of
3932 if (strcmp (p
->name
, name
) == 0)
3936 p
= (struct name_list
*) xmalloc (sizeof *p
);
3937 p
->next
= global_head
;
3941 /* Remove this name from ref list if present. */
3946 if (strcmp (p
->name
, name
) == 0)
3949 last
->next
= p
->next
;
3951 extern_head
= p
->next
;
3960 /* Add NAME to list of external symbols. */
3963 c4x_external_ref (const char *name
)
3965 struct name_list
*p
;
3967 /* Do not insert duplicate names. */
3971 if (strcmp (p
->name
, name
) == 0)
3976 /* Do not insert ref if global found. */
3980 if (strcmp (p
->name
, name
) == 0)
3984 p
= (struct name_list
*) xmalloc (sizeof *p
);
3985 p
->next
= extern_head
;
3990 /* We need to have a data section we can identify so that we can set
3991 the DP register back to a data pointer in the small memory model.
3992 This is only required for ISRs if we are paranoid that someone
3993 may have quietly changed this register on the sly. */
3995 c4x_file_start (void)
3997 default_file_start ();
3998 fprintf (asm_out_file
, "\t.version\t%d\n", c4x_cpu_version
);
3999 fputs ("\n\t.data\ndata_sec:\n", asm_out_file
);
4006 struct name_list
*p
;
4008 /* Output all external names that are not global. */
4012 fprintf (asm_out_file
, "\t.ref\t");
4013 assemble_name (asm_out_file
, p
->name
);
4014 fprintf (asm_out_file
, "\n");
4017 fprintf (asm_out_file
, "\t.end\n");
4022 c4x_check_attribute (const char *attrib
, tree list
, tree decl
, tree
*attributes
)
4024 while (list
!= NULL_TREE
4025 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4026 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4027 list
= TREE_CHAIN (list
);
4029 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4035 c4x_insert_attributes (tree decl
, tree
*attributes
)
4037 switch (TREE_CODE (decl
))
4040 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4041 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4042 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4043 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4044 c4x_check_attribute ("naked", naked_tree
, decl
, attributes
);
4048 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4056 /* Table of valid machine attributes. */
4057 const struct attribute_spec c4x_attribute_table
[] =
4059 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4060 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4061 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4062 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4063 { NULL
, 0, 0, false, false, false, NULL
}
4066 /* Handle an attribute requiring a FUNCTION_TYPE;
4067 arguments as in struct attribute_spec.handler. */
4069 c4x_handle_fntype_attribute (tree
*node
, tree name
,
4070 tree args ATTRIBUTE_UNUSED
,
4071 int flags ATTRIBUTE_UNUSED
,
4074 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4076 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4077 IDENTIFIER_POINTER (name
));
4078 *no_add_attrs
= true;
4085 /* !!! FIXME to emit RPTS correctly. */
4088 c4x_rptb_rpts_p (rtx insn
, rtx op
)
4090 /* The next insn should be our label marking where the
4091 repeat block starts. */
4092 insn
= NEXT_INSN (insn
);
4093 if (GET_CODE (insn
) != CODE_LABEL
)
4095 /* Some insns may have been shifted between the RPTB insn
4096 and the top label... They were probably destined to
4097 be moved out of the loop. For now, let's leave them
4098 where they are and print a warning. We should
4099 probably move these insns before the repeat block insn. */
4101 fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4106 /* Skip any notes. */
4107 insn
= next_nonnote_insn (insn
);
4109 /* This should be our first insn in the loop. */
4110 if (! INSN_P (insn
))
4113 /* Skip any notes. */
4114 insn
= next_nonnote_insn (insn
);
4116 if (! INSN_P (insn
))
4119 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4125 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4129 /* Check if register r11 is used as the destination of an insn. */
4132 c4x_r11_set_p(rtx x
)
4141 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4142 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4144 if (INSN_P (x
) && (set
= single_set (x
)))
4147 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4150 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4151 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4155 if (c4x_r11_set_p (XEXP (x
, i
)))
4158 else if (fmt
[i
] == 'E')
4159 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4160 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4167 /* The c4x sometimes has a problem when the insn before the laj insn
4168 sets the r11 register. Check for this situation. */
4171 c4x_check_laj_p (rtx insn
)
4173 insn
= prev_nonnote_insn (insn
);
4175 /* If this is the start of the function no nop is needed. */
4179 /* If the previous insn is a code label we have to insert a nop. This
4180 could be a jump or table jump. We can find the normal jumps by
4181 scanning the function but this will not find table jumps. */
4182 if (GET_CODE (insn
) == CODE_LABEL
)
4185 /* If the previous insn sets register r11 we have to insert a nop. */
4186 if (c4x_r11_set_p (insn
))
4189 /* No nop needed. */
4194 /* Adjust the cost of a scheduling dependency. Return the new cost of
4195 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4196 A set of an address register followed by a use occurs a 2 cycle
4197 stall (reduced to a single cycle on the c40 using LDA), while
4198 a read of an address register followed by a use occurs a single cycle. */
4200 #define SET_USE_COST 3
4201 #define SETLDA_USE_COST 2
4202 #define READ_USE_COST 2
4205 c4x_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
4207 /* Don't worry about this until we know what registers have been
4209 if (flag_schedule_insns
== 0 && ! reload_completed
)
4212 /* How do we handle dependencies where a read followed by another
4213 read causes a pipeline stall? For example, a read of ar0 followed
4214 by the use of ar0 for a memory reference. It looks like we
4215 need to extend the scheduler to handle this case. */
4217 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4218 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4219 so only deal with insns we know about. */
4220 if (recog_memoized (dep_insn
) < 0)
4223 if (REG_NOTE_KIND (link
) == 0)
4227 /* Data dependency; DEP_INSN writes a register that INSN reads some
4231 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4232 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4233 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4234 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4238 /* This could be significantly optimized. We should look
4239 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4240 insn uses ar0-ar7. We then test if the same register
4241 is used. The tricky bit is that some operands will
4242 use several registers... */
4243 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4244 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4245 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4246 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4247 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4248 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4250 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4251 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4252 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4253 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4254 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4255 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4257 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4258 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4259 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4260 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4261 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4262 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4264 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4265 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4266 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4267 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4268 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4269 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4271 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4272 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4273 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4274 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4275 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4276 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4278 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4279 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4280 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4281 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4282 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4283 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4285 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4286 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4287 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4288 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4289 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4290 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4292 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4293 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4294 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4295 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4296 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4297 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4299 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4300 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4301 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4302 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4304 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4305 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4306 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4307 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4313 /* For other data dependencies, the default cost specified in the
4317 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4319 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4322 /* For c4x anti dependencies, the cost is 0. */
4325 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4327 /* Output dependency; DEP_INSN writes a register that INSN writes some
4330 /* For c4x output dependencies, the cost is 0. */
4338 c4x_init_builtins (void)
4340 tree endlink
= void_list_node
;
4342 add_builtin_function ("fast_ftoi",
4345 tree_cons (NULL_TREE
, double_type_node
,
4347 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
, NULL_TREE
);
4348 add_builtin_function ("ansi_ftoi",
4351 tree_cons (NULL_TREE
, double_type_node
,
4353 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
,
4356 add_builtin_function ("fast_imult",
4359 tree_cons (NULL_TREE
, integer_type_node
,
4360 tree_cons (NULL_TREE
,
4363 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
,
4367 add_builtin_function ("toieee",
4370 tree_cons (NULL_TREE
, double_type_node
,
4372 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
,
4374 add_builtin_function ("frieee",
4377 tree_cons (NULL_TREE
, double_type_node
,
4379 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
,
4381 add_builtin_function ("fast_invf",
4384 tree_cons (NULL_TREE
, double_type_node
,
4386 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
,
4393 c4x_expand_builtin (tree exp
, rtx target
,
4394 rtx subtarget ATTRIBUTE_UNUSED
,
4395 enum machine_mode mode ATTRIBUTE_UNUSED
,
4396 int ignore ATTRIBUTE_UNUSED
)
4398 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4399 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4400 tree arglist
= TREE_OPERAND (exp
, 1);
4406 case C4X_BUILTIN_FIX
:
4407 arg0
= TREE_VALUE (arglist
);
4408 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4409 if (! target
|| ! register_operand (target
, QImode
))
4410 target
= gen_reg_rtx (QImode
);
4411 emit_insn (gen_fixqfqi_clobber (target
, r0
));
4414 case C4X_BUILTIN_FIX_ANSI
:
4415 arg0
= TREE_VALUE (arglist
);
4416 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4417 if (! target
|| ! register_operand (target
, QImode
))
4418 target
= gen_reg_rtx (QImode
);
4419 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
4422 case C4X_BUILTIN_MPYI
:
4425 arg0
= TREE_VALUE (arglist
);
4426 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4427 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
4428 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
4429 if (! target
|| ! register_operand (target
, QImode
))
4430 target
= gen_reg_rtx (QImode
);
4431 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
4434 case C4X_BUILTIN_TOIEEE
:
4437 arg0
= TREE_VALUE (arglist
);
4438 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4439 if (! target
|| ! register_operand (target
, QFmode
))
4440 target
= gen_reg_rtx (QFmode
);
4441 emit_insn (gen_toieee (target
, r0
));
4444 case C4X_BUILTIN_FRIEEE
:
4447 arg0
= TREE_VALUE (arglist
);
4448 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4449 if (register_operand (r0
, QFmode
))
4451 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
4452 emit_move_insn (r1
, r0
);
4455 if (! target
|| ! register_operand (target
, QFmode
))
4456 target
= gen_reg_rtx (QFmode
);
4457 emit_insn (gen_frieee (target
, r0
));
4460 case C4X_BUILTIN_RCPF
:
4463 arg0
= TREE_VALUE (arglist
);
4464 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
4465 if (! target
|| ! register_operand (target
, QFmode
))
4466 target
= gen_reg_rtx (QFmode
);
4467 emit_insn (gen_rcpfqf_clobber (target
, r0
));
4474 c4x_init_libfuncs (void)
4476 set_optab_libfunc (smul_optab
, QImode
, "__mulqi3");
4477 set_optab_libfunc (sdiv_optab
, QImode
, "__divqi3");
4478 set_optab_libfunc (udiv_optab
, QImode
, "__udivqi3");
4479 set_optab_libfunc (smod_optab
, QImode
, "__modqi3");
4480 set_optab_libfunc (umod_optab
, QImode
, "__umodqi3");
4481 set_optab_libfunc (sdiv_optab
, QFmode
, "__divqf3");
4482 set_optab_libfunc (smul_optab
, HFmode
, "__mulhf3");
4483 set_optab_libfunc (sdiv_optab
, HFmode
, "__divhf3");
4484 set_optab_libfunc (smul_optab
, HImode
, "__mulhi3");
4485 set_optab_libfunc (sdiv_optab
, HImode
, "__divhi3");
4486 set_optab_libfunc (udiv_optab
, HImode
, "__udivhi3");
4487 set_optab_libfunc (smod_optab
, HImode
, "__modhi3");
4488 set_optab_libfunc (umod_optab
, HImode
, "__umodhi3");
4489 set_optab_libfunc (ffs_optab
, QImode
, "__ffs");
4490 smulhi3_libfunc
= init_one_libfunc ("__smulhi3_high");
4491 umulhi3_libfunc
= init_one_libfunc ("__umulhi3_high");
4492 fix_truncqfhi2_libfunc
= init_one_libfunc ("__fix_truncqfhi2");
4493 fixuns_truncqfhi2_libfunc
= init_one_libfunc ("__ufix_truncqfhi2");
4494 fix_trunchfhi2_libfunc
= init_one_libfunc ("__fix_trunchfhi2");
4495 fixuns_trunchfhi2_libfunc
= init_one_libfunc ("__ufix_trunchfhi2");
4496 floathiqf2_libfunc
= init_one_libfunc ("__floathiqf2");
4497 floatunshiqf2_libfunc
= init_one_libfunc ("__ufloathiqf2");
4498 floathihf2_libfunc
= init_one_libfunc ("__floathihf2");
4499 floatunshihf2_libfunc
= init_one_libfunc ("__ufloathihf2");
4503 c4x_asm_named_section (const char *name
, unsigned int flags ATTRIBUTE_UNUSED
,
4504 tree decl ATTRIBUTE_UNUSED
)
4506 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);
4510 c4x_globalize_label (FILE *stream
, const char *name
)
4512 default_globalize_label (stream
, name
);
4513 c4x_global_label (name
);
4516 #define SHIFT_CODE_P(C) \
4517 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4518 #define LOGICAL_CODE_P(C) \
4519 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4521 /* Compute a (partial) cost for rtx X. Return true if the complete
4522 cost has been computed, and false if subexpressions should be
4523 scanned. In either case, *TOTAL contains the cost result. */
4526 c4x_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
4532 /* Some small integers are effectively free for the C40. We should
4533 also consider if we are using the small memory model. With
4534 the big memory model we require an extra insn for a constant
4535 loaded from memory. */
4539 if (c4x_J_constant (x
))
4541 else if (! TARGET_C3X
4542 && outer_code
== AND
4543 && (val
== 255 || val
== 65535))
4545 else if (! TARGET_C3X
4546 && (outer_code
== ASHIFTRT
|| outer_code
== LSHIFTRT
)
4547 && (val
== 16 || val
== 24))
4549 else if (TARGET_C3X
&& SHIFT_CODE_P (outer_code
))
4551 else if (LOGICAL_CODE_P (outer_code
)
4552 ? c4x_L_constant (x
) : c4x_I_constant (x
))
4565 if (c4x_H_constant (x
))
4567 else if (GET_MODE (x
) == QFmode
)
4573 /* ??? Note that we return true, rather than false so that rtx_cost
4574 doesn't include the constant costs. Otherwise expand_mult will
4575 think that it is cheaper to synthesize a multiply rather than to
4576 use a multiply instruction. I think this is because the algorithm
4577 synth_mult doesn't take into account the loading of the operands,
4578 whereas the calculation of mult_cost does. */
4587 *total
= COSTS_N_INSNS (1);
4591 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
4592 || TARGET_MPYI
? 1 : 14);
4599 *total
= COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
4608 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
4611 c4x_external_libcall (rtx fun
)
4613 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
4614 c4x_external_ref (XSTR (fun
, 0));
4617 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
4620 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
4621 int incoming ATTRIBUTE_UNUSED
)
4623 return gen_rtx_REG (Pmode
, AR0_REGNO
);