1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree
);
53 static int interrupt_function_p (tree
);
54 static int signal_function_p (tree
);
55 static int avr_OS_task_function_p (tree
);
56 static int avr_OS_main_function_p (tree
);
57 static int avr_regs_to_save (HARD_REG_SET
*);
58 static int get_sequence_length (rtx insns
);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code
);
62 static int avr_num_arg_regs (enum machine_mode
, tree
);
64 static RTX_CODE
compare_condition (rtx insn
);
65 static int compare_sign_p (rtx insn
);
66 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
67 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
68 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
69 const struct attribute_spec avr_attribute_table
[];
70 static bool avr_assemble_integer (rtx
, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx
avr_function_value (const_tree
, const_tree
, bool);
76 static void avr_insert_attributes (tree
, tree
*);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree
, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx
, int);
82 static void avr_asm_out_dtor (rtx
, int);
83 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
84 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
85 static int avr_address_cost (rtx
, bool);
86 static bool avr_return_in_memory (const_tree
, const_tree
);
87 static struct machine_function
* avr_init_machine_status (void);
88 static rtx
avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx
;
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx
;
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames
[] = REGISTER_NAMES
;
103 /* This holds the last insn address. */
104 static int last_insn_address
= 0;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro
;
109 /* Current architecture. */
110 const struct base_arch_s
*avr_current_arch
;
112 section
*progmem_section
;
114 static const struct base_arch_s avr_arch_types
[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL
}, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
128 /* These names are used as the index into the avr_arch_types[] table
147 const char *const name
;
148 int arch
; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro
;
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
161 static const struct mcu_type_s avr_mcu_types
[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2
, NULL
},
164 { "at90s2313", ARCH_AVR2
, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2
, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2
, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2
, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2
, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2
, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2
, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2
, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2
, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2
, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2
, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2
, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25
, NULL
},
178 { "ata6289", ARCH_AVR25
, "__AVR_ATA6289__" },
179 { "attiny13", ARCH_AVR25
, "__AVR_ATtiny13__" },
180 { "attiny13a", ARCH_AVR25
, "__AVR_ATtiny13A__" },
181 { "attiny2313", ARCH_AVR25
, "__AVR_ATtiny2313__" },
182 { "attiny24", ARCH_AVR25
, "__AVR_ATtiny24__" },
183 { "attiny44", ARCH_AVR25
, "__AVR_ATtiny44__" },
184 { "attiny84", ARCH_AVR25
, "__AVR_ATtiny84__" },
185 { "attiny25", ARCH_AVR25
, "__AVR_ATtiny25__" },
186 { "attiny45", ARCH_AVR25
, "__AVR_ATtiny45__" },
187 { "attiny85", ARCH_AVR25
, "__AVR_ATtiny85__" },
188 { "attiny261", ARCH_AVR25
, "__AVR_ATtiny261__" },
189 { "attiny461", ARCH_AVR25
, "__AVR_ATtiny461__" },
190 { "attiny861", ARCH_AVR25
, "__AVR_ATtiny861__" },
191 { "attiny43u", ARCH_AVR25
, "__AVR_ATtiny43U__" },
192 { "attiny87", ARCH_AVR25
, "__AVR_ATtiny87__" },
193 { "attiny48", ARCH_AVR25
, "__AVR_ATtiny48__" },
194 { "attiny88", ARCH_AVR25
, "__AVR_ATtiny88__" },
195 { "at86rf401", ARCH_AVR25
, "__AVR_AT86RF401__" },
196 /* Classic, > 8K, <= 64K. */
197 { "avr3", ARCH_AVR3
, NULL
},
198 { "at43usb355", ARCH_AVR3
, "__AVR_AT43USB355__" },
199 { "at76c711", ARCH_AVR3
, "__AVR_AT76C711__" },
200 /* Classic, == 128K. */
201 { "avr31", ARCH_AVR31
, NULL
},
202 { "atmega103", ARCH_AVR31
, "__AVR_ATmega103__" },
203 { "at43usb320", ARCH_AVR31
, "__AVR_AT43USB320__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35
, NULL
},
206 { "at90usb82", ARCH_AVR35
, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35
, "__AVR_AT90USB162__" },
208 { "attiny167", ARCH_AVR35
, "__AVR_ATtiny167__" },
209 { "attiny327", ARCH_AVR35
, "__AVR_ATtiny327__" },
210 /* Enhanced, <= 8K. */
211 { "avr4", ARCH_AVR4
, NULL
},
212 { "atmega8", ARCH_AVR4
, "__AVR_ATmega8__" },
213 { "atmega48", ARCH_AVR4
, "__AVR_ATmega48__" },
214 { "atmega48p", ARCH_AVR4
, "__AVR_ATmega48P__" },
215 { "atmega88", ARCH_AVR4
, "__AVR_ATmega88__" },
216 { "atmega88p", ARCH_AVR4
, "__AVR_ATmega88P__" },
217 { "atmega8515", ARCH_AVR4
, "__AVR_ATmega8515__" },
218 { "atmega8535", ARCH_AVR4
, "__AVR_ATmega8535__" },
219 { "atmega8hva", ARCH_AVR4
, "__AVR_ATmega8HVA__" },
220 { "atmega4hvd", ARCH_AVR4
, "__AVR_ATmega4HVD__" },
221 { "atmega8hvd", ARCH_AVR4
, "__AVR_ATmega8HVD__" },
222 { "at90pwm1", ARCH_AVR4
, "__AVR_AT90PWM1__" },
223 { "at90pwm2", ARCH_AVR4
, "__AVR_AT90PWM2__" },
224 { "at90pwm2b", ARCH_AVR4
, "__AVR_AT90PWM2B__" },
225 { "at90pwm3", ARCH_AVR4
, "__AVR_AT90PWM3__" },
226 { "at90pwm3b", ARCH_AVR4
, "__AVR_AT90PWM3B__" },
227 { "at90pwm81", ARCH_AVR4
, "__AVR_AT90PWM81__" },
228 /* Enhanced, > 8K, <= 64K. */
229 { "avr5", ARCH_AVR5
, NULL
},
230 { "atmega16", ARCH_AVR5
, "__AVR_ATmega16__" },
231 { "atmega161", ARCH_AVR5
, "__AVR_ATmega161__" },
232 { "atmega162", ARCH_AVR5
, "__AVR_ATmega162__" },
233 { "atmega163", ARCH_AVR5
, "__AVR_ATmega163__" },
234 { "atmega164p", ARCH_AVR5
, "__AVR_ATmega164P__" },
235 { "atmega165", ARCH_AVR5
, "__AVR_ATmega165__" },
236 { "atmega165p", ARCH_AVR5
, "__AVR_ATmega165P__" },
237 { "atmega168", ARCH_AVR5
, "__AVR_ATmega168__" },
238 { "atmega168p", ARCH_AVR5
, "__AVR_ATmega168P__" },
239 { "atmega169", ARCH_AVR5
, "__AVR_ATmega169__" },
240 { "atmega169p", ARCH_AVR5
, "__AVR_ATmega169P__" },
241 { "atmega32", ARCH_AVR5
, "__AVR_ATmega32__" },
242 { "atmega323", ARCH_AVR5
, "__AVR_ATmega323__" },
243 { "atmega324p", ARCH_AVR5
, "__AVR_ATmega324P__" },
244 { "atmega325", ARCH_AVR5
, "__AVR_ATmega325__" },
245 { "atmega325p", ARCH_AVR5
, "__AVR_ATmega325P__" },
246 { "atmega3250", ARCH_AVR5
, "__AVR_ATmega3250__" },
247 { "atmega3250p", ARCH_AVR5
, "__AVR_ATmega3250P__" },
248 { "atmega328p", ARCH_AVR5
, "__AVR_ATmega328P__" },
249 { "atmega329", ARCH_AVR5
, "__AVR_ATmega329__" },
250 { "atmega329p", ARCH_AVR5
, "__AVR_ATmega329P__" },
251 { "atmega3290", ARCH_AVR5
, "__AVR_ATmega3290__" },
252 { "atmega3290p", ARCH_AVR5
, "__AVR_ATmega3290P__" },
253 { "atmega406", ARCH_AVR5
, "__AVR_ATmega406__" },
254 { "atmega64", ARCH_AVR5
, "__AVR_ATmega64__" },
255 { "atmega640", ARCH_AVR5
, "__AVR_ATmega640__" },
256 { "atmega644", ARCH_AVR5
, "__AVR_ATmega644__" },
257 { "atmega644p", ARCH_AVR5
, "__AVR_ATmega644P__" },
258 { "atmega645", ARCH_AVR5
, "__AVR_ATmega645__" },
259 { "atmega6450", ARCH_AVR5
, "__AVR_ATmega6450__" },
260 { "atmega649", ARCH_AVR5
, "__AVR_ATmega649__" },
261 { "atmega6490", ARCH_AVR5
, "__AVR_ATmega6490__" },
262 { "atmega16hva", ARCH_AVR5
, "__AVR_ATmega16HVA__" },
263 { "atmega16hvb", ARCH_AVR5
, "__AVR_ATmega16HVB__" },
264 { "atmega32hvb", ARCH_AVR5
, "__AVR_ATmega32HVB__" },
265 { "at90can32", ARCH_AVR5
, "__AVR_AT90CAN32__" },
266 { "at90can64", ARCH_AVR5
, "__AVR_AT90CAN64__" },
267 { "at90pwm216", ARCH_AVR5
, "__AVR_AT90PWM216__" },
268 { "at90pwm316", ARCH_AVR5
, "__AVR_AT90PWM316__" },
269 { "atmega32c1", ARCH_AVR5
, "__AVR_ATmega32C1__" },
270 { "atmega64c1", ARCH_AVR5
, "__AVR_ATmega64C1__" },
271 { "atmega16m1", ARCH_AVR5
, "__AVR_ATmega16M1__" },
272 { "atmega32m1", ARCH_AVR5
, "__AVR_ATmega32M1__" },
273 { "atmega64m1", ARCH_AVR5
, "__AVR_ATmega64M1__" },
274 { "atmega16u4", ARCH_AVR5
, "__AVR_ATmega16U4__" },
275 { "atmega32u4", ARCH_AVR5
, "__AVR_ATmega32U4__" },
276 { "atmega32u6", ARCH_AVR5
, "__AVR_ATmega32U6__" },
277 { "at90scr100", ARCH_AVR5
, "__AVR_AT90SCR100__" },
278 { "at90usb646", ARCH_AVR5
, "__AVR_AT90USB646__" },
279 { "at90usb647", ARCH_AVR5
, "__AVR_AT90USB647__" },
280 { "at94k", ARCH_AVR5
, "__AVR_AT94K__" },
281 /* Enhanced, == 128K. */
282 { "avr51", ARCH_AVR51
, NULL
},
283 { "atmega128", ARCH_AVR51
, "__AVR_ATmega128__" },
284 { "atmega1280", ARCH_AVR51
, "__AVR_ATmega1280__" },
285 { "atmega1281", ARCH_AVR51
, "__AVR_ATmega1281__" },
286 { "atmega1284p", ARCH_AVR51
, "__AVR_ATmega1284P__" },
287 { "atmega128rfa1", ARCH_AVR51
, "__AVR_ATmega128RFA1__" },
288 { "at90can128", ARCH_AVR51
, "__AVR_AT90CAN128__" },
289 { "at90usb1286", ARCH_AVR51
, "__AVR_AT90USB1286__" },
290 { "at90usb1287", ARCH_AVR51
, "__AVR_AT90USB1287__" },
291 { "m3000f", ARCH_AVR51
, "__AVR_M3000F__" },
292 { "m3000s", ARCH_AVR51
, "__AVR_M3000S__" },
293 { "m3001b", ARCH_AVR51
, "__AVR_M3001B__" },
295 { "avr6", ARCH_AVR6
, NULL
},
296 { "atmega2560", ARCH_AVR6
, "__AVR_ATmega2560__" },
297 { "atmega2561", ARCH_AVR6
, "__AVR_ATmega2561__" },
298 /* Assembler only. */
299 { "avr1", ARCH_AVR1
, NULL
},
300 { "at90s1200", ARCH_AVR1
, "__AVR_AT90S1200__" },
301 { "attiny11", ARCH_AVR1
, "__AVR_ATtiny11__" },
302 { "attiny12", ARCH_AVR1
, "__AVR_ATtiny12__" },
303 { "attiny15", ARCH_AVR1
, "__AVR_ATtiny15__" },
304 { "attiny28", ARCH_AVR1
, "__AVR_ATtiny28__" },
305 { NULL
, ARCH_UNKNOWN
, NULL
}
308 int avr_case_values_threshold
= 30000;
310 /* Initialize the GCC target structure. */
311 #undef TARGET_ASM_ALIGNED_HI_OP
312 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
313 #undef TARGET_ASM_ALIGNED_SI_OP
314 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
315 #undef TARGET_ASM_UNALIGNED_HI_OP
316 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
317 #undef TARGET_ASM_UNALIGNED_SI_OP
318 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
319 #undef TARGET_ASM_INTEGER
320 #define TARGET_ASM_INTEGER avr_assemble_integer
321 #undef TARGET_ASM_FILE_START
322 #define TARGET_ASM_FILE_START avr_file_start
323 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
324 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
325 #undef TARGET_ASM_FILE_END
326 #define TARGET_ASM_FILE_END avr_file_end
328 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
329 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
330 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
331 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
332 #undef TARGET_FUNCTION_VALUE
333 #define TARGET_FUNCTION_VALUE avr_function_value
334 #undef TARGET_ATTRIBUTE_TABLE
335 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
336 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
337 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
338 #undef TARGET_INSERT_ATTRIBUTES
339 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
340 #undef TARGET_SECTION_TYPE_FLAGS
341 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
342 #undef TARGET_RTX_COSTS
343 #define TARGET_RTX_COSTS avr_rtx_costs
344 #undef TARGET_ADDRESS_COST
345 #define TARGET_ADDRESS_COST avr_address_cost
346 #undef TARGET_MACHINE_DEPENDENT_REORG
347 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
349 #undef TARGET_RETURN_IN_MEMORY
350 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
352 #undef TARGET_STRICT_ARGUMENT_NAMING
353 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
355 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
356 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
358 #undef TARGET_HARD_REGNO_SCRATCH_OK
359 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
361 struct gcc_target targetm
= TARGET_INITIALIZER
;
364 avr_override_options (void)
366 const struct mcu_type_s
*t
;
368 flag_delete_null_pointer_checks
= 0;
370 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST
))
371 set_param_value ("inline-call-cost", 5);
373 for (t
= avr_mcu_types
; t
->name
; t
++)
374 if (strcmp (t
->name
, avr_mcu_name
) == 0)
379 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
381 for (t
= avr_mcu_types
; t
->name
; t
++)
382 fprintf (stderr
," %s\n", t
->name
);
385 avr_current_arch
= &avr_arch_types
[t
->arch
];
386 avr_extra_arch_macro
= t
->macro
;
388 if (optimize
&& !TARGET_NO_TABLEJUMP
)
389 avr_case_values_threshold
=
390 (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
392 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
393 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
395 init_machine_status
= avr_init_machine_status
;
398 /* return register class from register number. */
400 static const int reg_class_tab
[]={
401 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
402 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
403 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
404 GENERAL_REGS
, /* r0 - r15 */
405 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
406 LD_REGS
, /* r16 - 23 */
407 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
408 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
409 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
410 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
411 STACK_REG
,STACK_REG
/* SPL,SPH */
414 /* Function to set up the backend function structure. */
416 static struct machine_function
*
417 avr_init_machine_status (void)
419 return ((struct machine_function
*)
420 ggc_alloc_cleared (sizeof (struct machine_function
)));
423 /* Return register class for register R. */
426 avr_regno_reg_class (int r
)
429 return reg_class_tab
[r
];
433 /* Return nonzero if FUNC is a naked function. */
436 avr_naked_function_p (tree func
)
440 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
442 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
443 return a
!= NULL_TREE
;
446 /* Return nonzero if FUNC is an interrupt function as specified
447 by the "interrupt" attribute. */
450 interrupt_function_p (tree func
)
454 if (TREE_CODE (func
) != FUNCTION_DECL
)
457 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
458 return a
!= NULL_TREE
;
461 /* Return nonzero if FUNC is a signal function as specified
462 by the "signal" attribute. */
465 signal_function_p (tree func
)
469 if (TREE_CODE (func
) != FUNCTION_DECL
)
472 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
473 return a
!= NULL_TREE
;
476 /* Return nonzero if FUNC is a OS_task function. */
479 avr_OS_task_function_p (tree func
)
483 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
485 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
486 return a
!= NULL_TREE
;
489 /* Return nonzero if FUNC is a OS_main function. */
492 avr_OS_main_function_p (tree func
)
496 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
498 a
= lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
499 return a
!= NULL_TREE
;
502 /* Return the number of hard registers to push/pop in the prologue/epilogue
503 of the current function, and optionally store these registers in SET. */
506 avr_regs_to_save (HARD_REG_SET
*set
)
509 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
510 || signal_function_p (current_function_decl
));
512 if (!reload_completed
)
513 cfun
->machine
->is_leaf
= leaf_function_p ();
516 CLEAR_HARD_REG_SET (*set
);
519 /* No need to save any registers if the function never returns or
520 is have "OS_task" or "OS_main" attribute. */
521 if (TREE_THIS_VOLATILE (current_function_decl
)
522 || cfun
->machine
->is_OS_task
523 || cfun
->machine
->is_OS_main
)
526 for (reg
= 0; reg
< 32; reg
++)
528 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
529 any global register variables. */
533 if ((int_or_sig_p
&& !cfun
->machine
->is_leaf
&& call_used_regs
[reg
])
534 || (df_regs_ever_live_p (reg
)
535 && (int_or_sig_p
|| !call_used_regs
[reg
])
536 && !(frame_pointer_needed
537 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
540 SET_HARD_REG_BIT (*set
, reg
);
547 /* Compute offset between arg_pointer and frame_pointer. */
550 initial_elimination_offset (int from
, int to
)
552 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
556 int offset
= frame_pointer_needed
? 2 : 0;
557 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
559 offset
+= avr_regs_to_save (NULL
);
560 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
564 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
565 frame pointer by +STARTING_FRAME_OFFSET.
566 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
567 avoids creating add/sub of offset in nonlocal goto and setjmp. */
569 rtx
avr_builtin_setjmp_frame_value (void)
571 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
572 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
575 /* Return 1 if the function epilogue is just a single "ret". */
578 avr_simple_epilogue (void)
580 return (! frame_pointer_needed
581 && get_frame_size () == 0
582 && avr_regs_to_save (NULL
) == 0
583 && ! interrupt_function_p (current_function_decl
)
584 && ! signal_function_p (current_function_decl
)
585 && ! avr_naked_function_p (current_function_decl
)
586 && ! TREE_THIS_VOLATILE (current_function_decl
));
589 /* This function checks sequence of live registers. */
592 sequent_regs_live (void)
598 for (reg
= 0; reg
< 18; ++reg
)
600 if (!call_used_regs
[reg
])
602 if (df_regs_ever_live_p (reg
))
612 if (!frame_pointer_needed
)
614 if (df_regs_ever_live_p (REG_Y
))
622 if (df_regs_ever_live_p (REG_Y
+1))
635 return (cur_seq
== live_seq
) ? live_seq
: 0;
638 /* Obtain the length sequence of insns. */
641 get_sequence_length (rtx insns
)
646 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
647 length
+= get_attr_length (insn
);
652 /* Output function prologue. */
655 expand_prologue (void)
660 HOST_WIDE_INT size
= get_frame_size();
661 /* Define templates for push instructions. */
662 rtx pushbyte
= gen_rtx_MEM (QImode
,
663 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
664 rtx pushword
= gen_rtx_MEM (HImode
,
665 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
668 last_insn_address
= 0;
670 /* Init cfun->machine. */
671 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
672 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
673 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
674 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
675 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
677 /* Prologue: naked. */
678 if (cfun
->machine
->is_naked
)
683 avr_regs_to_save (&set
);
684 live_seq
= sequent_regs_live ();
685 minimize
= (TARGET_CALL_PROLOGUES
686 && !cfun
->machine
->is_interrupt
687 && !cfun
->machine
->is_signal
688 && !cfun
->machine
->is_OS_task
689 && !cfun
->machine
->is_OS_main
692 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
694 if (cfun
->machine
->is_interrupt
)
696 /* Enable interrupts. */
697 insn
= emit_insn (gen_enable_interrupt ());
698 RTX_FRAME_RELATED_P (insn
) = 1;
702 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
703 RTX_FRAME_RELATED_P (insn
) = 1;
706 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
707 RTX_FRAME_RELATED_P (insn
) = 1;
710 insn
= emit_move_insn (tmp_reg_rtx
,
711 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
712 RTX_FRAME_RELATED_P (insn
) = 1;
713 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
714 RTX_FRAME_RELATED_P (insn
) = 1;
718 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
720 insn
= emit_move_insn (tmp_reg_rtx
,
721 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
722 RTX_FRAME_RELATED_P (insn
) = 1;
723 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
724 RTX_FRAME_RELATED_P (insn
) = 1;
727 /* Clear zero reg. */
728 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
729 RTX_FRAME_RELATED_P (insn
) = 1;
731 /* Prevent any attempt to delete the setting of ZERO_REG! */
732 emit_use (zero_reg_rtx
);
734 if (minimize
&& (frame_pointer_needed
735 || (AVR_2_BYTE_PC
&& live_seq
> 6)
738 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
739 gen_int_mode (size
, HImode
));
740 RTX_FRAME_RELATED_P (insn
) = 1;
743 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
744 gen_int_mode (size
+ live_seq
, HImode
)));
745 RTX_FRAME_RELATED_P (insn
) = 1;
750 for (reg
= 0; reg
< 32; ++reg
)
752 if (TEST_HARD_REG_BIT (set
, reg
))
754 /* Emit push of register to save. */
755 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
756 RTX_FRAME_RELATED_P (insn
) = 1;
759 if (frame_pointer_needed
)
761 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
763 /* Push frame pointer. */
764 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
765 RTX_FRAME_RELATED_P (insn
) = 1;
770 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
771 RTX_FRAME_RELATED_P (insn
) = 1;
775 /* Creating a frame can be done by direct manipulation of the
776 stack or via the frame pointer. These two methods are:
783 the optimum method depends on function type, stack and frame size.
784 To avoid a complex logic, both methods are tested and shortest
788 rtx sp_plus_insns
= NULL_RTX
;
790 if (TARGET_TINY_STACK
)
792 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
793 over 'sbiw' (2 cycles, same size). */
794 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
798 /* Normal sized addition. */
799 myfp
= frame_pointer_rtx
;
802 /* Method 1-Adjust frame pointer. */
805 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
806 RTX_FRAME_RELATED_P (insn
) = 1;
809 emit_move_insn (myfp
,
810 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
813 RTX_FRAME_RELATED_P (insn
) = 1;
815 /* Copy to stack pointer. */
816 if (TARGET_TINY_STACK
)
818 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
819 RTX_FRAME_RELATED_P (insn
) = 1;
821 else if (TARGET_NO_INTERRUPTS
822 || cfun
->machine
->is_signal
823 || cfun
->machine
->is_OS_main
)
826 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
828 RTX_FRAME_RELATED_P (insn
) = 1;
830 else if (cfun
->machine
->is_interrupt
)
832 insn
= emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
834 RTX_FRAME_RELATED_P (insn
) = 1;
838 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
839 RTX_FRAME_RELATED_P (insn
) = 1;
842 fp_plus_insns
= get_insns ();
845 /* Method 2-Adjust Stack pointer. */
851 emit_move_insn (stack_pointer_rtx
,
852 gen_rtx_PLUS (HImode
,
856 RTX_FRAME_RELATED_P (insn
) = 1;
859 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
860 RTX_FRAME_RELATED_P (insn
) = 1;
862 sp_plus_insns
= get_insns ();
866 /* Use shortest method. */
867 if (size
<= 6 && (get_sequence_length (sp_plus_insns
)
868 < get_sequence_length (fp_plus_insns
)))
869 emit_insn (sp_plus_insns
);
871 emit_insn (fp_plus_insns
);
877 /* Output summary at end of function prologue. */
880 avr_asm_function_end_prologue (FILE *file
)
882 if (cfun
->machine
->is_naked
)
884 fputs ("/* prologue: naked */\n", file
);
888 if (cfun
->machine
->is_interrupt
)
890 fputs ("/* prologue: Interrupt */\n", file
);
892 else if (cfun
->machine
->is_signal
)
894 fputs ("/* prologue: Signal */\n", file
);
897 fputs ("/* prologue: function */\n", file
);
899 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
904 /* Implement EPILOGUE_USES. */
907 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
911 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
916 /* Output RTL epilogue. */
919 expand_epilogue (void)
925 HOST_WIDE_INT size
= get_frame_size();
927 /* epilogue: naked */
928 if (cfun
->machine
->is_naked
)
930 emit_jump_insn (gen_return ());
934 avr_regs_to_save (&set
);
935 live_seq
= sequent_regs_live ();
936 minimize
= (TARGET_CALL_PROLOGUES
937 && !cfun
->machine
->is_interrupt
938 && !cfun
->machine
->is_signal
939 && !cfun
->machine
->is_OS_task
940 && !cfun
->machine
->is_OS_main
943 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
945 if (frame_pointer_needed
)
947 /* Get rid of frame. */
948 emit_move_insn(frame_pointer_rtx
,
949 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
950 gen_int_mode (size
, HImode
)));
954 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
957 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
961 if (frame_pointer_needed
)
965 /* Try two methods to adjust stack and select shortest. */
968 rtx sp_plus_insns
= NULL_RTX
;
970 if (TARGET_TINY_STACK
)
972 /* The high byte (r29) doesn't change - prefer 'subi'
973 (1 cycle) over 'sbiw' (2 cycles, same size). */
974 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
978 /* Normal sized addition. */
979 myfp
= frame_pointer_rtx
;
982 /* Method 1-Adjust frame pointer. */
985 emit_move_insn (myfp
,
986 gen_rtx_PLUS (HImode
, myfp
,
990 /* Copy to stack pointer. */
991 if (TARGET_TINY_STACK
)
993 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
995 else if (TARGET_NO_INTERRUPTS
996 || cfun
->machine
->is_signal
)
998 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
1001 else if (cfun
->machine
->is_interrupt
)
1003 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
1004 frame_pointer_rtx
));
1008 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1011 fp_plus_insns
= get_insns ();
1014 /* Method 2-Adjust Stack pointer. */
1019 emit_move_insn (stack_pointer_rtx
,
1020 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
1024 sp_plus_insns
= get_insns ();
1028 /* Use shortest method. */
1029 if (size
<= 5 && (get_sequence_length (sp_plus_insns
)
1030 < get_sequence_length (fp_plus_insns
)))
1031 emit_insn (sp_plus_insns
);
1033 emit_insn (fp_plus_insns
);
1035 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1037 /* Restore previous frame_pointer. */
1038 emit_insn (gen_pophi (frame_pointer_rtx
));
1041 /* Restore used registers. */
1042 for (reg
= 31; reg
>= 0; --reg
)
1044 if (TEST_HARD_REG_BIT (set
, reg
))
1045 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
1047 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1049 /* Restore RAMPZ using tmp reg as scratch. */
1051 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
1053 emit_insn (gen_popqi (tmp_reg_rtx
));
1054 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
1058 /* Restore SREG using tmp reg as scratch. */
1059 emit_insn (gen_popqi (tmp_reg_rtx
));
1061 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
1064 /* Restore tmp REG. */
1065 emit_insn (gen_popqi (tmp_reg_rtx
));
1067 /* Restore zero REG. */
1068 emit_insn (gen_popqi (zero_reg_rtx
));
1071 emit_jump_insn (gen_return ());
1075 /* Output summary messages at beginning of function epilogue. */
1078 avr_asm_function_begin_epilogue (FILE *file
)
1080 fprintf (file
, "/* epilogue start */\n");
1083 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1084 machine for a memory operand of mode MODE. */
1087 legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
1089 enum reg_class r
= NO_REGS
;
1091 if (TARGET_ALL_DEBUG
)
1093 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
1094 GET_MODE_NAME(mode
),
1095 strict
? "(strict)": "",
1096 reload_completed
? "(reload_completed)": "",
1097 reload_in_progress
? "(reload_in_progress)": "",
1098 reg_renumber
? "(reg_renumber)" : "");
1099 if (GET_CODE (x
) == PLUS
1100 && REG_P (XEXP (x
, 0))
1101 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1102 && INTVAL (XEXP (x
, 1)) >= 0
1103 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
1106 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1107 true_regnum (XEXP (x
, 0)));
1110 if (!strict
&& GET_CODE (x
) == SUBREG
)
1112 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1113 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1115 else if (CONSTANT_ADDRESS_P (x
))
1117 else if (GET_CODE (x
) == PLUS
1118 && REG_P (XEXP (x
, 0))
1119 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1120 && INTVAL (XEXP (x
, 1)) >= 0)
1122 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1126 || REGNO (XEXP (x
,0)) == REG_X
1127 || REGNO (XEXP (x
,0)) == REG_Y
1128 || REGNO (XEXP (x
,0)) == REG_Z
)
1129 r
= BASE_POINTER_REGS
;
1130 if (XEXP (x
,0) == frame_pointer_rtx
1131 || XEXP (x
,0) == arg_pointer_rtx
)
1132 r
= BASE_POINTER_REGS
;
1134 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1137 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1138 && REG_P (XEXP (x
, 0))
1139 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1140 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1144 if (TARGET_ALL_DEBUG
)
1146 fprintf (stderr
, " ret = %c\n", r
+ '0');
1148 return r
== NO_REGS
? 0 : (int)r
;
1151 /* Attempts to replace X with a valid
1152 memory address for an operand of mode MODE */
1155 legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1158 if (TARGET_ALL_DEBUG
)
1160 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1164 if (GET_CODE (oldx
) == PLUS
1165 && REG_P (XEXP (oldx
,0)))
1167 if (REG_P (XEXP (oldx
,1)))
1168 x
= force_reg (GET_MODE (oldx
), oldx
);
1169 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1171 int offs
= INTVAL (XEXP (oldx
,1));
1172 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1173 if (offs
> MAX_LD_OFFSET (mode
))
1175 if (TARGET_ALL_DEBUG
)
1176 fprintf (stderr
, "force_reg (big offset)\n");
1177 x
= force_reg (GET_MODE (oldx
), oldx
);
1185 /* Return a pointer register name as a string. */
1188 ptrreg_to_str (int regno
)
1192 case REG_X
: return "X";
1193 case REG_Y
: return "Y";
1194 case REG_Z
: return "Z";
1196 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1201 /* Return the condition name as a string.
1202 Used in conditional jump constructing */
1205 cond_string (enum rtx_code code
)
1214 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1219 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1232 /* Output ADDR to FILE as address. */
1235 print_operand_address (FILE *file
, rtx addr
)
1237 switch (GET_CODE (addr
))
1240 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1244 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1248 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1252 if (CONSTANT_ADDRESS_P (addr
)
1253 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1254 || GET_CODE (addr
) == LABEL_REF
))
1256 fprintf (file
, "gs(");
1257 output_addr_const (file
,addr
);
1258 fprintf (file
,")");
1261 output_addr_const (file
, addr
);
1266 /* Output X as assembler operand to file FILE. */
1269 print_operand (FILE *file
, rtx x
, int code
)
1273 if (code
>= 'A' && code
<= 'D')
1278 if (!AVR_HAVE_JMP_CALL
)
1281 else if (code
== '!')
1283 if (AVR_HAVE_EIJMP_EICALL
)
1288 if (x
== zero_reg_rtx
)
1289 fprintf (file
, "__zero_reg__");
1291 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1293 else if (GET_CODE (x
) == CONST_INT
)
1294 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1295 else if (GET_CODE (x
) == MEM
)
1297 rtx addr
= XEXP (x
,0);
1299 if (CONSTANT_P (addr
) && abcd
)
1302 output_address (addr
);
1303 fprintf (file
, ")+%d", abcd
);
1305 else if (code
== 'o')
1307 if (GET_CODE (addr
) != PLUS
)
1308 fatal_insn ("bad address, not (reg+disp):", addr
);
1310 print_operand (file
, XEXP (addr
, 1), 0);
1312 else if (code
== 'p' || code
== 'r')
1314 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1315 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1318 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1320 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1322 else if (GET_CODE (addr
) == PLUS
)
1324 print_operand_address (file
, XEXP (addr
,0));
1325 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1326 fatal_insn ("internal compiler error. Bad address:"
1329 print_operand (file
, XEXP (addr
,1), code
);
1332 print_operand_address (file
, addr
);
1334 else if (GET_CODE (x
) == CONST_DOUBLE
)
1338 if (GET_MODE (x
) != SFmode
)
1339 fatal_insn ("internal compiler error. Unknown mode:", x
);
1340 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1341 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1342 fprintf (file
, "0x%lx", val
);
1344 else if (code
== 'j')
1345 fputs (cond_string (GET_CODE (x
)), file
);
1346 else if (code
== 'k')
1347 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1349 print_operand_address (file
, x
);
1352 /* Update the condition code in the INSN. */
1355 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1359 switch (get_attr_cc (insn
))
1362 /* Insn does not affect CC at all. */
1370 set
= single_set (insn
);
1374 cc_status
.flags
|= CC_NO_OVERFLOW
;
1375 cc_status
.value1
= SET_DEST (set
);
1380 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1381 The V flag may or may not be known but that's ok because
1382 alter_cond will change tests to use EQ/NE. */
1383 set
= single_set (insn
);
1387 cc_status
.value1
= SET_DEST (set
);
1388 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1393 set
= single_set (insn
);
1396 cc_status
.value1
= SET_SRC (set
);
1400 /* Insn doesn't leave CC in a usable state. */
1403 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1404 set
= single_set (insn
);
1407 rtx src
= SET_SRC (set
);
1409 if (GET_CODE (src
) == ASHIFTRT
1410 && GET_MODE (src
) == QImode
)
1412 rtx x
= XEXP (src
, 1);
1414 if (GET_CODE (x
) == CONST_INT
1418 cc_status
.value1
= SET_DEST (set
);
1419 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1427 /* Return maximum number of consecutive registers of
1428 class CLASS needed to hold a value of mode MODE. */
1431 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1433 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1436 /* Choose mode for jump insn:
1437 1 - relative jump in range -63 <= x <= 62 ;
1438 2 - relative jump in range -2046 <= x <= 2045 ;
1439 3 - absolute jump (only for ATmega[16]03). */
1442 avr_jump_mode (rtx x
, rtx insn
)
1444 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_MODE (x
) == LABEL_REF
1445 ? XEXP (x
, 0) : x
));
1446 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1447 int jump_distance
= cur_addr
- dest_addr
;
1449 if (-63 <= jump_distance
&& jump_distance
<= 62)
1451 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1453 else if (AVR_HAVE_JMP_CALL
)
1459 /* return an AVR condition jump commands.
1460 X is a comparison RTX.
1461 LEN is a number returned by avr_jump_mode function.
1462 if REVERSE nonzero then condition code in X must be reversed. */
1465 ret_cond_branch (rtx x
, int len
, int reverse
)
1467 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1472 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1473 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1475 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1476 AS1 (brmi
,.+2) CR_TAB
1478 (AS1 (breq
,.+6) CR_TAB
1479 AS1 (brmi
,.+4) CR_TAB
1483 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1485 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1486 AS1 (brlt
,.+2) CR_TAB
1488 (AS1 (breq
,.+6) CR_TAB
1489 AS1 (brlt
,.+4) CR_TAB
1492 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1494 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1495 AS1 (brlo
,.+2) CR_TAB
1497 (AS1 (breq
,.+6) CR_TAB
1498 AS1 (brlo
,.+4) CR_TAB
1501 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1502 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1504 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1505 AS1 (brpl
,.+2) CR_TAB
1507 (AS1 (breq
,.+2) CR_TAB
1508 AS1 (brpl
,.+4) CR_TAB
1511 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1513 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1514 AS1 (brge
,.+2) CR_TAB
1516 (AS1 (breq
,.+2) CR_TAB
1517 AS1 (brge
,.+4) CR_TAB
1520 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1522 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1523 AS1 (brsh
,.+2) CR_TAB
1525 (AS1 (breq
,.+2) CR_TAB
1526 AS1 (brsh
,.+4) CR_TAB
1534 return AS1 (br
%k1
,%0);
1536 return (AS1 (br
%j1
,.+2) CR_TAB
1539 return (AS1 (br
%j1
,.+4) CR_TAB
1548 return AS1 (br
%j1
,%0);
1550 return (AS1 (br
%k1
,.+2) CR_TAB
1553 return (AS1 (br
%k1
,.+4) CR_TAB
1561 /* Predicate function for immediate operand which fits to byte (8bit) */
1564 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1566 return (GET_CODE (op
) == CONST_INT
1567 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1570 /* Output all insn addresses and their sizes into the assembly language
1571 output file. This is helpful for debugging whether the length attributes
1572 in the md file are correct.
1573 Output insn cost for next insn. */
1576 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1577 int num_operands ATTRIBUTE_UNUSED
)
1579 int uid
= INSN_UID (insn
);
1581 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1583 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1584 INSN_ADDRESSES (uid
),
1585 INSN_ADDRESSES (uid
) - last_insn_address
,
1586 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1588 last_insn_address
= INSN_ADDRESSES (uid
);
1591 /* Return 0 if undefined, 1 if always true or always false. */
1594 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1596 unsigned int max
= (mode
== QImode
? 0xff :
1597 mode
== HImode
? 0xffff :
1598 mode
== SImode
? 0xffffffff : 0);
1599 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1601 if (unsigned_condition (op
) != op
)
1604 if (max
!= (INTVAL (x
) & max
)
1605 && INTVAL (x
) != 0xff)
1612 /* Returns nonzero if REGNO is the number of a hard
1613 register in which function arguments are sometimes passed. */
1616 function_arg_regno_p(int r
)
1618 return (r
>= 8 && r
<= 25);
1621 /* Initializing the variable cum for the state at the beginning
1622 of the argument list. */
1625 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1626 tree fndecl ATTRIBUTE_UNUSED
)
1629 cum
->regno
= FIRST_CUM_REG
;
1630 if (!libname
&& fntype
)
1632 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1633 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1634 != void_type_node
));
1640 /* Returns the number of registers to allocate for a function argument. */
1643 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1647 if (mode
== BLKmode
)
1648 size
= int_size_in_bytes (type
);
1650 size
= GET_MODE_SIZE (mode
);
1652 /* Align all function arguments to start in even-numbered registers.
1653 Odd-sized arguments leave holes above them. */
1655 return (size
+ 1) & ~1;
1658 /* Controls whether a function argument is passed
1659 in a register, and which register. */
1662 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1663 int named ATTRIBUTE_UNUSED
)
1665 int bytes
= avr_num_arg_regs (mode
, type
);
1667 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1668 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1673 /* Update the summarizer variable CUM to advance past an argument
1674 in the argument list. */
1677 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1678 int named ATTRIBUTE_UNUSED
)
1680 int bytes
= avr_num_arg_regs (mode
, type
);
1682 cum
->nregs
-= bytes
;
1683 cum
->regno
-= bytes
;
1685 if (cum
->nregs
<= 0)
1688 cum
->regno
= FIRST_CUM_REG
;
1692 /***********************************************************************
1693 Functions for outputting various mov's for a various modes
1694 ************************************************************************/
1696 output_movqi (rtx insn
, rtx operands
[], int *l
)
1699 rtx dest
= operands
[0];
1700 rtx src
= operands
[1];
1708 if (register_operand (dest
, QImode
))
1710 if (register_operand (src
, QImode
)) /* mov r,r */
1712 if (test_hard_reg_class (STACK_REG
, dest
))
1713 return AS2 (out
,%0,%1);
1714 else if (test_hard_reg_class (STACK_REG
, src
))
1715 return AS2 (in
,%0,%1);
1717 return AS2 (mov
,%0,%1);
1719 else if (CONSTANT_P (src
))
1721 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1722 return AS2 (ldi
,%0,lo8(%1));
1724 if (GET_CODE (src
) == CONST_INT
)
1726 if (src
== const0_rtx
) /* mov r,L */
1727 return AS1 (clr
,%0);
1728 else if (src
== const1_rtx
)
1731 return (AS1 (clr
,%0) CR_TAB
1734 else if (src
== constm1_rtx
)
1736 /* Immediate constants -1 to any register */
1738 return (AS1 (clr
,%0) CR_TAB
1743 int bit_nr
= exact_log2 (INTVAL (src
));
1749 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1752 avr_output_bld (operands
, bit_nr
);
1759 /* Last resort, larger than loading from memory. */
1761 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1762 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1763 AS2 (mov
,%0,r31
) CR_TAB
1764 AS2 (mov
,r31
,__tmp_reg__
));
1766 else if (GET_CODE (src
) == MEM
)
1767 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1769 else if (GET_CODE (dest
) == MEM
)
1773 if (src
== const0_rtx
)
1774 operands
[1] = zero_reg_rtx
;
1776 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1779 output_asm_insn (templ
, operands
);
1788 output_movhi (rtx insn
, rtx operands
[], int *l
)
1791 rtx dest
= operands
[0];
1792 rtx src
= operands
[1];
1798 if (register_operand (dest
, HImode
))
1800 if (register_operand (src
, HImode
)) /* mov r,r */
1802 if (test_hard_reg_class (STACK_REG
, dest
))
1804 if (TARGET_TINY_STACK
)
1805 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1806 /* Use simple load of stack pointer if no interrupts are
1808 else if (TARGET_NO_INTERRUPTS
)
1809 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1810 AS2 (out
,__SP_L__
,%A1
));
1812 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1814 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1815 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1816 AS2 (out
,__SP_L__
,%A1
));
1818 else if (test_hard_reg_class (STACK_REG
, src
))
1821 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1822 AS2 (in
,%B0
,__SP_H__
));
1828 return (AS2 (movw
,%0,%1));
1833 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1837 else if (CONSTANT_P (src
))
1839 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1842 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1843 AS2 (ldi
,%B0
,hi8(%1)));
1846 if (GET_CODE (src
) == CONST_INT
)
1848 if (src
== const0_rtx
) /* mov r,L */
1851 return (AS1 (clr
,%A0
) CR_TAB
1854 else if (src
== const1_rtx
)
1857 return (AS1 (clr
,%A0
) CR_TAB
1858 AS1 (clr
,%B0
) CR_TAB
1861 else if (src
== constm1_rtx
)
1863 /* Immediate constants -1 to any register */
1865 return (AS1 (clr
,%0) CR_TAB
1866 AS1 (dec
,%A0
) CR_TAB
1871 int bit_nr
= exact_log2 (INTVAL (src
));
1877 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1878 AS1 (clr
,%B0
) CR_TAB
1881 avr_output_bld (operands
, bit_nr
);
1887 if ((INTVAL (src
) & 0xff) == 0)
1890 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1891 AS1 (clr
,%A0
) CR_TAB
1892 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1893 AS2 (mov
,%B0
,r31
) CR_TAB
1894 AS2 (mov
,r31
,__tmp_reg__
));
1896 else if ((INTVAL (src
) & 0xff00) == 0)
1899 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1900 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1901 AS2 (mov
,%A0
,r31
) CR_TAB
1902 AS1 (clr
,%B0
) CR_TAB
1903 AS2 (mov
,r31
,__tmp_reg__
));
1907 /* Last resort, equal to loading from memory. */
1909 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1910 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1911 AS2 (mov
,%A0
,r31
) CR_TAB
1912 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1913 AS2 (mov
,%B0
,r31
) CR_TAB
1914 AS2 (mov
,r31
,__tmp_reg__
));
1916 else if (GET_CODE (src
) == MEM
)
1917 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1919 else if (GET_CODE (dest
) == MEM
)
1923 if (src
== const0_rtx
)
1924 operands
[1] = zero_reg_rtx
;
1926 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
1929 output_asm_insn (templ
, operands
);
1934 fatal_insn ("invalid insn:", insn
);
1939 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1943 rtx x
= XEXP (src
, 0);
1949 if (CONSTANT_ADDRESS_P (x
))
1951 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1954 return AS2 (in
,%0,__SREG__
);
1956 if (optimize
> 0 && io_address_operand (x
, QImode
))
1959 return AS2 (in
,%0,%1-0x20);
1962 return AS2 (lds
,%0,%1);
1964 /* memory access by reg+disp */
1965 else if (GET_CODE (x
) == PLUS
1966 && REG_P (XEXP (x
,0))
1967 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1969 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1971 int disp
= INTVAL (XEXP (x
,1));
1972 if (REGNO (XEXP (x
,0)) != REG_Y
)
1973 fatal_insn ("incorrect insn:",insn
);
1975 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1976 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1977 AS2 (ldd
,%0,Y
+63) CR_TAB
1978 AS2 (sbiw
,r28
,%o1
-63));
1980 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1981 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1982 AS2 (ld
,%0,Y
) CR_TAB
1983 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1984 AS2 (sbci
,r29
,hi8(%o1
)));
1986 else if (REGNO (XEXP (x
,0)) == REG_X
)
1988 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1989 it but I have this situation with extremal optimizing options. */
1990 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1991 || reg_unused_after (insn
, XEXP (x
,0)))
1992 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1995 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1996 AS2 (ld
,%0,X
) CR_TAB
1997 AS2 (sbiw
,r26
,%o1
));
2000 return AS2 (ldd
,%0,%1);
2003 return AS2 (ld
,%0,%1);
2007 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
2011 rtx base
= XEXP (src
, 0);
2012 int reg_dest
= true_regnum (dest
);
2013 int reg_base
= true_regnum (base
);
2014 /* "volatile" forces reading low byte first, even if less efficient,
2015 for correct operation with 16-bit I/O registers. */
2016 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2024 if (reg_dest
== reg_base
) /* R = (R) */
2027 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
2028 AS2 (ld
,%B0
,%1) CR_TAB
2029 AS2 (mov
,%A0
,__tmp_reg__
));
2031 else if (reg_base
== REG_X
) /* (R26) */
2033 if (reg_unused_after (insn
, base
))
2036 return (AS2 (ld
,%A0
,X
+) CR_TAB
2040 return (AS2 (ld
,%A0
,X
+) CR_TAB
2041 AS2 (ld
,%B0
,X
) CR_TAB
2047 return (AS2 (ld
,%A0
,%1) CR_TAB
2048 AS2 (ldd
,%B0
,%1+1));
2051 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2053 int disp
= INTVAL (XEXP (base
, 1));
2054 int reg_base
= true_regnum (XEXP (base
, 0));
2056 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2058 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2059 fatal_insn ("incorrect insn:",insn
);
2061 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2062 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
2063 AS2 (ldd
,%A0
,Y
+62) CR_TAB
2064 AS2 (ldd
,%B0
,Y
+63) CR_TAB
2065 AS2 (sbiw
,r28
,%o1
-62));
2067 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2068 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2069 AS2 (ld
,%A0
,Y
) CR_TAB
2070 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2071 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2072 AS2 (sbci
,r29
,hi8(%o1
)));
2074 if (reg_base
== REG_X
)
2076 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2077 it but I have this situation with extremal
2078 optimization options. */
2081 if (reg_base
== reg_dest
)
2082 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2083 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2084 AS2 (ld
,%B0
,X
) CR_TAB
2085 AS2 (mov
,%A0
,__tmp_reg__
));
2087 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2088 AS2 (ld
,%A0
,X
+) CR_TAB
2089 AS2 (ld
,%B0
,X
) CR_TAB
2090 AS2 (sbiw
,r26
,%o1
+1));
2093 if (reg_base
== reg_dest
)
2096 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
2097 AS2 (ldd
,%B0
,%B1
) CR_TAB
2098 AS2 (mov
,%A0
,__tmp_reg__
));
2102 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2105 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2107 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2108 fatal_insn ("incorrect insn:", insn
);
2112 if (REGNO (XEXP (base
, 0)) == REG_X
)
2115 return (AS2 (sbiw
,r26
,2) CR_TAB
2116 AS2 (ld
,%A0
,X
+) CR_TAB
2117 AS2 (ld
,%B0
,X
) CR_TAB
2123 return (AS2 (sbiw
,%r1
,2) CR_TAB
2124 AS2 (ld
,%A0
,%p1
) CR_TAB
2125 AS2 (ldd
,%B0
,%p1
+1));
2130 return (AS2 (ld
,%B0
,%1) CR_TAB
2133 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2135 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2136 fatal_insn ("incorrect insn:", insn
);
2139 return (AS2 (ld
,%A0
,%1) CR_TAB
2142 else if (CONSTANT_ADDRESS_P (base
))
2144 if (optimize
> 0 && io_address_operand (base
, HImode
))
2147 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2148 AS2 (in
,%B0
,%B1
-0x20));
2151 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2155 fatal_insn ("unknown move insn:",insn
);
2160 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2164 rtx base
= XEXP (src
, 0);
2165 int reg_dest
= true_regnum (dest
);
2166 int reg_base
= true_regnum (base
);
2174 if (reg_base
== REG_X
) /* (R26) */
2176 if (reg_dest
== REG_X
)
2177 /* "ld r26,-X" is undefined */
2178 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2179 AS2 (ld
,r29
,X
) CR_TAB
2180 AS2 (ld
,r28
,-X
) CR_TAB
2181 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2182 AS2 (sbiw
,r26
,1) CR_TAB
2183 AS2 (ld
,r26
,X
) CR_TAB
2184 AS2 (mov
,r27
,__tmp_reg__
));
2185 else if (reg_dest
== REG_X
- 2)
2186 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2187 AS2 (ld
,%B0
,X
+) CR_TAB
2188 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2189 AS2 (ld
,%D0
,X
) CR_TAB
2190 AS2 (mov
,%C0
,__tmp_reg__
));
2191 else if (reg_unused_after (insn
, base
))
2192 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2193 AS2 (ld
,%B0
,X
+) CR_TAB
2194 AS2 (ld
,%C0
,X
+) CR_TAB
2197 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2198 AS2 (ld
,%B0
,X
+) CR_TAB
2199 AS2 (ld
,%C0
,X
+) CR_TAB
2200 AS2 (ld
,%D0
,X
) CR_TAB
2205 if (reg_dest
== reg_base
)
2206 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2207 AS2 (ldd
,%C0
,%1+2) CR_TAB
2208 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2209 AS2 (ld
,%A0
,%1) CR_TAB
2210 AS2 (mov
,%B0
,__tmp_reg__
));
2211 else if (reg_base
== reg_dest
+ 2)
2212 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2213 AS2 (ldd
,%B0
,%1+1) CR_TAB
2214 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2215 AS2 (ldd
,%D0
,%1+3) CR_TAB
2216 AS2 (mov
,%C0
,__tmp_reg__
));
2218 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2219 AS2 (ldd
,%B0
,%1+1) CR_TAB
2220 AS2 (ldd
,%C0
,%1+2) CR_TAB
2221 AS2 (ldd
,%D0
,%1+3));
2224 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2226 int disp
= INTVAL (XEXP (base
, 1));
2228 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2230 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2231 fatal_insn ("incorrect insn:",insn
);
2233 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2234 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2235 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2236 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2237 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2238 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2239 AS2 (sbiw
,r28
,%o1
-60));
2241 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2242 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2243 AS2 (ld
,%A0
,Y
) CR_TAB
2244 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2245 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2246 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2247 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2248 AS2 (sbci
,r29
,hi8(%o1
)));
2251 reg_base
= true_regnum (XEXP (base
, 0));
2252 if (reg_base
== REG_X
)
2255 if (reg_dest
== REG_X
)
2258 /* "ld r26,-X" is undefined */
2259 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2260 AS2 (ld
,r29
,X
) CR_TAB
2261 AS2 (ld
,r28
,-X
) CR_TAB
2262 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2263 AS2 (sbiw
,r26
,1) CR_TAB
2264 AS2 (ld
,r26
,X
) CR_TAB
2265 AS2 (mov
,r27
,__tmp_reg__
));
2268 if (reg_dest
== REG_X
- 2)
2269 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2270 AS2 (ld
,r24
,X
+) CR_TAB
2271 AS2 (ld
,r25
,X
+) CR_TAB
2272 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2273 AS2 (ld
,r27
,X
) CR_TAB
2274 AS2 (mov
,r26
,__tmp_reg__
));
2276 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2277 AS2 (ld
,%A0
,X
+) CR_TAB
2278 AS2 (ld
,%B0
,X
+) CR_TAB
2279 AS2 (ld
,%C0
,X
+) CR_TAB
2280 AS2 (ld
,%D0
,X
) CR_TAB
2281 AS2 (sbiw
,r26
,%o1
+3));
2283 if (reg_dest
== reg_base
)
2284 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2285 AS2 (ldd
,%C0
,%C1
) CR_TAB
2286 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2287 AS2 (ldd
,%A0
,%A1
) CR_TAB
2288 AS2 (mov
,%B0
,__tmp_reg__
));
2289 else if (reg_dest
== reg_base
- 2)
2290 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2291 AS2 (ldd
,%B0
,%B1
) CR_TAB
2292 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2293 AS2 (ldd
,%D0
,%D1
) CR_TAB
2294 AS2 (mov
,%C0
,__tmp_reg__
));
2295 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2296 AS2 (ldd
,%B0
,%B1
) CR_TAB
2297 AS2 (ldd
,%C0
,%C1
) CR_TAB
2300 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2301 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2302 AS2 (ld
,%C0
,%1) CR_TAB
2303 AS2 (ld
,%B0
,%1) CR_TAB
2305 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2306 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2307 AS2 (ld
,%B0
,%1) CR_TAB
2308 AS2 (ld
,%C0
,%1) CR_TAB
2310 else if (CONSTANT_ADDRESS_P (base
))
2311 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2312 AS2 (lds
,%B0
,%B1
) CR_TAB
2313 AS2 (lds
,%C0
,%C1
) CR_TAB
2316 fatal_insn ("unknown move insn:",insn
);
2321 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2325 rtx base
= XEXP (dest
, 0);
2326 int reg_base
= true_regnum (base
);
2327 int reg_src
= true_regnum (src
);
2333 if (CONSTANT_ADDRESS_P (base
))
2334 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2335 AS2 (sts
,%B0
,%B1
) CR_TAB
2336 AS2 (sts
,%C0
,%C1
) CR_TAB
2338 if (reg_base
> 0) /* (r) */
2340 if (reg_base
== REG_X
) /* (R26) */
2342 if (reg_src
== REG_X
)
2344 /* "st X+,r26" is undefined */
2345 if (reg_unused_after (insn
, base
))
2346 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2347 AS2 (st
,X
,r26
) CR_TAB
2348 AS2 (adiw
,r26
,1) CR_TAB
2349 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2350 AS2 (st
,X
+,r28
) CR_TAB
2353 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2354 AS2 (st
,X
,r26
) CR_TAB
2355 AS2 (adiw
,r26
,1) CR_TAB
2356 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2357 AS2 (st
,X
+,r28
) CR_TAB
2358 AS2 (st
,X
,r29
) CR_TAB
2361 else if (reg_base
== reg_src
+ 2)
2363 if (reg_unused_after (insn
, base
))
2364 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2365 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2366 AS2 (st
,%0+,%A1
) CR_TAB
2367 AS2 (st
,%0+,%B1
) CR_TAB
2368 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2369 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2370 AS1 (clr
,__zero_reg__
));
2372 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2373 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2374 AS2 (st
,%0+,%A1
) CR_TAB
2375 AS2 (st
,%0+,%B1
) CR_TAB
2376 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2377 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2378 AS1 (clr
,__zero_reg__
) CR_TAB
2381 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2382 AS2 (st
,%0+,%B1
) CR_TAB
2383 AS2 (st
,%0+,%C1
) CR_TAB
2384 AS2 (st
,%0,%D1
) CR_TAB
2388 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2389 AS2 (std
,%0+1,%B1
) CR_TAB
2390 AS2 (std
,%0+2,%C1
) CR_TAB
2391 AS2 (std
,%0+3,%D1
));
2393 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2395 int disp
= INTVAL (XEXP (base
, 1));
2396 reg_base
= REGNO (XEXP (base
, 0));
2397 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2399 if (reg_base
!= REG_Y
)
2400 fatal_insn ("incorrect insn:",insn
);
2402 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2403 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2404 AS2 (std
,Y
+60,%A1
) CR_TAB
2405 AS2 (std
,Y
+61,%B1
) CR_TAB
2406 AS2 (std
,Y
+62,%C1
) CR_TAB
2407 AS2 (std
,Y
+63,%D1
) CR_TAB
2408 AS2 (sbiw
,r28
,%o0
-60));
2410 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2411 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2412 AS2 (st
,Y
,%A1
) CR_TAB
2413 AS2 (std
,Y
+1,%B1
) CR_TAB
2414 AS2 (std
,Y
+2,%C1
) CR_TAB
2415 AS2 (std
,Y
+3,%D1
) CR_TAB
2416 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2417 AS2 (sbci
,r29
,hi8(%o0
)));
2419 if (reg_base
== REG_X
)
2422 if (reg_src
== REG_X
)
2425 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2426 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2427 AS2 (adiw
,r26
,%o0
) CR_TAB
2428 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2429 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2430 AS2 (st
,X
+,r28
) CR_TAB
2431 AS2 (st
,X
,r29
) CR_TAB
2432 AS1 (clr
,__zero_reg__
) CR_TAB
2433 AS2 (sbiw
,r26
,%o0
+3));
2435 else if (reg_src
== REG_X
- 2)
2438 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2439 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2440 AS2 (adiw
,r26
,%o0
) CR_TAB
2441 AS2 (st
,X
+,r24
) CR_TAB
2442 AS2 (st
,X
+,r25
) CR_TAB
2443 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2444 AS2 (st
,X
,__zero_reg__
) CR_TAB
2445 AS1 (clr
,__zero_reg__
) CR_TAB
2446 AS2 (sbiw
,r26
,%o0
+3));
2449 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2450 AS2 (st
,X
+,%A1
) CR_TAB
2451 AS2 (st
,X
+,%B1
) CR_TAB
2452 AS2 (st
,X
+,%C1
) CR_TAB
2453 AS2 (st
,X
,%D1
) CR_TAB
2454 AS2 (sbiw
,r26
,%o0
+3));
2456 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2457 AS2 (std
,%B0
,%B1
) CR_TAB
2458 AS2 (std
,%C0
,%C1
) CR_TAB
2461 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2462 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2463 AS2 (st
,%0,%C1
) CR_TAB
2464 AS2 (st
,%0,%B1
) CR_TAB
2466 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2467 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2468 AS2 (st
,%0,%B1
) CR_TAB
2469 AS2 (st
,%0,%C1
) CR_TAB
2471 fatal_insn ("unknown move insn:",insn
);
2476 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2479 rtx dest
= operands
[0];
2480 rtx src
= operands
[1];
2486 if (register_operand (dest
, VOIDmode
))
2488 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2490 if (true_regnum (dest
) > true_regnum (src
))
2495 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2496 AS2 (movw
,%A0
,%A1
));
2499 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2500 AS2 (mov
,%C0
,%C1
) CR_TAB
2501 AS2 (mov
,%B0
,%B1
) CR_TAB
2509 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2510 AS2 (movw
,%C0
,%C1
));
2513 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2514 AS2 (mov
,%B0
,%B1
) CR_TAB
2515 AS2 (mov
,%C0
,%C1
) CR_TAB
2519 else if (CONSTANT_P (src
))
2521 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2524 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2525 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2526 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2527 AS2 (ldi
,%D0
,hhi8(%1)));
2530 if (GET_CODE (src
) == CONST_INT
)
2532 const char *const clr_op0
=
2533 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2534 AS1 (clr
,%B0
) CR_TAB
2536 : (AS1 (clr
,%A0
) CR_TAB
2537 AS1 (clr
,%B0
) CR_TAB
2538 AS1 (clr
,%C0
) CR_TAB
2541 if (src
== const0_rtx
) /* mov r,L */
2543 *l
= AVR_HAVE_MOVW
? 3 : 4;
2546 else if (src
== const1_rtx
)
2549 output_asm_insn (clr_op0
, operands
);
2550 *l
= AVR_HAVE_MOVW
? 4 : 5;
2551 return AS1 (inc
,%A0
);
2553 else if (src
== constm1_rtx
)
2555 /* Immediate constants -1 to any register */
2559 return (AS1 (clr
,%A0
) CR_TAB
2560 AS1 (dec
,%A0
) CR_TAB
2561 AS2 (mov
,%B0
,%A0
) CR_TAB
2562 AS2 (movw
,%C0
,%A0
));
2565 return (AS1 (clr
,%A0
) CR_TAB
2566 AS1 (dec
,%A0
) CR_TAB
2567 AS2 (mov
,%B0
,%A0
) CR_TAB
2568 AS2 (mov
,%C0
,%A0
) CR_TAB
2573 int bit_nr
= exact_log2 (INTVAL (src
));
2577 *l
= AVR_HAVE_MOVW
? 5 : 6;
2580 output_asm_insn (clr_op0
, operands
);
2581 output_asm_insn ("set", operands
);
2584 avr_output_bld (operands
, bit_nr
);
2591 /* Last resort, better than loading from memory. */
2593 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2594 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2595 AS2 (mov
,%A0
,r31
) CR_TAB
2596 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2597 AS2 (mov
,%B0
,r31
) CR_TAB
2598 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2599 AS2 (mov
,%C0
,r31
) CR_TAB
2600 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2601 AS2 (mov
,%D0
,r31
) CR_TAB
2602 AS2 (mov
,r31
,__tmp_reg__
));
2604 else if (GET_CODE (src
) == MEM
)
2605 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2607 else if (GET_CODE (dest
) == MEM
)
2611 if (src
== const0_rtx
)
2612 operands
[1] = zero_reg_rtx
;
2614 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2617 output_asm_insn (templ
, operands
);
2622 fatal_insn ("invalid insn:", insn
);
2627 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2631 rtx x
= XEXP (dest
, 0);
2637 if (CONSTANT_ADDRESS_P (x
))
2639 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2642 return AS2 (out
,__SREG__
,%1);
2644 if (optimize
> 0 && io_address_operand (x
, QImode
))
2647 return AS2 (out
,%0-0x20,%1);
2650 return AS2 (sts
,%0,%1);
2652 /* memory access by reg+disp */
2653 else if (GET_CODE (x
) == PLUS
2654 && REG_P (XEXP (x
,0))
2655 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2657 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2659 int disp
= INTVAL (XEXP (x
,1));
2660 if (REGNO (XEXP (x
,0)) != REG_Y
)
2661 fatal_insn ("incorrect insn:",insn
);
2663 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2664 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2665 AS2 (std
,Y
+63,%1) CR_TAB
2666 AS2 (sbiw
,r28
,%o0
-63));
2668 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2669 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2670 AS2 (st
,Y
,%1) CR_TAB
2671 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2672 AS2 (sbci
,r29
,hi8(%o0
)));
2674 else if (REGNO (XEXP (x
,0)) == REG_X
)
2676 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2678 if (reg_unused_after (insn
, XEXP (x
,0)))
2679 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2680 AS2 (adiw
,r26
,%o0
) CR_TAB
2681 AS2 (st
,X
,__tmp_reg__
));
2683 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2684 AS2 (adiw
,r26
,%o0
) CR_TAB
2685 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2686 AS2 (sbiw
,r26
,%o0
));
2690 if (reg_unused_after (insn
, XEXP (x
,0)))
2691 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2694 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2695 AS2 (st
,X
,%1) CR_TAB
2696 AS2 (sbiw
,r26
,%o0
));
2700 return AS2 (std
,%0,%1);
2703 return AS2 (st
,%0,%1);
2707 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2711 rtx base
= XEXP (dest
, 0);
2712 int reg_base
= true_regnum (base
);
2713 int reg_src
= true_regnum (src
);
2714 /* "volatile" forces writing high byte first, even if less efficient,
2715 for correct operation with 16-bit I/O registers. */
2716 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2721 if (CONSTANT_ADDRESS_P (base
))
2723 if (optimize
> 0 && io_address_operand (base
, HImode
))
2726 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2727 AS2 (out
,%A0
-0x20,%A1
));
2729 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2734 if (reg_base
== REG_X
)
2736 if (reg_src
== REG_X
)
2738 /* "st X+,r26" and "st -X,r26" are undefined. */
2739 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2740 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2741 AS2 (st
,X
,r26
) CR_TAB
2742 AS2 (adiw
,r26
,1) CR_TAB
2743 AS2 (st
,X
,__tmp_reg__
));
2745 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2746 AS2 (adiw
,r26
,1) CR_TAB
2747 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2748 AS2 (sbiw
,r26
,1) CR_TAB
2753 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2754 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2757 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2758 AS2 (st
,X
,%B1
) CR_TAB
2763 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2766 else if (GET_CODE (base
) == PLUS
)
2768 int disp
= INTVAL (XEXP (base
, 1));
2769 reg_base
= REGNO (XEXP (base
, 0));
2770 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2772 if (reg_base
!= REG_Y
)
2773 fatal_insn ("incorrect insn:",insn
);
2775 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2776 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2777 AS2 (std
,Y
+63,%B1
) CR_TAB
2778 AS2 (std
,Y
+62,%A1
) CR_TAB
2779 AS2 (sbiw
,r28
,%o0
-62));
2781 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2782 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2783 AS2 (std
,Y
+1,%B1
) CR_TAB
2784 AS2 (st
,Y
,%A1
) CR_TAB
2785 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2786 AS2 (sbci
,r29
,hi8(%o0
)));
2788 if (reg_base
== REG_X
)
2791 if (reg_src
== REG_X
)
2794 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2795 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2796 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2797 AS2 (st
,X
,__zero_reg__
) CR_TAB
2798 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2799 AS1 (clr
,__zero_reg__
) CR_TAB
2800 AS2 (sbiw
,r26
,%o0
));
2803 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2804 AS2 (st
,X
,%B1
) CR_TAB
2805 AS2 (st
,-X
,%A1
) CR_TAB
2806 AS2 (sbiw
,r26
,%o0
));
2808 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2811 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2812 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2814 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2818 if (REGNO (XEXP (base
, 0)) == REG_X
)
2821 return (AS2 (adiw
,r26
,1) CR_TAB
2822 AS2 (st
,X
,%B1
) CR_TAB
2823 AS2 (st
,-X
,%A1
) CR_TAB
2829 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2830 AS2 (st
,%p0
,%A1
) CR_TAB
2836 return (AS2 (st
,%0,%A1
) CR_TAB
2839 fatal_insn ("unknown move insn:",insn
);
2843 /* Return 1 if frame pointer for current function required. */
2846 frame_pointer_required_p (void)
2848 return (cfun
->calls_alloca
2849 || crtl
->args
.info
.nregs
== 0
2850 || get_frame_size () > 0);
2853 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2856 compare_condition (rtx insn
)
2858 rtx next
= next_real_insn (insn
);
2859 RTX_CODE cond
= UNKNOWN
;
2860 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2862 rtx pat
= PATTERN (next
);
2863 rtx src
= SET_SRC (pat
);
2864 rtx t
= XEXP (src
, 0);
2865 cond
= GET_CODE (t
);
2870 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2873 compare_sign_p (rtx insn
)
2875 RTX_CODE cond
= compare_condition (insn
);
2876 return (cond
== GE
|| cond
== LT
);
2879 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2880 that needs to be swapped (GT, GTU, LE, LEU). */
2883 compare_diff_p (rtx insn
)
2885 RTX_CODE cond
= compare_condition (insn
);
2886 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2889 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2892 compare_eq_p (rtx insn
)
2894 RTX_CODE cond
= compare_condition (insn
);
2895 return (cond
== EQ
|| cond
== NE
);
2899 /* Output test instruction for HImode. */
2902 out_tsthi (rtx insn
, int *l
)
2904 if (compare_sign_p (insn
))
2907 return AS1 (tst
,%B0
);
2909 if (reg_unused_after (insn
, SET_SRC (PATTERN (insn
)))
2910 && compare_eq_p (insn
))
2912 /* Faster than sbiw if we can clobber the operand. */
2914 return AS2 (or,%A0
,%B0
);
2916 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2919 return AS2 (sbiw
,%0,0);
2922 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2923 AS2 (cpc
,%B0
,__zero_reg__
));
2927 /* Output test instruction for SImode. */
2930 out_tstsi (rtx insn
, int *l
)
2932 if (compare_sign_p (insn
))
2935 return AS1 (tst
,%D0
);
2937 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2940 return (AS2 (sbiw
,%A0
,0) CR_TAB
2941 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2942 AS2 (cpc
,%D0
,__zero_reg__
));
2945 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2946 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2947 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2948 AS2 (cpc
,%D0
,__zero_reg__
));
2952 /* Generate asm equivalent for various shifts.
2953 Shift count is a CONST_INT, MEM or REG.
2954 This only handles cases that are not already
2955 carefully hand-optimized in ?sh??i3_out. */
2958 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
2959 int *len
, int t_len
)
2963 int second_label
= 1;
2964 int saved_in_tmp
= 0;
2965 int use_zero_reg
= 0;
2967 op
[0] = operands
[0];
2968 op
[1] = operands
[1];
2969 op
[2] = operands
[2];
2970 op
[3] = operands
[3];
2976 if (GET_CODE (operands
[2]) == CONST_INT
)
2978 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2979 int count
= INTVAL (operands
[2]);
2980 int max_len
= 10; /* If larger than this, always use a loop. */
2989 if (count
< 8 && !scratch
)
2993 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2995 if (t_len
* count
<= max_len
)
2997 /* Output shifts inline with no loop - faster. */
2999 *len
= t_len
* count
;
3003 output_asm_insn (templ
, op
);
3012 strcat (str
, AS2 (ldi
,%3,%2));
3014 else if (use_zero_reg
)
3016 /* Hack to save one word: use __zero_reg__ as loop counter.
3017 Set one bit, then shift in a loop until it is 0 again. */
3019 op
[3] = zero_reg_rtx
;
3023 strcat (str
, ("set" CR_TAB
3024 AS2 (bld
,%3,%2-1)));
3028 /* No scratch register available, use one from LD_REGS (saved in
3029 __tmp_reg__) that doesn't overlap with registers to shift. */
3031 op
[3] = gen_rtx_REG (QImode
,
3032 ((true_regnum (operands
[0]) - 1) & 15) + 16);
3033 op
[4] = tmp_reg_rtx
;
3037 *len
= 3; /* Includes "mov %3,%4" after the loop. */
3039 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
3045 else if (GET_CODE (operands
[2]) == MEM
)
3049 op
[3] = op_mov
[0] = tmp_reg_rtx
;
3053 out_movqi_r_mr (insn
, op_mov
, len
);
3055 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
3057 else if (register_operand (operands
[2], QImode
))
3059 if (reg_unused_after (insn
, operands
[2]))
3063 op
[3] = tmp_reg_rtx
;
3065 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
3069 fatal_insn ("bad shift insn:", insn
);
3076 strcat (str
, AS1 (rjmp
,2f
));
3080 *len
+= t_len
+ 2; /* template + dec + brXX */
3083 strcat (str
, "\n1:\t");
3084 strcat (str
, templ
);
3085 strcat (str
, second_label
? "\n2:\t" : "\n\t");
3086 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
3087 strcat (str
, CR_TAB
);
3088 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
3090 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
3091 output_asm_insn (str
, op
);
3096 /* 8bit shift left ((char)x << i) */
3099 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3101 if (GET_CODE (operands
[2]) == CONST_INT
)
3108 switch (INTVAL (operands
[2]))
3111 if (INTVAL (operands
[2]) < 8)
3115 return AS1 (clr
,%0);
3119 return AS1 (lsl
,%0);
3123 return (AS1 (lsl
,%0) CR_TAB
3128 return (AS1 (lsl
,%0) CR_TAB
3133 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3136 return (AS1 (swap
,%0) CR_TAB
3137 AS2 (andi
,%0,0xf0));
3140 return (AS1 (lsl
,%0) CR_TAB
3146 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3149 return (AS1 (swap
,%0) CR_TAB
3151 AS2 (andi
,%0,0xe0));
3154 return (AS1 (lsl
,%0) CR_TAB
3161 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3164 return (AS1 (swap
,%0) CR_TAB
3167 AS2 (andi
,%0,0xc0));
3170 return (AS1 (lsl
,%0) CR_TAB
3179 return (AS1 (ror
,%0) CR_TAB
3184 else if (CONSTANT_P (operands
[2]))
3185 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3187 out_shift_with_cnt (AS1 (lsl
,%0),
3188 insn
, operands
, len
, 1);
3193 /* 16bit shift left ((short)x << i) */
3196 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3198 if (GET_CODE (operands
[2]) == CONST_INT
)
3200 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3201 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3208 switch (INTVAL (operands
[2]))
3211 if (INTVAL (operands
[2]) < 16)
3215 return (AS1 (clr
,%B0
) CR_TAB
3219 if (optimize_size
&& scratch
)
3224 return (AS1 (swap
,%A0
) CR_TAB
3225 AS1 (swap
,%B0
) CR_TAB
3226 AS2 (andi
,%B0
,0xf0) CR_TAB
3227 AS2 (eor
,%B0
,%A0
) CR_TAB
3228 AS2 (andi
,%A0
,0xf0) CR_TAB
3234 return (AS1 (swap
,%A0
) CR_TAB
3235 AS1 (swap
,%B0
) CR_TAB
3236 AS2 (ldi
,%3,0xf0) CR_TAB
3237 AS2 (and,%B0
,%3) CR_TAB
3238 AS2 (eor
,%B0
,%A0
) CR_TAB
3239 AS2 (and,%A0
,%3) CR_TAB
3242 break; /* optimize_size ? 6 : 8 */
3246 break; /* scratch ? 5 : 6 */
3250 return (AS1 (lsl
,%A0
) CR_TAB
3251 AS1 (rol
,%B0
) CR_TAB
3252 AS1 (swap
,%A0
) CR_TAB
3253 AS1 (swap
,%B0
) CR_TAB
3254 AS2 (andi
,%B0
,0xf0) CR_TAB
3255 AS2 (eor
,%B0
,%A0
) CR_TAB
3256 AS2 (andi
,%A0
,0xf0) CR_TAB
3262 return (AS1 (lsl
,%A0
) CR_TAB
3263 AS1 (rol
,%B0
) CR_TAB
3264 AS1 (swap
,%A0
) CR_TAB
3265 AS1 (swap
,%B0
) CR_TAB
3266 AS2 (ldi
,%3,0xf0) CR_TAB
3267 AS2 (and,%B0
,%3) CR_TAB
3268 AS2 (eor
,%B0
,%A0
) CR_TAB
3269 AS2 (and,%A0
,%3) CR_TAB
3276 break; /* scratch ? 5 : 6 */
3278 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3279 AS1 (lsr
,%B0
) CR_TAB
3280 AS1 (ror
,%A0
) CR_TAB
3281 AS1 (ror
,__tmp_reg__
) CR_TAB
3282 AS1 (lsr
,%B0
) CR_TAB
3283 AS1 (ror
,%A0
) CR_TAB
3284 AS1 (ror
,__tmp_reg__
) CR_TAB
3285 AS2 (mov
,%B0
,%A0
) CR_TAB
3286 AS2 (mov
,%A0
,__tmp_reg__
));
3290 return (AS1 (lsr
,%B0
) CR_TAB
3291 AS2 (mov
,%B0
,%A0
) CR_TAB
3292 AS1 (clr
,%A0
) CR_TAB
3293 AS1 (ror
,%B0
) CR_TAB
3297 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3302 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3303 AS1 (clr
,%A0
) CR_TAB
3308 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3309 AS1 (clr
,%A0
) CR_TAB
3310 AS1 (lsl
,%B0
) CR_TAB
3315 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3316 AS1 (clr
,%A0
) CR_TAB
3317 AS1 (lsl
,%B0
) CR_TAB
3318 AS1 (lsl
,%B0
) CR_TAB
3325 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3326 AS1 (clr
,%A0
) CR_TAB
3327 AS1 (swap
,%B0
) CR_TAB
3328 AS2 (andi
,%B0
,0xf0));
3333 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3334 AS1 (clr
,%A0
) CR_TAB
3335 AS1 (swap
,%B0
) CR_TAB
3336 AS2 (ldi
,%3,0xf0) CR_TAB
3340 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3341 AS1 (clr
,%A0
) CR_TAB
3342 AS1 (lsl
,%B0
) CR_TAB
3343 AS1 (lsl
,%B0
) CR_TAB
3344 AS1 (lsl
,%B0
) CR_TAB
3351 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3352 AS1 (clr
,%A0
) CR_TAB
3353 AS1 (swap
,%B0
) CR_TAB
3354 AS1 (lsl
,%B0
) CR_TAB
3355 AS2 (andi
,%B0
,0xe0));
3357 if (AVR_HAVE_MUL
&& scratch
)
3360 return (AS2 (ldi
,%3,0x20) CR_TAB
3361 AS2 (mul
,%A0
,%3) CR_TAB
3362 AS2 (mov
,%B0
,r0
) CR_TAB
3363 AS1 (clr
,%A0
) CR_TAB
3364 AS1 (clr
,__zero_reg__
));
3366 if (optimize_size
&& scratch
)
3371 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3372 AS1 (clr
,%A0
) CR_TAB
3373 AS1 (swap
,%B0
) CR_TAB
3374 AS1 (lsl
,%B0
) CR_TAB
3375 AS2 (ldi
,%3,0xe0) CR_TAB
3381 return ("set" CR_TAB
3382 AS2 (bld
,r1
,5) CR_TAB
3383 AS2 (mul
,%A0
,r1
) CR_TAB
3384 AS2 (mov
,%B0
,r0
) CR_TAB
3385 AS1 (clr
,%A0
) CR_TAB
3386 AS1 (clr
,__zero_reg__
));
3389 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3390 AS1 (clr
,%A0
) CR_TAB
3391 AS1 (lsl
,%B0
) CR_TAB
3392 AS1 (lsl
,%B0
) CR_TAB
3393 AS1 (lsl
,%B0
) CR_TAB
3394 AS1 (lsl
,%B0
) CR_TAB
3398 if (AVR_HAVE_MUL
&& ldi_ok
)
3401 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3402 AS2 (mul
,%A0
,%B0
) CR_TAB
3403 AS2 (mov
,%B0
,r0
) CR_TAB
3404 AS1 (clr
,%A0
) CR_TAB
3405 AS1 (clr
,__zero_reg__
));
3407 if (AVR_HAVE_MUL
&& scratch
)
3410 return (AS2 (ldi
,%3,0x40) CR_TAB
3411 AS2 (mul
,%A0
,%3) CR_TAB
3412 AS2 (mov
,%B0
,r0
) CR_TAB
3413 AS1 (clr
,%A0
) CR_TAB
3414 AS1 (clr
,__zero_reg__
));
3416 if (optimize_size
&& ldi_ok
)
3419 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3420 AS2 (ldi
,%A0
,6) "\n1:\t"
3421 AS1 (lsl
,%B0
) CR_TAB
3422 AS1 (dec
,%A0
) CR_TAB
3425 if (optimize_size
&& scratch
)
3428 return (AS1 (clr
,%B0
) CR_TAB
3429 AS1 (lsr
,%A0
) CR_TAB
3430 AS1 (ror
,%B0
) CR_TAB
3431 AS1 (lsr
,%A0
) CR_TAB
3432 AS1 (ror
,%B0
) CR_TAB
3437 return (AS1 (clr
,%B0
) CR_TAB
3438 AS1 (lsr
,%A0
) CR_TAB
3439 AS1 (ror
,%B0
) CR_TAB
3444 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3446 insn
, operands
, len
, 2);
3451 /* 32bit shift left ((long)x << i) */
3454 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3456 if (GET_CODE (operands
[2]) == CONST_INT
)
3464 switch (INTVAL (operands
[2]))
3467 if (INTVAL (operands
[2]) < 32)
3471 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3472 AS1 (clr
,%C0
) CR_TAB
3473 AS2 (movw
,%A0
,%C0
));
3475 return (AS1 (clr
,%D0
) CR_TAB
3476 AS1 (clr
,%C0
) CR_TAB
3477 AS1 (clr
,%B0
) CR_TAB
3482 int reg0
= true_regnum (operands
[0]);
3483 int reg1
= true_regnum (operands
[1]);
3486 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3487 AS2 (mov
,%C0
,%B1
) CR_TAB
3488 AS2 (mov
,%B0
,%A1
) CR_TAB
3491 return (AS1 (clr
,%A0
) CR_TAB
3492 AS2 (mov
,%B0
,%A1
) CR_TAB
3493 AS2 (mov
,%C0
,%B1
) CR_TAB
3499 int reg0
= true_regnum (operands
[0]);
3500 int reg1
= true_regnum (operands
[1]);
3501 if (reg0
+ 2 == reg1
)
3502 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3505 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3506 AS1 (clr
,%B0
) CR_TAB
3509 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3510 AS2 (mov
,%D0
,%B1
) CR_TAB
3511 AS1 (clr
,%B0
) CR_TAB
3517 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3518 AS1 (clr
,%C0
) CR_TAB
3519 AS1 (clr
,%B0
) CR_TAB
3524 return (AS1 (clr
,%D0
) CR_TAB
3525 AS1 (lsr
,%A0
) CR_TAB
3526 AS1 (ror
,%D0
) CR_TAB
3527 AS1 (clr
,%C0
) CR_TAB
3528 AS1 (clr
,%B0
) CR_TAB
3533 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3534 AS1 (rol
,%B0
) CR_TAB
3535 AS1 (rol
,%C0
) CR_TAB
3537 insn
, operands
, len
, 4);
3541 /* 8bit arithmetic shift right ((signed char)x >> i) */
3544 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3546 if (GET_CODE (operands
[2]) == CONST_INT
)
3553 switch (INTVAL (operands
[2]))
3557 return AS1 (asr
,%0);
3561 return (AS1 (asr
,%0) CR_TAB
3566 return (AS1 (asr
,%0) CR_TAB
3572 return (AS1 (asr
,%0) CR_TAB
3579 return (AS1 (asr
,%0) CR_TAB
3587 return (AS2 (bst
,%0,6) CR_TAB
3589 AS2 (sbc
,%0,%0) CR_TAB
3593 if (INTVAL (operands
[2]) < 8)
3600 return (AS1 (lsl
,%0) CR_TAB
3604 else if (CONSTANT_P (operands
[2]))
3605 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3607 out_shift_with_cnt (AS1 (asr
,%0),
3608 insn
, operands
, len
, 1);
3613 /* 16bit arithmetic shift right ((signed short)x >> i) */
3616 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3618 if (GET_CODE (operands
[2]) == CONST_INT
)
3620 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3621 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3628 switch (INTVAL (operands
[2]))
3632 /* XXX try to optimize this too? */
3637 break; /* scratch ? 5 : 6 */
3639 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3640 AS2 (mov
,%A0
,%B0
) CR_TAB
3641 AS1 (lsl
,__tmp_reg__
) CR_TAB
3642 AS1 (rol
,%A0
) CR_TAB
3643 AS2 (sbc
,%B0
,%B0
) CR_TAB
3644 AS1 (lsl
,__tmp_reg__
) CR_TAB
3645 AS1 (rol
,%A0
) CR_TAB
3650 return (AS1 (lsl
,%A0
) CR_TAB
3651 AS2 (mov
,%A0
,%B0
) CR_TAB
3652 AS1 (rol
,%A0
) CR_TAB
3657 int reg0
= true_regnum (operands
[0]);
3658 int reg1
= true_regnum (operands
[1]);
3661 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3662 AS1 (lsl
,%B0
) CR_TAB
3665 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3666 AS1 (clr
,%B0
) CR_TAB
3667 AS2 (sbrc
,%A0
,7) CR_TAB
3673 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3674 AS1 (lsl
,%B0
) CR_TAB
3675 AS2 (sbc
,%B0
,%B0
) CR_TAB
3680 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3681 AS1 (lsl
,%B0
) CR_TAB
3682 AS2 (sbc
,%B0
,%B0
) CR_TAB
3683 AS1 (asr
,%A0
) CR_TAB
3687 if (AVR_HAVE_MUL
&& ldi_ok
)
3690 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3691 AS2 (muls
,%B0
,%A0
) CR_TAB
3692 AS2 (mov
,%A0
,r1
) CR_TAB
3693 AS2 (sbc
,%B0
,%B0
) CR_TAB
3694 AS1 (clr
,__zero_reg__
));
3696 if (optimize_size
&& scratch
)
3699 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3700 AS1 (lsl
,%B0
) CR_TAB
3701 AS2 (sbc
,%B0
,%B0
) CR_TAB
3702 AS1 (asr
,%A0
) CR_TAB
3703 AS1 (asr
,%A0
) CR_TAB
3707 if (AVR_HAVE_MUL
&& ldi_ok
)
3710 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3711 AS2 (muls
,%B0
,%A0
) CR_TAB
3712 AS2 (mov
,%A0
,r1
) CR_TAB
3713 AS2 (sbc
,%B0
,%B0
) CR_TAB
3714 AS1 (clr
,__zero_reg__
));
3716 if (optimize_size
&& scratch
)
3719 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3720 AS1 (lsl
,%B0
) CR_TAB
3721 AS2 (sbc
,%B0
,%B0
) CR_TAB
3722 AS1 (asr
,%A0
) CR_TAB
3723 AS1 (asr
,%A0
) CR_TAB
3724 AS1 (asr
,%A0
) CR_TAB
3728 if (AVR_HAVE_MUL
&& ldi_ok
)
3731 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3732 AS2 (muls
,%B0
,%A0
) CR_TAB
3733 AS2 (mov
,%A0
,r1
) CR_TAB
3734 AS2 (sbc
,%B0
,%B0
) CR_TAB
3735 AS1 (clr
,__zero_reg__
));
3738 break; /* scratch ? 5 : 7 */
3740 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3741 AS1 (lsl
,%B0
) CR_TAB
3742 AS2 (sbc
,%B0
,%B0
) CR_TAB
3743 AS1 (asr
,%A0
) CR_TAB
3744 AS1 (asr
,%A0
) CR_TAB
3745 AS1 (asr
,%A0
) CR_TAB
3746 AS1 (asr
,%A0
) CR_TAB
3751 return (AS1 (lsl
,%B0
) CR_TAB
3752 AS2 (sbc
,%A0
,%A0
) CR_TAB
3753 AS1 (lsl
,%B0
) CR_TAB
3754 AS2 (mov
,%B0
,%A0
) CR_TAB
3758 if (INTVAL (operands
[2]) < 16)
3764 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3765 AS2 (sbc
,%A0
,%A0
) CR_TAB
3770 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3772 insn
, operands
, len
, 2);
3777 /* 32bit arithmetic shift right ((signed long)x >> i) */
3780 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3782 if (GET_CODE (operands
[2]) == CONST_INT
)
3790 switch (INTVAL (operands
[2]))
3794 int reg0
= true_regnum (operands
[0]);
3795 int reg1
= true_regnum (operands
[1]);
3798 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3799 AS2 (mov
,%B0
,%C1
) CR_TAB
3800 AS2 (mov
,%C0
,%D1
) CR_TAB
3801 AS1 (clr
,%D0
) CR_TAB
3802 AS2 (sbrc
,%C0
,7) CR_TAB
3805 return (AS1 (clr
,%D0
) CR_TAB
3806 AS2 (sbrc
,%D1
,7) CR_TAB
3807 AS1 (dec
,%D0
) CR_TAB
3808 AS2 (mov
,%C0
,%D1
) CR_TAB
3809 AS2 (mov
,%B0
,%C1
) CR_TAB
3815 int reg0
= true_regnum (operands
[0]);
3816 int reg1
= true_regnum (operands
[1]);
3818 if (reg0
== reg1
+ 2)
3819 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3820 AS2 (sbrc
,%B0
,7) CR_TAB
3821 AS1 (com
,%D0
) CR_TAB
3824 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3825 AS1 (clr
,%D0
) CR_TAB
3826 AS2 (sbrc
,%B0
,7) CR_TAB
3827 AS1 (com
,%D0
) CR_TAB
3830 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3831 AS2 (mov
,%A0
,%C1
) CR_TAB
3832 AS1 (clr
,%D0
) CR_TAB
3833 AS2 (sbrc
,%B0
,7) CR_TAB
3834 AS1 (com
,%D0
) CR_TAB
3839 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3840 AS1 (clr
,%D0
) CR_TAB
3841 AS2 (sbrc
,%A0
,7) CR_TAB
3842 AS1 (com
,%D0
) CR_TAB
3843 AS2 (mov
,%B0
,%D0
) CR_TAB
3847 if (INTVAL (operands
[2]) < 32)
3854 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3855 AS2 (sbc
,%A0
,%A0
) CR_TAB
3856 AS2 (mov
,%B0
,%A0
) CR_TAB
3857 AS2 (movw
,%C0
,%A0
));
3859 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3860 AS2 (sbc
,%A0
,%A0
) CR_TAB
3861 AS2 (mov
,%B0
,%A0
) CR_TAB
3862 AS2 (mov
,%C0
,%A0
) CR_TAB
3867 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3868 AS1 (ror
,%C0
) CR_TAB
3869 AS1 (ror
,%B0
) CR_TAB
3871 insn
, operands
, len
, 4);
3875 /* 8bit logic shift right ((unsigned char)x >> i) */
3878 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3880 if (GET_CODE (operands
[2]) == CONST_INT
)
3887 switch (INTVAL (operands
[2]))
3890 if (INTVAL (operands
[2]) < 8)
3894 return AS1 (clr
,%0);
3898 return AS1 (lsr
,%0);
3902 return (AS1 (lsr
,%0) CR_TAB
3906 return (AS1 (lsr
,%0) CR_TAB
3911 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3914 return (AS1 (swap
,%0) CR_TAB
3915 AS2 (andi
,%0,0x0f));
3918 return (AS1 (lsr
,%0) CR_TAB
3924 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3927 return (AS1 (swap
,%0) CR_TAB
3932 return (AS1 (lsr
,%0) CR_TAB
3939 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3942 return (AS1 (swap
,%0) CR_TAB
3948 return (AS1 (lsr
,%0) CR_TAB
3957 return (AS1 (rol
,%0) CR_TAB
3962 else if (CONSTANT_P (operands
[2]))
3963 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3965 out_shift_with_cnt (AS1 (lsr
,%0),
3966 insn
, operands
, len
, 1);
3970 /* 16bit logic shift right ((unsigned short)x >> i) */
3973 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3975 if (GET_CODE (operands
[2]) == CONST_INT
)
3977 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3978 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3985 switch (INTVAL (operands
[2]))
3988 if (INTVAL (operands
[2]) < 16)
3992 return (AS1 (clr
,%B0
) CR_TAB
3996 if (optimize_size
&& scratch
)
4001 return (AS1 (swap
,%B0
) CR_TAB
4002 AS1 (swap
,%A0
) CR_TAB
4003 AS2 (andi
,%A0
,0x0f) CR_TAB
4004 AS2 (eor
,%A0
,%B0
) CR_TAB
4005 AS2 (andi
,%B0
,0x0f) CR_TAB
4011 return (AS1 (swap
,%B0
) CR_TAB
4012 AS1 (swap
,%A0
) CR_TAB
4013 AS2 (ldi
,%3,0x0f) CR_TAB
4014 AS2 (and,%A0
,%3) CR_TAB
4015 AS2 (eor
,%A0
,%B0
) CR_TAB
4016 AS2 (and,%B0
,%3) CR_TAB
4019 break; /* optimize_size ? 6 : 8 */
4023 break; /* scratch ? 5 : 6 */
4027 return (AS1 (lsr
,%B0
) CR_TAB
4028 AS1 (ror
,%A0
) CR_TAB
4029 AS1 (swap
,%B0
) CR_TAB
4030 AS1 (swap
,%A0
) CR_TAB
4031 AS2 (andi
,%A0
,0x0f) CR_TAB
4032 AS2 (eor
,%A0
,%B0
) CR_TAB
4033 AS2 (andi
,%B0
,0x0f) CR_TAB
4039 return (AS1 (lsr
,%B0
) CR_TAB
4040 AS1 (ror
,%A0
) CR_TAB
4041 AS1 (swap
,%B0
) CR_TAB
4042 AS1 (swap
,%A0
) CR_TAB
4043 AS2 (ldi
,%3,0x0f) CR_TAB
4044 AS2 (and,%A0
,%3) CR_TAB
4045 AS2 (eor
,%A0
,%B0
) CR_TAB
4046 AS2 (and,%B0
,%3) CR_TAB
4053 break; /* scratch ? 5 : 6 */
4055 return (AS1 (clr
,__tmp_reg__
) CR_TAB
4056 AS1 (lsl
,%A0
) CR_TAB
4057 AS1 (rol
,%B0
) CR_TAB
4058 AS1 (rol
,__tmp_reg__
) CR_TAB
4059 AS1 (lsl
,%A0
) CR_TAB
4060 AS1 (rol
,%B0
) CR_TAB
4061 AS1 (rol
,__tmp_reg__
) CR_TAB
4062 AS2 (mov
,%A0
,%B0
) CR_TAB
4063 AS2 (mov
,%B0
,__tmp_reg__
));
4067 return (AS1 (lsl
,%A0
) CR_TAB
4068 AS2 (mov
,%A0
,%B0
) CR_TAB
4069 AS1 (rol
,%A0
) CR_TAB
4070 AS2 (sbc
,%B0
,%B0
) CR_TAB
4074 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
4079 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4080 AS1 (clr
,%B0
) CR_TAB
4085 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4086 AS1 (clr
,%B0
) CR_TAB
4087 AS1 (lsr
,%A0
) CR_TAB
4092 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4093 AS1 (clr
,%B0
) CR_TAB
4094 AS1 (lsr
,%A0
) CR_TAB
4095 AS1 (lsr
,%A0
) CR_TAB
4102 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4103 AS1 (clr
,%B0
) CR_TAB
4104 AS1 (swap
,%A0
) CR_TAB
4105 AS2 (andi
,%A0
,0x0f));
4110 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4111 AS1 (clr
,%B0
) CR_TAB
4112 AS1 (swap
,%A0
) CR_TAB
4113 AS2 (ldi
,%3,0x0f) CR_TAB
4117 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4118 AS1 (clr
,%B0
) CR_TAB
4119 AS1 (lsr
,%A0
) CR_TAB
4120 AS1 (lsr
,%A0
) CR_TAB
4121 AS1 (lsr
,%A0
) CR_TAB
4128 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4129 AS1 (clr
,%B0
) CR_TAB
4130 AS1 (swap
,%A0
) CR_TAB
4131 AS1 (lsr
,%A0
) CR_TAB
4132 AS2 (andi
,%A0
,0x07));
4134 if (AVR_HAVE_MUL
&& scratch
)
4137 return (AS2 (ldi
,%3,0x08) CR_TAB
4138 AS2 (mul
,%B0
,%3) CR_TAB
4139 AS2 (mov
,%A0
,r1
) CR_TAB
4140 AS1 (clr
,%B0
) CR_TAB
4141 AS1 (clr
,__zero_reg__
));
4143 if (optimize_size
&& scratch
)
4148 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4149 AS1 (clr
,%B0
) CR_TAB
4150 AS1 (swap
,%A0
) CR_TAB
4151 AS1 (lsr
,%A0
) CR_TAB
4152 AS2 (ldi
,%3,0x07) CR_TAB
4158 return ("set" CR_TAB
4159 AS2 (bld
,r1
,3) CR_TAB
4160 AS2 (mul
,%B0
,r1
) CR_TAB
4161 AS2 (mov
,%A0
,r1
) CR_TAB
4162 AS1 (clr
,%B0
) CR_TAB
4163 AS1 (clr
,__zero_reg__
));
4166 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4167 AS1 (clr
,%B0
) CR_TAB
4168 AS1 (lsr
,%A0
) CR_TAB
4169 AS1 (lsr
,%A0
) CR_TAB
4170 AS1 (lsr
,%A0
) CR_TAB
4171 AS1 (lsr
,%A0
) CR_TAB
4175 if (AVR_HAVE_MUL
&& ldi_ok
)
4178 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4179 AS2 (mul
,%B0
,%A0
) CR_TAB
4180 AS2 (mov
,%A0
,r1
) CR_TAB
4181 AS1 (clr
,%B0
) CR_TAB
4182 AS1 (clr
,__zero_reg__
));
4184 if (AVR_HAVE_MUL
&& scratch
)
4187 return (AS2 (ldi
,%3,0x04) CR_TAB
4188 AS2 (mul
,%B0
,%3) CR_TAB
4189 AS2 (mov
,%A0
,r1
) CR_TAB
4190 AS1 (clr
,%B0
) CR_TAB
4191 AS1 (clr
,__zero_reg__
));
4193 if (optimize_size
&& ldi_ok
)
4196 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4197 AS2 (ldi
,%B0
,6) "\n1:\t"
4198 AS1 (lsr
,%A0
) CR_TAB
4199 AS1 (dec
,%B0
) CR_TAB
4202 if (optimize_size
&& scratch
)
4205 return (AS1 (clr
,%A0
) CR_TAB
4206 AS1 (lsl
,%B0
) CR_TAB
4207 AS1 (rol
,%A0
) CR_TAB
4208 AS1 (lsl
,%B0
) CR_TAB
4209 AS1 (rol
,%A0
) CR_TAB
4214 return (AS1 (clr
,%A0
) CR_TAB
4215 AS1 (lsl
,%B0
) CR_TAB
4216 AS1 (rol
,%A0
) CR_TAB
4221 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4223 insn
, operands
, len
, 2);
4227 /* 32bit logic shift right ((unsigned int)x >> i) */
4230 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4232 if (GET_CODE (operands
[2]) == CONST_INT
)
4240 switch (INTVAL (operands
[2]))
4243 if (INTVAL (operands
[2]) < 32)
4247 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4248 AS1 (clr
,%C0
) CR_TAB
4249 AS2 (movw
,%A0
,%C0
));
4251 return (AS1 (clr
,%D0
) CR_TAB
4252 AS1 (clr
,%C0
) CR_TAB
4253 AS1 (clr
,%B0
) CR_TAB
4258 int reg0
= true_regnum (operands
[0]);
4259 int reg1
= true_regnum (operands
[1]);
4262 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4263 AS2 (mov
,%B0
,%C1
) CR_TAB
4264 AS2 (mov
,%C0
,%D1
) CR_TAB
4267 return (AS1 (clr
,%D0
) CR_TAB
4268 AS2 (mov
,%C0
,%D1
) CR_TAB
4269 AS2 (mov
,%B0
,%C1
) CR_TAB
4275 int reg0
= true_regnum (operands
[0]);
4276 int reg1
= true_regnum (operands
[1]);
4278 if (reg0
== reg1
+ 2)
4279 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4282 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4283 AS1 (clr
,%C0
) CR_TAB
4286 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4287 AS2 (mov
,%A0
,%C1
) CR_TAB
4288 AS1 (clr
,%C0
) CR_TAB
4293 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4294 AS1 (clr
,%B0
) CR_TAB
4295 AS1 (clr
,%C0
) CR_TAB
4300 return (AS1 (clr
,%A0
) CR_TAB
4301 AS2 (sbrc
,%D0
,7) CR_TAB
4302 AS1 (inc
,%A0
) CR_TAB
4303 AS1 (clr
,%B0
) CR_TAB
4304 AS1 (clr
,%C0
) CR_TAB
4309 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4310 AS1 (ror
,%C0
) CR_TAB
4311 AS1 (ror
,%B0
) CR_TAB
4313 insn
, operands
, len
, 4);
4317 /* Modifies the length assigned to instruction INSN
4318 LEN is the initially computed length of the insn. */
4321 adjust_insn_length (rtx insn
, int len
)
4323 rtx patt
= PATTERN (insn
);
4326 if (GET_CODE (patt
) == SET
)
4329 op
[1] = SET_SRC (patt
);
4330 op
[0] = SET_DEST (patt
);
4331 if (general_operand (op
[1], VOIDmode
)
4332 && general_operand (op
[0], VOIDmode
))
4334 switch (GET_MODE (op
[0]))
4337 output_movqi (insn
, op
, &len
);
4340 output_movhi (insn
, op
, &len
);
4344 output_movsisf (insn
, op
, &len
);
4350 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4352 switch (GET_MODE (op
[1]))
4354 case HImode
: out_tsthi (insn
,&len
); break;
4355 case SImode
: out_tstsi (insn
,&len
); break;
4359 else if (GET_CODE (op
[1]) == AND
)
4361 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4363 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4364 if (GET_MODE (op
[1]) == SImode
)
4365 len
= (((mask
& 0xff) != 0xff)
4366 + ((mask
& 0xff00) != 0xff00)
4367 + ((mask
& 0xff0000L
) != 0xff0000L
)
4368 + ((mask
& 0xff000000L
) != 0xff000000L
));
4369 else if (GET_MODE (op
[1]) == HImode
)
4370 len
= (((mask
& 0xff) != 0xff)
4371 + ((mask
& 0xff00) != 0xff00));
4374 else if (GET_CODE (op
[1]) == IOR
)
4376 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4378 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4379 if (GET_MODE (op
[1]) == SImode
)
4380 len
= (((mask
& 0xff) != 0)
4381 + ((mask
& 0xff00) != 0)
4382 + ((mask
& 0xff0000L
) != 0)
4383 + ((mask
& 0xff000000L
) != 0));
4384 else if (GET_MODE (op
[1]) == HImode
)
4385 len
= (((mask
& 0xff) != 0)
4386 + ((mask
& 0xff00) != 0));
4390 set
= single_set (insn
);
4395 op
[1] = SET_SRC (set
);
4396 op
[0] = SET_DEST (set
);
4398 if (GET_CODE (patt
) == PARALLEL
4399 && general_operand (op
[1], VOIDmode
)
4400 && general_operand (op
[0], VOIDmode
))
4402 if (XVECLEN (patt
, 0) == 2)
4403 op
[2] = XVECEXP (patt
, 0, 1);
4405 switch (GET_MODE (op
[0]))
4411 output_reload_inhi (insn
, op
, &len
);
4415 output_reload_insisf (insn
, op
, &len
);
4421 else if (GET_CODE (op
[1]) == ASHIFT
4422 || GET_CODE (op
[1]) == ASHIFTRT
4423 || GET_CODE (op
[1]) == LSHIFTRT
)
4427 ops
[1] = XEXP (op
[1],0);
4428 ops
[2] = XEXP (op
[1],1);
4429 switch (GET_CODE (op
[1]))
4432 switch (GET_MODE (op
[0]))
4434 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4435 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4436 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4441 switch (GET_MODE (op
[0]))
4443 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4444 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4445 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4450 switch (GET_MODE (op
[0]))
4452 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4453 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4454 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4466 /* Return nonzero if register REG dead after INSN. */
4469 reg_unused_after (rtx insn
, rtx reg
)
4471 return (dead_or_set_p (insn
, reg
)
4472 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4475 /* Return nonzero if REG is not used after INSN.
4476 We assume REG is a reload reg, and therefore does
4477 not live past labels. It may live past calls or jumps though. */
4480 _reg_unused_after (rtx insn
, rtx reg
)
4485 /* If the reg is set by this instruction, then it is safe for our
4486 case. Disregard the case where this is a store to memory, since
4487 we are checking a register used in the store address. */
4488 set
= single_set (insn
);
4489 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4490 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4493 while ((insn
= NEXT_INSN (insn
)))
4496 code
= GET_CODE (insn
);
4499 /* If this is a label that existed before reload, then the register
4500 if dead here. However, if this is a label added by reorg, then
4501 the register may still be live here. We can't tell the difference,
4502 so we just ignore labels completely. */
4503 if (code
== CODE_LABEL
)
4511 if (code
== JUMP_INSN
)
4514 /* If this is a sequence, we must handle them all at once.
4515 We could have for instance a call that sets the target register,
4516 and an insn in a delay slot that uses the register. In this case,
4517 we must return 0. */
4518 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4523 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4525 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4526 rtx set
= single_set (this_insn
);
4528 if (GET_CODE (this_insn
) == CALL_INSN
)
4530 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4532 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4537 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4539 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4541 if (GET_CODE (SET_DEST (set
)) != MEM
)
4547 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4552 else if (code
== JUMP_INSN
)
4556 if (code
== CALL_INSN
)
4559 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4560 if (GET_CODE (XEXP (tem
, 0)) == USE
4561 && REG_P (XEXP (XEXP (tem
, 0), 0))
4562 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4564 if (call_used_regs
[REGNO (reg
)])
4568 set
= single_set (insn
);
4570 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4572 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4573 return GET_CODE (SET_DEST (set
)) != MEM
;
4574 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4580 /* Target hook for assembling integer objects. The AVR version needs
4581 special handling for references to certain labels. */
4584 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4586 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4587 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4588 || GET_CODE (x
) == LABEL_REF
))
4590 fputs ("\t.word\tgs(", asm_out_file
);
4591 output_addr_const (asm_out_file
, x
);
4592 fputs (")\n", asm_out_file
);
4595 return default_assemble_integer (x
, size
, aligned_p
);
4598 /* The routine used to output NUL terminated strings. We use a special
4599 version of this for most svr4 targets because doing so makes the
4600 generated assembly code more compact (and thus faster to assemble)
4601 as well as more readable, especially for targets like the i386
4602 (where the only alternative is to output character sequences as
4603 comma separated lists of numbers). */
4606 gas_output_limited_string(FILE *file
, const char *str
)
4608 const unsigned char *_limited_str
= (const unsigned char *) str
;
4610 fprintf (file
, "%s\"", STRING_ASM_OP
);
4611 for (; (ch
= *_limited_str
); _limited_str
++)
4614 switch (escape
= ESCAPES
[ch
])
4620 fprintf (file
, "\\%03o", ch
);
4624 putc (escape
, file
);
4628 fprintf (file
, "\"\n");
4631 /* The routine used to output sequences of byte values. We use a special
4632 version of this for most svr4 targets because doing so makes the
4633 generated assembly code more compact (and thus faster to assemble)
4634 as well as more readable. Note that if we find subparts of the
4635 character sequence which end with NUL (and which are shorter than
4636 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4639 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4641 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4642 const unsigned char *limit
= _ascii_bytes
+ length
;
4643 unsigned bytes_in_chunk
= 0;
4644 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4646 const unsigned char *p
;
4647 if (bytes_in_chunk
>= 60)
4649 fprintf (file
, "\"\n");
4652 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4654 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4656 if (bytes_in_chunk
> 0)
4658 fprintf (file
, "\"\n");
4661 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4668 if (bytes_in_chunk
== 0)
4669 fprintf (file
, "\t.ascii\t\"");
4670 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4677 fprintf (file
, "\\%03o", ch
);
4678 bytes_in_chunk
+= 4;
4682 putc (escape
, file
);
4683 bytes_in_chunk
+= 2;
4688 if (bytes_in_chunk
> 0)
4689 fprintf (file
, "\"\n");
4692 /* Return value is nonzero if pseudos that have been
4693 assigned to registers of class CLASS would likely be spilled
4694 because registers of CLASS are needed for spill registers. */
4697 class_likely_spilled_p (int c
)
4699 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4702 /* Valid attributes:
4703 progmem - put data to program memory;
4704 signal - make a function to be hardware interrupt. After function
4705 prologue interrupts are disabled;
4706 interrupt - make a function to be hardware interrupt. After function
4707 prologue interrupts are enabled;
4708 naked - don't generate function prologue/epilogue and `ret' command.
4710 Only `progmem' attribute valid for type. */
4712 const struct attribute_spec avr_attribute_table
[] =
4714 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4715 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
4716 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4717 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4718 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4719 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4720 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4721 { NULL
, 0, 0, false, false, false, NULL
}
4724 /* Handle a "progmem" attribute; arguments as in
4725 struct attribute_spec.handler. */
4727 avr_handle_progmem_attribute (tree
*node
, tree name
,
4728 tree args ATTRIBUTE_UNUSED
,
4729 int flags ATTRIBUTE_UNUSED
,
4734 if (TREE_CODE (*node
) == TYPE_DECL
)
4736 /* This is really a decl attribute, not a type attribute,
4737 but try to handle it for GCC 3.0 backwards compatibility. */
4739 tree type
= TREE_TYPE (*node
);
4740 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4741 tree newtype
= build_type_attribute_variant (type
, attr
);
4743 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4744 TREE_TYPE (*node
) = newtype
;
4745 *no_add_attrs
= true;
4747 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4749 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4751 warning (0, "only initialized variables can be placed into "
4752 "program memory area");
4753 *no_add_attrs
= true;
4758 warning (OPT_Wattributes
, "%qs attribute ignored",
4759 IDENTIFIER_POINTER (name
));
4760 *no_add_attrs
= true;
4767 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4768 struct attribute_spec.handler. */
4771 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4772 tree args ATTRIBUTE_UNUSED
,
4773 int flags ATTRIBUTE_UNUSED
,
4776 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4778 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4779 IDENTIFIER_POINTER (name
));
4780 *no_add_attrs
= true;
4784 const char *func_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node
));
4785 const char *attr
= IDENTIFIER_POINTER (name
);
4787 /* If the function has the 'signal' or 'interrupt' attribute, test to
4788 make sure that the name of the function is "__vector_NN" so as to
4789 catch when the user misspells the interrupt vector name. */
4791 if (strncmp (attr
, "interrupt", strlen ("interrupt")) == 0)
4793 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4795 warning (0, "%qs appears to be a misspelled interrupt handler",
4799 else if (strncmp (attr
, "signal", strlen ("signal")) == 0)
4801 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4803 warning (0, "%qs appears to be a misspelled signal handler",
4813 avr_handle_fntype_attribute (tree
*node
, tree name
,
4814 tree args ATTRIBUTE_UNUSED
,
4815 int flags ATTRIBUTE_UNUSED
,
4818 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4820 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4821 IDENTIFIER_POINTER (name
));
4822 *no_add_attrs
= true;
4828 /* Look for attribute `progmem' in DECL
4829 if found return 1, otherwise 0. */
4832 avr_progmem_p (tree decl
, tree attributes
)
4836 if (TREE_CODE (decl
) != VAR_DECL
)
4840 != lookup_attribute ("progmem", attributes
))
4846 while (TREE_CODE (a
) == ARRAY_TYPE
);
4848 if (a
== error_mark_node
)
4851 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4857 /* Add the section attribute if the variable is in progmem. */
4860 avr_insert_attributes (tree node
, tree
*attributes
)
4862 if (TREE_CODE (node
) == VAR_DECL
4863 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4864 && avr_progmem_p (node
, *attributes
))
4866 static const char dsec
[] = ".progmem.data";
4867 *attributes
= tree_cons (get_identifier ("section"),
4868 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4871 /* ??? This seems sketchy. Why can't the user declare the
4872 thing const in the first place? */
4873 TREE_READONLY (node
) = 1;
4877 /* A get_unnamed_section callback for switching to progmem_section. */
4880 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4882 fprintf (asm_out_file
,
4883 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4884 AVR_HAVE_JMP_CALL
? "a" : "ax");
4885 /* Should already be aligned, this is just to be safe if it isn't. */
4886 fprintf (asm_out_file
, "\t.p2align 1\n");
4889 /* Implement TARGET_ASM_INIT_SECTIONS. */
4892 avr_asm_init_sections (void)
4894 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4895 avr_output_progmem_section_asm_op
,
4897 readonly_data_section
= data_section
;
4901 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4903 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4905 if (strncmp (name
, ".noinit", 7) == 0)
4907 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4908 && DECL_INITIAL (decl
) == NULL_TREE
)
4909 flags
|= SECTION_BSS
; /* @nobits */
4911 warning (0, "only uninitialized variables can be placed in the "
4918 /* Outputs some appropriate text to go at the start of an assembler
4922 avr_file_start (void)
4924 if (avr_current_arch
->asm_only
)
4925 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4927 default_file_start ();
4929 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4930 fputs ("__SREG__ = 0x3f\n"
4932 "__SP_L__ = 0x3d\n", asm_out_file
);
4934 fputs ("__tmp_reg__ = 0\n"
4935 "__zero_reg__ = 1\n", asm_out_file
);
4937 /* FIXME: output these only if there is anything in the .data / .bss
4938 sections - some code size could be saved by not linking in the
4939 initialization code from libgcc if one or both sections are empty. */
4940 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4941 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4944 /* Outputs to the stdio stream FILE some
4945 appropriate text to go at the end of an assembler file. */
4952 /* Choose the order in which to allocate hard registers for
4953 pseudo-registers local to a basic block.
4955 Store the desired register order in the array `reg_alloc_order'.
4956 Element 0 should be the register to allocate first; element 1, the
4957 next register; and so on. */
4960 order_regs_for_local_alloc (void)
4963 static const int order_0
[] = {
4971 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4975 static const int order_1
[] = {
4983 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4987 static const int order_2
[] = {
4996 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5001 const int *order
= (TARGET_ORDER_1
? order_1
:
5002 TARGET_ORDER_2
? order_2
:
5004 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5005 reg_alloc_order
[i
] = order
[i
];
5009 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5010 cost of an RTX operand given its context. X is the rtx of the
5011 operand, MODE is its mode, and OUTER is the rtx_code of this
5012 operand's parent operator. */
5015 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5018 enum rtx_code code
= GET_CODE (x
);
5029 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5036 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5040 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5041 is to be calculated. Return true if the complete cost has been
5042 computed, and false if subexpressions should be scanned. In either
5043 case, *TOTAL contains the cost result. */
5046 avr_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5049 enum machine_mode mode
= GET_MODE (x
);
5056 /* Immediate constants are as cheap as registers. */
5064 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5072 *total
= COSTS_N_INSNS (1);
5076 *total
= COSTS_N_INSNS (3);
5080 *total
= COSTS_N_INSNS (7);
5086 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5094 *total
= COSTS_N_INSNS (1);
5100 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5104 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5105 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5109 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5110 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5111 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5115 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5116 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5117 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5124 *total
= COSTS_N_INSNS (1);
5125 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5126 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5130 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5132 *total
= COSTS_N_INSNS (2);
5133 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5135 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5136 *total
= COSTS_N_INSNS (1);
5138 *total
= COSTS_N_INSNS (2);
5142 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5144 *total
= COSTS_N_INSNS (4);
5145 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5147 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5148 *total
= COSTS_N_INSNS (1);
5150 *total
= COSTS_N_INSNS (4);
5156 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5162 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5163 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5164 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5165 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5169 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5170 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5171 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5179 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5181 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5188 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5190 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5198 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5199 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5207 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5210 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5211 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5218 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5219 *total
= COSTS_N_INSNS (1);
5224 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5225 *total
= COSTS_N_INSNS (3);
5230 if (CONST_INT_P (XEXP (x
, 1)))
5231 switch (INTVAL (XEXP (x
, 1)))
5235 *total
= COSTS_N_INSNS (5);
5238 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5246 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5253 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5255 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5256 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5260 val
= INTVAL (XEXP (x
, 1));
5262 *total
= COSTS_N_INSNS (3);
5263 else if (val
>= 0 && val
<= 7)
5264 *total
= COSTS_N_INSNS (val
);
5266 *total
= COSTS_N_INSNS (1);
5271 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5273 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5274 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5277 switch (INTVAL (XEXP (x
, 1)))
5284 *total
= COSTS_N_INSNS (2);
5287 *total
= COSTS_N_INSNS (3);
5293 *total
= COSTS_N_INSNS (4);
5298 *total
= COSTS_N_INSNS (5);
5301 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5304 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5307 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5310 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5311 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5316 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5318 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5319 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5322 switch (INTVAL (XEXP (x
, 1)))
5328 *total
= COSTS_N_INSNS (3);
5333 *total
= COSTS_N_INSNS (4);
5336 *total
= COSTS_N_INSNS (6);
5339 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5342 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5343 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5350 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5357 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5359 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5360 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5364 val
= INTVAL (XEXP (x
, 1));
5366 *total
= COSTS_N_INSNS (4);
5368 *total
= COSTS_N_INSNS (2);
5369 else if (val
>= 0 && val
<= 7)
5370 *total
= COSTS_N_INSNS (val
);
5372 *total
= COSTS_N_INSNS (1);
5377 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5379 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5380 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5383 switch (INTVAL (XEXP (x
, 1)))
5389 *total
= COSTS_N_INSNS (2);
5392 *total
= COSTS_N_INSNS (3);
5398 *total
= COSTS_N_INSNS (4);
5402 *total
= COSTS_N_INSNS (5);
5405 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5408 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5412 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5415 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5416 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5421 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5423 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5424 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5427 switch (INTVAL (XEXP (x
, 1)))
5433 *total
= COSTS_N_INSNS (4);
5438 *total
= COSTS_N_INSNS (6);
5441 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5444 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5447 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5448 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5455 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5462 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5464 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5465 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5469 val
= INTVAL (XEXP (x
, 1));
5471 *total
= COSTS_N_INSNS (3);
5472 else if (val
>= 0 && val
<= 7)
5473 *total
= COSTS_N_INSNS (val
);
5475 *total
= COSTS_N_INSNS (1);
5480 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5482 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5483 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5486 switch (INTVAL (XEXP (x
, 1)))
5493 *total
= COSTS_N_INSNS (2);
5496 *total
= COSTS_N_INSNS (3);
5501 *total
= COSTS_N_INSNS (4);
5505 *total
= COSTS_N_INSNS (5);
5511 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5514 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5518 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5521 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5522 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5527 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5529 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5530 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5533 switch (INTVAL (XEXP (x
, 1)))
5539 *total
= COSTS_N_INSNS (4);
5542 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5547 *total
= COSTS_N_INSNS (4);
5550 *total
= COSTS_N_INSNS (6);
5553 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5554 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5561 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5565 switch (GET_MODE (XEXP (x
, 0)))
5568 *total
= COSTS_N_INSNS (1);
5569 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5570 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5574 *total
= COSTS_N_INSNS (2);
5575 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5576 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5577 else if (INTVAL (XEXP (x
, 1)) != 0)
5578 *total
+= COSTS_N_INSNS (1);
5582 *total
= COSTS_N_INSNS (4);
5583 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5584 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5585 else if (INTVAL (XEXP (x
, 1)) != 0)
5586 *total
+= COSTS_N_INSNS (3);
5592 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5601 /* Calculate the cost of a memory address. */
5604 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5606 if (GET_CODE (x
) == PLUS
5607 && GET_CODE (XEXP (x
,1)) == CONST_INT
5608 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5609 && INTVAL (XEXP (x
,1)) >= 61)
5611 if (CONSTANT_ADDRESS_P (x
))
5613 if (optimize
> 0 && io_address_operand (x
, QImode
))
5620 /* Test for extra memory constraint 'Q'.
5621 It's a memory address based on Y or Z pointer with valid displacement. */
5624 extra_constraint_Q (rtx x
)
5626 if (GET_CODE (XEXP (x
,0)) == PLUS
5627 && REG_P (XEXP (XEXP (x
,0), 0))
5628 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5629 && (INTVAL (XEXP (XEXP (x
,0), 1))
5630 <= MAX_LD_OFFSET (GET_MODE (x
))))
5632 rtx xx
= XEXP (XEXP (x
,0), 0);
5633 int regno
= REGNO (xx
);
5634 if (TARGET_ALL_DEBUG
)
5636 fprintf (stderr
, ("extra_constraint:\n"
5637 "reload_completed: %d\n"
5638 "reload_in_progress: %d\n"),
5639 reload_completed
, reload_in_progress
);
5642 if (regno
>= FIRST_PSEUDO_REGISTER
)
5643 return 1; /* allocate pseudos */
5644 else if (regno
== REG_Z
|| regno
== REG_Y
)
5645 return 1; /* strictly check */
5646 else if (xx
== frame_pointer_rtx
5647 || xx
== arg_pointer_rtx
)
5648 return 1; /* XXX frame & arg pointer checks */
5653 /* Convert condition code CONDITION to the valid AVR condition code. */
5656 avr_normalize_condition (RTX_CODE condition
)
5673 /* This function optimizes conditional jumps. */
5680 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5682 if (! (GET_CODE (insn
) == INSN
5683 || GET_CODE (insn
) == CALL_INSN
5684 || GET_CODE (insn
) == JUMP_INSN
)
5685 || !single_set (insn
))
5688 pattern
= PATTERN (insn
);
5690 if (GET_CODE (pattern
) == PARALLEL
)
5691 pattern
= XVECEXP (pattern
, 0, 0);
5692 if (GET_CODE (pattern
) == SET
5693 && SET_DEST (pattern
) == cc0_rtx
5694 && compare_diff_p (insn
))
5696 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5698 /* Now we work under compare insn. */
5700 pattern
= SET_SRC (pattern
);
5701 if (true_regnum (XEXP (pattern
,0)) >= 0
5702 && true_regnum (XEXP (pattern
,1)) >= 0 )
5704 rtx x
= XEXP (pattern
,0);
5705 rtx next
= next_real_insn (insn
);
5706 rtx pat
= PATTERN (next
);
5707 rtx src
= SET_SRC (pat
);
5708 rtx t
= XEXP (src
,0);
5709 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5710 XEXP (pattern
,0) = XEXP (pattern
,1);
5711 XEXP (pattern
,1) = x
;
5712 INSN_CODE (next
) = -1;
5714 else if (true_regnum (XEXP (pattern
,0)) >= 0
5715 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5717 rtx x
= XEXP (pattern
,1);
5718 rtx next
= next_real_insn (insn
);
5719 rtx pat
= PATTERN (next
);
5720 rtx src
= SET_SRC (pat
);
5721 rtx t
= XEXP (src
,0);
5722 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5724 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5726 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5727 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5728 INSN_CODE (next
) = -1;
5729 INSN_CODE (insn
) = -1;
5733 else if (true_regnum (SET_SRC (pattern
)) >= 0)
5735 /* This is a tst insn */
5736 rtx next
= next_real_insn (insn
);
5737 rtx pat
= PATTERN (next
);
5738 rtx src
= SET_SRC (pat
);
5739 rtx t
= XEXP (src
,0);
5741 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5742 SET_SRC (pattern
) = gen_rtx_COMPARE (GET_MODE (SET_SRC (pattern
)), const0_rtx
,
5744 INSN_CODE (next
) = -1;
5745 INSN_CODE (insn
) = -1;
5751 /* Returns register number for function return value.*/
5754 avr_ret_register (void)
5759 /* Create an RTX representing the place where a
5760 library function returns a value of mode MODE. */
5763 avr_libcall_value (enum machine_mode mode
)
5765 int offs
= GET_MODE_SIZE (mode
);
5768 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5771 /* Create an RTX representing the place where a
5772 function returns a value of data type VALTYPE. */
5775 avr_function_value (const_tree type
,
5776 const_tree func ATTRIBUTE_UNUSED
,
5777 bool outgoing ATTRIBUTE_UNUSED
)
5781 if (TYPE_MODE (type
) != BLKmode
)
5782 return avr_libcall_value (TYPE_MODE (type
));
5784 offs
= int_size_in_bytes (type
);
5787 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5788 offs
= GET_MODE_SIZE (SImode
);
5789 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5790 offs
= GET_MODE_SIZE (DImode
);
5792 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5795 /* Places additional restrictions on the register class to
5796 use when it is necessary to copy value X into a register
5800 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
5806 test_hard_reg_class (enum reg_class rclass
, rtx x
)
5808 int regno
= true_regnum (x
);
5812 if (TEST_HARD_REG_CLASS (rclass
, regno
))
5820 jump_over_one_insn_p (rtx insn
, rtx dest
)
5822 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5825 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5826 int dest_addr
= INSN_ADDRESSES (uid
);
5827 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5830 /* Returns 1 if a value of mode MODE can be stored starting with hard
5831 register number REGNO. On the enhanced core, anything larger than
5832 1 byte must start in even numbered register for "movw" to work
5833 (this way we don't have to check for odd registers everywhere). */
5836 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5838 /* Disallow QImode in stack pointer regs. */
5839 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5842 /* The only thing that can go into registers r28:r29 is a Pmode. */
5843 if (regno
== REG_Y
&& mode
== Pmode
)
5846 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5847 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5853 /* Modes larger than QImode occupy consecutive registers. */
5854 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5857 /* All modes larger than QImode should start in an even register. */
5858 return !(regno
& 1);
5862 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5868 if (GET_CODE (operands
[1]) == CONST_INT
)
5870 int val
= INTVAL (operands
[1]);
5871 if ((val
& 0xff) == 0)
5874 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5875 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5878 else if ((val
& 0xff00) == 0)
5881 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5882 AS2 (mov
,%A0
,%2) CR_TAB
5883 AS2 (mov
,%B0
,__zero_reg__
));
5885 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5888 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5889 AS2 (mov
,%A0
,%2) CR_TAB
5894 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5895 AS2 (mov
,%A0
,%2) CR_TAB
5896 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5902 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5904 rtx src
= operands
[1];
5905 int cnst
= (GET_CODE (src
) == CONST_INT
);
5910 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5911 + ((INTVAL (src
) & 0xff00) != 0)
5912 + ((INTVAL (src
) & 0xff0000) != 0)
5913 + ((INTVAL (src
) & 0xff000000) != 0);
5920 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5921 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5924 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5925 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5927 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5928 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5931 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5932 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5934 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5935 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5938 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5939 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5941 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5942 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5945 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5946 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5952 avr_output_bld (rtx operands
[], int bit_nr
)
5954 static char s
[] = "bld %A0,0";
5956 s
[5] = 'A' + (bit_nr
>> 3);
5957 s
[8] = '0' + (bit_nr
& 7);
5958 output_asm_insn (s
, operands
);
5962 avr_output_addr_vec_elt (FILE *stream
, int value
)
5964 switch_to_section (progmem_section
);
5965 if (AVR_HAVE_JMP_CALL
)
5966 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5968 fprintf (stream
, "\trjmp .L%d\n", value
);
5971 /* Returns true if SCRATCH are safe to be allocated as a scratch
5972 registers (for a define_peephole2) in the current function. */
5975 avr_hard_regno_scratch_ok (unsigned int regno
)
5977 /* Interrupt functions can only use registers that have already been saved
5978 by the prologue, even if they would normally be call-clobbered. */
5980 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5981 && !df_regs_ever_live_p (regno
))
5987 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5990 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
5991 unsigned int new_reg
)
5993 /* Interrupt functions can only use registers that have already been
5994 saved by the prologue, even if they would normally be
5997 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5998 && !df_regs_ever_live_p (new_reg
))
6004 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
6005 or memory location in the I/O space (QImode only).
6007 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6008 Operand 1: register operand to test, or CONST_INT memory address.
6009 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
6010 Operand 3: label to jump to if the test is true. */
6013 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6015 enum rtx_code comp
= GET_CODE (operands
[0]);
6016 int long_jump
= (get_attr_length (insn
) >= 4);
6017 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6021 else if (comp
== LT
)
6025 comp
= reverse_condition (comp
);
6027 if (GET_CODE (operands
[1]) == CONST_INT
)
6029 if (INTVAL (operands
[1]) < 0x40)
6032 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
6034 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
6038 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
6040 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6042 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6045 else /* GET_CODE (operands[1]) == REG */
6047 if (GET_MODE (operands
[1]) == QImode
)
6050 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6052 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6054 else /* HImode or SImode */
6056 static char buf
[] = "sbrc %A1,0";
6057 int bit_nr
= exact_log2 (INTVAL (operands
[2])
6058 & GET_MODE_MASK (GET_MODE (operands
[1])));
6060 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6061 buf
[6] = 'A' + (bit_nr
>> 3);
6062 buf
[9] = '0' + (bit_nr
& 7);
6063 output_asm_insn (buf
, operands
);
6068 return (AS1 (rjmp
,.+4) CR_TAB
6071 return AS1 (rjmp
,%3);
6075 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6078 avr_asm_out_ctor (rtx symbol
, int priority
)
6080 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6081 default_ctor_section_asm_out_constructor (symbol
, priority
);
6084 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6087 avr_asm_out_dtor (rtx symbol
, int priority
)
6089 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6090 default_dtor_section_asm_out_destructor (symbol
, priority
);
6093 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6096 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6098 if (TYPE_MODE (type
) == BLKmode
)
6100 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6101 return (size
== -1 || size
> 8);