Update concepts branch to revision 131834
[official-gcc.git] / gcc / config / avr / avr.c
blobe4820e623f08f8b71d073900bef24ee785dfb877
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static int compare_sign_p (rtx insn);
65 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
66 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
68 const struct attribute_spec avr_attribute_table[];
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static void avr_asm_function_end_prologue (FILE *);
73 static void avr_asm_function_begin_epilogue (FILE *);
74 static void avr_insert_attributes (tree, tree *);
75 static void avr_asm_init_sections (void);
76 static unsigned int avr_section_type_flags (tree, const char *, int);
78 static void avr_reorg (void);
79 static void avr_asm_out_ctor (rtx, int);
80 static void avr_asm_out_dtor (rtx, int);
81 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
82 static bool avr_rtx_costs (rtx, int, int, int *);
83 static int avr_address_cost (rtx);
84 static bool avr_return_in_memory (const_tree, const_tree);
85 static struct machine_function * avr_init_machine_status (void);
86 static rtx avr_builtin_setjmp_frame_value (void);
88 /* Allocate registers from r25 to r8 for parameters for function calls. */
89 #define FIRST_CUM_REG 26
91 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
92 static GTY(()) rtx tmp_reg_rtx;
94 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
95 static GTY(()) rtx zero_reg_rtx;
97 /* AVR register names {"r0", "r1", ..., "r31"} */
98 static const char *const avr_regnames[] = REGISTER_NAMES;
100 /* This holds the last insn address. */
101 static int last_insn_address = 0;
103 /* Preprocessor macros to define depending on MCU type. */
104 const char *avr_extra_arch_macro;
106 /* Current architecture. */
107 const struct base_arch_s *avr_current_arch;
109 section *progmem_section;
111 static const struct base_arch_s avr_arch_types[] = {
112 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
113 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
114 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
115 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
116 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
117 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
118 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
119 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
120 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
121 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
122 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
125 /* These names are used as the index into the avr_arch_types[] table
126 above. */
128 enum avr_arch
130 ARCH_UNKNOWN,
131 ARCH_AVR1,
132 ARCH_AVR2,
133 ARCH_AVR25,
134 ARCH_AVR3,
135 ARCH_AVR31,
136 ARCH_AVR35,
137 ARCH_AVR4,
138 ARCH_AVR5,
139 ARCH_AVR51,
140 ARCH_AVR6
143 struct mcu_type_s {
144 const char *const name;
145 int arch; /* index in avr_arch_types[] */
146 /* Must lie outside user's namespace. NULL == no macro. */
147 const char *const macro;
150 /* List of all known AVR MCU types - if updated, it has to be kept
151 in sync in several places (FIXME: is there a better way?):
152 - here
153 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
154 - t-avr (MULTILIB_MATCHES)
155 - gas/config/tc-avr.c
156 - avr-libc */
158 static const struct mcu_type_s avr_mcu_types[] = {
159 /* Classic, <= 8K. */
160 { "avr2", ARCH_AVR2, NULL },
161 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
162 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
163 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
164 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
165 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
166 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
167 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
168 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
169 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
170 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
171 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
172 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
173 /* Classic + MOVW, <= 8K. */
174 { "avr25", ARCH_AVR25, NULL },
175 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
176 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
177 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
178 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
179 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
180 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
181 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
182 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
183 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
184 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
185 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
186 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
187 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
188 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
189 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
190 /* Classic, > 8K, <= 64K. */
191 { "avr3", ARCH_AVR3, NULL },
192 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
193 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
194 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
195 /* Classic, == 128K. */
196 { "avr31", ARCH_AVR31, NULL },
197 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
198 /* Classic + MOVW + JMP/CALL. */
199 { "avr35", ARCH_AVR35, NULL },
200 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
201 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
202 /* Enhanced, <= 8K. */
203 { "avr4", ARCH_AVR4, NULL },
204 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
205 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
206 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
207 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
208 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
209 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
210 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
211 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
212 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
213 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
214 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
215 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
216 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
217 /* Enhanced, > 8K, <= 64K. */
218 { "avr5", ARCH_AVR5, NULL },
219 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
220 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
221 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
222 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
223 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
224 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
225 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
226 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
227 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
228 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
229 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
230 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
231 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
232 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
233 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
234 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
235 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
236 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
237 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
238 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
239 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
240 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
241 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
242 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
243 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
244 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
245 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
246 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
247 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
248 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
249 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
250 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
251 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
252 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
253 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
254 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
255 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
256 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
257 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
258 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
259 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
260 /* Enhanced, == 128K. */
261 { "avr51", ARCH_AVR51, NULL },
262 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
263 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
264 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
265 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
266 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
267 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
268 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
269 /* 3-Byte PC. */
270 { "avr6", ARCH_AVR6, NULL },
271 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
272 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
273 /* Assembler only. */
274 { "avr1", ARCH_AVR1, NULL },
275 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
276 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
277 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
278 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
279 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
280 { NULL, ARCH_UNKNOWN, NULL }
283 int avr_case_values_threshold = 30000;
285 /* Initialize the GCC target structure. */
286 #undef TARGET_ASM_ALIGNED_HI_OP
287 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
288 #undef TARGET_ASM_ALIGNED_SI_OP
289 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
290 #undef TARGET_ASM_UNALIGNED_HI_OP
291 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
292 #undef TARGET_ASM_UNALIGNED_SI_OP
293 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
294 #undef TARGET_ASM_INTEGER
295 #define TARGET_ASM_INTEGER avr_assemble_integer
296 #undef TARGET_ASM_FILE_START
297 #define TARGET_ASM_FILE_START avr_file_start
298 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
299 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
300 #undef TARGET_ASM_FILE_END
301 #define TARGET_ASM_FILE_END avr_file_end
303 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
304 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
305 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
306 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
307 #undef TARGET_ATTRIBUTE_TABLE
308 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
309 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
310 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
311 #undef TARGET_INSERT_ATTRIBUTES
312 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
313 #undef TARGET_SECTION_TYPE_FLAGS
314 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
315 #undef TARGET_RTX_COSTS
316 #define TARGET_RTX_COSTS avr_rtx_costs
317 #undef TARGET_ADDRESS_COST
318 #define TARGET_ADDRESS_COST avr_address_cost
319 #undef TARGET_MACHINE_DEPENDENT_REORG
320 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
322 #undef TARGET_RETURN_IN_MEMORY
323 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
325 #undef TARGET_STRICT_ARGUMENT_NAMING
326 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
328 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
329 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
331 struct gcc_target targetm = TARGET_INITIALIZER;
333 void
334 avr_override_options (void)
336 const struct mcu_type_s *t;
338 flag_delete_null_pointer_checks = 0;
340 for (t = avr_mcu_types; t->name; t++)
341 if (strcmp (t->name, avr_mcu_name) == 0)
342 break;
344 if (!t->name)
346 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
347 avr_mcu_name);
348 for (t = avr_mcu_types; t->name; t++)
349 fprintf (stderr," %s\n", t->name);
352 avr_current_arch = &avr_arch_types[t->arch];
353 avr_extra_arch_macro = t->macro;
355 if (optimize && !TARGET_NO_TABLEJUMP)
356 avr_case_values_threshold =
357 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
359 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
360 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
362 init_machine_status = avr_init_machine_status;
365 /* return register class from register number. */
367 static const int reg_class_tab[]={
368 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
369 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
370 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
371 GENERAL_REGS, /* r0 - r15 */
372 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
373 LD_REGS, /* r16 - 23 */
374 ADDW_REGS,ADDW_REGS, /* r24,r25 */
375 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
376 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
377 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
378 STACK_REG,STACK_REG /* SPL,SPH */
381 /* Function to set up the backend function structure. */
383 static struct machine_function *
384 avr_init_machine_status (void)
386 return ((struct machine_function *)
387 ggc_alloc_cleared (sizeof (struct machine_function)));
390 /* Return register class for register R. */
392 enum reg_class
393 avr_regno_reg_class (int r)
395 if (r <= 33)
396 return reg_class_tab[r];
397 return ALL_REGS;
400 /* Return nonzero if FUNC is a naked function. */
402 static int
403 avr_naked_function_p (tree func)
405 tree a;
407 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
409 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
410 return a != NULL_TREE;
413 /* Return nonzero if FUNC is an interrupt function as specified
414 by the "interrupt" attribute. */
416 static int
417 interrupt_function_p (tree func)
419 tree a;
421 if (TREE_CODE (func) != FUNCTION_DECL)
422 return 0;
424 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
425 return a != NULL_TREE;
428 /* Return nonzero if FUNC is a signal function as specified
429 by the "signal" attribute. */
431 static int
432 signal_function_p (tree func)
434 tree a;
436 if (TREE_CODE (func) != FUNCTION_DECL)
437 return 0;
439 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
440 return a != NULL_TREE;
443 /* Return nonzero if FUNC is a OS_task function. */
445 static int
446 avr_OS_task_function_p (tree func)
448 tree a;
450 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
452 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
453 return a != NULL_TREE;
456 /* Return nonzero if FUNC is a OS_main function. */
458 static int
459 avr_OS_main_function_p (tree func)
461 tree a;
463 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
465 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
466 return a != NULL_TREE;
469 /* Return the number of hard registers to push/pop in the prologue/epilogue
470 of the current function, and optionally store these registers in SET. */
472 static int
473 avr_regs_to_save (HARD_REG_SET *set)
475 int reg, count;
476 int int_or_sig_p = (interrupt_function_p (current_function_decl)
477 || signal_function_p (current_function_decl));
479 if (!reload_completed)
480 cfun->machine->is_leaf = leaf_function_p ();
482 if (set)
483 CLEAR_HARD_REG_SET (*set);
484 count = 0;
486 /* No need to save any registers if the function never returns or
487 is have "OS_task" or "OS_main" attribute. */
488 if (TREE_THIS_VOLATILE (current_function_decl)
489 || cfun->machine->is_OS_task
490 || cfun->machine->is_OS_main)
491 return 0;
493 for (reg = 0; reg < 32; reg++)
495 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
496 any global register variables. */
497 if (fixed_regs[reg])
498 continue;
500 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
501 || (df_regs_ever_live_p (reg)
502 && (int_or_sig_p || !call_used_regs[reg])
503 && !(frame_pointer_needed
504 && (reg == REG_Y || reg == (REG_Y+1)))))
506 if (set)
507 SET_HARD_REG_BIT (*set, reg);
508 count++;
511 return count;
514 /* Compute offset between arg_pointer and frame_pointer. */
517 initial_elimination_offset (int from, int to)
519 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
520 return 0;
521 else
523 int offset = frame_pointer_needed ? 2 : 0;
524 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
526 offset += avr_regs_to_save (NULL);
527 return get_frame_size () + (avr_pc_size) + 1 + offset;
531 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
532 frame pointer by +STARTING_FRAME_OFFSET.
533 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
534 avoids creating add/sub of offset in nonlocal goto and setjmp. */
536 rtx avr_builtin_setjmp_frame_value (void)
538 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
539 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
542 /* Return 1 if the function epilogue is just a single "ret". */
545 avr_simple_epilogue (void)
547 return (! frame_pointer_needed
548 && get_frame_size () == 0
549 && avr_regs_to_save (NULL) == 0
550 && ! interrupt_function_p (current_function_decl)
551 && ! signal_function_p (current_function_decl)
552 && ! avr_naked_function_p (current_function_decl)
553 && ! TREE_THIS_VOLATILE (current_function_decl));
556 /* This function checks sequence of live registers. */
558 static int
559 sequent_regs_live (void)
561 int reg;
562 int live_seq=0;
563 int cur_seq=0;
565 for (reg = 0; reg < 18; ++reg)
567 if (!call_used_regs[reg])
569 if (df_regs_ever_live_p (reg))
571 ++live_seq;
572 ++cur_seq;
574 else
575 cur_seq = 0;
579 if (!frame_pointer_needed)
581 if (df_regs_ever_live_p (REG_Y))
583 ++live_seq;
584 ++cur_seq;
586 else
587 cur_seq = 0;
589 if (df_regs_ever_live_p (REG_Y+1))
591 ++live_seq;
592 ++cur_seq;
594 else
595 cur_seq = 0;
597 else
599 cur_seq += 2;
600 live_seq += 2;
602 return (cur_seq == live_seq) ? live_seq : 0;
605 /* Obtain the length sequence of insns. */
608 get_sequence_length (rtx insns)
610 rtx insn;
611 int length;
613 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
614 length += get_attr_length (insn);
616 return length;
619 /* Output function prologue. */
621 void
622 expand_prologue (void)
624 int live_seq;
625 HARD_REG_SET set;
626 int minimize;
627 HOST_WIDE_INT size = get_frame_size();
628 /* Define templates for push instructions. */
629 rtx pushbyte = gen_rtx_MEM (QImode,
630 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
631 rtx pushword = gen_rtx_MEM (HImode,
632 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
633 rtx insn;
635 last_insn_address = 0;
637 /* Init cfun->machine. */
638 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
639 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
640 cfun->machine->is_signal = signal_function_p (current_function_decl);
641 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
642 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
644 /* Prologue: naked. */
645 if (cfun->machine->is_naked)
647 return;
650 avr_regs_to_save (&set);
651 live_seq = sequent_regs_live ();
652 minimize = (TARGET_CALL_PROLOGUES
653 && !cfun->machine->is_interrupt
654 && !cfun->machine->is_signal
655 && !cfun->machine->is_OS_task
656 && !cfun->machine->is_OS_main
657 && live_seq);
659 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
661 if (cfun->machine->is_interrupt)
663 /* Enable interrupts. */
664 insn = emit_insn (gen_enable_interrupt ());
665 RTX_FRAME_RELATED_P (insn) = 1;
668 /* Push zero reg. */
669 insn = emit_move_insn (pushbyte, zero_reg_rtx);
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Push tmp reg. */
673 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
674 RTX_FRAME_RELATED_P (insn) = 1;
676 /* Push SREG. */
677 insn = emit_move_insn (tmp_reg_rtx,
678 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
679 RTX_FRAME_RELATED_P (insn) = 1;
680 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
681 RTX_FRAME_RELATED_P (insn) = 1;
683 /* Push RAMPZ. */
684 if(AVR_HAVE_RAMPZ
685 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
687 insn = emit_move_insn (tmp_reg_rtx,
688 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
689 RTX_FRAME_RELATED_P (insn) = 1;
690 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
691 RTX_FRAME_RELATED_P (insn) = 1;
694 /* Clear zero reg. */
695 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
696 RTX_FRAME_RELATED_P (insn) = 1;
698 /* Prevent any attempt to delete the setting of ZERO_REG! */
699 emit_use (zero_reg_rtx);
701 if (minimize && (frame_pointer_needed
702 || (AVR_2_BYTE_PC && live_seq > 6)
703 || live_seq > 7))
705 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
706 gen_int_mode (size, HImode));
707 RTX_FRAME_RELATED_P (insn) = 1;
709 insn =
710 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
711 gen_int_mode (size + live_seq, HImode)));
712 RTX_FRAME_RELATED_P (insn) = 1;
714 else
716 int reg;
717 for (reg = 0; reg < 32; ++reg)
719 if (TEST_HARD_REG_BIT (set, reg))
721 /* Emit push of register to save. */
722 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
723 RTX_FRAME_RELATED_P (insn) = 1;
726 if (frame_pointer_needed)
728 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
730 /* Push frame pointer. */
731 insn = emit_move_insn (pushword, frame_pointer_rtx);
732 RTX_FRAME_RELATED_P (insn) = 1;
735 if (!size)
737 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
738 RTX_FRAME_RELATED_P (insn) = 1;
740 else
742 /* Creating a frame can be done by direct manipulation of the
743 stack or via the frame pointer. These two methods are:
744 fp=sp
745 fp-=size
746 sp=fp
748 sp-=size
749 fp=sp
750 the optimum method depends on function type, stack and frame size.
751 To avoid a complex logic, both methods are tested and shortest
752 is selected. */
753 rtx myfp;
754 rtx fp_plus_insns;
755 rtx sp_plus_insns = NULL_RTX;
757 if (TARGET_TINY_STACK)
759 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
760 over 'sbiw' (2 cycles, same size). */
761 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
763 else
765 /* Normal sized addition. */
766 myfp = frame_pointer_rtx;
769 /* Method 1-Adjust frame pointer. */
770 start_sequence ();
772 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
773 RTX_FRAME_RELATED_P (insn) = 1;
775 insn =
776 emit_move_insn (myfp,
777 gen_rtx_PLUS (GET_MODE(myfp), myfp,
778 gen_int_mode (-size,
779 GET_MODE(myfp))));
780 RTX_FRAME_RELATED_P (insn) = 1;
782 /* Copy to stack pointer. */
783 if (TARGET_TINY_STACK)
785 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
786 RTX_FRAME_RELATED_P (insn) = 1;
788 else if (TARGET_NO_INTERRUPTS
789 || cfun->machine->is_signal
790 || cfun->machine->is_OS_main)
792 insn =
793 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
794 frame_pointer_rtx));
795 RTX_FRAME_RELATED_P (insn) = 1;
797 else if (cfun->machine->is_interrupt)
799 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
800 frame_pointer_rtx));
801 RTX_FRAME_RELATED_P (insn) = 1;
803 else
805 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
806 RTX_FRAME_RELATED_P (insn) = 1;
809 fp_plus_insns = get_insns ();
810 end_sequence ();
812 /* Method 2-Adjust Stack pointer. */
813 if (size <= 6)
815 start_sequence ();
817 insn =
818 emit_move_insn (stack_pointer_rtx,
819 gen_rtx_PLUS (HImode,
820 stack_pointer_rtx,
821 gen_int_mode (-size,
822 HImode)));
823 RTX_FRAME_RELATED_P (insn) = 1;
825 insn =
826 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
827 RTX_FRAME_RELATED_P (insn) = 1;
829 sp_plus_insns = get_insns ();
830 end_sequence ();
833 /* Use shortest method. */
834 if (size <= 6 && (get_sequence_length (sp_plus_insns)
835 < get_sequence_length (fp_plus_insns)))
836 emit_insn (sp_plus_insns);
837 else
838 emit_insn (fp_plus_insns);
844 /* Output summary at end of function prologue. */
846 static void
847 avr_asm_function_end_prologue (FILE *file)
849 if (cfun->machine->is_naked)
851 fputs ("/* prologue: naked */\n", file);
853 else
855 if (cfun->machine->is_interrupt)
857 fputs ("/* prologue: Interrupt */\n", file);
859 else if (cfun->machine->is_signal)
861 fputs ("/* prologue: Signal */\n", file);
863 else
864 fputs ("/* prologue: function */\n", file);
866 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
867 get_frame_size());
871 /* Implement EPILOGUE_USES. */
874 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
876 if (reload_completed
877 && cfun->machine
878 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
879 return 1;
880 return 0;
883 /* Output RTL epilogue. */
885 void
886 expand_epilogue (void)
888 int reg;
889 int live_seq;
890 HARD_REG_SET set;
891 int minimize;
892 HOST_WIDE_INT size = get_frame_size();
894 /* epilogue: naked */
895 if (cfun->machine->is_naked)
897 emit_jump_insn (gen_return ());
898 return;
901 avr_regs_to_save (&set);
902 live_seq = sequent_regs_live ();
903 minimize = (TARGET_CALL_PROLOGUES
904 && !cfun->machine->is_interrupt
905 && !cfun->machine->is_signal
906 && !cfun->machine->is_OS_task
907 && !cfun->machine->is_OS_main
908 && live_seq);
910 if (minimize && (frame_pointer_needed || live_seq > 4))
912 if (frame_pointer_needed)
914 /* Get rid of frame. */
915 emit_move_insn(frame_pointer_rtx,
916 gen_rtx_PLUS (HImode, frame_pointer_rtx,
917 gen_int_mode (size, HImode)));
919 else
921 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
924 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
926 else
928 if (frame_pointer_needed)
930 if (size)
932 /* Try two methods to adjust stack and select shortest. */
933 rtx myfp;
934 rtx fp_plus_insns;
935 rtx sp_plus_insns = NULL_RTX;
937 if (TARGET_TINY_STACK)
939 /* The high byte (r29) doesn't change - prefer 'subi'
940 (1 cycle) over 'sbiw' (2 cycles, same size). */
941 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
943 else
945 /* Normal sized addition. */
946 myfp = frame_pointer_rtx;
949 /* Method 1-Adjust frame pointer. */
950 start_sequence ();
952 emit_move_insn (myfp,
953 gen_rtx_PLUS (HImode, myfp,
954 gen_int_mode (size,
955 GET_MODE(myfp))));
957 /* Copy to stack pointer. */
958 if (TARGET_TINY_STACK)
960 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
962 else if (TARGET_NO_INTERRUPTS
963 || cfun->machine->is_signal)
965 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
966 frame_pointer_rtx));
968 else if (cfun->machine->is_interrupt)
970 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
971 frame_pointer_rtx));
973 else
975 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
978 fp_plus_insns = get_insns ();
979 end_sequence ();
981 /* Method 2-Adjust Stack pointer. */
982 if (size <= 5)
984 start_sequence ();
986 emit_move_insn (stack_pointer_rtx,
987 gen_rtx_PLUS (HImode, stack_pointer_rtx,
988 gen_int_mode (size,
989 HImode)));
991 sp_plus_insns = get_insns ();
992 end_sequence ();
995 /* Use shortest method. */
996 if (size <= 5 && (get_sequence_length (sp_plus_insns)
997 < get_sequence_length (fp_plus_insns)))
998 emit_insn (sp_plus_insns);
999 else
1000 emit_insn (fp_plus_insns);
1002 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1004 /* Restore previous frame_pointer. */
1005 emit_insn (gen_pophi (frame_pointer_rtx));
1008 /* Restore used registers. */
1009 for (reg = 31; reg >= 0; --reg)
1011 if (TEST_HARD_REG_BIT (set, reg))
1012 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1014 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1016 /* Restore RAMPZ using tmp reg as scratch. */
1017 if(AVR_HAVE_RAMPZ
1018 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1020 emit_insn (gen_popqi (tmp_reg_rtx));
1021 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1022 tmp_reg_rtx);
1025 /* Restore SREG using tmp reg as scratch. */
1026 emit_insn (gen_popqi (tmp_reg_rtx));
1028 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1029 tmp_reg_rtx);
1031 /* Restore tmp REG. */
1032 emit_insn (gen_popqi (tmp_reg_rtx));
1034 /* Restore zero REG. */
1035 emit_insn (gen_popqi (zero_reg_rtx));
1038 emit_jump_insn (gen_return ());
1042 /* Output summary messages at beginning of function epilogue. */
1044 static void
1045 avr_asm_function_begin_epilogue (FILE *file)
1047 fprintf (file, "/* epilogue start */\n");
1050 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1051 machine for a memory operand of mode MODE. */
1054 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1056 enum reg_class r = NO_REGS;
1058 if (TARGET_ALL_DEBUG)
1060 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1061 GET_MODE_NAME(mode),
1062 strict ? "(strict)": "",
1063 reload_completed ? "(reload_completed)": "",
1064 reload_in_progress ? "(reload_in_progress)": "",
1065 reg_renumber ? "(reg_renumber)" : "");
1066 if (GET_CODE (x) == PLUS
1067 && REG_P (XEXP (x, 0))
1068 && GET_CODE (XEXP (x, 1)) == CONST_INT
1069 && INTVAL (XEXP (x, 1)) >= 0
1070 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1071 && reg_renumber
1073 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1074 true_regnum (XEXP (x, 0)));
1075 debug_rtx (x);
1077 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1078 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1079 r = POINTER_REGS;
1080 else if (CONSTANT_ADDRESS_P (x))
1081 r = ALL_REGS;
1082 else if (GET_CODE (x) == PLUS
1083 && REG_P (XEXP (x, 0))
1084 && GET_CODE (XEXP (x, 1)) == CONST_INT
1085 && INTVAL (XEXP (x, 1)) >= 0)
1087 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1088 if (fit)
1090 if (! strict
1091 || REGNO (XEXP (x,0)) == REG_Y
1092 || REGNO (XEXP (x,0)) == REG_Z)
1093 r = BASE_POINTER_REGS;
1094 if (XEXP (x,0) == frame_pointer_rtx
1095 || XEXP (x,0) == arg_pointer_rtx)
1096 r = BASE_POINTER_REGS;
1098 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1099 r = POINTER_Y_REGS;
1101 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1102 && REG_P (XEXP (x, 0))
1103 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1104 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1106 r = POINTER_REGS;
1108 if (TARGET_ALL_DEBUG)
1110 fprintf (stderr, " ret = %c\n", r + '0');
1112 return r == NO_REGS ? 0 : (int)r;
1115 /* Attempts to replace X with a valid
1116 memory address for an operand of mode MODE */
1119 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1121 x = oldx;
1122 if (TARGET_ALL_DEBUG)
1124 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1125 debug_rtx (oldx);
1128 if (GET_CODE (oldx) == PLUS
1129 && REG_P (XEXP (oldx,0)))
1131 if (REG_P (XEXP (oldx,1)))
1132 x = force_reg (GET_MODE (oldx), oldx);
1133 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1135 int offs = INTVAL (XEXP (oldx,1));
1136 if (frame_pointer_rtx != XEXP (oldx,0))
1137 if (offs > MAX_LD_OFFSET (mode))
1139 if (TARGET_ALL_DEBUG)
1140 fprintf (stderr, "force_reg (big offset)\n");
1141 x = force_reg (GET_MODE (oldx), oldx);
1145 return x;
1149 /* Return a pointer register name as a string. */
1151 static const char *
1152 ptrreg_to_str (int regno)
1154 switch (regno)
1156 case REG_X: return "X";
1157 case REG_Y: return "Y";
1158 case REG_Z: return "Z";
1159 default:
1160 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1162 return NULL;
1165 /* Return the condition name as a string.
1166 Used in conditional jump constructing */
1168 static const char *
1169 cond_string (enum rtx_code code)
1171 switch (code)
1173 case NE:
1174 return "ne";
1175 case EQ:
1176 return "eq";
1177 case GE:
1178 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1179 return "pl";
1180 else
1181 return "ge";
1182 case LT:
1183 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1184 return "mi";
1185 else
1186 return "lt";
1187 case GEU:
1188 return "sh";
1189 case LTU:
1190 return "lo";
1191 default:
1192 gcc_unreachable ();
1196 /* Output ADDR to FILE as address. */
1198 void
1199 print_operand_address (FILE *file, rtx addr)
1201 switch (GET_CODE (addr))
1203 case REG:
1204 fprintf (file, ptrreg_to_str (REGNO (addr)));
1205 break;
1207 case PRE_DEC:
1208 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1209 break;
1211 case POST_INC:
1212 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1213 break;
1215 default:
1216 if (CONSTANT_ADDRESS_P (addr)
1217 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1218 || GET_CODE (addr) == LABEL_REF))
1220 fprintf (file, "gs(");
1221 output_addr_const (file,addr);
1222 fprintf (file ,")");
1224 else
1225 output_addr_const (file, addr);
1230 /* Output X as assembler operand to file FILE. */
1232 void
1233 print_operand (FILE *file, rtx x, int code)
1235 int abcd = 0;
1237 if (code >= 'A' && code <= 'D')
1238 abcd = code - 'A';
1240 if (code == '~')
1242 if (!AVR_HAVE_JMP_CALL)
1243 fputc ('r', file);
1245 else if (code == '!')
1247 if (AVR_HAVE_EIJMP_EICALL)
1248 fputc ('e', file);
1250 else if (REG_P (x))
1252 if (x == zero_reg_rtx)
1253 fprintf (file, "__zero_reg__");
1254 else
1255 fprintf (file, reg_names[true_regnum (x) + abcd]);
1257 else if (GET_CODE (x) == CONST_INT)
1258 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1259 else if (GET_CODE (x) == MEM)
1261 rtx addr = XEXP (x,0);
1263 if (CONSTANT_P (addr) && abcd)
1265 fputc ('(', file);
1266 output_address (addr);
1267 fprintf (file, ")+%d", abcd);
1269 else if (code == 'o')
1271 if (GET_CODE (addr) != PLUS)
1272 fatal_insn ("bad address, not (reg+disp):", addr);
1274 print_operand (file, XEXP (addr, 1), 0);
1276 else if (code == 'p' || code == 'r')
1278 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1279 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1281 if (code == 'p')
1282 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1283 else
1284 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1286 else if (GET_CODE (addr) == PLUS)
1288 print_operand_address (file, XEXP (addr,0));
1289 if (REGNO (XEXP (addr, 0)) == REG_X)
1290 fatal_insn ("internal compiler error. Bad address:"
1291 ,addr);
1292 fputc ('+', file);
1293 print_operand (file, XEXP (addr,1), code);
1295 else
1296 print_operand_address (file, addr);
1298 else if (GET_CODE (x) == CONST_DOUBLE)
1300 long val;
1301 REAL_VALUE_TYPE rv;
1302 if (GET_MODE (x) != SFmode)
1303 fatal_insn ("internal compiler error. Unknown mode:", x);
1304 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1305 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1306 fprintf (file, "0x%lx", val);
1308 else if (code == 'j')
1309 fputs (cond_string (GET_CODE (x)), file);
1310 else if (code == 'k')
1311 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1312 else
1313 print_operand_address (file, x);
1316 /* Update the condition code in the INSN. */
1318 void
1319 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1321 rtx set;
1323 switch (get_attr_cc (insn))
1325 case CC_NONE:
1326 /* Insn does not affect CC at all. */
1327 break;
1329 case CC_SET_N:
1330 CC_STATUS_INIT;
1331 break;
1333 case CC_SET_ZN:
1334 set = single_set (insn);
1335 CC_STATUS_INIT;
1336 if (set)
1338 cc_status.flags |= CC_NO_OVERFLOW;
1339 cc_status.value1 = SET_DEST (set);
1341 break;
1343 case CC_SET_CZN:
1344 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1345 The V flag may or may not be known but that's ok because
1346 alter_cond will change tests to use EQ/NE. */
1347 set = single_set (insn);
1348 CC_STATUS_INIT;
1349 if (set)
1351 cc_status.value1 = SET_DEST (set);
1352 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1354 break;
1356 case CC_COMPARE:
1357 set = single_set (insn);
1358 CC_STATUS_INIT;
1359 if (set)
1360 cc_status.value1 = SET_SRC (set);
1361 break;
1363 case CC_CLOBBER:
1364 /* Insn doesn't leave CC in a usable state. */
1365 CC_STATUS_INIT;
1367 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1368 set = single_set (insn);
1369 if (set)
1371 rtx src = SET_SRC (set);
1373 if (GET_CODE (src) == ASHIFTRT
1374 && GET_MODE (src) == QImode)
1376 rtx x = XEXP (src, 1);
1378 if (GET_CODE (x) == CONST_INT
1379 && INTVAL (x) > 0
1380 && INTVAL (x) != 6)
1382 cc_status.value1 = SET_DEST (set);
1383 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1387 break;
1391 /* Return maximum number of consecutive registers of
1392 class CLASS needed to hold a value of mode MODE. */
1395 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1397 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1400 /* Choose mode for jump insn:
1401 1 - relative jump in range -63 <= x <= 62 ;
1402 2 - relative jump in range -2046 <= x <= 2045 ;
1403 3 - absolute jump (only for ATmega[16]03). */
1406 avr_jump_mode (rtx x, rtx insn)
1408 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1409 ? XEXP (x, 0) : x));
1410 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1411 int jump_distance = cur_addr - dest_addr;
1413 if (-63 <= jump_distance && jump_distance <= 62)
1414 return 1;
1415 else if (-2046 <= jump_distance && jump_distance <= 2045)
1416 return 2;
1417 else if (AVR_HAVE_JMP_CALL)
1418 return 3;
1420 return 2;
1423 /* return an AVR condition jump commands.
1424 X is a comparison RTX.
1425 LEN is a number returned by avr_jump_mode function.
1426 if REVERSE nonzero then condition code in X must be reversed. */
1428 const char *
1429 ret_cond_branch (rtx x, int len, int reverse)
1431 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1433 switch (cond)
1435 case GT:
1436 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1437 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1438 AS1 (brpl,%0)) :
1439 len == 2 ? (AS1 (breq,.+4) CR_TAB
1440 AS1 (brmi,.+2) CR_TAB
1441 AS1 (rjmp,%0)) :
1442 (AS1 (breq,.+6) CR_TAB
1443 AS1 (brmi,.+4) CR_TAB
1444 AS1 (jmp,%0)));
1446 else
1447 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1448 AS1 (brge,%0)) :
1449 len == 2 ? (AS1 (breq,.+4) CR_TAB
1450 AS1 (brlt,.+2) CR_TAB
1451 AS1 (rjmp,%0)) :
1452 (AS1 (breq,.+6) CR_TAB
1453 AS1 (brlt,.+4) CR_TAB
1454 AS1 (jmp,%0)));
1455 case GTU:
1456 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1457 AS1 (brsh,%0)) :
1458 len == 2 ? (AS1 (breq,.+4) CR_TAB
1459 AS1 (brlo,.+2) CR_TAB
1460 AS1 (rjmp,%0)) :
1461 (AS1 (breq,.+6) CR_TAB
1462 AS1 (brlo,.+4) CR_TAB
1463 AS1 (jmp,%0)));
1464 case LE:
1465 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1466 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1467 AS1 (brmi,%0)) :
1468 len == 2 ? (AS1 (breq,.+2) CR_TAB
1469 AS1 (brpl,.+2) CR_TAB
1470 AS1 (rjmp,%0)) :
1471 (AS1 (breq,.+2) CR_TAB
1472 AS1 (brpl,.+4) CR_TAB
1473 AS1 (jmp,%0)));
1474 else
1475 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1476 AS1 (brlt,%0)) :
1477 len == 2 ? (AS1 (breq,.+2) CR_TAB
1478 AS1 (brge,.+2) CR_TAB
1479 AS1 (rjmp,%0)) :
1480 (AS1 (breq,.+2) CR_TAB
1481 AS1 (brge,.+4) CR_TAB
1482 AS1 (jmp,%0)));
1483 case LEU:
1484 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1485 AS1 (brlo,%0)) :
1486 len == 2 ? (AS1 (breq,.+2) CR_TAB
1487 AS1 (brsh,.+2) CR_TAB
1488 AS1 (rjmp,%0)) :
1489 (AS1 (breq,.+2) CR_TAB
1490 AS1 (brsh,.+4) CR_TAB
1491 AS1 (jmp,%0)));
1492 default:
1493 if (reverse)
1495 switch (len)
1497 case 1:
1498 return AS1 (br%k1,%0);
1499 case 2:
1500 return (AS1 (br%j1,.+2) CR_TAB
1501 AS1 (rjmp,%0));
1502 default:
1503 return (AS1 (br%j1,.+4) CR_TAB
1504 AS1 (jmp,%0));
1507 else
1509 switch (len)
1511 case 1:
1512 return AS1 (br%j1,%0);
1513 case 2:
1514 return (AS1 (br%k1,.+2) CR_TAB
1515 AS1 (rjmp,%0));
1516 default:
1517 return (AS1 (br%k1,.+4) CR_TAB
1518 AS1 (jmp,%0));
1522 return "";
1525 /* Predicate function for immediate operand which fits to byte (8bit) */
1528 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1530 return (GET_CODE (op) == CONST_INT
1531 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1534 /* Output all insn addresses and their sizes into the assembly language
1535 output file. This is helpful for debugging whether the length attributes
1536 in the md file are correct.
1537 Output insn cost for next insn. */
1539 void
1540 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1541 int num_operands ATTRIBUTE_UNUSED)
1543 int uid = INSN_UID (insn);
1545 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1547 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1548 INSN_ADDRESSES (uid),
1549 INSN_ADDRESSES (uid) - last_insn_address,
1550 rtx_cost (PATTERN (insn), INSN));
1552 last_insn_address = INSN_ADDRESSES (uid);
1555 /* Return 0 if undefined, 1 if always true or always false. */
1558 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1560 unsigned int max = (mode == QImode ? 0xff :
1561 mode == HImode ? 0xffff :
1562 mode == SImode ? 0xffffffff : 0);
1563 if (max && operator && GET_CODE (x) == CONST_INT)
1565 if (unsigned_condition (operator) != operator)
1566 max >>= 1;
1568 if (max != (INTVAL (x) & max)
1569 && INTVAL (x) != 0xff)
1570 return 1;
1572 return 0;
1576 /* Returns nonzero if REGNO is the number of a hard
1577 register in which function arguments are sometimes passed. */
1580 function_arg_regno_p(int r)
1582 return (r >= 8 && r <= 25);
1585 /* Initializing the variable cum for the state at the beginning
1586 of the argument list. */
1588 void
1589 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1590 tree fndecl ATTRIBUTE_UNUSED)
1592 cum->nregs = 18;
1593 cum->regno = FIRST_CUM_REG;
1594 if (!libname && fntype)
1596 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1597 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1598 != void_type_node));
1599 if (stdarg)
1600 cum->nregs = 0;
1604 /* Returns the number of registers to allocate for a function argument. */
1606 static int
1607 avr_num_arg_regs (enum machine_mode mode, tree type)
1609 int size;
1611 if (mode == BLKmode)
1612 size = int_size_in_bytes (type);
1613 else
1614 size = GET_MODE_SIZE (mode);
1616 /* Align all function arguments to start in even-numbered registers.
1617 Odd-sized arguments leave holes above them. */
1619 return (size + 1) & ~1;
1622 /* Controls whether a function argument is passed
1623 in a register, and which register. */
1626 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1627 int named ATTRIBUTE_UNUSED)
1629 int bytes = avr_num_arg_regs (mode, type);
1631 if (cum->nregs && bytes <= cum->nregs)
1632 return gen_rtx_REG (mode, cum->regno - bytes);
1634 return NULL_RTX;
1637 /* Update the summarizer variable CUM to advance past an argument
1638 in the argument list. */
1640 void
1641 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1642 int named ATTRIBUTE_UNUSED)
1644 int bytes = avr_num_arg_regs (mode, type);
1646 cum->nregs -= bytes;
1647 cum->regno -= bytes;
1649 if (cum->nregs <= 0)
1651 cum->nregs = 0;
1652 cum->regno = FIRST_CUM_REG;
1656 /***********************************************************************
1657 Functions for outputting various mov's for a various modes
1658 ************************************************************************/
1659 const char *
1660 output_movqi (rtx insn, rtx operands[], int *l)
1662 int dummy;
1663 rtx dest = operands[0];
1664 rtx src = operands[1];
1665 int *real_l = l;
1667 if (!l)
1668 l = &dummy;
1670 *l = 1;
1672 if (register_operand (dest, QImode))
1674 if (register_operand (src, QImode)) /* mov r,r */
1676 if (test_hard_reg_class (STACK_REG, dest))
1677 return AS2 (out,%0,%1);
1678 else if (test_hard_reg_class (STACK_REG, src))
1679 return AS2 (in,%0,%1);
1681 return AS2 (mov,%0,%1);
1683 else if (CONSTANT_P (src))
1685 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1686 return AS2 (ldi,%0,lo8(%1));
1688 if (GET_CODE (src) == CONST_INT)
1690 if (src == const0_rtx) /* mov r,L */
1691 return AS1 (clr,%0);
1692 else if (src == const1_rtx)
1694 *l = 2;
1695 return (AS1 (clr,%0) CR_TAB
1696 AS1 (inc,%0));
1698 else if (src == constm1_rtx)
1700 /* Immediate constants -1 to any register */
1701 *l = 2;
1702 return (AS1 (clr,%0) CR_TAB
1703 AS1 (dec,%0));
1705 else
1707 int bit_nr = exact_log2 (INTVAL (src));
1709 if (bit_nr >= 0)
1711 *l = 3;
1712 if (!real_l)
1713 output_asm_insn ((AS1 (clr,%0) CR_TAB
1714 "set"), operands);
1715 if (!real_l)
1716 avr_output_bld (operands, bit_nr);
1718 return "";
1723 /* Last resort, larger than loading from memory. */
1724 *l = 4;
1725 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1726 AS2 (ldi,r31,lo8(%1)) CR_TAB
1727 AS2 (mov,%0,r31) CR_TAB
1728 AS2 (mov,r31,__tmp_reg__));
1730 else if (GET_CODE (src) == MEM)
1731 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1733 else if (GET_CODE (dest) == MEM)
1735 const char *template;
1737 if (src == const0_rtx)
1738 operands[1] = zero_reg_rtx;
1740 template = out_movqi_mr_r (insn, operands, real_l);
1742 if (!real_l)
1743 output_asm_insn (template, operands);
1745 operands[1] = src;
1747 return "";
1751 const char *
1752 output_movhi (rtx insn, rtx operands[], int *l)
1754 int dummy;
1755 rtx dest = operands[0];
1756 rtx src = operands[1];
1757 int *real_l = l;
1759 if (!l)
1760 l = &dummy;
1762 if (register_operand (dest, HImode))
1764 if (register_operand (src, HImode)) /* mov r,r */
1766 if (test_hard_reg_class (STACK_REG, dest))
1768 if (TARGET_TINY_STACK)
1769 return *l = 1, AS2 (out,__SP_L__,%A1);
1770 /* Use simple load of stack pointer if no interrupts are
1771 used. */
1772 else if (TARGET_NO_INTERRUPTS)
1773 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1774 AS2 (out,__SP_L__,%A1));
1775 *l = 5;
1776 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1777 "cli" CR_TAB
1778 AS2 (out,__SP_H__,%B1) CR_TAB
1779 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1780 AS2 (out,__SP_L__,%A1));
1782 else if (test_hard_reg_class (STACK_REG, src))
1784 *l = 2;
1785 return (AS2 (in,%A0,__SP_L__) CR_TAB
1786 AS2 (in,%B0,__SP_H__));
1789 if (AVR_HAVE_MOVW)
1791 *l = 1;
1792 return (AS2 (movw,%0,%1));
1794 else
1796 *l = 2;
1797 return (AS2 (mov,%A0,%A1) CR_TAB
1798 AS2 (mov,%B0,%B1));
1801 else if (CONSTANT_P (src))
1803 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1805 *l = 2;
1806 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1807 AS2 (ldi,%B0,hi8(%1)));
1810 if (GET_CODE (src) == CONST_INT)
1812 if (src == const0_rtx) /* mov r,L */
1814 *l = 2;
1815 return (AS1 (clr,%A0) CR_TAB
1816 AS1 (clr,%B0));
1818 else if (src == const1_rtx)
1820 *l = 3;
1821 return (AS1 (clr,%A0) CR_TAB
1822 AS1 (clr,%B0) CR_TAB
1823 AS1 (inc,%A0));
1825 else if (src == constm1_rtx)
1827 /* Immediate constants -1 to any register */
1828 *l = 3;
1829 return (AS1 (clr,%0) CR_TAB
1830 AS1 (dec,%A0) CR_TAB
1831 AS2 (mov,%B0,%A0));
1833 else
1835 int bit_nr = exact_log2 (INTVAL (src));
1837 if (bit_nr >= 0)
1839 *l = 4;
1840 if (!real_l)
1841 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1842 AS1 (clr,%B0) CR_TAB
1843 "set"), operands);
1844 if (!real_l)
1845 avr_output_bld (operands, bit_nr);
1847 return "";
1851 if ((INTVAL (src) & 0xff) == 0)
1853 *l = 5;
1854 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1855 AS1 (clr,%A0) CR_TAB
1856 AS2 (ldi,r31,hi8(%1)) CR_TAB
1857 AS2 (mov,%B0,r31) CR_TAB
1858 AS2 (mov,r31,__tmp_reg__));
1860 else if ((INTVAL (src) & 0xff00) == 0)
1862 *l = 5;
1863 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1864 AS2 (ldi,r31,lo8(%1)) CR_TAB
1865 AS2 (mov,%A0,r31) CR_TAB
1866 AS1 (clr,%B0) CR_TAB
1867 AS2 (mov,r31,__tmp_reg__));
1871 /* Last resort, equal to loading from memory. */
1872 *l = 6;
1873 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1874 AS2 (ldi,r31,lo8(%1)) CR_TAB
1875 AS2 (mov,%A0,r31) CR_TAB
1876 AS2 (ldi,r31,hi8(%1)) CR_TAB
1877 AS2 (mov,%B0,r31) CR_TAB
1878 AS2 (mov,r31,__tmp_reg__));
1880 else if (GET_CODE (src) == MEM)
1881 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1883 else if (GET_CODE (dest) == MEM)
1885 const char *template;
1887 if (src == const0_rtx)
1888 operands[1] = zero_reg_rtx;
1890 template = out_movhi_mr_r (insn, operands, real_l);
1892 if (!real_l)
1893 output_asm_insn (template, operands);
1895 operands[1] = src;
1896 return "";
1898 fatal_insn ("invalid insn:", insn);
1899 return "";
1902 const char *
1903 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1905 rtx dest = op[0];
1906 rtx src = op[1];
1907 rtx x = XEXP (src, 0);
1908 int dummy;
1910 if (!l)
1911 l = &dummy;
1913 if (CONSTANT_ADDRESS_P (x))
1915 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1917 *l = 1;
1918 return AS2 (in,%0,__SREG__);
1920 if (optimize > 0 && io_address_operand (x, QImode))
1922 *l = 1;
1923 return AS2 (in,%0,%1-0x20);
1925 *l = 2;
1926 return AS2 (lds,%0,%1);
1928 /* memory access by reg+disp */
1929 else if (GET_CODE (x) == PLUS
1930 && REG_P (XEXP (x,0))
1931 && GET_CODE (XEXP (x,1)) == CONST_INT)
1933 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1935 int disp = INTVAL (XEXP (x,1));
1936 if (REGNO (XEXP (x,0)) != REG_Y)
1937 fatal_insn ("incorrect insn:",insn);
1939 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1940 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1941 AS2 (ldd,%0,Y+63) CR_TAB
1942 AS2 (sbiw,r28,%o1-63));
1944 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1945 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1946 AS2 (ld,%0,Y) CR_TAB
1947 AS2 (subi,r28,lo8(%o1)) CR_TAB
1948 AS2 (sbci,r29,hi8(%o1)));
1950 else if (REGNO (XEXP (x,0)) == REG_X)
1952 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1953 it but I have this situation with extremal optimizing options. */
1954 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1955 || reg_unused_after (insn, XEXP (x,0)))
1956 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1957 AS2 (ld,%0,X));
1959 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1960 AS2 (ld,%0,X) CR_TAB
1961 AS2 (sbiw,r26,%o1));
1963 *l = 1;
1964 return AS2 (ldd,%0,%1);
1966 *l = 1;
1967 return AS2 (ld,%0,%1);
1970 const char *
1971 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1973 rtx dest = op[0];
1974 rtx src = op[1];
1975 rtx base = XEXP (src, 0);
1976 int reg_dest = true_regnum (dest);
1977 int reg_base = true_regnum (base);
1978 /* "volatile" forces reading low byte first, even if less efficient,
1979 for correct operation with 16-bit I/O registers. */
1980 int mem_volatile_p = MEM_VOLATILE_P (src);
1981 int tmp;
1983 if (!l)
1984 l = &tmp;
1986 if (reg_base > 0)
1988 if (reg_dest == reg_base) /* R = (R) */
1990 *l = 3;
1991 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1992 AS2 (ld,%B0,%1) CR_TAB
1993 AS2 (mov,%A0,__tmp_reg__));
1995 else if (reg_base == REG_X) /* (R26) */
1997 if (reg_unused_after (insn, base))
1999 *l = 2;
2000 return (AS2 (ld,%A0,X+) CR_TAB
2001 AS2 (ld,%B0,X));
2003 *l = 3;
2004 return (AS2 (ld,%A0,X+) CR_TAB
2005 AS2 (ld,%B0,X) CR_TAB
2006 AS2 (sbiw,r26,1));
2008 else /* (R) */
2010 *l = 2;
2011 return (AS2 (ld,%A0,%1) CR_TAB
2012 AS2 (ldd,%B0,%1+1));
2015 else if (GET_CODE (base) == PLUS) /* (R + i) */
2017 int disp = INTVAL (XEXP (base, 1));
2018 int reg_base = true_regnum (XEXP (base, 0));
2020 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2022 if (REGNO (XEXP (base, 0)) != REG_Y)
2023 fatal_insn ("incorrect insn:",insn);
2025 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2026 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2027 AS2 (ldd,%A0,Y+62) CR_TAB
2028 AS2 (ldd,%B0,Y+63) CR_TAB
2029 AS2 (sbiw,r28,%o1-62));
2031 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2032 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2033 AS2 (ld,%A0,Y) CR_TAB
2034 AS2 (ldd,%B0,Y+1) CR_TAB
2035 AS2 (subi,r28,lo8(%o1)) CR_TAB
2036 AS2 (sbci,r29,hi8(%o1)));
2038 if (reg_base == REG_X)
2040 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2041 it but I have this situation with extremal
2042 optimization options. */
2044 *l = 4;
2045 if (reg_base == reg_dest)
2046 return (AS2 (adiw,r26,%o1) CR_TAB
2047 AS2 (ld,__tmp_reg__,X+) CR_TAB
2048 AS2 (ld,%B0,X) CR_TAB
2049 AS2 (mov,%A0,__tmp_reg__));
2051 return (AS2 (adiw,r26,%o1) CR_TAB
2052 AS2 (ld,%A0,X+) CR_TAB
2053 AS2 (ld,%B0,X) CR_TAB
2054 AS2 (sbiw,r26,%o1+1));
2057 if (reg_base == reg_dest)
2059 *l = 3;
2060 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2061 AS2 (ldd,%B0,%B1) CR_TAB
2062 AS2 (mov,%A0,__tmp_reg__));
2065 *l = 2;
2066 return (AS2 (ldd,%A0,%A1) CR_TAB
2067 AS2 (ldd,%B0,%B1));
2069 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2071 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2072 fatal_insn ("incorrect insn:", insn);
2074 if (mem_volatile_p)
2076 if (REGNO (XEXP (base, 0)) == REG_X)
2078 *l = 4;
2079 return (AS2 (sbiw,r26,2) CR_TAB
2080 AS2 (ld,%A0,X+) CR_TAB
2081 AS2 (ld,%B0,X) CR_TAB
2082 AS2 (sbiw,r26,1));
2084 else
2086 *l = 3;
2087 return (AS2 (sbiw,%r1,2) CR_TAB
2088 AS2 (ld,%A0,%p1) CR_TAB
2089 AS2 (ldd,%B0,%p1+1));
2093 *l = 2;
2094 return (AS2 (ld,%B0,%1) CR_TAB
2095 AS2 (ld,%A0,%1));
2097 else if (GET_CODE (base) == POST_INC) /* (R++) */
2099 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2100 fatal_insn ("incorrect insn:", insn);
2102 *l = 2;
2103 return (AS2 (ld,%A0,%1) CR_TAB
2104 AS2 (ld,%B0,%1));
2106 else if (CONSTANT_ADDRESS_P (base))
2108 if (optimize > 0 && io_address_operand (base, HImode))
2110 *l = 2;
2111 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2112 AS2 (in,%B0,%B1-0x20));
2114 *l = 4;
2115 return (AS2 (lds,%A0,%A1) CR_TAB
2116 AS2 (lds,%B0,%B1));
2119 fatal_insn ("unknown move insn:",insn);
2120 return "";
2123 const char *
2124 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2126 rtx dest = op[0];
2127 rtx src = op[1];
2128 rtx base = XEXP (src, 0);
2129 int reg_dest = true_regnum (dest);
2130 int reg_base = true_regnum (base);
2131 int tmp;
2133 if (!l)
2134 l = &tmp;
2136 if (reg_base > 0)
2138 if (reg_base == REG_X) /* (R26) */
2140 if (reg_dest == REG_X)
2141 /* "ld r26,-X" is undefined */
2142 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2143 AS2 (ld,r29,X) CR_TAB
2144 AS2 (ld,r28,-X) CR_TAB
2145 AS2 (ld,__tmp_reg__,-X) CR_TAB
2146 AS2 (sbiw,r26,1) CR_TAB
2147 AS2 (ld,r26,X) CR_TAB
2148 AS2 (mov,r27,__tmp_reg__));
2149 else if (reg_dest == REG_X - 2)
2150 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2151 AS2 (ld,%B0,X+) CR_TAB
2152 AS2 (ld,__tmp_reg__,X+) CR_TAB
2153 AS2 (ld,%D0,X) CR_TAB
2154 AS2 (mov,%C0,__tmp_reg__));
2155 else if (reg_unused_after (insn, base))
2156 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2157 AS2 (ld,%B0,X+) CR_TAB
2158 AS2 (ld,%C0,X+) CR_TAB
2159 AS2 (ld,%D0,X));
2160 else
2161 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2162 AS2 (ld,%B0,X+) CR_TAB
2163 AS2 (ld,%C0,X+) CR_TAB
2164 AS2 (ld,%D0,X) CR_TAB
2165 AS2 (sbiw,r26,3));
2167 else
2169 if (reg_dest == reg_base)
2170 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2171 AS2 (ldd,%C0,%1+2) CR_TAB
2172 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2173 AS2 (ld,%A0,%1) CR_TAB
2174 AS2 (mov,%B0,__tmp_reg__));
2175 else if (reg_base == reg_dest + 2)
2176 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2177 AS2 (ldd,%B0,%1+1) CR_TAB
2178 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2179 AS2 (ldd,%D0,%1+3) CR_TAB
2180 AS2 (mov,%C0,__tmp_reg__));
2181 else
2182 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2183 AS2 (ldd,%B0,%1+1) CR_TAB
2184 AS2 (ldd,%C0,%1+2) CR_TAB
2185 AS2 (ldd,%D0,%1+3));
2188 else if (GET_CODE (base) == PLUS) /* (R + i) */
2190 int disp = INTVAL (XEXP (base, 1));
2192 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2194 if (REGNO (XEXP (base, 0)) != REG_Y)
2195 fatal_insn ("incorrect insn:",insn);
2197 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2198 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2199 AS2 (ldd,%A0,Y+60) CR_TAB
2200 AS2 (ldd,%B0,Y+61) CR_TAB
2201 AS2 (ldd,%C0,Y+62) CR_TAB
2202 AS2 (ldd,%D0,Y+63) CR_TAB
2203 AS2 (sbiw,r28,%o1-60));
2205 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2206 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2207 AS2 (ld,%A0,Y) CR_TAB
2208 AS2 (ldd,%B0,Y+1) CR_TAB
2209 AS2 (ldd,%C0,Y+2) CR_TAB
2210 AS2 (ldd,%D0,Y+3) CR_TAB
2211 AS2 (subi,r28,lo8(%o1)) CR_TAB
2212 AS2 (sbci,r29,hi8(%o1)));
2215 reg_base = true_regnum (XEXP (base, 0));
2216 if (reg_base == REG_X)
2218 /* R = (X + d) */
2219 if (reg_dest == REG_X)
2221 *l = 7;
2222 /* "ld r26,-X" is undefined */
2223 return (AS2 (adiw,r26,%o1+3) CR_TAB
2224 AS2 (ld,r29,X) CR_TAB
2225 AS2 (ld,r28,-X) CR_TAB
2226 AS2 (ld,__tmp_reg__,-X) CR_TAB
2227 AS2 (sbiw,r26,1) CR_TAB
2228 AS2 (ld,r26,X) CR_TAB
2229 AS2 (mov,r27,__tmp_reg__));
2231 *l = 6;
2232 if (reg_dest == REG_X - 2)
2233 return (AS2 (adiw,r26,%o1) CR_TAB
2234 AS2 (ld,r24,X+) CR_TAB
2235 AS2 (ld,r25,X+) CR_TAB
2236 AS2 (ld,__tmp_reg__,X+) CR_TAB
2237 AS2 (ld,r27,X) CR_TAB
2238 AS2 (mov,r26,__tmp_reg__));
2240 return (AS2 (adiw,r26,%o1) CR_TAB
2241 AS2 (ld,%A0,X+) CR_TAB
2242 AS2 (ld,%B0,X+) CR_TAB
2243 AS2 (ld,%C0,X+) CR_TAB
2244 AS2 (ld,%D0,X) CR_TAB
2245 AS2 (sbiw,r26,%o1+3));
2247 if (reg_dest == reg_base)
2248 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2249 AS2 (ldd,%C0,%C1) CR_TAB
2250 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2251 AS2 (ldd,%A0,%A1) CR_TAB
2252 AS2 (mov,%B0,__tmp_reg__));
2253 else if (reg_dest == reg_base - 2)
2254 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2255 AS2 (ldd,%B0,%B1) CR_TAB
2256 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2257 AS2 (ldd,%D0,%D1) CR_TAB
2258 AS2 (mov,%C0,__tmp_reg__));
2259 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2260 AS2 (ldd,%B0,%B1) CR_TAB
2261 AS2 (ldd,%C0,%C1) CR_TAB
2262 AS2 (ldd,%D0,%D1));
2264 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2265 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2266 AS2 (ld,%C0,%1) CR_TAB
2267 AS2 (ld,%B0,%1) CR_TAB
2268 AS2 (ld,%A0,%1));
2269 else if (GET_CODE (base) == POST_INC) /* (R++) */
2270 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2271 AS2 (ld,%B0,%1) CR_TAB
2272 AS2 (ld,%C0,%1) CR_TAB
2273 AS2 (ld,%D0,%1));
2274 else if (CONSTANT_ADDRESS_P (base))
2275 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2276 AS2 (lds,%B0,%B1) CR_TAB
2277 AS2 (lds,%C0,%C1) CR_TAB
2278 AS2 (lds,%D0,%D1));
2280 fatal_insn ("unknown move insn:",insn);
2281 return "";
2284 const char *
2285 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2287 rtx dest = op[0];
2288 rtx src = op[1];
2289 rtx base = XEXP (dest, 0);
2290 int reg_base = true_regnum (base);
2291 int reg_src = true_regnum (src);
2292 int tmp;
2294 if (!l)
2295 l = &tmp;
2297 if (CONSTANT_ADDRESS_P (base))
2298 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2299 AS2 (sts,%B0,%B1) CR_TAB
2300 AS2 (sts,%C0,%C1) CR_TAB
2301 AS2 (sts,%D0,%D1));
2302 if (reg_base > 0) /* (r) */
2304 if (reg_base == REG_X) /* (R26) */
2306 if (reg_src == REG_X)
2308 /* "st X+,r26" is undefined */
2309 if (reg_unused_after (insn, base))
2310 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2311 AS2 (st,X,r26) CR_TAB
2312 AS2 (adiw,r26,1) CR_TAB
2313 AS2 (st,X+,__tmp_reg__) CR_TAB
2314 AS2 (st,X+,r28) CR_TAB
2315 AS2 (st,X,r29));
2316 else
2317 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2318 AS2 (st,X,r26) CR_TAB
2319 AS2 (adiw,r26,1) CR_TAB
2320 AS2 (st,X+,__tmp_reg__) CR_TAB
2321 AS2 (st,X+,r28) CR_TAB
2322 AS2 (st,X,r29) CR_TAB
2323 AS2 (sbiw,r26,3));
2325 else if (reg_base == reg_src + 2)
2327 if (reg_unused_after (insn, base))
2328 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2329 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2330 AS2 (st,%0+,%A1) CR_TAB
2331 AS2 (st,%0+,%B1) CR_TAB
2332 AS2 (st,%0+,__zero_reg__) CR_TAB
2333 AS2 (st,%0,__tmp_reg__) CR_TAB
2334 AS1 (clr,__zero_reg__));
2335 else
2336 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2337 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2338 AS2 (st,%0+,%A1) CR_TAB
2339 AS2 (st,%0+,%B1) CR_TAB
2340 AS2 (st,%0+,__zero_reg__) CR_TAB
2341 AS2 (st,%0,__tmp_reg__) CR_TAB
2342 AS1 (clr,__zero_reg__) CR_TAB
2343 AS2 (sbiw,r26,3));
2345 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2346 AS2 (st,%0+,%B1) CR_TAB
2347 AS2 (st,%0+,%C1) CR_TAB
2348 AS2 (st,%0,%D1) CR_TAB
2349 AS2 (sbiw,r26,3));
2351 else
2352 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2353 AS2 (std,%0+1,%B1) CR_TAB
2354 AS2 (std,%0+2,%C1) CR_TAB
2355 AS2 (std,%0+3,%D1));
2357 else if (GET_CODE (base) == PLUS) /* (R + i) */
2359 int disp = INTVAL (XEXP (base, 1));
2360 reg_base = REGNO (XEXP (base, 0));
2361 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2363 if (reg_base != REG_Y)
2364 fatal_insn ("incorrect insn:",insn);
2366 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2367 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2368 AS2 (std,Y+60,%A1) CR_TAB
2369 AS2 (std,Y+61,%B1) CR_TAB
2370 AS2 (std,Y+62,%C1) CR_TAB
2371 AS2 (std,Y+63,%D1) CR_TAB
2372 AS2 (sbiw,r28,%o0-60));
2374 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2375 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2376 AS2 (st,Y,%A1) CR_TAB
2377 AS2 (std,Y+1,%B1) CR_TAB
2378 AS2 (std,Y+2,%C1) CR_TAB
2379 AS2 (std,Y+3,%D1) CR_TAB
2380 AS2 (subi,r28,lo8(%o0)) CR_TAB
2381 AS2 (sbci,r29,hi8(%o0)));
2383 if (reg_base == REG_X)
2385 /* (X + d) = R */
2386 if (reg_src == REG_X)
2388 *l = 9;
2389 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2390 AS2 (mov,__zero_reg__,r27) CR_TAB
2391 AS2 (adiw,r26,%o0) CR_TAB
2392 AS2 (st,X+,__tmp_reg__) CR_TAB
2393 AS2 (st,X+,__zero_reg__) CR_TAB
2394 AS2 (st,X+,r28) CR_TAB
2395 AS2 (st,X,r29) CR_TAB
2396 AS1 (clr,__zero_reg__) CR_TAB
2397 AS2 (sbiw,r26,%o0+3));
2399 else if (reg_src == REG_X - 2)
2401 *l = 9;
2402 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2403 AS2 (mov,__zero_reg__,r27) CR_TAB
2404 AS2 (adiw,r26,%o0) CR_TAB
2405 AS2 (st,X+,r24) CR_TAB
2406 AS2 (st,X+,r25) CR_TAB
2407 AS2 (st,X+,__tmp_reg__) CR_TAB
2408 AS2 (st,X,__zero_reg__) CR_TAB
2409 AS1 (clr,__zero_reg__) CR_TAB
2410 AS2 (sbiw,r26,%o0+3));
2412 *l = 6;
2413 return (AS2 (adiw,r26,%o0) CR_TAB
2414 AS2 (st,X+,%A1) CR_TAB
2415 AS2 (st,X+,%B1) CR_TAB
2416 AS2 (st,X+,%C1) CR_TAB
2417 AS2 (st,X,%D1) CR_TAB
2418 AS2 (sbiw,r26,%o0+3));
2420 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2421 AS2 (std,%B0,%B1) CR_TAB
2422 AS2 (std,%C0,%C1) CR_TAB
2423 AS2 (std,%D0,%D1));
2425 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2426 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2427 AS2 (st,%0,%C1) CR_TAB
2428 AS2 (st,%0,%B1) CR_TAB
2429 AS2 (st,%0,%A1));
2430 else if (GET_CODE (base) == POST_INC) /* (R++) */
2431 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2432 AS2 (st,%0,%B1) CR_TAB
2433 AS2 (st,%0,%C1) CR_TAB
2434 AS2 (st,%0,%D1));
2435 fatal_insn ("unknown move insn:",insn);
2436 return "";
2439 const char *
2440 output_movsisf(rtx insn, rtx operands[], int *l)
2442 int dummy;
2443 rtx dest = operands[0];
2444 rtx src = operands[1];
2445 int *real_l = l;
2447 if (!l)
2448 l = &dummy;
2450 if (register_operand (dest, VOIDmode))
2452 if (register_operand (src, VOIDmode)) /* mov r,r */
2454 if (true_regnum (dest) > true_regnum (src))
2456 if (AVR_HAVE_MOVW)
2458 *l = 2;
2459 return (AS2 (movw,%C0,%C1) CR_TAB
2460 AS2 (movw,%A0,%A1));
2462 *l = 4;
2463 return (AS2 (mov,%D0,%D1) CR_TAB
2464 AS2 (mov,%C0,%C1) CR_TAB
2465 AS2 (mov,%B0,%B1) CR_TAB
2466 AS2 (mov,%A0,%A1));
2468 else
2470 if (AVR_HAVE_MOVW)
2472 *l = 2;
2473 return (AS2 (movw,%A0,%A1) CR_TAB
2474 AS2 (movw,%C0,%C1));
2476 *l = 4;
2477 return (AS2 (mov,%A0,%A1) CR_TAB
2478 AS2 (mov,%B0,%B1) CR_TAB
2479 AS2 (mov,%C0,%C1) CR_TAB
2480 AS2 (mov,%D0,%D1));
2483 else if (CONSTANT_P (src))
2485 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2487 *l = 4;
2488 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2489 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2490 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2491 AS2 (ldi,%D0,hhi8(%1)));
2494 if (GET_CODE (src) == CONST_INT)
2496 const char *const clr_op0 =
2497 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2498 AS1 (clr,%B0) CR_TAB
2499 AS2 (movw,%C0,%A0))
2500 : (AS1 (clr,%A0) CR_TAB
2501 AS1 (clr,%B0) CR_TAB
2502 AS1 (clr,%C0) CR_TAB
2503 AS1 (clr,%D0));
2505 if (src == const0_rtx) /* mov r,L */
2507 *l = AVR_HAVE_MOVW ? 3 : 4;
2508 return clr_op0;
2510 else if (src == const1_rtx)
2512 if (!real_l)
2513 output_asm_insn (clr_op0, operands);
2514 *l = AVR_HAVE_MOVW ? 4 : 5;
2515 return AS1 (inc,%A0);
2517 else if (src == constm1_rtx)
2519 /* Immediate constants -1 to any register */
2520 if (AVR_HAVE_MOVW)
2522 *l = 4;
2523 return (AS1 (clr,%A0) CR_TAB
2524 AS1 (dec,%A0) CR_TAB
2525 AS2 (mov,%B0,%A0) CR_TAB
2526 AS2 (movw,%C0,%A0));
2528 *l = 5;
2529 return (AS1 (clr,%A0) CR_TAB
2530 AS1 (dec,%A0) CR_TAB
2531 AS2 (mov,%B0,%A0) CR_TAB
2532 AS2 (mov,%C0,%A0) CR_TAB
2533 AS2 (mov,%D0,%A0));
2535 else
2537 int bit_nr = exact_log2 (INTVAL (src));
2539 if (bit_nr >= 0)
2541 *l = AVR_HAVE_MOVW ? 5 : 6;
2542 if (!real_l)
2544 output_asm_insn (clr_op0, operands);
2545 output_asm_insn ("set", operands);
2547 if (!real_l)
2548 avr_output_bld (operands, bit_nr);
2550 return "";
2555 /* Last resort, better than loading from memory. */
2556 *l = 10;
2557 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2558 AS2 (ldi,r31,lo8(%1)) CR_TAB
2559 AS2 (mov,%A0,r31) CR_TAB
2560 AS2 (ldi,r31,hi8(%1)) CR_TAB
2561 AS2 (mov,%B0,r31) CR_TAB
2562 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2563 AS2 (mov,%C0,r31) CR_TAB
2564 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2565 AS2 (mov,%D0,r31) CR_TAB
2566 AS2 (mov,r31,__tmp_reg__));
2568 else if (GET_CODE (src) == MEM)
2569 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2571 else if (GET_CODE (dest) == MEM)
2573 const char *template;
2575 if (src == const0_rtx)
2576 operands[1] = zero_reg_rtx;
2578 template = out_movsi_mr_r (insn, operands, real_l);
2580 if (!real_l)
2581 output_asm_insn (template, operands);
2583 operands[1] = src;
2584 return "";
2586 fatal_insn ("invalid insn:", insn);
2587 return "";
2590 const char *
2591 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2593 rtx dest = op[0];
2594 rtx src = op[1];
2595 rtx x = XEXP (dest, 0);
2596 int dummy;
2598 if (!l)
2599 l = &dummy;
2601 if (CONSTANT_ADDRESS_P (x))
2603 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2605 *l = 1;
2606 return AS2 (out,__SREG__,%1);
2608 if (optimize > 0 && io_address_operand (x, QImode))
2610 *l = 1;
2611 return AS2 (out,%0-0x20,%1);
2613 *l = 2;
2614 return AS2 (sts,%0,%1);
2616 /* memory access by reg+disp */
2617 else if (GET_CODE (x) == PLUS
2618 && REG_P (XEXP (x,0))
2619 && GET_CODE (XEXP (x,1)) == CONST_INT)
2621 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2623 int disp = INTVAL (XEXP (x,1));
2624 if (REGNO (XEXP (x,0)) != REG_Y)
2625 fatal_insn ("incorrect insn:",insn);
2627 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2628 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2629 AS2 (std,Y+63,%1) CR_TAB
2630 AS2 (sbiw,r28,%o0-63));
2632 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2633 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2634 AS2 (st,Y,%1) CR_TAB
2635 AS2 (subi,r28,lo8(%o0)) CR_TAB
2636 AS2 (sbci,r29,hi8(%o0)));
2638 else if (REGNO (XEXP (x,0)) == REG_X)
2640 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2642 if (reg_unused_after (insn, XEXP (x,0)))
2643 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2644 AS2 (adiw,r26,%o0) CR_TAB
2645 AS2 (st,X,__tmp_reg__));
2647 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2648 AS2 (adiw,r26,%o0) CR_TAB
2649 AS2 (st,X,__tmp_reg__) CR_TAB
2650 AS2 (sbiw,r26,%o0));
2652 else
2654 if (reg_unused_after (insn, XEXP (x,0)))
2655 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2656 AS2 (st,X,%1));
2658 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2659 AS2 (st,X,%1) CR_TAB
2660 AS2 (sbiw,r26,%o0));
2663 *l = 1;
2664 return AS2 (std,%0,%1);
2666 *l = 1;
2667 return AS2 (st,%0,%1);
2670 const char *
2671 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2673 rtx dest = op[0];
2674 rtx src = op[1];
2675 rtx base = XEXP (dest, 0);
2676 int reg_base = true_regnum (base);
2677 int reg_src = true_regnum (src);
2678 /* "volatile" forces writing high byte first, even if less efficient,
2679 for correct operation with 16-bit I/O registers. */
2680 int mem_volatile_p = MEM_VOLATILE_P (dest);
2681 int tmp;
2683 if (!l)
2684 l = &tmp;
2685 if (CONSTANT_ADDRESS_P (base))
2687 if (optimize > 0 && io_address_operand (base, HImode))
2689 *l = 2;
2690 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2691 AS2 (out,%A0-0x20,%A1));
2693 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2694 AS2 (sts,%A0,%A1));
2696 if (reg_base > 0)
2698 if (reg_base == REG_X)
2700 if (reg_src == REG_X)
2702 /* "st X+,r26" and "st -X,r26" are undefined. */
2703 if (!mem_volatile_p && reg_unused_after (insn, src))
2704 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2705 AS2 (st,X,r26) CR_TAB
2706 AS2 (adiw,r26,1) CR_TAB
2707 AS2 (st,X,__tmp_reg__));
2708 else
2709 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2710 AS2 (adiw,r26,1) CR_TAB
2711 AS2 (st,X,__tmp_reg__) CR_TAB
2712 AS2 (sbiw,r26,1) CR_TAB
2713 AS2 (st,X,r26));
2715 else
2717 if (!mem_volatile_p && reg_unused_after (insn, base))
2718 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2719 AS2 (st,X,%B1));
2720 else
2721 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2722 AS2 (st,X,%B1) CR_TAB
2723 AS2 (st,-X,%A1));
2726 else
2727 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2728 AS2 (st,%0,%A1));
2730 else if (GET_CODE (base) == PLUS)
2732 int disp = INTVAL (XEXP (base, 1));
2733 reg_base = REGNO (XEXP (base, 0));
2734 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2736 if (reg_base != REG_Y)
2737 fatal_insn ("incorrect insn:",insn);
2739 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2740 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2741 AS2 (std,Y+63,%B1) CR_TAB
2742 AS2 (std,Y+62,%A1) CR_TAB
2743 AS2 (sbiw,r28,%o0-62));
2745 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2746 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2747 AS2 (std,Y+1,%B1) CR_TAB
2748 AS2 (st,Y,%A1) CR_TAB
2749 AS2 (subi,r28,lo8(%o0)) CR_TAB
2750 AS2 (sbci,r29,hi8(%o0)));
2752 if (reg_base == REG_X)
2754 /* (X + d) = R */
2755 if (reg_src == REG_X)
2757 *l = 7;
2758 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2759 AS2 (mov,__zero_reg__,r27) CR_TAB
2760 AS2 (adiw,r26,%o0+1) CR_TAB
2761 AS2 (st,X,__zero_reg__) CR_TAB
2762 AS2 (st,-X,__tmp_reg__) CR_TAB
2763 AS1 (clr,__zero_reg__) CR_TAB
2764 AS2 (sbiw,r26,%o0));
2766 *l = 4;
2767 return (AS2 (adiw,r26,%o0+1) CR_TAB
2768 AS2 (st,X,%B1) CR_TAB
2769 AS2 (st,-X,%A1) CR_TAB
2770 AS2 (sbiw,r26,%o0));
2772 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2773 AS2 (std,%A0,%A1));
2775 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2776 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2777 AS2 (st,%0,%A1));
2778 else if (GET_CODE (base) == POST_INC) /* (R++) */
2780 if (mem_volatile_p)
2782 if (REGNO (XEXP (base, 0)) == REG_X)
2784 *l = 4;
2785 return (AS2 (adiw,r26,1) CR_TAB
2786 AS2 (st,X,%B1) CR_TAB
2787 AS2 (st,-X,%A1) CR_TAB
2788 AS2 (adiw,r26,2));
2790 else
2792 *l = 3;
2793 return (AS2 (std,%p0+1,%B1) CR_TAB
2794 AS2 (st,%p0,%A1) CR_TAB
2795 AS2 (adiw,%r0,2));
2799 *l = 2;
2800 return (AS2 (st,%0,%A1) CR_TAB
2801 AS2 (st,%0,%B1));
2803 fatal_insn ("unknown move insn:",insn);
2804 return "";
2807 /* Return 1 if frame pointer for current function required. */
2810 frame_pointer_required_p (void)
2812 return (cfun->calls_alloca
2813 || crtl->args.info.nregs == 0
2814 || get_frame_size () > 0);
2817 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2819 static RTX_CODE
2820 compare_condition (rtx insn)
2822 rtx next = next_real_insn (insn);
2823 RTX_CODE cond = UNKNOWN;
2824 if (next && GET_CODE (next) == JUMP_INSN)
2826 rtx pat = PATTERN (next);
2827 rtx src = SET_SRC (pat);
2828 rtx t = XEXP (src, 0);
2829 cond = GET_CODE (t);
2831 return cond;
2834 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2836 static int
2837 compare_sign_p (rtx insn)
2839 RTX_CODE cond = compare_condition (insn);
2840 return (cond == GE || cond == LT);
2843 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2844 that needs to be swapped (GT, GTU, LE, LEU). */
2847 compare_diff_p (rtx insn)
2849 RTX_CODE cond = compare_condition (insn);
2850 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2853 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2856 compare_eq_p (rtx insn)
2858 RTX_CODE cond = compare_condition (insn);
2859 return (cond == EQ || cond == NE);
2863 /* Output test instruction for HImode. */
2865 const char *
2866 out_tsthi (rtx insn, int *l)
2868 if (compare_sign_p (insn))
2870 if (l) *l = 1;
2871 return AS1 (tst,%B0);
2873 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2874 && compare_eq_p (insn))
2876 /* Faster than sbiw if we can clobber the operand. */
2877 if (l) *l = 1;
2878 return AS2 (or,%A0,%B0);
2880 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2882 if (l) *l = 1;
2883 return AS2 (sbiw,%0,0);
2885 if (l) *l = 2;
2886 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2887 AS2 (cpc,%B0,__zero_reg__));
2891 /* Output test instruction for SImode. */
2893 const char *
2894 out_tstsi (rtx insn, int *l)
2896 if (compare_sign_p (insn))
2898 if (l) *l = 1;
2899 return AS1 (tst,%D0);
2901 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2903 if (l) *l = 3;
2904 return (AS2 (sbiw,%A0,0) CR_TAB
2905 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2906 AS2 (cpc,%D0,__zero_reg__));
2908 if (l) *l = 4;
2909 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2910 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2911 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2912 AS2 (cpc,%D0,__zero_reg__));
2916 /* Generate asm equivalent for various shifts.
2917 Shift count is a CONST_INT, MEM or REG.
2918 This only handles cases that are not already
2919 carefully hand-optimized in ?sh??i3_out. */
2921 void
2922 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2923 int *len, int t_len)
2925 rtx op[10];
2926 char str[500];
2927 int second_label = 1;
2928 int saved_in_tmp = 0;
2929 int use_zero_reg = 0;
2931 op[0] = operands[0];
2932 op[1] = operands[1];
2933 op[2] = operands[2];
2934 op[3] = operands[3];
2935 str[0] = 0;
2937 if (len)
2938 *len = 1;
2940 if (GET_CODE (operands[2]) == CONST_INT)
2942 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2943 int count = INTVAL (operands[2]);
2944 int max_len = 10; /* If larger than this, always use a loop. */
2946 if (count <= 0)
2948 if (len)
2949 *len = 0;
2950 return;
2953 if (count < 8 && !scratch)
2954 use_zero_reg = 1;
2956 if (optimize_size)
2957 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2959 if (t_len * count <= max_len)
2961 /* Output shifts inline with no loop - faster. */
2962 if (len)
2963 *len = t_len * count;
2964 else
2966 while (count-- > 0)
2967 output_asm_insn (template, op);
2970 return;
2973 if (scratch)
2975 if (!len)
2976 strcat (str, AS2 (ldi,%3,%2));
2978 else if (use_zero_reg)
2980 /* Hack to save one word: use __zero_reg__ as loop counter.
2981 Set one bit, then shift in a loop until it is 0 again. */
2983 op[3] = zero_reg_rtx;
2984 if (len)
2985 *len = 2;
2986 else
2987 strcat (str, ("set" CR_TAB
2988 AS2 (bld,%3,%2-1)));
2990 else
2992 /* No scratch register available, use one from LD_REGS (saved in
2993 __tmp_reg__) that doesn't overlap with registers to shift. */
2995 op[3] = gen_rtx_REG (QImode,
2996 ((true_regnum (operands[0]) - 1) & 15) + 16);
2997 op[4] = tmp_reg_rtx;
2998 saved_in_tmp = 1;
3000 if (len)
3001 *len = 3; /* Includes "mov %3,%4" after the loop. */
3002 else
3003 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3004 AS2 (ldi,%3,%2)));
3007 second_label = 0;
3009 else if (GET_CODE (operands[2]) == MEM)
3011 rtx op_mov[10];
3013 op[3] = op_mov[0] = tmp_reg_rtx;
3014 op_mov[1] = op[2];
3016 if (len)
3017 out_movqi_r_mr (insn, op_mov, len);
3018 else
3019 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3021 else if (register_operand (operands[2], QImode))
3023 if (reg_unused_after (insn, operands[2]))
3024 op[3] = op[2];
3025 else
3027 op[3] = tmp_reg_rtx;
3028 if (!len)
3029 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3032 else
3033 fatal_insn ("bad shift insn:", insn);
3035 if (second_label)
3037 if (len)
3038 ++*len;
3039 else
3040 strcat (str, AS1 (rjmp,2f));
3043 if (len)
3044 *len += t_len + 2; /* template + dec + brXX */
3045 else
3047 strcat (str, "\n1:\t");
3048 strcat (str, template);
3049 strcat (str, second_label ? "\n2:\t" : "\n\t");
3050 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3051 strcat (str, CR_TAB);
3052 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3053 if (saved_in_tmp)
3054 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3055 output_asm_insn (str, op);
3060 /* 8bit shift left ((char)x << i) */
3062 const char *
3063 ashlqi3_out (rtx insn, rtx operands[], int *len)
3065 if (GET_CODE (operands[2]) == CONST_INT)
3067 int k;
3069 if (!len)
3070 len = &k;
3072 switch (INTVAL (operands[2]))
3074 default:
3075 if (INTVAL (operands[2]) < 8)
3076 break;
3078 *len = 1;
3079 return AS1 (clr,%0);
3081 case 1:
3082 *len = 1;
3083 return AS1 (lsl,%0);
3085 case 2:
3086 *len = 2;
3087 return (AS1 (lsl,%0) CR_TAB
3088 AS1 (lsl,%0));
3090 case 3:
3091 *len = 3;
3092 return (AS1 (lsl,%0) CR_TAB
3093 AS1 (lsl,%0) CR_TAB
3094 AS1 (lsl,%0));
3096 case 4:
3097 if (test_hard_reg_class (LD_REGS, operands[0]))
3099 *len = 2;
3100 return (AS1 (swap,%0) CR_TAB
3101 AS2 (andi,%0,0xf0));
3103 *len = 4;
3104 return (AS1 (lsl,%0) CR_TAB
3105 AS1 (lsl,%0) CR_TAB
3106 AS1 (lsl,%0) CR_TAB
3107 AS1 (lsl,%0));
3109 case 5:
3110 if (test_hard_reg_class (LD_REGS, operands[0]))
3112 *len = 3;
3113 return (AS1 (swap,%0) CR_TAB
3114 AS1 (lsl,%0) CR_TAB
3115 AS2 (andi,%0,0xe0));
3117 *len = 5;
3118 return (AS1 (lsl,%0) CR_TAB
3119 AS1 (lsl,%0) CR_TAB
3120 AS1 (lsl,%0) CR_TAB
3121 AS1 (lsl,%0) CR_TAB
3122 AS1 (lsl,%0));
3124 case 6:
3125 if (test_hard_reg_class (LD_REGS, operands[0]))
3127 *len = 4;
3128 return (AS1 (swap,%0) CR_TAB
3129 AS1 (lsl,%0) CR_TAB
3130 AS1 (lsl,%0) CR_TAB
3131 AS2 (andi,%0,0xc0));
3133 *len = 6;
3134 return (AS1 (lsl,%0) CR_TAB
3135 AS1 (lsl,%0) CR_TAB
3136 AS1 (lsl,%0) CR_TAB
3137 AS1 (lsl,%0) CR_TAB
3138 AS1 (lsl,%0) CR_TAB
3139 AS1 (lsl,%0));
3141 case 7:
3142 *len = 3;
3143 return (AS1 (ror,%0) CR_TAB
3144 AS1 (clr,%0) CR_TAB
3145 AS1 (ror,%0));
3148 else if (CONSTANT_P (operands[2]))
3149 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3151 out_shift_with_cnt (AS1 (lsl,%0),
3152 insn, operands, len, 1);
3153 return "";
3157 /* 16bit shift left ((short)x << i) */
3159 const char *
3160 ashlhi3_out (rtx insn, rtx operands[], int *len)
3162 if (GET_CODE (operands[2]) == CONST_INT)
3164 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3165 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3166 int k;
3167 int *t = len;
3169 if (!len)
3170 len = &k;
3172 switch (INTVAL (operands[2]))
3174 default:
3175 if (INTVAL (operands[2]) < 16)
3176 break;
3178 *len = 2;
3179 return (AS1 (clr,%B0) CR_TAB
3180 AS1 (clr,%A0));
3182 case 4:
3183 if (optimize_size && scratch)
3184 break; /* 5 */
3185 if (ldi_ok)
3187 *len = 6;
3188 return (AS1 (swap,%A0) CR_TAB
3189 AS1 (swap,%B0) CR_TAB
3190 AS2 (andi,%B0,0xf0) CR_TAB
3191 AS2 (eor,%B0,%A0) CR_TAB
3192 AS2 (andi,%A0,0xf0) CR_TAB
3193 AS2 (eor,%B0,%A0));
3195 if (scratch)
3197 *len = 7;
3198 return (AS1 (swap,%A0) CR_TAB
3199 AS1 (swap,%B0) CR_TAB
3200 AS2 (ldi,%3,0xf0) CR_TAB
3201 AS2 (and,%B0,%3) CR_TAB
3202 AS2 (eor,%B0,%A0) CR_TAB
3203 AS2 (and,%A0,%3) CR_TAB
3204 AS2 (eor,%B0,%A0));
3206 break; /* optimize_size ? 6 : 8 */
3208 case 5:
3209 if (optimize_size)
3210 break; /* scratch ? 5 : 6 */
3211 if (ldi_ok)
3213 *len = 8;
3214 return (AS1 (lsl,%A0) CR_TAB
3215 AS1 (rol,%B0) CR_TAB
3216 AS1 (swap,%A0) CR_TAB
3217 AS1 (swap,%B0) CR_TAB
3218 AS2 (andi,%B0,0xf0) CR_TAB
3219 AS2 (eor,%B0,%A0) CR_TAB
3220 AS2 (andi,%A0,0xf0) CR_TAB
3221 AS2 (eor,%B0,%A0));
3223 if (scratch)
3225 *len = 9;
3226 return (AS1 (lsl,%A0) CR_TAB
3227 AS1 (rol,%B0) CR_TAB
3228 AS1 (swap,%A0) CR_TAB
3229 AS1 (swap,%B0) CR_TAB
3230 AS2 (ldi,%3,0xf0) CR_TAB
3231 AS2 (and,%B0,%3) CR_TAB
3232 AS2 (eor,%B0,%A0) CR_TAB
3233 AS2 (and,%A0,%3) CR_TAB
3234 AS2 (eor,%B0,%A0));
3236 break; /* 10 */
3238 case 6:
3239 if (optimize_size)
3240 break; /* scratch ? 5 : 6 */
3241 *len = 9;
3242 return (AS1 (clr,__tmp_reg__) CR_TAB
3243 AS1 (lsr,%B0) CR_TAB
3244 AS1 (ror,%A0) CR_TAB
3245 AS1 (ror,__tmp_reg__) CR_TAB
3246 AS1 (lsr,%B0) CR_TAB
3247 AS1 (ror,%A0) CR_TAB
3248 AS1 (ror,__tmp_reg__) CR_TAB
3249 AS2 (mov,%B0,%A0) CR_TAB
3250 AS2 (mov,%A0,__tmp_reg__));
3252 case 7:
3253 *len = 5;
3254 return (AS1 (lsr,%B0) CR_TAB
3255 AS2 (mov,%B0,%A0) CR_TAB
3256 AS1 (clr,%A0) CR_TAB
3257 AS1 (ror,%B0) CR_TAB
3258 AS1 (ror,%A0));
3260 case 8:
3261 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3262 AS1 (clr,%A0));
3264 case 9:
3265 *len = 3;
3266 return (AS2 (mov,%B0,%A0) CR_TAB
3267 AS1 (clr,%A0) CR_TAB
3268 AS1 (lsl,%B0));
3270 case 10:
3271 *len = 4;
3272 return (AS2 (mov,%B0,%A0) CR_TAB
3273 AS1 (clr,%A0) CR_TAB
3274 AS1 (lsl,%B0) CR_TAB
3275 AS1 (lsl,%B0));
3277 case 11:
3278 *len = 5;
3279 return (AS2 (mov,%B0,%A0) CR_TAB
3280 AS1 (clr,%A0) CR_TAB
3281 AS1 (lsl,%B0) CR_TAB
3282 AS1 (lsl,%B0) CR_TAB
3283 AS1 (lsl,%B0));
3285 case 12:
3286 if (ldi_ok)
3288 *len = 4;
3289 return (AS2 (mov,%B0,%A0) CR_TAB
3290 AS1 (clr,%A0) CR_TAB
3291 AS1 (swap,%B0) CR_TAB
3292 AS2 (andi,%B0,0xf0));
3294 if (scratch)
3296 *len = 5;
3297 return (AS2 (mov,%B0,%A0) CR_TAB
3298 AS1 (clr,%A0) CR_TAB
3299 AS1 (swap,%B0) CR_TAB
3300 AS2 (ldi,%3,0xf0) CR_TAB
3301 AS2 (and,%B0,%3));
3303 *len = 6;
3304 return (AS2 (mov,%B0,%A0) CR_TAB
3305 AS1 (clr,%A0) CR_TAB
3306 AS1 (lsl,%B0) CR_TAB
3307 AS1 (lsl,%B0) CR_TAB
3308 AS1 (lsl,%B0) CR_TAB
3309 AS1 (lsl,%B0));
3311 case 13:
3312 if (ldi_ok)
3314 *len = 5;
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (swap,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS2 (andi,%B0,0xe0));
3321 if (AVR_HAVE_MUL && scratch)
3323 *len = 5;
3324 return (AS2 (ldi,%3,0x20) CR_TAB
3325 AS2 (mul,%A0,%3) CR_TAB
3326 AS2 (mov,%B0,r0) CR_TAB
3327 AS1 (clr,%A0) CR_TAB
3328 AS1 (clr,__zero_reg__));
3330 if (optimize_size && scratch)
3331 break; /* 5 */
3332 if (scratch)
3334 *len = 6;
3335 return (AS2 (mov,%B0,%A0) CR_TAB
3336 AS1 (clr,%A0) CR_TAB
3337 AS1 (swap,%B0) CR_TAB
3338 AS1 (lsl,%B0) CR_TAB
3339 AS2 (ldi,%3,0xe0) CR_TAB
3340 AS2 (and,%B0,%3));
3342 if (AVR_HAVE_MUL)
3344 *len = 6;
3345 return ("set" CR_TAB
3346 AS2 (bld,r1,5) CR_TAB
3347 AS2 (mul,%A0,r1) CR_TAB
3348 AS2 (mov,%B0,r0) CR_TAB
3349 AS1 (clr,%A0) CR_TAB
3350 AS1 (clr,__zero_reg__));
3352 *len = 7;
3353 return (AS2 (mov,%B0,%A0) CR_TAB
3354 AS1 (clr,%A0) CR_TAB
3355 AS1 (lsl,%B0) CR_TAB
3356 AS1 (lsl,%B0) CR_TAB
3357 AS1 (lsl,%B0) CR_TAB
3358 AS1 (lsl,%B0) CR_TAB
3359 AS1 (lsl,%B0));
3361 case 14:
3362 if (AVR_HAVE_MUL && ldi_ok)
3364 *len = 5;
3365 return (AS2 (ldi,%B0,0x40) CR_TAB
3366 AS2 (mul,%A0,%B0) CR_TAB
3367 AS2 (mov,%B0,r0) CR_TAB
3368 AS1 (clr,%A0) CR_TAB
3369 AS1 (clr,__zero_reg__));
3371 if (AVR_HAVE_MUL && scratch)
3373 *len = 5;
3374 return (AS2 (ldi,%3,0x40) CR_TAB
3375 AS2 (mul,%A0,%3) CR_TAB
3376 AS2 (mov,%B0,r0) CR_TAB
3377 AS1 (clr,%A0) CR_TAB
3378 AS1 (clr,__zero_reg__));
3380 if (optimize_size && ldi_ok)
3382 *len = 5;
3383 return (AS2 (mov,%B0,%A0) CR_TAB
3384 AS2 (ldi,%A0,6) "\n1:\t"
3385 AS1 (lsl,%B0) CR_TAB
3386 AS1 (dec,%A0) CR_TAB
3387 AS1 (brne,1b));
3389 if (optimize_size && scratch)
3390 break; /* 5 */
3391 *len = 6;
3392 return (AS1 (clr,%B0) CR_TAB
3393 AS1 (lsr,%A0) CR_TAB
3394 AS1 (ror,%B0) CR_TAB
3395 AS1 (lsr,%A0) CR_TAB
3396 AS1 (ror,%B0) CR_TAB
3397 AS1 (clr,%A0));
3399 case 15:
3400 *len = 4;
3401 return (AS1 (clr,%B0) CR_TAB
3402 AS1 (lsr,%A0) CR_TAB
3403 AS1 (ror,%B0) CR_TAB
3404 AS1 (clr,%A0));
3406 len = t;
3408 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3409 AS1 (rol,%B0)),
3410 insn, operands, len, 2);
3411 return "";
3415 /* 32bit shift left ((long)x << i) */
3417 const char *
3418 ashlsi3_out (rtx insn, rtx operands[], int *len)
3420 if (GET_CODE (operands[2]) == CONST_INT)
3422 int k;
3423 int *t = len;
3425 if (!len)
3426 len = &k;
3428 switch (INTVAL (operands[2]))
3430 default:
3431 if (INTVAL (operands[2]) < 32)
3432 break;
3434 if (AVR_HAVE_MOVW)
3435 return *len = 3, (AS1 (clr,%D0) CR_TAB
3436 AS1 (clr,%C0) CR_TAB
3437 AS2 (movw,%A0,%C0));
3438 *len = 4;
3439 return (AS1 (clr,%D0) CR_TAB
3440 AS1 (clr,%C0) CR_TAB
3441 AS1 (clr,%B0) CR_TAB
3442 AS1 (clr,%A0));
3444 case 8:
3446 int reg0 = true_regnum (operands[0]);
3447 int reg1 = true_regnum (operands[1]);
3448 *len = 4;
3449 if (reg0 >= reg1)
3450 return (AS2 (mov,%D0,%C1) CR_TAB
3451 AS2 (mov,%C0,%B1) CR_TAB
3452 AS2 (mov,%B0,%A1) CR_TAB
3453 AS1 (clr,%A0));
3454 else
3455 return (AS1 (clr,%A0) CR_TAB
3456 AS2 (mov,%B0,%A1) CR_TAB
3457 AS2 (mov,%C0,%B1) CR_TAB
3458 AS2 (mov,%D0,%C1));
3461 case 16:
3463 int reg0 = true_regnum (operands[0]);
3464 int reg1 = true_regnum (operands[1]);
3465 if (reg0 + 2 == reg1)
3466 return *len = 2, (AS1 (clr,%B0) CR_TAB
3467 AS1 (clr,%A0));
3468 if (AVR_HAVE_MOVW)
3469 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3470 AS1 (clr,%B0) CR_TAB
3471 AS1 (clr,%A0));
3472 else
3473 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3474 AS2 (mov,%D0,%B1) CR_TAB
3475 AS1 (clr,%B0) CR_TAB
3476 AS1 (clr,%A0));
3479 case 24:
3480 *len = 4;
3481 return (AS2 (mov,%D0,%A1) CR_TAB
3482 AS1 (clr,%C0) CR_TAB
3483 AS1 (clr,%B0) CR_TAB
3484 AS1 (clr,%A0));
3486 case 31:
3487 *len = 6;
3488 return (AS1 (clr,%D0) CR_TAB
3489 AS1 (lsr,%A0) CR_TAB
3490 AS1 (ror,%D0) CR_TAB
3491 AS1 (clr,%C0) CR_TAB
3492 AS1 (clr,%B0) CR_TAB
3493 AS1 (clr,%A0));
3495 len = t;
3497 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3498 AS1 (rol,%B0) CR_TAB
3499 AS1 (rol,%C0) CR_TAB
3500 AS1 (rol,%D0)),
3501 insn, operands, len, 4);
3502 return "";
3505 /* 8bit arithmetic shift right ((signed char)x >> i) */
3507 const char *
3508 ashrqi3_out (rtx insn, rtx operands[], int *len)
3510 if (GET_CODE (operands[2]) == CONST_INT)
3512 int k;
3514 if (!len)
3515 len = &k;
3517 switch (INTVAL (operands[2]))
3519 case 1:
3520 *len = 1;
3521 return AS1 (asr,%0);
3523 case 2:
3524 *len = 2;
3525 return (AS1 (asr,%0) CR_TAB
3526 AS1 (asr,%0));
3528 case 3:
3529 *len = 3;
3530 return (AS1 (asr,%0) CR_TAB
3531 AS1 (asr,%0) CR_TAB
3532 AS1 (asr,%0));
3534 case 4:
3535 *len = 4;
3536 return (AS1 (asr,%0) CR_TAB
3537 AS1 (asr,%0) CR_TAB
3538 AS1 (asr,%0) CR_TAB
3539 AS1 (asr,%0));
3541 case 5:
3542 *len = 5;
3543 return (AS1 (asr,%0) CR_TAB
3544 AS1 (asr,%0) CR_TAB
3545 AS1 (asr,%0) CR_TAB
3546 AS1 (asr,%0) CR_TAB
3547 AS1 (asr,%0));
3549 case 6:
3550 *len = 4;
3551 return (AS2 (bst,%0,6) CR_TAB
3552 AS1 (lsl,%0) CR_TAB
3553 AS2 (sbc,%0,%0) CR_TAB
3554 AS2 (bld,%0,0));
3556 default:
3557 if (INTVAL (operands[2]) < 8)
3558 break;
3560 /* fall through */
3562 case 7:
3563 *len = 2;
3564 return (AS1 (lsl,%0) CR_TAB
3565 AS2 (sbc,%0,%0));
3568 else if (CONSTANT_P (operands[2]))
3569 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3571 out_shift_with_cnt (AS1 (asr,%0),
3572 insn, operands, len, 1);
3573 return "";
3577 /* 16bit arithmetic shift right ((signed short)x >> i) */
3579 const char *
3580 ashrhi3_out (rtx insn, rtx operands[], int *len)
3582 if (GET_CODE (operands[2]) == CONST_INT)
3584 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3585 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3586 int k;
3587 int *t = len;
3589 if (!len)
3590 len = &k;
3592 switch (INTVAL (operands[2]))
3594 case 4:
3595 case 5:
3596 /* XXX try to optimize this too? */
3597 break;
3599 case 6:
3600 if (optimize_size)
3601 break; /* scratch ? 5 : 6 */
3602 *len = 8;
3603 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3604 AS2 (mov,%A0,%B0) CR_TAB
3605 AS1 (lsl,__tmp_reg__) CR_TAB
3606 AS1 (rol,%A0) CR_TAB
3607 AS2 (sbc,%B0,%B0) CR_TAB
3608 AS1 (lsl,__tmp_reg__) CR_TAB
3609 AS1 (rol,%A0) CR_TAB
3610 AS1 (rol,%B0));
3612 case 7:
3613 *len = 4;
3614 return (AS1 (lsl,%A0) CR_TAB
3615 AS2 (mov,%A0,%B0) CR_TAB
3616 AS1 (rol,%A0) CR_TAB
3617 AS2 (sbc,%B0,%B0));
3619 case 8:
3621 int reg0 = true_regnum (operands[0]);
3622 int reg1 = true_regnum (operands[1]);
3624 if (reg0 == reg1)
3625 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3626 AS1 (lsl,%B0) CR_TAB
3627 AS2 (sbc,%B0,%B0));
3628 else
3629 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3630 AS1 (clr,%B0) CR_TAB
3631 AS2 (sbrc,%A0,7) CR_TAB
3632 AS1 (dec,%B0));
3635 case 9:
3636 *len = 4;
3637 return (AS2 (mov,%A0,%B0) CR_TAB
3638 AS1 (lsl,%B0) CR_TAB
3639 AS2 (sbc,%B0,%B0) CR_TAB
3640 AS1 (asr,%A0));
3642 case 10:
3643 *len = 5;
3644 return (AS2 (mov,%A0,%B0) CR_TAB
3645 AS1 (lsl,%B0) CR_TAB
3646 AS2 (sbc,%B0,%B0) CR_TAB
3647 AS1 (asr,%A0) CR_TAB
3648 AS1 (asr,%A0));
3650 case 11:
3651 if (AVR_HAVE_MUL && ldi_ok)
3653 *len = 5;
3654 return (AS2 (ldi,%A0,0x20) CR_TAB
3655 AS2 (muls,%B0,%A0) CR_TAB
3656 AS2 (mov,%A0,r1) CR_TAB
3657 AS2 (sbc,%B0,%B0) CR_TAB
3658 AS1 (clr,__zero_reg__));
3660 if (optimize_size && scratch)
3661 break; /* 5 */
3662 *len = 6;
3663 return (AS2 (mov,%A0,%B0) CR_TAB
3664 AS1 (lsl,%B0) CR_TAB
3665 AS2 (sbc,%B0,%B0) CR_TAB
3666 AS1 (asr,%A0) CR_TAB
3667 AS1 (asr,%A0) CR_TAB
3668 AS1 (asr,%A0));
3670 case 12:
3671 if (AVR_HAVE_MUL && ldi_ok)
3673 *len = 5;
3674 return (AS2 (ldi,%A0,0x10) CR_TAB
3675 AS2 (muls,%B0,%A0) CR_TAB
3676 AS2 (mov,%A0,r1) CR_TAB
3677 AS2 (sbc,%B0,%B0) CR_TAB
3678 AS1 (clr,__zero_reg__));
3680 if (optimize_size && scratch)
3681 break; /* 5 */
3682 *len = 7;
3683 return (AS2 (mov,%A0,%B0) CR_TAB
3684 AS1 (lsl,%B0) CR_TAB
3685 AS2 (sbc,%B0,%B0) CR_TAB
3686 AS1 (asr,%A0) CR_TAB
3687 AS1 (asr,%A0) CR_TAB
3688 AS1 (asr,%A0) CR_TAB
3689 AS1 (asr,%A0));
3691 case 13:
3692 if (AVR_HAVE_MUL && ldi_ok)
3694 *len = 5;
3695 return (AS2 (ldi,%A0,0x08) CR_TAB
3696 AS2 (muls,%B0,%A0) CR_TAB
3697 AS2 (mov,%A0,r1) CR_TAB
3698 AS2 (sbc,%B0,%B0) CR_TAB
3699 AS1 (clr,__zero_reg__));
3701 if (optimize_size)
3702 break; /* scratch ? 5 : 7 */
3703 *len = 8;
3704 return (AS2 (mov,%A0,%B0) CR_TAB
3705 AS1 (lsl,%B0) CR_TAB
3706 AS2 (sbc,%B0,%B0) CR_TAB
3707 AS1 (asr,%A0) CR_TAB
3708 AS1 (asr,%A0) CR_TAB
3709 AS1 (asr,%A0) CR_TAB
3710 AS1 (asr,%A0) CR_TAB
3711 AS1 (asr,%A0));
3713 case 14:
3714 *len = 5;
3715 return (AS1 (lsl,%B0) CR_TAB
3716 AS2 (sbc,%A0,%A0) CR_TAB
3717 AS1 (lsl,%B0) CR_TAB
3718 AS2 (mov,%B0,%A0) CR_TAB
3719 AS1 (rol,%A0));
3721 default:
3722 if (INTVAL (operands[2]) < 16)
3723 break;
3725 /* fall through */
3727 case 15:
3728 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3729 AS2 (sbc,%A0,%A0) CR_TAB
3730 AS2 (mov,%B0,%A0));
3732 len = t;
3734 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3735 AS1 (ror,%A0)),
3736 insn, operands, len, 2);
3737 return "";
3741 /* 32bit arithmetic shift right ((signed long)x >> i) */
3743 const char *
3744 ashrsi3_out (rtx insn, rtx operands[], int *len)
3746 if (GET_CODE (operands[2]) == CONST_INT)
3748 int k;
3749 int *t = len;
3751 if (!len)
3752 len = &k;
3754 switch (INTVAL (operands[2]))
3756 case 8:
3758 int reg0 = true_regnum (operands[0]);
3759 int reg1 = true_regnum (operands[1]);
3760 *len=6;
3761 if (reg0 <= reg1)
3762 return (AS2 (mov,%A0,%B1) CR_TAB
3763 AS2 (mov,%B0,%C1) CR_TAB
3764 AS2 (mov,%C0,%D1) CR_TAB
3765 AS1 (clr,%D0) CR_TAB
3766 AS2 (sbrc,%C0,7) CR_TAB
3767 AS1 (dec,%D0));
3768 else
3769 return (AS1 (clr,%D0) CR_TAB
3770 AS2 (sbrc,%D1,7) CR_TAB
3771 AS1 (dec,%D0) CR_TAB
3772 AS2 (mov,%C0,%D1) CR_TAB
3773 AS2 (mov,%B0,%C1) CR_TAB
3774 AS2 (mov,%A0,%B1));
3777 case 16:
3779 int reg0 = true_regnum (operands[0]);
3780 int reg1 = true_regnum (operands[1]);
3782 if (reg0 == reg1 + 2)
3783 return *len = 4, (AS1 (clr,%D0) CR_TAB
3784 AS2 (sbrc,%B0,7) CR_TAB
3785 AS1 (com,%D0) CR_TAB
3786 AS2 (mov,%C0,%D0));
3787 if (AVR_HAVE_MOVW)
3788 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3789 AS1 (clr,%D0) CR_TAB
3790 AS2 (sbrc,%B0,7) CR_TAB
3791 AS1 (com,%D0) CR_TAB
3792 AS2 (mov,%C0,%D0));
3793 else
3794 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3795 AS2 (mov,%A0,%C1) CR_TAB
3796 AS1 (clr,%D0) CR_TAB
3797 AS2 (sbrc,%B0,7) CR_TAB
3798 AS1 (com,%D0) CR_TAB
3799 AS2 (mov,%C0,%D0));
3802 case 24:
3803 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3804 AS1 (clr,%D0) CR_TAB
3805 AS2 (sbrc,%A0,7) CR_TAB
3806 AS1 (com,%D0) CR_TAB
3807 AS2 (mov,%B0,%D0) CR_TAB
3808 AS2 (mov,%C0,%D0));
3810 default:
3811 if (INTVAL (operands[2]) < 32)
3812 break;
3814 /* fall through */
3816 case 31:
3817 if (AVR_HAVE_MOVW)
3818 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3819 AS2 (sbc,%A0,%A0) CR_TAB
3820 AS2 (mov,%B0,%A0) CR_TAB
3821 AS2 (movw,%C0,%A0));
3822 else
3823 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3824 AS2 (sbc,%A0,%A0) CR_TAB
3825 AS2 (mov,%B0,%A0) CR_TAB
3826 AS2 (mov,%C0,%A0) CR_TAB
3827 AS2 (mov,%D0,%A0));
3829 len = t;
3831 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3832 AS1 (ror,%C0) CR_TAB
3833 AS1 (ror,%B0) CR_TAB
3834 AS1 (ror,%A0)),
3835 insn, operands, len, 4);
3836 return "";
3839 /* 8bit logic shift right ((unsigned char)x >> i) */
3841 const char *
3842 lshrqi3_out (rtx insn, rtx operands[], int *len)
3844 if (GET_CODE (operands[2]) == CONST_INT)
3846 int k;
3848 if (!len)
3849 len = &k;
3851 switch (INTVAL (operands[2]))
3853 default:
3854 if (INTVAL (operands[2]) < 8)
3855 break;
3857 *len = 1;
3858 return AS1 (clr,%0);
3860 case 1:
3861 *len = 1;
3862 return AS1 (lsr,%0);
3864 case 2:
3865 *len = 2;
3866 return (AS1 (lsr,%0) CR_TAB
3867 AS1 (lsr,%0));
3868 case 3:
3869 *len = 3;
3870 return (AS1 (lsr,%0) CR_TAB
3871 AS1 (lsr,%0) CR_TAB
3872 AS1 (lsr,%0));
3874 case 4:
3875 if (test_hard_reg_class (LD_REGS, operands[0]))
3877 *len=2;
3878 return (AS1 (swap,%0) CR_TAB
3879 AS2 (andi,%0,0x0f));
3881 *len = 4;
3882 return (AS1 (lsr,%0) CR_TAB
3883 AS1 (lsr,%0) CR_TAB
3884 AS1 (lsr,%0) CR_TAB
3885 AS1 (lsr,%0));
3887 case 5:
3888 if (test_hard_reg_class (LD_REGS, operands[0]))
3890 *len = 3;
3891 return (AS1 (swap,%0) CR_TAB
3892 AS1 (lsr,%0) CR_TAB
3893 AS2 (andi,%0,0x7));
3895 *len = 5;
3896 return (AS1 (lsr,%0) CR_TAB
3897 AS1 (lsr,%0) CR_TAB
3898 AS1 (lsr,%0) CR_TAB
3899 AS1 (lsr,%0) CR_TAB
3900 AS1 (lsr,%0));
3902 case 6:
3903 if (test_hard_reg_class (LD_REGS, operands[0]))
3905 *len = 4;
3906 return (AS1 (swap,%0) CR_TAB
3907 AS1 (lsr,%0) CR_TAB
3908 AS1 (lsr,%0) CR_TAB
3909 AS2 (andi,%0,0x3));
3911 *len = 6;
3912 return (AS1 (lsr,%0) CR_TAB
3913 AS1 (lsr,%0) CR_TAB
3914 AS1 (lsr,%0) CR_TAB
3915 AS1 (lsr,%0) CR_TAB
3916 AS1 (lsr,%0) CR_TAB
3917 AS1 (lsr,%0));
3919 case 7:
3920 *len = 3;
3921 return (AS1 (rol,%0) CR_TAB
3922 AS1 (clr,%0) CR_TAB
3923 AS1 (rol,%0));
3926 else if (CONSTANT_P (operands[2]))
3927 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3929 out_shift_with_cnt (AS1 (lsr,%0),
3930 insn, operands, len, 1);
3931 return "";
3934 /* 16bit logic shift right ((unsigned short)x >> i) */
3936 const char *
3937 lshrhi3_out (rtx insn, rtx operands[], int *len)
3939 if (GET_CODE (operands[2]) == CONST_INT)
3941 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3942 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3943 int k;
3944 int *t = len;
3946 if (!len)
3947 len = &k;
3949 switch (INTVAL (operands[2]))
3951 default:
3952 if (INTVAL (operands[2]) < 16)
3953 break;
3955 *len = 2;
3956 return (AS1 (clr,%B0) CR_TAB
3957 AS1 (clr,%A0));
3959 case 4:
3960 if (optimize_size && scratch)
3961 break; /* 5 */
3962 if (ldi_ok)
3964 *len = 6;
3965 return (AS1 (swap,%B0) CR_TAB
3966 AS1 (swap,%A0) CR_TAB
3967 AS2 (andi,%A0,0x0f) CR_TAB
3968 AS2 (eor,%A0,%B0) CR_TAB
3969 AS2 (andi,%B0,0x0f) CR_TAB
3970 AS2 (eor,%A0,%B0));
3972 if (scratch)
3974 *len = 7;
3975 return (AS1 (swap,%B0) CR_TAB
3976 AS1 (swap,%A0) CR_TAB
3977 AS2 (ldi,%3,0x0f) CR_TAB
3978 AS2 (and,%A0,%3) CR_TAB
3979 AS2 (eor,%A0,%B0) CR_TAB
3980 AS2 (and,%B0,%3) CR_TAB
3981 AS2 (eor,%A0,%B0));
3983 break; /* optimize_size ? 6 : 8 */
3985 case 5:
3986 if (optimize_size)
3987 break; /* scratch ? 5 : 6 */
3988 if (ldi_ok)
3990 *len = 8;
3991 return (AS1 (lsr,%B0) CR_TAB
3992 AS1 (ror,%A0) CR_TAB
3993 AS1 (swap,%B0) CR_TAB
3994 AS1 (swap,%A0) CR_TAB
3995 AS2 (andi,%A0,0x0f) CR_TAB
3996 AS2 (eor,%A0,%B0) CR_TAB
3997 AS2 (andi,%B0,0x0f) CR_TAB
3998 AS2 (eor,%A0,%B0));
4000 if (scratch)
4002 *len = 9;
4003 return (AS1 (lsr,%B0) CR_TAB
4004 AS1 (ror,%A0) CR_TAB
4005 AS1 (swap,%B0) CR_TAB
4006 AS1 (swap,%A0) CR_TAB
4007 AS2 (ldi,%3,0x0f) CR_TAB
4008 AS2 (and,%A0,%3) CR_TAB
4009 AS2 (eor,%A0,%B0) CR_TAB
4010 AS2 (and,%B0,%3) CR_TAB
4011 AS2 (eor,%A0,%B0));
4013 break; /* 10 */
4015 case 6:
4016 if (optimize_size)
4017 break; /* scratch ? 5 : 6 */
4018 *len = 9;
4019 return (AS1 (clr,__tmp_reg__) CR_TAB
4020 AS1 (lsl,%A0) CR_TAB
4021 AS1 (rol,%B0) CR_TAB
4022 AS1 (rol,__tmp_reg__) CR_TAB
4023 AS1 (lsl,%A0) CR_TAB
4024 AS1 (rol,%B0) CR_TAB
4025 AS1 (rol,__tmp_reg__) CR_TAB
4026 AS2 (mov,%A0,%B0) CR_TAB
4027 AS2 (mov,%B0,__tmp_reg__));
4029 case 7:
4030 *len = 5;
4031 return (AS1 (lsl,%A0) CR_TAB
4032 AS2 (mov,%A0,%B0) CR_TAB
4033 AS1 (rol,%A0) CR_TAB
4034 AS2 (sbc,%B0,%B0) CR_TAB
4035 AS1 (neg,%B0));
4037 case 8:
4038 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4039 AS1 (clr,%B0));
4041 case 9:
4042 *len = 3;
4043 return (AS2 (mov,%A0,%B0) CR_TAB
4044 AS1 (clr,%B0) CR_TAB
4045 AS1 (lsr,%A0));
4047 case 10:
4048 *len = 4;
4049 return (AS2 (mov,%A0,%B0) CR_TAB
4050 AS1 (clr,%B0) CR_TAB
4051 AS1 (lsr,%A0) CR_TAB
4052 AS1 (lsr,%A0));
4054 case 11:
4055 *len = 5;
4056 return (AS2 (mov,%A0,%B0) CR_TAB
4057 AS1 (clr,%B0) CR_TAB
4058 AS1 (lsr,%A0) CR_TAB
4059 AS1 (lsr,%A0) CR_TAB
4060 AS1 (lsr,%A0));
4062 case 12:
4063 if (ldi_ok)
4065 *len = 4;
4066 return (AS2 (mov,%A0,%B0) CR_TAB
4067 AS1 (clr,%B0) CR_TAB
4068 AS1 (swap,%A0) CR_TAB
4069 AS2 (andi,%A0,0x0f));
4071 if (scratch)
4073 *len = 5;
4074 return (AS2 (mov,%A0,%B0) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4076 AS1 (swap,%A0) CR_TAB
4077 AS2 (ldi,%3,0x0f) CR_TAB
4078 AS2 (and,%A0,%3));
4080 *len = 6;
4081 return (AS2 (mov,%A0,%B0) CR_TAB
4082 AS1 (clr,%B0) CR_TAB
4083 AS1 (lsr,%A0) CR_TAB
4084 AS1 (lsr,%A0) CR_TAB
4085 AS1 (lsr,%A0) CR_TAB
4086 AS1 (lsr,%A0));
4088 case 13:
4089 if (ldi_ok)
4091 *len = 5;
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (swap,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS2 (andi,%A0,0x07));
4098 if (AVR_HAVE_MUL && scratch)
4100 *len = 5;
4101 return (AS2 (ldi,%3,0x08) CR_TAB
4102 AS2 (mul,%B0,%3) CR_TAB
4103 AS2 (mov,%A0,r1) CR_TAB
4104 AS1 (clr,%B0) CR_TAB
4105 AS1 (clr,__zero_reg__));
4107 if (optimize_size && scratch)
4108 break; /* 5 */
4109 if (scratch)
4111 *len = 6;
4112 return (AS2 (mov,%A0,%B0) CR_TAB
4113 AS1 (clr,%B0) CR_TAB
4114 AS1 (swap,%A0) CR_TAB
4115 AS1 (lsr,%A0) CR_TAB
4116 AS2 (ldi,%3,0x07) CR_TAB
4117 AS2 (and,%A0,%3));
4119 if (AVR_HAVE_MUL)
4121 *len = 6;
4122 return ("set" CR_TAB
4123 AS2 (bld,r1,3) CR_TAB
4124 AS2 (mul,%B0,r1) CR_TAB
4125 AS2 (mov,%A0,r1) CR_TAB
4126 AS1 (clr,%B0) CR_TAB
4127 AS1 (clr,__zero_reg__));
4129 *len = 7;
4130 return (AS2 (mov,%A0,%B0) CR_TAB
4131 AS1 (clr,%B0) CR_TAB
4132 AS1 (lsr,%A0) CR_TAB
4133 AS1 (lsr,%A0) CR_TAB
4134 AS1 (lsr,%A0) CR_TAB
4135 AS1 (lsr,%A0) CR_TAB
4136 AS1 (lsr,%A0));
4138 case 14:
4139 if (AVR_HAVE_MUL && ldi_ok)
4141 *len = 5;
4142 return (AS2 (ldi,%A0,0x04) CR_TAB
4143 AS2 (mul,%B0,%A0) CR_TAB
4144 AS2 (mov,%A0,r1) CR_TAB
4145 AS1 (clr,%B0) CR_TAB
4146 AS1 (clr,__zero_reg__));
4148 if (AVR_HAVE_MUL && scratch)
4150 *len = 5;
4151 return (AS2 (ldi,%3,0x04) CR_TAB
4152 AS2 (mul,%B0,%3) CR_TAB
4153 AS2 (mov,%A0,r1) CR_TAB
4154 AS1 (clr,%B0) CR_TAB
4155 AS1 (clr,__zero_reg__));
4157 if (optimize_size && ldi_ok)
4159 *len = 5;
4160 return (AS2 (mov,%A0,%B0) CR_TAB
4161 AS2 (ldi,%B0,6) "\n1:\t"
4162 AS1 (lsr,%A0) CR_TAB
4163 AS1 (dec,%B0) CR_TAB
4164 AS1 (brne,1b));
4166 if (optimize_size && scratch)
4167 break; /* 5 */
4168 *len = 6;
4169 return (AS1 (clr,%A0) CR_TAB
4170 AS1 (lsl,%B0) CR_TAB
4171 AS1 (rol,%A0) CR_TAB
4172 AS1 (lsl,%B0) CR_TAB
4173 AS1 (rol,%A0) CR_TAB
4174 AS1 (clr,%B0));
4176 case 15:
4177 *len = 4;
4178 return (AS1 (clr,%A0) CR_TAB
4179 AS1 (lsl,%B0) CR_TAB
4180 AS1 (rol,%A0) CR_TAB
4181 AS1 (clr,%B0));
4183 len = t;
4185 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4186 AS1 (ror,%A0)),
4187 insn, operands, len, 2);
4188 return "";
4191 /* 32bit logic shift right ((unsigned int)x >> i) */
4193 const char *
4194 lshrsi3_out (rtx insn, rtx operands[], int *len)
4196 if (GET_CODE (operands[2]) == CONST_INT)
4198 int k;
4199 int *t = len;
4201 if (!len)
4202 len = &k;
4204 switch (INTVAL (operands[2]))
4206 default:
4207 if (INTVAL (operands[2]) < 32)
4208 break;
4210 if (AVR_HAVE_MOVW)
4211 return *len = 3, (AS1 (clr,%D0) CR_TAB
4212 AS1 (clr,%C0) CR_TAB
4213 AS2 (movw,%A0,%C0));
4214 *len = 4;
4215 return (AS1 (clr,%D0) CR_TAB
4216 AS1 (clr,%C0) CR_TAB
4217 AS1 (clr,%B0) CR_TAB
4218 AS1 (clr,%A0));
4220 case 8:
4222 int reg0 = true_regnum (operands[0]);
4223 int reg1 = true_regnum (operands[1]);
4224 *len = 4;
4225 if (reg0 <= reg1)
4226 return (AS2 (mov,%A0,%B1) CR_TAB
4227 AS2 (mov,%B0,%C1) CR_TAB
4228 AS2 (mov,%C0,%D1) CR_TAB
4229 AS1 (clr,%D0));
4230 else
4231 return (AS1 (clr,%D0) CR_TAB
4232 AS2 (mov,%C0,%D1) CR_TAB
4233 AS2 (mov,%B0,%C1) CR_TAB
4234 AS2 (mov,%A0,%B1));
4237 case 16:
4239 int reg0 = true_regnum (operands[0]);
4240 int reg1 = true_regnum (operands[1]);
4242 if (reg0 == reg1 + 2)
4243 return *len = 2, (AS1 (clr,%C0) CR_TAB
4244 AS1 (clr,%D0));
4245 if (AVR_HAVE_MOVW)
4246 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4247 AS1 (clr,%C0) CR_TAB
4248 AS1 (clr,%D0));
4249 else
4250 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4251 AS2 (mov,%A0,%C1) CR_TAB
4252 AS1 (clr,%C0) CR_TAB
4253 AS1 (clr,%D0));
4256 case 24:
4257 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4258 AS1 (clr,%B0) CR_TAB
4259 AS1 (clr,%C0) CR_TAB
4260 AS1 (clr,%D0));
4262 case 31:
4263 *len = 6;
4264 return (AS1 (clr,%A0) CR_TAB
4265 AS2 (sbrc,%D0,7) CR_TAB
4266 AS1 (inc,%A0) CR_TAB
4267 AS1 (clr,%B0) CR_TAB
4268 AS1 (clr,%C0) CR_TAB
4269 AS1 (clr,%D0));
4271 len = t;
4273 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4274 AS1 (ror,%C0) CR_TAB
4275 AS1 (ror,%B0) CR_TAB
4276 AS1 (ror,%A0)),
4277 insn, operands, len, 4);
4278 return "";
4281 /* Modifies the length assigned to instruction INSN
4282 LEN is the initially computed length of the insn. */
4285 adjust_insn_length (rtx insn, int len)
4287 rtx patt = PATTERN (insn);
4288 rtx set;
4290 if (GET_CODE (patt) == SET)
4292 rtx op[10];
4293 op[1] = SET_SRC (patt);
4294 op[0] = SET_DEST (patt);
4295 if (general_operand (op[1], VOIDmode)
4296 && general_operand (op[0], VOIDmode))
4298 switch (GET_MODE (op[0]))
4300 case QImode:
4301 output_movqi (insn, op, &len);
4302 break;
4303 case HImode:
4304 output_movhi (insn, op, &len);
4305 break;
4306 case SImode:
4307 case SFmode:
4308 output_movsisf (insn, op, &len);
4309 break;
4310 default:
4311 break;
4314 else if (op[0] == cc0_rtx && REG_P (op[1]))
4316 switch (GET_MODE (op[1]))
4318 case HImode: out_tsthi (insn,&len); break;
4319 case SImode: out_tstsi (insn,&len); break;
4320 default: break;
4323 else if (GET_CODE (op[1]) == AND)
4325 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4327 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4328 if (GET_MODE (op[1]) == SImode)
4329 len = (((mask & 0xff) != 0xff)
4330 + ((mask & 0xff00) != 0xff00)
4331 + ((mask & 0xff0000L) != 0xff0000L)
4332 + ((mask & 0xff000000L) != 0xff000000L));
4333 else if (GET_MODE (op[1]) == HImode)
4334 len = (((mask & 0xff) != 0xff)
4335 + ((mask & 0xff00) != 0xff00));
4338 else if (GET_CODE (op[1]) == IOR)
4340 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4342 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4343 if (GET_MODE (op[1]) == SImode)
4344 len = (((mask & 0xff) != 0)
4345 + ((mask & 0xff00) != 0)
4346 + ((mask & 0xff0000L) != 0)
4347 + ((mask & 0xff000000L) != 0));
4348 else if (GET_MODE (op[1]) == HImode)
4349 len = (((mask & 0xff) != 0)
4350 + ((mask & 0xff00) != 0));
4354 set = single_set (insn);
4355 if (set)
4357 rtx op[10];
4359 op[1] = SET_SRC (set);
4360 op[0] = SET_DEST (set);
4362 if (GET_CODE (patt) == PARALLEL
4363 && general_operand (op[1], VOIDmode)
4364 && general_operand (op[0], VOIDmode))
4366 if (XVECLEN (patt, 0) == 2)
4367 op[2] = XVECEXP (patt, 0, 1);
4369 switch (GET_MODE (op[0]))
4371 case QImode:
4372 len = 2;
4373 break;
4374 case HImode:
4375 output_reload_inhi (insn, op, &len);
4376 break;
4377 case SImode:
4378 case SFmode:
4379 output_reload_insisf (insn, op, &len);
4380 break;
4381 default:
4382 break;
4385 else if (GET_CODE (op[1]) == ASHIFT
4386 || GET_CODE (op[1]) == ASHIFTRT
4387 || GET_CODE (op[1]) == LSHIFTRT)
4389 rtx ops[10];
4390 ops[0] = op[0];
4391 ops[1] = XEXP (op[1],0);
4392 ops[2] = XEXP (op[1],1);
4393 switch (GET_CODE (op[1]))
4395 case ASHIFT:
4396 switch (GET_MODE (op[0]))
4398 case QImode: ashlqi3_out (insn,ops,&len); break;
4399 case HImode: ashlhi3_out (insn,ops,&len); break;
4400 case SImode: ashlsi3_out (insn,ops,&len); break;
4401 default: break;
4403 break;
4404 case ASHIFTRT:
4405 switch (GET_MODE (op[0]))
4407 case QImode: ashrqi3_out (insn,ops,&len); break;
4408 case HImode: ashrhi3_out (insn,ops,&len); break;
4409 case SImode: ashrsi3_out (insn,ops,&len); break;
4410 default: break;
4412 break;
4413 case LSHIFTRT:
4414 switch (GET_MODE (op[0]))
4416 case QImode: lshrqi3_out (insn,ops,&len); break;
4417 case HImode: lshrhi3_out (insn,ops,&len); break;
4418 case SImode: lshrsi3_out (insn,ops,&len); break;
4419 default: break;
4421 break;
4422 default:
4423 break;
4427 return len;
4430 /* Return nonzero if register REG dead after INSN. */
4433 reg_unused_after (rtx insn, rtx reg)
4435 return (dead_or_set_p (insn, reg)
4436 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4439 /* Return nonzero if REG is not used after INSN.
4440 We assume REG is a reload reg, and therefore does
4441 not live past labels. It may live past calls or jumps though. */
4444 _reg_unused_after (rtx insn, rtx reg)
4446 enum rtx_code code;
4447 rtx set;
4449 /* If the reg is set by this instruction, then it is safe for our
4450 case. Disregard the case where this is a store to memory, since
4451 we are checking a register used in the store address. */
4452 set = single_set (insn);
4453 if (set && GET_CODE (SET_DEST (set)) != MEM
4454 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4455 return 1;
4457 while ((insn = NEXT_INSN (insn)))
4459 rtx set;
4460 code = GET_CODE (insn);
4462 #if 0
4463 /* If this is a label that existed before reload, then the register
4464 if dead here. However, if this is a label added by reorg, then
4465 the register may still be live here. We can't tell the difference,
4466 so we just ignore labels completely. */
4467 if (code == CODE_LABEL)
4468 return 1;
4469 /* else */
4470 #endif
4472 if (!INSN_P (insn))
4473 continue;
4475 if (code == JUMP_INSN)
4476 return 0;
4478 /* If this is a sequence, we must handle them all at once.
4479 We could have for instance a call that sets the target register,
4480 and an insn in a delay slot that uses the register. In this case,
4481 we must return 0. */
4482 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4484 int i;
4485 int retval = 0;
4487 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4489 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4490 rtx set = single_set (this_insn);
4492 if (GET_CODE (this_insn) == CALL_INSN)
4493 code = CALL_INSN;
4494 else if (GET_CODE (this_insn) == JUMP_INSN)
4496 if (INSN_ANNULLED_BRANCH_P (this_insn))
4497 return 0;
4498 code = JUMP_INSN;
4501 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4502 return 0;
4503 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4505 if (GET_CODE (SET_DEST (set)) != MEM)
4506 retval = 1;
4507 else
4508 return 0;
4510 if (set == 0
4511 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4512 return 0;
4514 if (retval == 1)
4515 return 1;
4516 else if (code == JUMP_INSN)
4517 return 0;
4520 if (code == CALL_INSN)
4522 rtx tem;
4523 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4524 if (GET_CODE (XEXP (tem, 0)) == USE
4525 && REG_P (XEXP (XEXP (tem, 0), 0))
4526 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4527 return 0;
4528 if (call_used_regs[REGNO (reg)])
4529 return 1;
4532 set = single_set (insn);
4534 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4535 return 0;
4536 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4537 return GET_CODE (SET_DEST (set)) != MEM;
4538 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4539 return 0;
4541 return 1;
4544 /* Target hook for assembling integer objects. The AVR version needs
4545 special handling for references to certain labels. */
4547 static bool
4548 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4550 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4551 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4552 || GET_CODE (x) == LABEL_REF))
4554 fputs ("\t.word\tgs(", asm_out_file);
4555 output_addr_const (asm_out_file, x);
4556 fputs (")\n", asm_out_file);
4557 return true;
4559 return default_assemble_integer (x, size, aligned_p);
4562 /* The routine used to output NUL terminated strings. We use a special
4563 version of this for most svr4 targets because doing so makes the
4564 generated assembly code more compact (and thus faster to assemble)
4565 as well as more readable, especially for targets like the i386
4566 (where the only alternative is to output character sequences as
4567 comma separated lists of numbers). */
4569 void
4570 gas_output_limited_string(FILE *file, const char *str)
4572 const unsigned char *_limited_str = (const unsigned char *) str;
4573 unsigned ch;
4574 fprintf (file, "%s\"", STRING_ASM_OP);
4575 for (; (ch = *_limited_str); _limited_str++)
4577 int escape;
4578 switch (escape = ESCAPES[ch])
4580 case 0:
4581 putc (ch, file);
4582 break;
4583 case 1:
4584 fprintf (file, "\\%03o", ch);
4585 break;
4586 default:
4587 putc ('\\', file);
4588 putc (escape, file);
4589 break;
4592 fprintf (file, "\"\n");
4595 /* The routine used to output sequences of byte values. We use a special
4596 version of this for most svr4 targets because doing so makes the
4597 generated assembly code more compact (and thus faster to assemble)
4598 as well as more readable. Note that if we find subparts of the
4599 character sequence which end with NUL (and which are shorter than
4600 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4602 void
4603 gas_output_ascii(FILE *file, const char *str, size_t length)
4605 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4606 const unsigned char *limit = _ascii_bytes + length;
4607 unsigned bytes_in_chunk = 0;
4608 for (; _ascii_bytes < limit; _ascii_bytes++)
4610 const unsigned char *p;
4611 if (bytes_in_chunk >= 60)
4613 fprintf (file, "\"\n");
4614 bytes_in_chunk = 0;
4616 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4617 continue;
4618 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4620 if (bytes_in_chunk > 0)
4622 fprintf (file, "\"\n");
4623 bytes_in_chunk = 0;
4625 gas_output_limited_string (file, (const char*)_ascii_bytes);
4626 _ascii_bytes = p;
4628 else
4630 int escape;
4631 unsigned ch;
4632 if (bytes_in_chunk == 0)
4633 fprintf (file, "\t.ascii\t\"");
4634 switch (escape = ESCAPES[ch = *_ascii_bytes])
4636 case 0:
4637 putc (ch, file);
4638 bytes_in_chunk++;
4639 break;
4640 case 1:
4641 fprintf (file, "\\%03o", ch);
4642 bytes_in_chunk += 4;
4643 break;
4644 default:
4645 putc ('\\', file);
4646 putc (escape, file);
4647 bytes_in_chunk += 2;
4648 break;
4652 if (bytes_in_chunk > 0)
4653 fprintf (file, "\"\n");
4656 /* Return value is nonzero if pseudos that have been
4657 assigned to registers of class CLASS would likely be spilled
4658 because registers of CLASS are needed for spill registers. */
4660 enum reg_class
4661 class_likely_spilled_p (int c)
4663 return (c != ALL_REGS && c != ADDW_REGS);
4666 /* Valid attributes:
4667 progmem - put data to program memory;
4668 signal - make a function to be hardware interrupt. After function
4669 prologue interrupts are disabled;
4670 interrupt - make a function to be hardware interrupt. After function
4671 prologue interrupts are enabled;
4672 naked - don't generate function prologue/epilogue and `ret' command.
4674 Only `progmem' attribute valid for type. */
4676 const struct attribute_spec avr_attribute_table[] =
4678 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4679 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4680 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4681 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4682 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4683 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4684 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4685 { NULL, 0, 0, false, false, false, NULL }
4688 /* Handle a "progmem" attribute; arguments as in
4689 struct attribute_spec.handler. */
4690 static tree
4691 avr_handle_progmem_attribute (tree *node, tree name,
4692 tree args ATTRIBUTE_UNUSED,
4693 int flags ATTRIBUTE_UNUSED,
4694 bool *no_add_attrs)
4696 if (DECL_P (*node))
4698 if (TREE_CODE (*node) == TYPE_DECL)
4700 /* This is really a decl attribute, not a type attribute,
4701 but try to handle it for GCC 3.0 backwards compatibility. */
4703 tree type = TREE_TYPE (*node);
4704 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4705 tree newtype = build_type_attribute_variant (type, attr);
4707 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4708 TREE_TYPE (*node) = newtype;
4709 *no_add_attrs = true;
4711 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4713 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4715 warning (0, "only initialized variables can be placed into "
4716 "program memory area");
4717 *no_add_attrs = true;
4720 else
4722 warning (OPT_Wattributes, "%qs attribute ignored",
4723 IDENTIFIER_POINTER (name));
4724 *no_add_attrs = true;
4728 return NULL_TREE;
4731 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4732 struct attribute_spec.handler. */
4734 static tree
4735 avr_handle_fndecl_attribute (tree *node, tree name,
4736 tree args ATTRIBUTE_UNUSED,
4737 int flags ATTRIBUTE_UNUSED,
4738 bool *no_add_attrs)
4740 if (TREE_CODE (*node) != FUNCTION_DECL)
4742 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4743 IDENTIFIER_POINTER (name));
4744 *no_add_attrs = true;
4746 else
4748 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4749 const char *attr = IDENTIFIER_POINTER (name);
4751 /* If the function has the 'signal' or 'interrupt' attribute, test to
4752 make sure that the name of the function is "__vector_NN" so as to
4753 catch when the user misspells the interrupt vector name. */
4755 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4757 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4759 warning (0, "%qs appears to be a misspelled interrupt handler",
4760 func_name);
4763 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4765 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4767 warning (0, "%qs appears to be a misspelled signal handler",
4768 func_name);
4773 return NULL_TREE;
4776 static tree
4777 avr_handle_fntype_attribute (tree *node, tree name,
4778 tree args ATTRIBUTE_UNUSED,
4779 int flags ATTRIBUTE_UNUSED,
4780 bool *no_add_attrs)
4782 if (TREE_CODE (*node) != FUNCTION_TYPE)
4784 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4785 IDENTIFIER_POINTER (name));
4786 *no_add_attrs = true;
4789 return NULL_TREE;
4792 /* Look for attribute `progmem' in DECL
4793 if found return 1, otherwise 0. */
4796 avr_progmem_p (tree decl, tree attributes)
4798 tree a;
4800 if (TREE_CODE (decl) != VAR_DECL)
4801 return 0;
4803 if (NULL_TREE
4804 != lookup_attribute ("progmem", attributes))
4805 return 1;
4807 a=decl;
4809 a = TREE_TYPE(a);
4810 while (TREE_CODE (a) == ARRAY_TYPE);
4812 if (a == error_mark_node)
4813 return 0;
4815 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4816 return 1;
4818 return 0;
4821 /* Add the section attribute if the variable is in progmem. */
4823 static void
4824 avr_insert_attributes (tree node, tree *attributes)
4826 if (TREE_CODE (node) == VAR_DECL
4827 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4828 && avr_progmem_p (node, *attributes))
4830 static const char dsec[] = ".progmem.data";
4831 *attributes = tree_cons (get_identifier ("section"),
4832 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4833 *attributes);
4835 /* ??? This seems sketchy. Why can't the user declare the
4836 thing const in the first place? */
4837 TREE_READONLY (node) = 1;
4841 /* A get_unnamed_section callback for switching to progmem_section. */
4843 static void
4844 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4846 fprintf (asm_out_file,
4847 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4848 AVR_HAVE_JMP_CALL ? "a" : "ax");
4849 /* Should already be aligned, this is just to be safe if it isn't. */
4850 fprintf (asm_out_file, "\t.p2align 1\n");
4853 /* Implement TARGET_ASM_INIT_SECTIONS. */
4855 static void
4856 avr_asm_init_sections (void)
4858 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4859 avr_output_progmem_section_asm_op,
4860 NULL);
4861 readonly_data_section = data_section;
4864 static unsigned int
4865 avr_section_type_flags (tree decl, const char *name, int reloc)
4867 unsigned int flags = default_section_type_flags (decl, name, reloc);
4869 if (strncmp (name, ".noinit", 7) == 0)
4871 if (decl && TREE_CODE (decl) == VAR_DECL
4872 && DECL_INITIAL (decl) == NULL_TREE)
4873 flags |= SECTION_BSS; /* @nobits */
4874 else
4875 warning (0, "only uninitialized variables can be placed in the "
4876 ".noinit section");
4879 return flags;
4882 /* Outputs some appropriate text to go at the start of an assembler
4883 file. */
4885 static void
4886 avr_file_start (void)
4888 if (avr_current_arch->asm_only)
4889 error ("MCU %qs supported for assembler only", avr_mcu_name);
4891 default_file_start ();
4893 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4894 fputs ("__SREG__ = 0x3f\n"
4895 "__SP_H__ = 0x3e\n"
4896 "__SP_L__ = 0x3d\n", asm_out_file);
4898 fputs ("__tmp_reg__ = 0\n"
4899 "__zero_reg__ = 1\n", asm_out_file);
4901 /* FIXME: output these only if there is anything in the .data / .bss
4902 sections - some code size could be saved by not linking in the
4903 initialization code from libgcc if one or both sections are empty. */
4904 fputs ("\t.global __do_copy_data\n", asm_out_file);
4905 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4908 /* Outputs to the stdio stream FILE some
4909 appropriate text to go at the end of an assembler file. */
4911 static void
4912 avr_file_end (void)
4916 /* Choose the order in which to allocate hard registers for
4917 pseudo-registers local to a basic block.
4919 Store the desired register order in the array `reg_alloc_order'.
4920 Element 0 should be the register to allocate first; element 1, the
4921 next register; and so on. */
4923 void
4924 order_regs_for_local_alloc (void)
4926 unsigned int i;
4927 static const int order_0[] = {
4928 24,25,
4929 18,19,
4930 20,21,
4931 22,23,
4932 30,31,
4933 26,27,
4934 28,29,
4935 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4936 0,1,
4937 32,33,34,35
4939 static const int order_1[] = {
4940 18,19,
4941 20,21,
4942 22,23,
4943 24,25,
4944 30,31,
4945 26,27,
4946 28,29,
4947 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4948 0,1,
4949 32,33,34,35
4951 static const int order_2[] = {
4952 25,24,
4953 23,22,
4954 21,20,
4955 19,18,
4956 30,31,
4957 26,27,
4958 28,29,
4959 17,16,
4960 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4961 1,0,
4962 32,33,34,35
4965 const int *order = (TARGET_ORDER_1 ? order_1 :
4966 TARGET_ORDER_2 ? order_2 :
4967 order_0);
4968 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4969 reg_alloc_order[i] = order[i];
4973 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4974 cost of an RTX operand given its context. X is the rtx of the
4975 operand, MODE is its mode, and OUTER is the rtx_code of this
4976 operand's parent operator. */
4978 static int
4979 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4981 enum rtx_code code = GET_CODE (x);
4982 int total;
4984 switch (code)
4986 case REG:
4987 case SUBREG:
4988 return 0;
4990 case CONST_INT:
4991 case CONST_DOUBLE:
4992 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4994 default:
4995 break;
4998 total = 0;
4999 avr_rtx_costs (x, code, outer, &total);
5000 return total;
5003 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5004 is to be calculated. Return true if the complete cost has been
5005 computed, and false if subexpressions should be scanned. In either
5006 case, *TOTAL contains the cost result. */
5008 static bool
5009 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
5011 enum machine_mode mode = GET_MODE (x);
5012 HOST_WIDE_INT val;
5014 switch (code)
5016 case CONST_INT:
5017 case CONST_DOUBLE:
5018 /* Immediate constants are as cheap as registers. */
5019 *total = 0;
5020 return true;
5022 case MEM:
5023 case CONST:
5024 case LABEL_REF:
5025 case SYMBOL_REF:
5026 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5027 return true;
5029 case NEG:
5030 switch (mode)
5032 case QImode:
5033 case SFmode:
5034 *total = COSTS_N_INSNS (1);
5035 break;
5037 case HImode:
5038 *total = COSTS_N_INSNS (3);
5039 break;
5041 case SImode:
5042 *total = COSTS_N_INSNS (7);
5043 break;
5045 default:
5046 return false;
5048 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5049 return true;
5051 case ABS:
5052 switch (mode)
5054 case QImode:
5055 case SFmode:
5056 *total = COSTS_N_INSNS (1);
5057 break;
5059 default:
5060 return false;
5062 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5063 return true;
5065 case NOT:
5066 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5067 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5068 return true;
5070 case ZERO_EXTEND:
5071 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5072 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5073 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5074 return true;
5076 case SIGN_EXTEND:
5077 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5078 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5079 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5080 return true;
5082 case PLUS:
5083 switch (mode)
5085 case QImode:
5086 *total = COSTS_N_INSNS (1);
5087 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5088 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5089 break;
5091 case HImode:
5092 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5094 *total = COSTS_N_INSNS (2);
5095 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5097 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5098 *total = COSTS_N_INSNS (1);
5099 else
5100 *total = COSTS_N_INSNS (2);
5101 break;
5103 case SImode:
5104 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5106 *total = COSTS_N_INSNS (4);
5107 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5109 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5110 *total = COSTS_N_INSNS (1);
5111 else
5112 *total = COSTS_N_INSNS (4);
5113 break;
5115 default:
5116 return false;
5118 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5119 return true;
5121 case MINUS:
5122 case AND:
5123 case IOR:
5124 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5125 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5126 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5127 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5128 return true;
5130 case XOR:
5131 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5132 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5133 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5134 return true;
5136 case MULT:
5137 switch (mode)
5139 case QImode:
5140 if (AVR_HAVE_MUL)
5141 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5142 else if (optimize_size)
5143 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5144 else
5145 return false;
5146 break;
5148 case HImode:
5149 if (AVR_HAVE_MUL)
5150 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5151 else if (optimize_size)
5152 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5153 else
5154 return false;
5155 break;
5157 default:
5158 return false;
5160 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5161 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5162 return true;
5164 case DIV:
5165 case MOD:
5166 case UDIV:
5167 case UMOD:
5168 if (optimize_size)
5169 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5170 else
5171 return false;
5172 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5173 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5174 return true;
5176 case ASHIFT:
5177 switch (mode)
5179 case QImode:
5180 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5182 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5183 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5185 else
5187 val = INTVAL (XEXP (x, 1));
5188 if (val == 7)
5189 *total = COSTS_N_INSNS (3);
5190 else if (val >= 0 && val <= 7)
5191 *total = COSTS_N_INSNS (val);
5192 else
5193 *total = COSTS_N_INSNS (1);
5195 break;
5197 case HImode:
5198 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5200 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5201 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5203 else
5204 switch (INTVAL (XEXP (x, 1)))
5206 case 0:
5207 *total = 0;
5208 break;
5209 case 1:
5210 case 8:
5211 *total = COSTS_N_INSNS (2);
5212 break;
5213 case 9:
5214 *total = COSTS_N_INSNS (3);
5215 break;
5216 case 2:
5217 case 3:
5218 case 10:
5219 case 15:
5220 *total = COSTS_N_INSNS (4);
5221 break;
5222 case 7:
5223 case 11:
5224 case 12:
5225 *total = COSTS_N_INSNS (5);
5226 break;
5227 case 4:
5228 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5229 break;
5230 case 6:
5231 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5232 break;
5233 case 5:
5234 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5235 break;
5236 default:
5237 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5238 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5240 break;
5242 case SImode:
5243 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5245 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5246 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5248 else
5249 switch (INTVAL (XEXP (x, 1)))
5251 case 0:
5252 *total = 0;
5253 break;
5254 case 24:
5255 *total = COSTS_N_INSNS (3);
5256 break;
5257 case 1:
5258 case 8:
5259 case 16:
5260 *total = COSTS_N_INSNS (4);
5261 break;
5262 case 31:
5263 *total = COSTS_N_INSNS (6);
5264 break;
5265 case 2:
5266 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5267 break;
5268 default:
5269 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5270 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5272 break;
5274 default:
5275 return false;
5277 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5278 return true;
5280 case ASHIFTRT:
5281 switch (mode)
5283 case QImode:
5284 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5286 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5287 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5289 else
5291 val = INTVAL (XEXP (x, 1));
5292 if (val == 6)
5293 *total = COSTS_N_INSNS (4);
5294 else if (val == 7)
5295 *total = COSTS_N_INSNS (2);
5296 else if (val >= 0 && val <= 7)
5297 *total = COSTS_N_INSNS (val);
5298 else
5299 *total = COSTS_N_INSNS (1);
5301 break;
5303 case HImode:
5304 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5306 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5309 else
5310 switch (INTVAL (XEXP (x, 1)))
5312 case 0:
5313 *total = 0;
5314 break;
5315 case 1:
5316 *total = COSTS_N_INSNS (2);
5317 break;
5318 case 15:
5319 *total = COSTS_N_INSNS (3);
5320 break;
5321 case 2:
5322 case 7:
5323 case 8:
5324 case 9:
5325 *total = COSTS_N_INSNS (4);
5326 break;
5327 case 10:
5328 case 14:
5329 *total = COSTS_N_INSNS (5);
5330 break;
5331 case 11:
5332 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5333 break;
5334 case 12:
5335 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5336 break;
5337 case 6:
5338 case 13:
5339 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5340 break;
5341 default:
5342 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5343 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5345 break;
5347 case SImode:
5348 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5350 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5351 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5353 else
5354 switch (INTVAL (XEXP (x, 1)))
5356 case 0:
5357 *total = 0;
5358 break;
5359 case 1:
5360 *total = COSTS_N_INSNS (4);
5361 break;
5362 case 8:
5363 case 16:
5364 case 24:
5365 *total = COSTS_N_INSNS (6);
5366 break;
5367 case 2:
5368 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5369 break;
5370 case 31:
5371 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5372 break;
5373 default:
5374 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5375 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5377 break;
5379 default:
5380 return false;
5382 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5383 return true;
5385 case LSHIFTRT:
5386 switch (mode)
5388 case QImode:
5389 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5391 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5394 else
5396 val = INTVAL (XEXP (x, 1));
5397 if (val == 7)
5398 *total = COSTS_N_INSNS (3);
5399 else if (val >= 0 && val <= 7)
5400 *total = COSTS_N_INSNS (val);
5401 else
5402 *total = COSTS_N_INSNS (1);
5404 break;
5406 case HImode:
5407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5409 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5410 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5412 else
5413 switch (INTVAL (XEXP (x, 1)))
5415 case 0:
5416 *total = 0;
5417 break;
5418 case 1:
5419 case 8:
5420 *total = COSTS_N_INSNS (2);
5421 break;
5422 case 9:
5423 *total = COSTS_N_INSNS (3);
5424 break;
5425 case 2:
5426 case 10:
5427 case 15:
5428 *total = COSTS_N_INSNS (4);
5429 break;
5430 case 7:
5431 case 11:
5432 *total = COSTS_N_INSNS (5);
5433 break;
5434 case 3:
5435 case 12:
5436 case 13:
5437 case 14:
5438 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5439 break;
5440 case 4:
5441 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5442 break;
5443 case 5:
5444 case 6:
5445 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5446 break;
5447 default:
5448 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5449 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5451 break;
5453 case SImode:
5454 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5456 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5457 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5459 else
5460 switch (INTVAL (XEXP (x, 1)))
5462 case 0:
5463 *total = 0;
5464 break;
5465 case 1:
5466 *total = COSTS_N_INSNS (4);
5467 break;
5468 case 2:
5469 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5470 break;
5471 case 8:
5472 case 16:
5473 case 24:
5474 *total = COSTS_N_INSNS (4);
5475 break;
5476 case 31:
5477 *total = COSTS_N_INSNS (6);
5478 break;
5479 default:
5480 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5483 break;
5485 default:
5486 return false;
5488 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5489 return true;
5491 case COMPARE:
5492 switch (GET_MODE (XEXP (x, 0)))
5494 case QImode:
5495 *total = COSTS_N_INSNS (1);
5496 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5497 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5498 break;
5500 case HImode:
5501 *total = COSTS_N_INSNS (2);
5502 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5503 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5504 else if (INTVAL (XEXP (x, 1)) != 0)
5505 *total += COSTS_N_INSNS (1);
5506 break;
5508 case SImode:
5509 *total = COSTS_N_INSNS (4);
5510 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5511 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5512 else if (INTVAL (XEXP (x, 1)) != 0)
5513 *total += COSTS_N_INSNS (3);
5514 break;
5516 default:
5517 return false;
5519 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5520 return true;
5522 default:
5523 break;
5525 return false;
5528 /* Calculate the cost of a memory address. */
5530 static int
5531 avr_address_cost (rtx x)
5533 if (GET_CODE (x) == PLUS
5534 && GET_CODE (XEXP (x,1)) == CONST_INT
5535 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5536 && INTVAL (XEXP (x,1)) >= 61)
5537 return 18;
5538 if (CONSTANT_ADDRESS_P (x))
5540 if (optimize > 0 && io_address_operand (x, QImode))
5541 return 2;
5542 return 4;
5544 return 4;
5547 /* Test for extra memory constraint 'Q'.
5548 It's a memory address based on Y or Z pointer with valid displacement. */
5551 extra_constraint_Q (rtx x)
5553 if (GET_CODE (XEXP (x,0)) == PLUS
5554 && REG_P (XEXP (XEXP (x,0), 0))
5555 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5556 && (INTVAL (XEXP (XEXP (x,0), 1))
5557 <= MAX_LD_OFFSET (GET_MODE (x))))
5559 rtx xx = XEXP (XEXP (x,0), 0);
5560 int regno = REGNO (xx);
5561 if (TARGET_ALL_DEBUG)
5563 fprintf (stderr, ("extra_constraint:\n"
5564 "reload_completed: %d\n"
5565 "reload_in_progress: %d\n"),
5566 reload_completed, reload_in_progress);
5567 debug_rtx (x);
5569 if (regno >= FIRST_PSEUDO_REGISTER)
5570 return 1; /* allocate pseudos */
5571 else if (regno == REG_Z || regno == REG_Y)
5572 return 1; /* strictly check */
5573 else if (xx == frame_pointer_rtx
5574 || xx == arg_pointer_rtx)
5575 return 1; /* XXX frame & arg pointer checks */
5577 return 0;
5580 /* Convert condition code CONDITION to the valid AVR condition code. */
5582 RTX_CODE
5583 avr_normalize_condition (RTX_CODE condition)
5585 switch (condition)
5587 case GT:
5588 return GE;
5589 case GTU:
5590 return GEU;
5591 case LE:
5592 return LT;
5593 case LEU:
5594 return LTU;
5595 default:
5596 gcc_unreachable ();
5600 /* This function optimizes conditional jumps. */
5602 static void
5603 avr_reorg (void)
5605 rtx insn, pattern;
5607 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5609 if (! (GET_CODE (insn) == INSN
5610 || GET_CODE (insn) == CALL_INSN
5611 || GET_CODE (insn) == JUMP_INSN)
5612 || !single_set (insn))
5613 continue;
5615 pattern = PATTERN (insn);
5617 if (GET_CODE (pattern) == PARALLEL)
5618 pattern = XVECEXP (pattern, 0, 0);
5619 if (GET_CODE (pattern) == SET
5620 && SET_DEST (pattern) == cc0_rtx
5621 && compare_diff_p (insn))
5623 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5625 /* Now we work under compare insn. */
5627 pattern = SET_SRC (pattern);
5628 if (true_regnum (XEXP (pattern,0)) >= 0
5629 && true_regnum (XEXP (pattern,1)) >= 0 )
5631 rtx x = XEXP (pattern,0);
5632 rtx next = next_real_insn (insn);
5633 rtx pat = PATTERN (next);
5634 rtx src = SET_SRC (pat);
5635 rtx t = XEXP (src,0);
5636 PUT_CODE (t, swap_condition (GET_CODE (t)));
5637 XEXP (pattern,0) = XEXP (pattern,1);
5638 XEXP (pattern,1) = x;
5639 INSN_CODE (next) = -1;
5641 else if (true_regnum (XEXP (pattern,0)) >= 0
5642 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5644 rtx x = XEXP (pattern,1);
5645 rtx next = next_real_insn (insn);
5646 rtx pat = PATTERN (next);
5647 rtx src = SET_SRC (pat);
5648 rtx t = XEXP (src,0);
5649 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5651 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5653 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5654 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5655 INSN_CODE (next) = -1;
5656 INSN_CODE (insn) = -1;
5660 else if (true_regnum (SET_SRC (pattern)) >= 0)
5662 /* This is a tst insn */
5663 rtx next = next_real_insn (insn);
5664 rtx pat = PATTERN (next);
5665 rtx src = SET_SRC (pat);
5666 rtx t = XEXP (src,0);
5668 PUT_CODE (t, swap_condition (GET_CODE (t)));
5669 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5670 SET_SRC (pattern));
5671 INSN_CODE (next) = -1;
5672 INSN_CODE (insn) = -1;
5678 /* Returns register number for function return value.*/
5681 avr_ret_register (void)
5683 return 24;
5686 /* Create an RTX representing the place where a
5687 library function returns a value of mode MODE. */
5690 avr_libcall_value (enum machine_mode mode)
5692 int offs = GET_MODE_SIZE (mode);
5693 if (offs < 2)
5694 offs = 2;
5695 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5698 /* Create an RTX representing the place where a
5699 function returns a value of data type VALTYPE. */
5702 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5704 unsigned int offs;
5706 if (TYPE_MODE (type) != BLKmode)
5707 return avr_libcall_value (TYPE_MODE (type));
5709 offs = int_size_in_bytes (type);
5710 if (offs < 2)
5711 offs = 2;
5712 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5713 offs = GET_MODE_SIZE (SImode);
5714 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5715 offs = GET_MODE_SIZE (DImode);
5717 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5720 /* Places additional restrictions on the register class to
5721 use when it is necessary to copy value X into a register
5722 in class CLASS. */
5724 enum reg_class
5725 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5727 return class;
5731 test_hard_reg_class (enum reg_class class, rtx x)
5733 int regno = true_regnum (x);
5734 if (regno < 0)
5735 return 0;
5737 if (TEST_HARD_REG_CLASS (class, regno))
5738 return 1;
5740 return 0;
5745 jump_over_one_insn_p (rtx insn, rtx dest)
5747 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5748 ? XEXP (dest, 0)
5749 : dest);
5750 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5751 int dest_addr = INSN_ADDRESSES (uid);
5752 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5755 /* Returns 1 if a value of mode MODE can be stored starting with hard
5756 register number REGNO. On the enhanced core, anything larger than
5757 1 byte must start in even numbered register for "movw" to work
5758 (this way we don't have to check for odd registers everywhere). */
5761 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5763 /* Disallow QImode in stack pointer regs. */
5764 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5765 return 0;
5767 /* The only thing that can go into registers r28:r29 is a Pmode. */
5768 if (regno == REG_Y && mode == Pmode)
5769 return 1;
5771 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5772 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5773 return 0;
5775 if (mode == QImode)
5776 return 1;
5778 /* Modes larger than QImode occupy consecutive registers. */
5779 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5780 return 0;
5782 /* All modes larger than QImode should start in an even register. */
5783 return !(regno & 1);
5786 const char *
5787 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5789 int tmp;
5790 if (!len)
5791 len = &tmp;
5793 if (GET_CODE (operands[1]) == CONST_INT)
5795 int val = INTVAL (operands[1]);
5796 if ((val & 0xff) == 0)
5798 *len = 3;
5799 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5800 AS2 (ldi,%2,hi8(%1)) CR_TAB
5801 AS2 (mov,%B0,%2));
5803 else if ((val & 0xff00) == 0)
5805 *len = 3;
5806 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5807 AS2 (mov,%A0,%2) CR_TAB
5808 AS2 (mov,%B0,__zero_reg__));
5810 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5812 *len = 3;
5813 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5814 AS2 (mov,%A0,%2) CR_TAB
5815 AS2 (mov,%B0,%2));
5818 *len = 4;
5819 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5820 AS2 (mov,%A0,%2) CR_TAB
5821 AS2 (ldi,%2,hi8(%1)) CR_TAB
5822 AS2 (mov,%B0,%2));
5826 const char *
5827 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5829 rtx src = operands[1];
5830 int cnst = (GET_CODE (src) == CONST_INT);
5832 if (len)
5834 if (cnst)
5835 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5836 + ((INTVAL (src) & 0xff00) != 0)
5837 + ((INTVAL (src) & 0xff0000) != 0)
5838 + ((INTVAL (src) & 0xff000000) != 0);
5839 else
5840 *len = 8;
5842 return "";
5845 if (cnst && ((INTVAL (src) & 0xff) == 0))
5846 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5847 else
5849 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5850 output_asm_insn (AS2 (mov, %A0, %2), operands);
5852 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5853 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5854 else
5856 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5857 output_asm_insn (AS2 (mov, %B0, %2), operands);
5859 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5860 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5861 else
5863 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5864 output_asm_insn (AS2 (mov, %C0, %2), operands);
5866 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5867 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5868 else
5870 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5871 output_asm_insn (AS2 (mov, %D0, %2), operands);
5873 return "";
5876 void
5877 avr_output_bld (rtx operands[], int bit_nr)
5879 static char s[] = "bld %A0,0";
5881 s[5] = 'A' + (bit_nr >> 3);
5882 s[8] = '0' + (bit_nr & 7);
5883 output_asm_insn (s, operands);
5886 void
5887 avr_output_addr_vec_elt (FILE *stream, int value)
5889 switch_to_section (progmem_section);
5890 if (AVR_HAVE_JMP_CALL)
5891 fprintf (stream, "\t.word gs(.L%d)\n", value);
5892 else
5893 fprintf (stream, "\trjmp .L%d\n", value);
5896 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5897 registers (for a define_peephole2) in the current function. */
5900 avr_peep2_scratch_safe (rtx scratch)
5902 if ((interrupt_function_p (current_function_decl)
5903 || signal_function_p (current_function_decl))
5904 && leaf_function_p ())
5906 int first_reg = true_regnum (scratch);
5907 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5908 int reg;
5910 for (reg = first_reg; reg <= last_reg; reg++)
5912 if (!df_regs_ever_live_p (reg))
5913 return 0;
5916 return 1;
5919 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5922 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5923 unsigned int new_reg)
5925 /* Interrupt functions can only use registers that have already been
5926 saved by the prologue, even if they would normally be
5927 call-clobbered. */
5929 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5930 && !df_regs_ever_live_p (new_reg))
5931 return 0;
5933 return 1;
5936 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5937 or memory location in the I/O space (QImode only).
5939 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5940 Operand 1: register operand to test, or CONST_INT memory address.
5941 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5942 Operand 3: label to jump to if the test is true. */
5944 const char *
5945 avr_out_sbxx_branch (rtx insn, rtx operands[])
5947 enum rtx_code comp = GET_CODE (operands[0]);
5948 int long_jump = (get_attr_length (insn) >= 4);
5949 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5951 if (comp == GE)
5952 comp = EQ;
5953 else if (comp == LT)
5954 comp = NE;
5956 if (reverse)
5957 comp = reverse_condition (comp);
5959 if (GET_CODE (operands[1]) == CONST_INT)
5961 if (INTVAL (operands[1]) < 0x40)
5963 if (comp == EQ)
5964 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5965 else
5966 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5968 else
5970 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5971 if (comp == EQ)
5972 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5973 else
5974 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5977 else /* GET_CODE (operands[1]) == REG */
5979 if (GET_MODE (operands[1]) == QImode)
5981 if (comp == EQ)
5982 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5983 else
5984 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5986 else /* HImode or SImode */
5988 static char buf[] = "sbrc %A1,0";
5989 int bit_nr = exact_log2 (INTVAL (operands[2])
5990 & GET_MODE_MASK (GET_MODE (operands[1])));
5992 buf[3] = (comp == EQ) ? 's' : 'c';
5993 buf[6] = 'A' + (bit_nr >> 3);
5994 buf[9] = '0' + (bit_nr & 7);
5995 output_asm_insn (buf, operands);
5999 if (long_jump)
6000 return (AS1 (rjmp,.+4) CR_TAB
6001 AS1 (jmp,%3));
6002 if (!reverse)
6003 return AS1 (rjmp,%3);
6004 return "";
6007 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6009 static void
6010 avr_asm_out_ctor (rtx symbol, int priority)
6012 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6013 default_ctor_section_asm_out_constructor (symbol, priority);
6016 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6018 static void
6019 avr_asm_out_dtor (rtx symbol, int priority)
6021 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6022 default_dtor_section_asm_out_destructor (symbol, priority);
6025 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6027 static bool
6028 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6030 if (TYPE_MODE (type) == BLKmode)
6032 HOST_WIDE_INT size = int_size_in_bytes (type);
6033 return (size == -1 || size > 8);
6035 else
6036 return false;
6039 #include "gt-avr.h"