rtl.h (emit_clobber, [...]): Declare.
[official-gcc.git] / gcc / config / avr / avr.c
blob7135c79610a4a5e62c8eebef412c1d126f475a43
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static int compare_sign_p (rtx insn);
65 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
66 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
68 const struct attribute_spec avr_attribute_table[];
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static void avr_asm_function_end_prologue (FILE *);
73 static void avr_asm_function_begin_epilogue (FILE *);
74 static void avr_insert_attributes (tree, tree *);
75 static void avr_asm_init_sections (void);
76 static unsigned int avr_section_type_flags (tree, const char *, int);
78 static void avr_reorg (void);
79 static void avr_asm_out_ctor (rtx, int);
80 static void avr_asm_out_dtor (rtx, int);
81 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
82 static bool avr_rtx_costs (rtx, int, int, int *);
83 static int avr_address_cost (rtx);
84 static bool avr_return_in_memory (const_tree, const_tree);
85 static struct machine_function * avr_init_machine_status (void);
86 /* Allocate registers from r25 to r8 for parameters for function calls. */
87 #define FIRST_CUM_REG 26
89 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
90 static GTY(()) rtx tmp_reg_rtx;
92 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
93 static GTY(()) rtx zero_reg_rtx;
95 /* AVR register names {"r0", "r1", ..., "r31"} */
96 static const char *const avr_regnames[] = REGISTER_NAMES;
98 /* This holds the last insn address. */
99 static int last_insn_address = 0;
101 /* Preprocessor macros to define depending on MCU type. */
102 const char *avr_extra_arch_macro;
104 /* Current architecture. */
105 const struct base_arch_s *avr_current_arch;
107 section *progmem_section;
109 static const struct base_arch_s avr_arch_types[] = {
110 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
111 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
112 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
113 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
114 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
115 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
116 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
117 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
118 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
119 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
120 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
123 /* These names are used as the index into the avr_arch_types[] table
124 above. */
126 enum avr_arch
128 ARCH_UNKNOWN,
129 ARCH_AVR1,
130 ARCH_AVR2,
131 ARCH_AVR25,
132 ARCH_AVR3,
133 ARCH_AVR31,
134 ARCH_AVR35,
135 ARCH_AVR4,
136 ARCH_AVR5,
137 ARCH_AVR51,
138 ARCH_AVR6
141 struct mcu_type_s {
142 const char *const name;
143 int arch; /* index in avr_arch_types[] */
144 /* Must lie outside user's namespace. NULL == no macro. */
145 const char *const macro;
148 /* List of all known AVR MCU types - if updated, it has to be kept
149 in sync in several places (FIXME: is there a better way?):
150 - here
151 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
152 - t-avr (MULTILIB_MATCHES)
153 - gas/config/tc-avr.c
154 - avr-libc */
156 static const struct mcu_type_s avr_mcu_types[] = {
157 /* Classic, <= 8K. */
158 { "avr2", ARCH_AVR2, NULL },
159 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
160 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
161 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
162 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
163 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
164 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
165 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
166 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
167 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
168 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
169 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
170 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
171 /* Classic + MOVW, <= 8K. */
172 { "avr25", ARCH_AVR25, NULL },
173 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
174 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
175 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
176 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
177 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
178 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
179 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
180 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
181 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
182 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
183 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
184 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
185 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
186 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
187 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
188 /* Classic, > 8K, <= 64K. */
189 { "avr3", ARCH_AVR3, NULL },
190 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
191 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
192 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
193 /* Classic, == 128K. */
194 { "avr31", ARCH_AVR31, NULL },
195 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
196 /* Classic + MOVW + JMP/CALL. */
197 { "avr35", ARCH_AVR35, NULL },
198 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
199 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
200 /* Enhanced, <= 8K. */
201 { "avr4", ARCH_AVR4, NULL },
202 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
203 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
204 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
205 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
206 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
207 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
208 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
209 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
210 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
211 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
212 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
213 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
214 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
215 /* Enhanced, > 8K, <= 64K. */
216 { "avr5", ARCH_AVR5, NULL },
217 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
218 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
219 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
220 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
221 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
222 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
223 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
224 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
225 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
226 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
227 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
228 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
229 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
230 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
231 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
232 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
233 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
234 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
235 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
236 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
237 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
238 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
239 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
240 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
241 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
242 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
243 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
244 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
245 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
246 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
247 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
248 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
249 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
250 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
251 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
252 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
253 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
254 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
255 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
256 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
257 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
258 /* Enhanced, == 128K. */
259 { "avr51", ARCH_AVR51, NULL },
260 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
261 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
262 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
263 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
264 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
265 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
266 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
267 /* 3-Byte PC. */
268 { "avr6", ARCH_AVR6, NULL },
269 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
270 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
271 /* Assembler only. */
272 { "avr1", ARCH_AVR1, NULL },
273 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
274 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
275 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
276 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
277 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
278 { NULL, ARCH_UNKNOWN, NULL }
281 int avr_case_values_threshold = 30000;
283 /* Initialize the GCC target structure. */
284 #undef TARGET_ASM_ALIGNED_HI_OP
285 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
286 #undef TARGET_ASM_ALIGNED_SI_OP
287 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
288 #undef TARGET_ASM_UNALIGNED_HI_OP
289 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
290 #undef TARGET_ASM_UNALIGNED_SI_OP
291 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
292 #undef TARGET_ASM_INTEGER
293 #define TARGET_ASM_INTEGER avr_assemble_integer
294 #undef TARGET_ASM_FILE_START
295 #define TARGET_ASM_FILE_START avr_file_start
296 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
297 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
298 #undef TARGET_ASM_FILE_END
299 #define TARGET_ASM_FILE_END avr_file_end
301 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
302 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
303 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
304 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
305 #undef TARGET_ATTRIBUTE_TABLE
306 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
307 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
308 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
309 #undef TARGET_INSERT_ATTRIBUTES
310 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
311 #undef TARGET_SECTION_TYPE_FLAGS
312 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
313 #undef TARGET_RTX_COSTS
314 #define TARGET_RTX_COSTS avr_rtx_costs
315 #undef TARGET_ADDRESS_COST
316 #define TARGET_ADDRESS_COST avr_address_cost
317 #undef TARGET_MACHINE_DEPENDENT_REORG
318 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
320 #undef TARGET_RETURN_IN_MEMORY
321 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
323 #undef TARGET_STRICT_ARGUMENT_NAMING
324 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
326 struct gcc_target targetm = TARGET_INITIALIZER;
328 void
329 avr_override_options (void)
331 const struct mcu_type_s *t;
333 flag_delete_null_pointer_checks = 0;
335 for (t = avr_mcu_types; t->name; t++)
336 if (strcmp (t->name, avr_mcu_name) == 0)
337 break;
339 if (!t->name)
341 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
342 avr_mcu_name);
343 for (t = avr_mcu_types; t->name; t++)
344 fprintf (stderr," %s\n", t->name);
347 avr_current_arch = &avr_arch_types[t->arch];
348 avr_extra_arch_macro = t->macro;
350 if (optimize && !TARGET_NO_TABLEJUMP)
351 avr_case_values_threshold =
352 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
354 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
355 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
357 init_machine_status = avr_init_machine_status;
360 /* return register class from register number. */
362 static const int reg_class_tab[]={
363 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
364 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
365 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
366 GENERAL_REGS, /* r0 - r15 */
367 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
368 LD_REGS, /* r16 - 23 */
369 ADDW_REGS,ADDW_REGS, /* r24,r25 */
370 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
371 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
372 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
373 STACK_REG,STACK_REG /* SPL,SPH */
376 /* Function to set up the backend function structure. */
378 static struct machine_function *
379 avr_init_machine_status (void)
381 return ((struct machine_function *)
382 ggc_alloc_cleared (sizeof (struct machine_function)));
385 /* Return register class for register R. */
387 enum reg_class
388 avr_regno_reg_class (int r)
390 if (r <= 33)
391 return reg_class_tab[r];
392 return ALL_REGS;
395 /* Return nonzero if FUNC is a naked function. */
397 static int
398 avr_naked_function_p (tree func)
400 tree a;
402 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
404 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
405 return a != NULL_TREE;
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
411 static int
412 interrupt_function_p (tree func)
414 tree a;
416 if (TREE_CODE (func) != FUNCTION_DECL)
417 return 0;
419 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
420 return a != NULL_TREE;
423 /* Return nonzero if FUNC is a signal function as specified
424 by the "signal" attribute. */
426 static int
427 signal_function_p (tree func)
429 tree a;
431 if (TREE_CODE (func) != FUNCTION_DECL)
432 return 0;
434 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
435 return a != NULL_TREE;
438 /* Return nonzero if FUNC is a OS_task function. */
440 static int
441 avr_OS_task_function_p (tree func)
443 tree a;
445 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
447 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
448 return a != NULL_TREE;
451 /* Return nonzero if FUNC is a OS_main function. */
453 static int
454 avr_OS_main_function_p (tree func)
456 tree a;
458 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
460 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
461 return a != NULL_TREE;
464 /* Return the number of hard registers to push/pop in the prologue/epilogue
465 of the current function, and optionally store these registers in SET. */
467 static int
468 avr_regs_to_save (HARD_REG_SET *set)
470 int reg, count;
471 int int_or_sig_p = (interrupt_function_p (current_function_decl)
472 || signal_function_p (current_function_decl));
474 if (!reload_completed)
475 cfun->machine->is_leaf = leaf_function_p ();
477 if (set)
478 CLEAR_HARD_REG_SET (*set);
479 count = 0;
481 /* No need to save any registers if the function never returns or
482 is have "OS_task" or "OS_main" attribute. */
483 if (TREE_THIS_VOLATILE (current_function_decl)
484 || cfun->machine->is_OS_task
485 || cfun->machine->is_OS_main)
486 return 0;
488 for (reg = 0; reg < 32; reg++)
490 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
491 any global register variables. */
492 if (fixed_regs[reg])
493 continue;
495 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
496 || (df_regs_ever_live_p (reg)
497 && (int_or_sig_p || !call_used_regs[reg])
498 && !(frame_pointer_needed
499 && (reg == REG_Y || reg == (REG_Y+1)))))
501 if (set)
502 SET_HARD_REG_BIT (*set, reg);
503 count++;
506 return count;
509 /* Compute offset between arg_pointer and frame_pointer. */
512 initial_elimination_offset (int from, int to)
514 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
515 return 0;
516 else
518 int offset = frame_pointer_needed ? 2 : 0;
519 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
521 offset += avr_regs_to_save (NULL);
522 return get_frame_size () + (avr_pc_size) + 1 + offset;
526 /* Return 1 if the function epilogue is just a single "ret". */
529 avr_simple_epilogue (void)
531 return (! frame_pointer_needed
532 && get_frame_size () == 0
533 && avr_regs_to_save (NULL) == 0
534 && ! interrupt_function_p (current_function_decl)
535 && ! signal_function_p (current_function_decl)
536 && ! avr_naked_function_p (current_function_decl)
537 && ! TREE_THIS_VOLATILE (current_function_decl));
540 /* This function checks sequence of live registers. */
542 static int
543 sequent_regs_live (void)
545 int reg;
546 int live_seq=0;
547 int cur_seq=0;
549 for (reg = 0; reg < 18; ++reg)
551 if (!call_used_regs[reg])
553 if (df_regs_ever_live_p (reg))
555 ++live_seq;
556 ++cur_seq;
558 else
559 cur_seq = 0;
563 if (!frame_pointer_needed)
565 if (df_regs_ever_live_p (REG_Y))
567 ++live_seq;
568 ++cur_seq;
570 else
571 cur_seq = 0;
573 if (df_regs_ever_live_p (REG_Y+1))
575 ++live_seq;
576 ++cur_seq;
578 else
579 cur_seq = 0;
581 else
583 cur_seq += 2;
584 live_seq += 2;
586 return (cur_seq == live_seq) ? live_seq : 0;
589 /* Obtain the length sequence of insns. */
592 get_sequence_length (rtx insns)
594 rtx insn;
595 int length;
597 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
598 length += get_attr_length (insn);
600 return length;
603 /* Output function prologue. */
605 void
606 expand_prologue (void)
608 int live_seq;
609 HARD_REG_SET set;
610 int minimize;
611 HOST_WIDE_INT size = get_frame_size();
612 /* Define templates for push instructions. */
613 rtx pushbyte = gen_rtx_MEM (QImode,
614 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
615 rtx pushword = gen_rtx_MEM (HImode,
616 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
617 rtx insn;
619 last_insn_address = 0;
621 /* Init cfun->machine. */
622 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
623 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
624 cfun->machine->is_signal = signal_function_p (current_function_decl);
625 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
626 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
628 /* Prologue: naked. */
629 if (cfun->machine->is_naked)
631 return;
634 avr_regs_to_save (&set);
635 live_seq = sequent_regs_live ();
636 minimize = (TARGET_CALL_PROLOGUES
637 && !cfun->machine->is_interrupt
638 && !cfun->machine->is_signal
639 && !cfun->machine->is_OS_task
640 && !cfun->machine->is_OS_main
641 && live_seq);
643 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
645 if (cfun->machine->is_interrupt)
647 /* Enable interrupts. */
648 insn = emit_insn (gen_enable_interrupt ());
649 RTX_FRAME_RELATED_P (insn) = 1;
652 /* Push zero reg. */
653 insn = emit_move_insn (pushbyte, zero_reg_rtx);
654 RTX_FRAME_RELATED_P (insn) = 1;
656 /* Push tmp reg. */
657 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
658 RTX_FRAME_RELATED_P (insn) = 1;
660 /* Push SREG. */
661 insn = emit_move_insn (tmp_reg_rtx,
662 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
663 RTX_FRAME_RELATED_P (insn) = 1;
664 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
665 RTX_FRAME_RELATED_P (insn) = 1;
667 /* Push RAMPZ. */
668 if(AVR_HAVE_RAMPZ
669 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
671 insn = emit_move_insn (tmp_reg_rtx,
672 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
673 RTX_FRAME_RELATED_P (insn) = 1;
674 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
675 RTX_FRAME_RELATED_P (insn) = 1;
678 /* Clear zero reg. */
679 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
680 RTX_FRAME_RELATED_P (insn) = 1;
682 /* Prevent any attempt to delete the setting of ZERO_REG! */
683 emit_use (zero_reg_rtx);
685 if (minimize && (frame_pointer_needed
686 || (AVR_2_BYTE_PC && live_seq > 6)
687 || live_seq > 7))
689 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
690 gen_int_mode (size, HImode));
691 RTX_FRAME_RELATED_P (insn) = 1;
693 insn =
694 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
695 gen_int_mode (size + live_seq, HImode)));
696 RTX_FRAME_RELATED_P (insn) = 1;
698 else
700 int reg;
701 for (reg = 0; reg < 32; ++reg)
703 if (TEST_HARD_REG_BIT (set, reg))
705 /* Emit push of register to save. */
706 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
707 RTX_FRAME_RELATED_P (insn) = 1;
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. */
715 insn = emit_move_insn (pushword, frame_pointer_rtx);
716 RTX_FRAME_RELATED_P (insn) = 1;
719 if (!size)
721 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
722 RTX_FRAME_RELATED_P (insn) = 1;
724 else
726 /* Creating a frame can be done by direct manipulation of the
727 stack or via the frame pointer. These two methods are:
728 fp=sp
729 fp-=size
730 sp=fp
732 sp-=size
733 fp=sp
734 the optimum method depends on function type, stack and frame size.
735 To avoid a complex logic, both methods are tested and shortest
736 is selected. */
737 rtx myfp;
738 rtx fp_plus_insns;
739 rtx sp_plus_insns = NULL_RTX;
741 if (TARGET_TINY_STACK)
743 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
744 over 'sbiw' (2 cycles, same size). */
745 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
747 else
749 /* Normal sized addition. */
750 myfp = frame_pointer_rtx;
753 /* Method 1-Adjust frame pointer. */
754 start_sequence ();
756 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
757 RTX_FRAME_RELATED_P (insn) = 1;
759 insn =
760 emit_move_insn (myfp,
761 gen_rtx_PLUS (GET_MODE(myfp), myfp,
762 gen_int_mode (-size,
763 GET_MODE(myfp))));
764 RTX_FRAME_RELATED_P (insn) = 1;
766 /* Copy to stack pointer. */
767 if (TARGET_TINY_STACK)
769 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
770 RTX_FRAME_RELATED_P (insn) = 1;
772 else if (TARGET_NO_INTERRUPTS
773 || cfun->machine->is_signal
774 || cfun->machine->is_OS_main)
776 insn =
777 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
778 frame_pointer_rtx));
779 RTX_FRAME_RELATED_P (insn) = 1;
781 else if (cfun->machine->is_interrupt)
783 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
784 frame_pointer_rtx));
785 RTX_FRAME_RELATED_P (insn) = 1;
787 else
789 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
790 RTX_FRAME_RELATED_P (insn) = 1;
793 fp_plus_insns = get_insns ();
794 end_sequence ();
796 /* Method 2-Adjust Stack pointer. */
797 if (size <= 6)
799 start_sequence ();
801 insn =
802 emit_move_insn (stack_pointer_rtx,
803 gen_rtx_PLUS (HImode,
804 stack_pointer_rtx,
805 gen_int_mode (-size,
806 HImode)));
807 RTX_FRAME_RELATED_P (insn) = 1;
809 insn =
810 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
814 end_sequence ();
817 /* Use shortest method. */
818 if (size <= 6 && (get_sequence_length (sp_plus_insns)
819 < get_sequence_length (fp_plus_insns)))
820 emit_insn (sp_plus_insns);
821 else
822 emit_insn (fp_plus_insns);
828 /* Output summary at end of function prologue. */
830 static void
831 avr_asm_function_end_prologue (FILE *file)
833 if (cfun->machine->is_naked)
835 fputs ("/* prologue: naked */\n", file);
837 else
839 if (cfun->machine->is_interrupt)
841 fputs ("/* prologue: Interrupt */\n", file);
843 else if (cfun->machine->is_signal)
845 fputs ("/* prologue: Signal */\n", file);
847 else
848 fputs ("/* prologue: function */\n", file);
850 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
851 get_frame_size());
855 /* Implement EPILOGUE_USES. */
858 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
860 if (reload_completed
861 && cfun->machine
862 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
863 return 1;
864 return 0;
867 /* Output RTL epilogue. */
869 void
870 expand_epilogue (void)
872 int reg;
873 int live_seq;
874 HARD_REG_SET set;
875 int minimize;
876 HOST_WIDE_INT size = get_frame_size();
878 /* epilogue: naked */
879 if (cfun->machine->is_naked)
881 emit_jump_insn (gen_return ());
882 return;
885 avr_regs_to_save (&set);
886 live_seq = sequent_regs_live ();
887 minimize = (TARGET_CALL_PROLOGUES
888 && !cfun->machine->is_interrupt
889 && !cfun->machine->is_signal
890 && !cfun->machine->is_OS_task
891 && !cfun->machine->is_OS_main
892 && live_seq);
894 if (minimize && (frame_pointer_needed || live_seq > 4))
896 if (frame_pointer_needed)
898 /* Get rid of frame. */
899 emit_move_insn(frame_pointer_rtx,
900 gen_rtx_PLUS (HImode, frame_pointer_rtx,
901 gen_int_mode (size, HImode)));
903 else
905 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
908 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
910 else
912 if (frame_pointer_needed)
914 if (size)
916 /* Try two methods to adjust stack and select shortest. */
917 rtx myfp;
918 rtx fp_plus_insns;
919 rtx sp_plus_insns = NULL_RTX;
921 if (TARGET_TINY_STACK)
923 /* The high byte (r29) doesn't change - prefer 'subi'
924 (1 cycle) over 'sbiw' (2 cycles, same size). */
925 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
927 else
929 /* Normal sized addition. */
930 myfp = frame_pointer_rtx;
933 /* Method 1-Adjust frame pointer. */
934 start_sequence ();
936 emit_move_insn (myfp,
937 gen_rtx_PLUS (HImode, myfp,
938 gen_int_mode (size,
939 GET_MODE(myfp))));
941 /* Copy to stack pointer. */
942 if (TARGET_TINY_STACK)
944 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
946 else if (TARGET_NO_INTERRUPTS
947 || cfun->machine->is_signal)
949 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
950 frame_pointer_rtx));
952 else if (cfun->machine->is_interrupt)
954 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
955 frame_pointer_rtx));
957 else
959 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
962 fp_plus_insns = get_insns ();
963 end_sequence ();
965 /* Method 2-Adjust Stack pointer. */
966 if (size <= 5)
968 start_sequence ();
970 emit_move_insn (stack_pointer_rtx,
971 gen_rtx_PLUS (HImode, stack_pointer_rtx,
972 gen_int_mode (size,
973 HImode)));
975 sp_plus_insns = get_insns ();
976 end_sequence ();
979 /* Use shortest method. */
980 if (size <= 5 && (get_sequence_length (sp_plus_insns)
981 < get_sequence_length (fp_plus_insns)))
982 emit_insn (sp_plus_insns);
983 else
984 emit_insn (fp_plus_insns);
986 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
988 /* Restore previous frame_pointer. */
989 emit_insn (gen_pophi (frame_pointer_rtx));
992 /* Restore used registers. */
993 for (reg = 31; reg >= 0; --reg)
995 if (TEST_HARD_REG_BIT (set, reg))
996 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
998 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1000 /* Restore RAMPZ using tmp reg as scratch. */
1001 if(AVR_HAVE_RAMPZ
1002 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1004 emit_insn (gen_popqi (tmp_reg_rtx));
1005 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1006 tmp_reg_rtx);
1009 /* Restore SREG using tmp reg as scratch. */
1010 emit_insn (gen_popqi (tmp_reg_rtx));
1012 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1013 tmp_reg_rtx);
1015 /* Restore tmp REG. */
1016 emit_insn (gen_popqi (tmp_reg_rtx));
1018 /* Restore zero REG. */
1019 emit_insn (gen_popqi (zero_reg_rtx));
1022 emit_jump_insn (gen_return ());
1026 /* Output summary messages at beginning of function epilogue. */
1028 static void
1029 avr_asm_function_begin_epilogue (FILE *file)
1031 fprintf (file, "/* epilogue start */\n");
1034 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1035 machine for a memory operand of mode MODE. */
1038 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1040 enum reg_class r = NO_REGS;
1042 if (TARGET_ALL_DEBUG)
1044 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1045 GET_MODE_NAME(mode),
1046 strict ? "(strict)": "",
1047 reload_completed ? "(reload_completed)": "",
1048 reload_in_progress ? "(reload_in_progress)": "",
1049 reg_renumber ? "(reg_renumber)" : "");
1050 if (GET_CODE (x) == PLUS
1051 && REG_P (XEXP (x, 0))
1052 && GET_CODE (XEXP (x, 1)) == CONST_INT
1053 && INTVAL (XEXP (x, 1)) >= 0
1054 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1055 && reg_renumber
1057 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1058 true_regnum (XEXP (x, 0)));
1059 debug_rtx (x);
1061 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1062 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1063 r = POINTER_REGS;
1064 else if (CONSTANT_ADDRESS_P (x))
1065 r = ALL_REGS;
1066 else if (GET_CODE (x) == PLUS
1067 && REG_P (XEXP (x, 0))
1068 && GET_CODE (XEXP (x, 1)) == CONST_INT
1069 && INTVAL (XEXP (x, 1)) >= 0)
1071 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1072 if (fit)
1074 if (! strict
1075 || REGNO (XEXP (x,0)) == REG_Y
1076 || REGNO (XEXP (x,0)) == REG_Z)
1077 r = BASE_POINTER_REGS;
1078 if (XEXP (x,0) == frame_pointer_rtx
1079 || XEXP (x,0) == arg_pointer_rtx)
1080 r = BASE_POINTER_REGS;
1082 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1083 r = POINTER_Y_REGS;
1085 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1086 && REG_P (XEXP (x, 0))
1087 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1088 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1090 r = POINTER_REGS;
1092 if (TARGET_ALL_DEBUG)
1094 fprintf (stderr, " ret = %c\n", r + '0');
1096 return r == NO_REGS ? 0 : (int)r;
1099 /* Attempts to replace X with a valid
1100 memory address for an operand of mode MODE */
1103 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1105 x = oldx;
1106 if (TARGET_ALL_DEBUG)
1108 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1109 debug_rtx (oldx);
1112 if (GET_CODE (oldx) == PLUS
1113 && REG_P (XEXP (oldx,0)))
1115 if (REG_P (XEXP (oldx,1)))
1116 x = force_reg (GET_MODE (oldx), oldx);
1117 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1119 int offs = INTVAL (XEXP (oldx,1));
1120 if (frame_pointer_rtx != XEXP (oldx,0))
1121 if (offs > MAX_LD_OFFSET (mode))
1123 if (TARGET_ALL_DEBUG)
1124 fprintf (stderr, "force_reg (big offset)\n");
1125 x = force_reg (GET_MODE (oldx), oldx);
1129 return x;
1133 /* Return a pointer register name as a string. */
1135 static const char *
1136 ptrreg_to_str (int regno)
1138 switch (regno)
1140 case REG_X: return "X";
1141 case REG_Y: return "Y";
1142 case REG_Z: return "Z";
1143 default:
1144 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1146 return NULL;
1149 /* Return the condition name as a string.
1150 Used in conditional jump constructing */
1152 static const char *
1153 cond_string (enum rtx_code code)
1155 switch (code)
1157 case NE:
1158 return "ne";
1159 case EQ:
1160 return "eq";
1161 case GE:
1162 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1163 return "pl";
1164 else
1165 return "ge";
1166 case LT:
1167 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1168 return "mi";
1169 else
1170 return "lt";
1171 case GEU:
1172 return "sh";
1173 case LTU:
1174 return "lo";
1175 default:
1176 gcc_unreachable ();
1180 /* Output ADDR to FILE as address. */
1182 void
1183 print_operand_address (FILE *file, rtx addr)
1185 switch (GET_CODE (addr))
1187 case REG:
1188 fprintf (file, ptrreg_to_str (REGNO (addr)));
1189 break;
1191 case PRE_DEC:
1192 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1193 break;
1195 case POST_INC:
1196 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1197 break;
1199 default:
1200 if (CONSTANT_ADDRESS_P (addr)
1201 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1202 || GET_CODE (addr) == LABEL_REF))
1204 fprintf (file, "gs(");
1205 output_addr_const (file,addr);
1206 fprintf (file ,")");
1208 else
1209 output_addr_const (file, addr);
1214 /* Output X as assembler operand to file FILE. */
1216 void
1217 print_operand (FILE *file, rtx x, int code)
1219 int abcd = 0;
1221 if (code >= 'A' && code <= 'D')
1222 abcd = code - 'A';
1224 if (code == '~')
1226 if (!AVR_HAVE_JMP_CALL)
1227 fputc ('r', file);
1229 else if (code == '!')
1231 if (AVR_HAVE_EIJMP_EICALL)
1232 fputc ('e', file);
1234 else if (REG_P (x))
1236 if (x == zero_reg_rtx)
1237 fprintf (file, "__zero_reg__");
1238 else
1239 fprintf (file, reg_names[true_regnum (x) + abcd]);
1241 else if (GET_CODE (x) == CONST_INT)
1242 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1243 else if (GET_CODE (x) == MEM)
1245 rtx addr = XEXP (x,0);
1247 if (CONSTANT_P (addr) && abcd)
1249 fputc ('(', file);
1250 output_address (addr);
1251 fprintf (file, ")+%d", abcd);
1253 else if (code == 'o')
1255 if (GET_CODE (addr) != PLUS)
1256 fatal_insn ("bad address, not (reg+disp):", addr);
1258 print_operand (file, XEXP (addr, 1), 0);
1260 else if (code == 'p' || code == 'r')
1262 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1263 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1265 if (code == 'p')
1266 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1267 else
1268 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1270 else if (GET_CODE (addr) == PLUS)
1272 print_operand_address (file, XEXP (addr,0));
1273 if (REGNO (XEXP (addr, 0)) == REG_X)
1274 fatal_insn ("internal compiler error. Bad address:"
1275 ,addr);
1276 fputc ('+', file);
1277 print_operand (file, XEXP (addr,1), code);
1279 else
1280 print_operand_address (file, addr);
1282 else if (GET_CODE (x) == CONST_DOUBLE)
1284 long val;
1285 REAL_VALUE_TYPE rv;
1286 if (GET_MODE (x) != SFmode)
1287 fatal_insn ("internal compiler error. Unknown mode:", x);
1288 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1289 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1290 fprintf (file, "0x%lx", val);
1292 else if (code == 'j')
1293 fputs (cond_string (GET_CODE (x)), file);
1294 else if (code == 'k')
1295 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1296 else
1297 print_operand_address (file, x);
1300 /* Update the condition code in the INSN. */
1302 void
1303 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1305 rtx set;
1307 switch (get_attr_cc (insn))
1309 case CC_NONE:
1310 /* Insn does not affect CC at all. */
1311 break;
1313 case CC_SET_N:
1314 CC_STATUS_INIT;
1315 break;
1317 case CC_SET_ZN:
1318 set = single_set (insn);
1319 CC_STATUS_INIT;
1320 if (set)
1322 cc_status.flags |= CC_NO_OVERFLOW;
1323 cc_status.value1 = SET_DEST (set);
1325 break;
1327 case CC_SET_CZN:
1328 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1329 The V flag may or may not be known but that's ok because
1330 alter_cond will change tests to use EQ/NE. */
1331 set = single_set (insn);
1332 CC_STATUS_INIT;
1333 if (set)
1335 cc_status.value1 = SET_DEST (set);
1336 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1338 break;
1340 case CC_COMPARE:
1341 set = single_set (insn);
1342 CC_STATUS_INIT;
1343 if (set)
1344 cc_status.value1 = SET_SRC (set);
1345 break;
1347 case CC_CLOBBER:
1348 /* Insn doesn't leave CC in a usable state. */
1349 CC_STATUS_INIT;
1351 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1352 set = single_set (insn);
1353 if (set)
1355 rtx src = SET_SRC (set);
1357 if (GET_CODE (src) == ASHIFTRT
1358 && GET_MODE (src) == QImode)
1360 rtx x = XEXP (src, 1);
1362 if (GET_CODE (x) == CONST_INT
1363 && INTVAL (x) > 0
1364 && INTVAL (x) != 6)
1366 cc_status.value1 = SET_DEST (set);
1367 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1371 break;
1375 /* Return maximum number of consecutive registers of
1376 class CLASS needed to hold a value of mode MODE. */
1379 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1381 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1384 /* Choose mode for jump insn:
1385 1 - relative jump in range -63 <= x <= 62 ;
1386 2 - relative jump in range -2046 <= x <= 2045 ;
1387 3 - absolute jump (only for ATmega[16]03). */
1390 avr_jump_mode (rtx x, rtx insn)
1392 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1393 ? XEXP (x, 0) : x));
1394 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1395 int jump_distance = cur_addr - dest_addr;
1397 if (-63 <= jump_distance && jump_distance <= 62)
1398 return 1;
1399 else if (-2046 <= jump_distance && jump_distance <= 2045)
1400 return 2;
1401 else if (AVR_HAVE_JMP_CALL)
1402 return 3;
1404 return 2;
1407 /* return an AVR condition jump commands.
1408 X is a comparison RTX.
1409 LEN is a number returned by avr_jump_mode function.
1410 if REVERSE nonzero then condition code in X must be reversed. */
1412 const char *
1413 ret_cond_branch (rtx x, int len, int reverse)
1415 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1417 switch (cond)
1419 case GT:
1420 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1421 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1422 AS1 (brpl,%0)) :
1423 len == 2 ? (AS1 (breq,.+4) CR_TAB
1424 AS1 (brmi,.+2) CR_TAB
1425 AS1 (rjmp,%0)) :
1426 (AS1 (breq,.+6) CR_TAB
1427 AS1 (brmi,.+4) CR_TAB
1428 AS1 (jmp,%0)));
1430 else
1431 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1432 AS1 (brge,%0)) :
1433 len == 2 ? (AS1 (breq,.+4) CR_TAB
1434 AS1 (brlt,.+2) CR_TAB
1435 AS1 (rjmp,%0)) :
1436 (AS1 (breq,.+6) CR_TAB
1437 AS1 (brlt,.+4) CR_TAB
1438 AS1 (jmp,%0)));
1439 case GTU:
1440 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1441 AS1 (brsh,%0)) :
1442 len == 2 ? (AS1 (breq,.+4) CR_TAB
1443 AS1 (brlo,.+2) CR_TAB
1444 AS1 (rjmp,%0)) :
1445 (AS1 (breq,.+6) CR_TAB
1446 AS1 (brlo,.+4) CR_TAB
1447 AS1 (jmp,%0)));
1448 case LE:
1449 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1450 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1451 AS1 (brmi,%0)) :
1452 len == 2 ? (AS1 (breq,.+2) CR_TAB
1453 AS1 (brpl,.+2) CR_TAB
1454 AS1 (rjmp,%0)) :
1455 (AS1 (breq,.+2) CR_TAB
1456 AS1 (brpl,.+4) CR_TAB
1457 AS1 (jmp,%0)));
1458 else
1459 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1460 AS1 (brlt,%0)) :
1461 len == 2 ? (AS1 (breq,.+2) CR_TAB
1462 AS1 (brge,.+2) CR_TAB
1463 AS1 (rjmp,%0)) :
1464 (AS1 (breq,.+2) CR_TAB
1465 AS1 (brge,.+4) CR_TAB
1466 AS1 (jmp,%0)));
1467 case LEU:
1468 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1469 AS1 (brlo,%0)) :
1470 len == 2 ? (AS1 (breq,.+2) CR_TAB
1471 AS1 (brsh,.+2) CR_TAB
1472 AS1 (rjmp,%0)) :
1473 (AS1 (breq,.+2) CR_TAB
1474 AS1 (brsh,.+4) CR_TAB
1475 AS1 (jmp,%0)));
1476 default:
1477 if (reverse)
1479 switch (len)
1481 case 1:
1482 return AS1 (br%k1,%0);
1483 case 2:
1484 return (AS1 (br%j1,.+2) CR_TAB
1485 AS1 (rjmp,%0));
1486 default:
1487 return (AS1 (br%j1,.+4) CR_TAB
1488 AS1 (jmp,%0));
1491 else
1493 switch (len)
1495 case 1:
1496 return AS1 (br%j1,%0);
1497 case 2:
1498 return (AS1 (br%k1,.+2) CR_TAB
1499 AS1 (rjmp,%0));
1500 default:
1501 return (AS1 (br%k1,.+4) CR_TAB
1502 AS1 (jmp,%0));
1506 return "";
1509 /* Predicate function for immediate operand which fits to byte (8bit) */
1512 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1514 return (GET_CODE (op) == CONST_INT
1515 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1518 /* Output all insn addresses and their sizes into the assembly language
1519 output file. This is helpful for debugging whether the length attributes
1520 in the md file are correct.
1521 Output insn cost for next insn. */
1523 void
1524 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1525 int num_operands ATTRIBUTE_UNUSED)
1527 int uid = INSN_UID (insn);
1529 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1531 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1532 INSN_ADDRESSES (uid),
1533 INSN_ADDRESSES (uid) - last_insn_address,
1534 rtx_cost (PATTERN (insn), INSN));
1536 last_insn_address = INSN_ADDRESSES (uid);
1539 /* Return 0 if undefined, 1 if always true or always false. */
1542 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1544 unsigned int max = (mode == QImode ? 0xff :
1545 mode == HImode ? 0xffff :
1546 mode == SImode ? 0xffffffff : 0);
1547 if (max && operator && GET_CODE (x) == CONST_INT)
1549 if (unsigned_condition (operator) != operator)
1550 max >>= 1;
1552 if (max != (INTVAL (x) & max)
1553 && INTVAL (x) != 0xff)
1554 return 1;
1556 return 0;
1560 /* Returns nonzero if REGNO is the number of a hard
1561 register in which function arguments are sometimes passed. */
1564 function_arg_regno_p(int r)
1566 return (r >= 8 && r <= 25);
1569 /* Initializing the variable cum for the state at the beginning
1570 of the argument list. */
1572 void
1573 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1574 tree fndecl ATTRIBUTE_UNUSED)
1576 cum->nregs = 18;
1577 cum->regno = FIRST_CUM_REG;
1578 if (!libname && fntype)
1580 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1581 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1582 != void_type_node));
1583 if (stdarg)
1584 cum->nregs = 0;
1588 /* Returns the number of registers to allocate for a function argument. */
1590 static int
1591 avr_num_arg_regs (enum machine_mode mode, tree type)
1593 int size;
1595 if (mode == BLKmode)
1596 size = int_size_in_bytes (type);
1597 else
1598 size = GET_MODE_SIZE (mode);
1600 /* Align all function arguments to start in even-numbered registers.
1601 Odd-sized arguments leave holes above them. */
1603 return (size + 1) & ~1;
1606 /* Controls whether a function argument is passed
1607 in a register, and which register. */
1610 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1611 int named ATTRIBUTE_UNUSED)
1613 int bytes = avr_num_arg_regs (mode, type);
1615 if (cum->nregs && bytes <= cum->nregs)
1616 return gen_rtx_REG (mode, cum->regno - bytes);
1618 return NULL_RTX;
1621 /* Update the summarizer variable CUM to advance past an argument
1622 in the argument list. */
1624 void
1625 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1626 int named ATTRIBUTE_UNUSED)
1628 int bytes = avr_num_arg_regs (mode, type);
1630 cum->nregs -= bytes;
1631 cum->regno -= bytes;
1633 if (cum->nregs <= 0)
1635 cum->nregs = 0;
1636 cum->regno = FIRST_CUM_REG;
1640 /***********************************************************************
1641 Functions for outputting various mov's for a various modes
1642 ************************************************************************/
1643 const char *
1644 output_movqi (rtx insn, rtx operands[], int *l)
1646 int dummy;
1647 rtx dest = operands[0];
1648 rtx src = operands[1];
1649 int *real_l = l;
1651 if (!l)
1652 l = &dummy;
1654 *l = 1;
1656 if (register_operand (dest, QImode))
1658 if (register_operand (src, QImode)) /* mov r,r */
1660 if (test_hard_reg_class (STACK_REG, dest))
1661 return AS2 (out,%0,%1);
1662 else if (test_hard_reg_class (STACK_REG, src))
1663 return AS2 (in,%0,%1);
1665 return AS2 (mov,%0,%1);
1667 else if (CONSTANT_P (src))
1669 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1670 return AS2 (ldi,%0,lo8(%1));
1672 if (GET_CODE (src) == CONST_INT)
1674 if (src == const0_rtx) /* mov r,L */
1675 return AS1 (clr,%0);
1676 else if (src == const1_rtx)
1678 *l = 2;
1679 return (AS1 (clr,%0) CR_TAB
1680 AS1 (inc,%0));
1682 else if (src == constm1_rtx)
1684 /* Immediate constants -1 to any register */
1685 *l = 2;
1686 return (AS1 (clr,%0) CR_TAB
1687 AS1 (dec,%0));
1689 else
1691 int bit_nr = exact_log2 (INTVAL (src));
1693 if (bit_nr >= 0)
1695 *l = 3;
1696 if (!real_l)
1697 output_asm_insn ((AS1 (clr,%0) CR_TAB
1698 "set"), operands);
1699 if (!real_l)
1700 avr_output_bld (operands, bit_nr);
1702 return "";
1707 /* Last resort, larger than loading from memory. */
1708 *l = 4;
1709 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1710 AS2 (ldi,r31,lo8(%1)) CR_TAB
1711 AS2 (mov,%0,r31) CR_TAB
1712 AS2 (mov,r31,__tmp_reg__));
1714 else if (GET_CODE (src) == MEM)
1715 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1717 else if (GET_CODE (dest) == MEM)
1719 const char *template;
1721 if (src == const0_rtx)
1722 operands[1] = zero_reg_rtx;
1724 template = out_movqi_mr_r (insn, operands, real_l);
1726 if (!real_l)
1727 output_asm_insn (template, operands);
1729 operands[1] = src;
1731 return "";
1735 const char *
1736 output_movhi (rtx insn, rtx operands[], int *l)
1738 int dummy;
1739 rtx dest = operands[0];
1740 rtx src = operands[1];
1741 int *real_l = l;
1743 if (!l)
1744 l = &dummy;
1746 if (register_operand (dest, HImode))
1748 if (register_operand (src, HImode)) /* mov r,r */
1750 if (test_hard_reg_class (STACK_REG, dest))
1752 if (TARGET_TINY_STACK)
1753 return *l = 1, AS2 (out,__SP_L__,%A1);
1754 /* Use simple load of stack pointer if no interrupts are
1755 used. */
1756 else if (TARGET_NO_INTERRUPTS)
1757 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1758 AS2 (out,__SP_L__,%A1));
1759 *l = 5;
1760 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1761 "cli" CR_TAB
1762 AS2 (out,__SP_H__,%B1) CR_TAB
1763 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1764 AS2 (out,__SP_L__,%A1));
1766 else if (test_hard_reg_class (STACK_REG, src))
1768 *l = 2;
1769 return (AS2 (in,%A0,__SP_L__) CR_TAB
1770 AS2 (in,%B0,__SP_H__));
1773 if (AVR_HAVE_MOVW)
1775 *l = 1;
1776 return (AS2 (movw,%0,%1));
1778 else
1780 *l = 2;
1781 return (AS2 (mov,%A0,%A1) CR_TAB
1782 AS2 (mov,%B0,%B1));
1785 else if (CONSTANT_P (src))
1787 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1789 *l = 2;
1790 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1791 AS2 (ldi,%B0,hi8(%1)));
1794 if (GET_CODE (src) == CONST_INT)
1796 if (src == const0_rtx) /* mov r,L */
1798 *l = 2;
1799 return (AS1 (clr,%A0) CR_TAB
1800 AS1 (clr,%B0));
1802 else if (src == const1_rtx)
1804 *l = 3;
1805 return (AS1 (clr,%A0) CR_TAB
1806 AS1 (clr,%B0) CR_TAB
1807 AS1 (inc,%A0));
1809 else if (src == constm1_rtx)
1811 /* Immediate constants -1 to any register */
1812 *l = 3;
1813 return (AS1 (clr,%0) CR_TAB
1814 AS1 (dec,%A0) CR_TAB
1815 AS2 (mov,%B0,%A0));
1817 else
1819 int bit_nr = exact_log2 (INTVAL (src));
1821 if (bit_nr >= 0)
1823 *l = 4;
1824 if (!real_l)
1825 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1826 AS1 (clr,%B0) CR_TAB
1827 "set"), operands);
1828 if (!real_l)
1829 avr_output_bld (operands, bit_nr);
1831 return "";
1835 if ((INTVAL (src) & 0xff) == 0)
1837 *l = 5;
1838 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1839 AS1 (clr,%A0) CR_TAB
1840 AS2 (ldi,r31,hi8(%1)) CR_TAB
1841 AS2 (mov,%B0,r31) CR_TAB
1842 AS2 (mov,r31,__tmp_reg__));
1844 else if ((INTVAL (src) & 0xff00) == 0)
1846 *l = 5;
1847 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1848 AS2 (ldi,r31,lo8(%1)) CR_TAB
1849 AS2 (mov,%A0,r31) CR_TAB
1850 AS1 (clr,%B0) CR_TAB
1851 AS2 (mov,r31,__tmp_reg__));
1855 /* Last resort, equal to loading from memory. */
1856 *l = 6;
1857 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1858 AS2 (ldi,r31,lo8(%1)) CR_TAB
1859 AS2 (mov,%A0,r31) CR_TAB
1860 AS2 (ldi,r31,hi8(%1)) CR_TAB
1861 AS2 (mov,%B0,r31) CR_TAB
1862 AS2 (mov,r31,__tmp_reg__));
1864 else if (GET_CODE (src) == MEM)
1865 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1867 else if (GET_CODE (dest) == MEM)
1869 const char *template;
1871 if (src == const0_rtx)
1872 operands[1] = zero_reg_rtx;
1874 template = out_movhi_mr_r (insn, operands, real_l);
1876 if (!real_l)
1877 output_asm_insn (template, operands);
1879 operands[1] = src;
1880 return "";
1882 fatal_insn ("invalid insn:", insn);
1883 return "";
1886 const char *
1887 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1889 rtx dest = op[0];
1890 rtx src = op[1];
1891 rtx x = XEXP (src, 0);
1892 int dummy;
1894 if (!l)
1895 l = &dummy;
1897 if (CONSTANT_ADDRESS_P (x))
1899 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1901 *l = 1;
1902 return AS2 (in,%0,__SREG__);
1904 if (optimize > 0 && io_address_operand (x, QImode))
1906 *l = 1;
1907 return AS2 (in,%0,%1-0x20);
1909 *l = 2;
1910 return AS2 (lds,%0,%1);
1912 /* memory access by reg+disp */
1913 else if (GET_CODE (x) == PLUS
1914 && REG_P (XEXP (x,0))
1915 && GET_CODE (XEXP (x,1)) == CONST_INT)
1917 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1919 int disp = INTVAL (XEXP (x,1));
1920 if (REGNO (XEXP (x,0)) != REG_Y)
1921 fatal_insn ("incorrect insn:",insn);
1923 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1924 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1925 AS2 (ldd,%0,Y+63) CR_TAB
1926 AS2 (sbiw,r28,%o1-63));
1928 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1929 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1930 AS2 (ld,%0,Y) CR_TAB
1931 AS2 (subi,r28,lo8(%o1)) CR_TAB
1932 AS2 (sbci,r29,hi8(%o1)));
1934 else if (REGNO (XEXP (x,0)) == REG_X)
1936 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1937 it but I have this situation with extremal optimizing options. */
1938 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1939 || reg_unused_after (insn, XEXP (x,0)))
1940 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1941 AS2 (ld,%0,X));
1943 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1944 AS2 (ld,%0,X) CR_TAB
1945 AS2 (sbiw,r26,%o1));
1947 *l = 1;
1948 return AS2 (ldd,%0,%1);
1950 *l = 1;
1951 return AS2 (ld,%0,%1);
1954 const char *
1955 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1957 rtx dest = op[0];
1958 rtx src = op[1];
1959 rtx base = XEXP (src, 0);
1960 int reg_dest = true_regnum (dest);
1961 int reg_base = true_regnum (base);
1962 /* "volatile" forces reading low byte first, even if less efficient,
1963 for correct operation with 16-bit I/O registers. */
1964 int mem_volatile_p = MEM_VOLATILE_P (src);
1965 int tmp;
1967 if (!l)
1968 l = &tmp;
1970 if (reg_base > 0)
1972 if (reg_dest == reg_base) /* R = (R) */
1974 *l = 3;
1975 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1976 AS2 (ld,%B0,%1) CR_TAB
1977 AS2 (mov,%A0,__tmp_reg__));
1979 else if (reg_base == REG_X) /* (R26) */
1981 if (reg_unused_after (insn, base))
1983 *l = 2;
1984 return (AS2 (ld,%A0,X+) CR_TAB
1985 AS2 (ld,%B0,X));
1987 *l = 3;
1988 return (AS2 (ld,%A0,X+) CR_TAB
1989 AS2 (ld,%B0,X) CR_TAB
1990 AS2 (sbiw,r26,1));
1992 else /* (R) */
1994 *l = 2;
1995 return (AS2 (ld,%A0,%1) CR_TAB
1996 AS2 (ldd,%B0,%1+1));
1999 else if (GET_CODE (base) == PLUS) /* (R + i) */
2001 int disp = INTVAL (XEXP (base, 1));
2002 int reg_base = true_regnum (XEXP (base, 0));
2004 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2006 if (REGNO (XEXP (base, 0)) != REG_Y)
2007 fatal_insn ("incorrect insn:",insn);
2009 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2010 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2011 AS2 (ldd,%A0,Y+62) CR_TAB
2012 AS2 (ldd,%B0,Y+63) CR_TAB
2013 AS2 (sbiw,r28,%o1-62));
2015 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2016 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2017 AS2 (ld,%A0,Y) CR_TAB
2018 AS2 (ldd,%B0,Y+1) CR_TAB
2019 AS2 (subi,r28,lo8(%o1)) CR_TAB
2020 AS2 (sbci,r29,hi8(%o1)));
2022 if (reg_base == REG_X)
2024 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2025 it but I have this situation with extremal
2026 optimization options. */
2028 *l = 4;
2029 if (reg_base == reg_dest)
2030 return (AS2 (adiw,r26,%o1) CR_TAB
2031 AS2 (ld,__tmp_reg__,X+) CR_TAB
2032 AS2 (ld,%B0,X) CR_TAB
2033 AS2 (mov,%A0,__tmp_reg__));
2035 return (AS2 (adiw,r26,%o1) CR_TAB
2036 AS2 (ld,%A0,X+) CR_TAB
2037 AS2 (ld,%B0,X) CR_TAB
2038 AS2 (sbiw,r26,%o1+1));
2041 if (reg_base == reg_dest)
2043 *l = 3;
2044 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2045 AS2 (ldd,%B0,%B1) CR_TAB
2046 AS2 (mov,%A0,__tmp_reg__));
2049 *l = 2;
2050 return (AS2 (ldd,%A0,%A1) CR_TAB
2051 AS2 (ldd,%B0,%B1));
2053 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2055 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2056 fatal_insn ("incorrect insn:", insn);
2058 if (mem_volatile_p)
2060 if (REGNO (XEXP (base, 0)) == REG_X)
2062 *l = 4;
2063 return (AS2 (sbiw,r26,2) CR_TAB
2064 AS2 (ld,%A0,X+) CR_TAB
2065 AS2 (ld,%B0,X) CR_TAB
2066 AS2 (sbiw,r26,1));
2068 else
2070 *l = 3;
2071 return (AS2 (sbiw,%r1,2) CR_TAB
2072 AS2 (ld,%A0,%p1) CR_TAB
2073 AS2 (ldd,%B0,%p1+1));
2077 *l = 2;
2078 return (AS2 (ld,%B0,%1) CR_TAB
2079 AS2 (ld,%A0,%1));
2081 else if (GET_CODE (base) == POST_INC) /* (R++) */
2083 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2084 fatal_insn ("incorrect insn:", insn);
2086 *l = 2;
2087 return (AS2 (ld,%A0,%1) CR_TAB
2088 AS2 (ld,%B0,%1));
2090 else if (CONSTANT_ADDRESS_P (base))
2092 if (optimize > 0 && io_address_operand (base, HImode))
2094 *l = 2;
2095 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2096 AS2 (in,%B0,%B1-0x20));
2098 *l = 4;
2099 return (AS2 (lds,%A0,%A1) CR_TAB
2100 AS2 (lds,%B0,%B1));
2103 fatal_insn ("unknown move insn:",insn);
2104 return "";
2107 const char *
2108 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2110 rtx dest = op[0];
2111 rtx src = op[1];
2112 rtx base = XEXP (src, 0);
2113 int reg_dest = true_regnum (dest);
2114 int reg_base = true_regnum (base);
2115 int tmp;
2117 if (!l)
2118 l = &tmp;
2120 if (reg_base > 0)
2122 if (reg_base == REG_X) /* (R26) */
2124 if (reg_dest == REG_X)
2125 /* "ld r26,-X" is undefined */
2126 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2127 AS2 (ld,r29,X) CR_TAB
2128 AS2 (ld,r28,-X) CR_TAB
2129 AS2 (ld,__tmp_reg__,-X) CR_TAB
2130 AS2 (sbiw,r26,1) CR_TAB
2131 AS2 (ld,r26,X) CR_TAB
2132 AS2 (mov,r27,__tmp_reg__));
2133 else if (reg_dest == REG_X - 2)
2134 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2135 AS2 (ld,%B0,X+) CR_TAB
2136 AS2 (ld,__tmp_reg__,X+) CR_TAB
2137 AS2 (ld,%D0,X) CR_TAB
2138 AS2 (mov,%C0,__tmp_reg__));
2139 else if (reg_unused_after (insn, base))
2140 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2141 AS2 (ld,%B0,X+) CR_TAB
2142 AS2 (ld,%C0,X+) CR_TAB
2143 AS2 (ld,%D0,X));
2144 else
2145 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2146 AS2 (ld,%B0,X+) CR_TAB
2147 AS2 (ld,%C0,X+) CR_TAB
2148 AS2 (ld,%D0,X) CR_TAB
2149 AS2 (sbiw,r26,3));
2151 else
2153 if (reg_dest == reg_base)
2154 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2155 AS2 (ldd,%C0,%1+2) CR_TAB
2156 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2157 AS2 (ld,%A0,%1) CR_TAB
2158 AS2 (mov,%B0,__tmp_reg__));
2159 else if (reg_base == reg_dest + 2)
2160 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2161 AS2 (ldd,%B0,%1+1) CR_TAB
2162 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2163 AS2 (ldd,%D0,%1+3) CR_TAB
2164 AS2 (mov,%C0,__tmp_reg__));
2165 else
2166 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2167 AS2 (ldd,%B0,%1+1) CR_TAB
2168 AS2 (ldd,%C0,%1+2) CR_TAB
2169 AS2 (ldd,%D0,%1+3));
2172 else if (GET_CODE (base) == PLUS) /* (R + i) */
2174 int disp = INTVAL (XEXP (base, 1));
2176 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2178 if (REGNO (XEXP (base, 0)) != REG_Y)
2179 fatal_insn ("incorrect insn:",insn);
2181 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2182 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2183 AS2 (ldd,%A0,Y+60) CR_TAB
2184 AS2 (ldd,%B0,Y+61) CR_TAB
2185 AS2 (ldd,%C0,Y+62) CR_TAB
2186 AS2 (ldd,%D0,Y+63) CR_TAB
2187 AS2 (sbiw,r28,%o1-60));
2189 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2190 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2191 AS2 (ld,%A0,Y) CR_TAB
2192 AS2 (ldd,%B0,Y+1) CR_TAB
2193 AS2 (ldd,%C0,Y+2) CR_TAB
2194 AS2 (ldd,%D0,Y+3) CR_TAB
2195 AS2 (subi,r28,lo8(%o1)) CR_TAB
2196 AS2 (sbci,r29,hi8(%o1)));
2199 reg_base = true_regnum (XEXP (base, 0));
2200 if (reg_base == REG_X)
2202 /* R = (X + d) */
2203 if (reg_dest == REG_X)
2205 *l = 7;
2206 /* "ld r26,-X" is undefined */
2207 return (AS2 (adiw,r26,%o1+3) CR_TAB
2208 AS2 (ld,r29,X) CR_TAB
2209 AS2 (ld,r28,-X) CR_TAB
2210 AS2 (ld,__tmp_reg__,-X) CR_TAB
2211 AS2 (sbiw,r26,1) CR_TAB
2212 AS2 (ld,r26,X) CR_TAB
2213 AS2 (mov,r27,__tmp_reg__));
2215 *l = 6;
2216 if (reg_dest == REG_X - 2)
2217 return (AS2 (adiw,r26,%o1) CR_TAB
2218 AS2 (ld,r24,X+) CR_TAB
2219 AS2 (ld,r25,X+) CR_TAB
2220 AS2 (ld,__tmp_reg__,X+) CR_TAB
2221 AS2 (ld,r27,X) CR_TAB
2222 AS2 (mov,r26,__tmp_reg__));
2224 return (AS2 (adiw,r26,%o1) CR_TAB
2225 AS2 (ld,%A0,X+) CR_TAB
2226 AS2 (ld,%B0,X+) CR_TAB
2227 AS2 (ld,%C0,X+) CR_TAB
2228 AS2 (ld,%D0,X) CR_TAB
2229 AS2 (sbiw,r26,%o1+3));
2231 if (reg_dest == reg_base)
2232 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2233 AS2 (ldd,%C0,%C1) CR_TAB
2234 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2235 AS2 (ldd,%A0,%A1) CR_TAB
2236 AS2 (mov,%B0,__tmp_reg__));
2237 else if (reg_dest == reg_base - 2)
2238 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2239 AS2 (ldd,%B0,%B1) CR_TAB
2240 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2241 AS2 (ldd,%D0,%D1) CR_TAB
2242 AS2 (mov,%C0,__tmp_reg__));
2243 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2244 AS2 (ldd,%B0,%B1) CR_TAB
2245 AS2 (ldd,%C0,%C1) CR_TAB
2246 AS2 (ldd,%D0,%D1));
2248 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2249 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2250 AS2 (ld,%C0,%1) CR_TAB
2251 AS2 (ld,%B0,%1) CR_TAB
2252 AS2 (ld,%A0,%1));
2253 else if (GET_CODE (base) == POST_INC) /* (R++) */
2254 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2255 AS2 (ld,%B0,%1) CR_TAB
2256 AS2 (ld,%C0,%1) CR_TAB
2257 AS2 (ld,%D0,%1));
2258 else if (CONSTANT_ADDRESS_P (base))
2259 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2260 AS2 (lds,%B0,%B1) CR_TAB
2261 AS2 (lds,%C0,%C1) CR_TAB
2262 AS2 (lds,%D0,%D1));
2264 fatal_insn ("unknown move insn:",insn);
2265 return "";
2268 const char *
2269 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2271 rtx dest = op[0];
2272 rtx src = op[1];
2273 rtx base = XEXP (dest, 0);
2274 int reg_base = true_regnum (base);
2275 int reg_src = true_regnum (src);
2276 int tmp;
2278 if (!l)
2279 l = &tmp;
2281 if (CONSTANT_ADDRESS_P (base))
2282 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2283 AS2 (sts,%B0,%B1) CR_TAB
2284 AS2 (sts,%C0,%C1) CR_TAB
2285 AS2 (sts,%D0,%D1));
2286 if (reg_base > 0) /* (r) */
2288 if (reg_base == REG_X) /* (R26) */
2290 if (reg_src == REG_X)
2292 /* "st X+,r26" is undefined */
2293 if (reg_unused_after (insn, base))
2294 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2295 AS2 (st,X,r26) CR_TAB
2296 AS2 (adiw,r26,1) CR_TAB
2297 AS2 (st,X+,__tmp_reg__) CR_TAB
2298 AS2 (st,X+,r28) CR_TAB
2299 AS2 (st,X,r29));
2300 else
2301 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2302 AS2 (st,X,r26) CR_TAB
2303 AS2 (adiw,r26,1) CR_TAB
2304 AS2 (st,X+,__tmp_reg__) CR_TAB
2305 AS2 (st,X+,r28) CR_TAB
2306 AS2 (st,X,r29) CR_TAB
2307 AS2 (sbiw,r26,3));
2309 else if (reg_base == reg_src + 2)
2311 if (reg_unused_after (insn, base))
2312 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2313 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2314 AS2 (st,%0+,%A1) CR_TAB
2315 AS2 (st,%0+,%B1) CR_TAB
2316 AS2 (st,%0+,__zero_reg__) CR_TAB
2317 AS2 (st,%0,__tmp_reg__) CR_TAB
2318 AS1 (clr,__zero_reg__));
2319 else
2320 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2321 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2322 AS2 (st,%0+,%A1) CR_TAB
2323 AS2 (st,%0+,%B1) CR_TAB
2324 AS2 (st,%0+,__zero_reg__) CR_TAB
2325 AS2 (st,%0,__tmp_reg__) CR_TAB
2326 AS1 (clr,__zero_reg__) CR_TAB
2327 AS2 (sbiw,r26,3));
2329 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2330 AS2 (st,%0+,%B1) CR_TAB
2331 AS2 (st,%0+,%C1) CR_TAB
2332 AS2 (st,%0,%D1) CR_TAB
2333 AS2 (sbiw,r26,3));
2335 else
2336 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2337 AS2 (std,%0+1,%B1) CR_TAB
2338 AS2 (std,%0+2,%C1) CR_TAB
2339 AS2 (std,%0+3,%D1));
2341 else if (GET_CODE (base) == PLUS) /* (R + i) */
2343 int disp = INTVAL (XEXP (base, 1));
2344 reg_base = REGNO (XEXP (base, 0));
2345 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2347 if (reg_base != REG_Y)
2348 fatal_insn ("incorrect insn:",insn);
2350 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2351 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2352 AS2 (std,Y+60,%A1) CR_TAB
2353 AS2 (std,Y+61,%B1) CR_TAB
2354 AS2 (std,Y+62,%C1) CR_TAB
2355 AS2 (std,Y+63,%D1) CR_TAB
2356 AS2 (sbiw,r28,%o0-60));
2358 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2359 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2360 AS2 (st,Y,%A1) CR_TAB
2361 AS2 (std,Y+1,%B1) CR_TAB
2362 AS2 (std,Y+2,%C1) CR_TAB
2363 AS2 (std,Y+3,%D1) CR_TAB
2364 AS2 (subi,r28,lo8(%o0)) CR_TAB
2365 AS2 (sbci,r29,hi8(%o0)));
2367 if (reg_base == REG_X)
2369 /* (X + d) = R */
2370 if (reg_src == REG_X)
2372 *l = 9;
2373 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2374 AS2 (mov,__zero_reg__,r27) CR_TAB
2375 AS2 (adiw,r26,%o0) CR_TAB
2376 AS2 (st,X+,__tmp_reg__) CR_TAB
2377 AS2 (st,X+,__zero_reg__) CR_TAB
2378 AS2 (st,X+,r28) CR_TAB
2379 AS2 (st,X,r29) CR_TAB
2380 AS1 (clr,__zero_reg__) CR_TAB
2381 AS2 (sbiw,r26,%o0+3));
2383 else if (reg_src == REG_X - 2)
2385 *l = 9;
2386 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2387 AS2 (mov,__zero_reg__,r27) CR_TAB
2388 AS2 (adiw,r26,%o0) CR_TAB
2389 AS2 (st,X+,r24) CR_TAB
2390 AS2 (st,X+,r25) CR_TAB
2391 AS2 (st,X+,__tmp_reg__) CR_TAB
2392 AS2 (st,X,__zero_reg__) CR_TAB
2393 AS1 (clr,__zero_reg__) CR_TAB
2394 AS2 (sbiw,r26,%o0+3));
2396 *l = 6;
2397 return (AS2 (adiw,r26,%o0) CR_TAB
2398 AS2 (st,X+,%A1) CR_TAB
2399 AS2 (st,X+,%B1) CR_TAB
2400 AS2 (st,X+,%C1) CR_TAB
2401 AS2 (st,X,%D1) CR_TAB
2402 AS2 (sbiw,r26,%o0+3));
2404 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2405 AS2 (std,%B0,%B1) CR_TAB
2406 AS2 (std,%C0,%C1) CR_TAB
2407 AS2 (std,%D0,%D1));
2409 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2410 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2411 AS2 (st,%0,%C1) CR_TAB
2412 AS2 (st,%0,%B1) CR_TAB
2413 AS2 (st,%0,%A1));
2414 else if (GET_CODE (base) == POST_INC) /* (R++) */
2415 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2416 AS2 (st,%0,%B1) CR_TAB
2417 AS2 (st,%0,%C1) CR_TAB
2418 AS2 (st,%0,%D1));
2419 fatal_insn ("unknown move insn:",insn);
2420 return "";
2423 const char *
2424 output_movsisf(rtx insn, rtx operands[], int *l)
2426 int dummy;
2427 rtx dest = operands[0];
2428 rtx src = operands[1];
2429 int *real_l = l;
2431 if (!l)
2432 l = &dummy;
2434 if (register_operand (dest, VOIDmode))
2436 if (register_operand (src, VOIDmode)) /* mov r,r */
2438 if (true_regnum (dest) > true_regnum (src))
2440 if (AVR_HAVE_MOVW)
2442 *l = 2;
2443 return (AS2 (movw,%C0,%C1) CR_TAB
2444 AS2 (movw,%A0,%A1));
2446 *l = 4;
2447 return (AS2 (mov,%D0,%D1) CR_TAB
2448 AS2 (mov,%C0,%C1) CR_TAB
2449 AS2 (mov,%B0,%B1) CR_TAB
2450 AS2 (mov,%A0,%A1));
2452 else
2454 if (AVR_HAVE_MOVW)
2456 *l = 2;
2457 return (AS2 (movw,%A0,%A1) CR_TAB
2458 AS2 (movw,%C0,%C1));
2460 *l = 4;
2461 return (AS2 (mov,%A0,%A1) CR_TAB
2462 AS2 (mov,%B0,%B1) CR_TAB
2463 AS2 (mov,%C0,%C1) CR_TAB
2464 AS2 (mov,%D0,%D1));
2467 else if (CONSTANT_P (src))
2469 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2471 *l = 4;
2472 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2473 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2474 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2475 AS2 (ldi,%D0,hhi8(%1)));
2478 if (GET_CODE (src) == CONST_INT)
2480 const char *const clr_op0 =
2481 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2482 AS1 (clr,%B0) CR_TAB
2483 AS2 (movw,%C0,%A0))
2484 : (AS1 (clr,%A0) CR_TAB
2485 AS1 (clr,%B0) CR_TAB
2486 AS1 (clr,%C0) CR_TAB
2487 AS1 (clr,%D0));
2489 if (src == const0_rtx) /* mov r,L */
2491 *l = AVR_HAVE_MOVW ? 3 : 4;
2492 return clr_op0;
2494 else if (src == const1_rtx)
2496 if (!real_l)
2497 output_asm_insn (clr_op0, operands);
2498 *l = AVR_HAVE_MOVW ? 4 : 5;
2499 return AS1 (inc,%A0);
2501 else if (src == constm1_rtx)
2503 /* Immediate constants -1 to any register */
2504 if (AVR_HAVE_MOVW)
2506 *l = 4;
2507 return (AS1 (clr,%A0) CR_TAB
2508 AS1 (dec,%A0) CR_TAB
2509 AS2 (mov,%B0,%A0) CR_TAB
2510 AS2 (movw,%C0,%A0));
2512 *l = 5;
2513 return (AS1 (clr,%A0) CR_TAB
2514 AS1 (dec,%A0) CR_TAB
2515 AS2 (mov,%B0,%A0) CR_TAB
2516 AS2 (mov,%C0,%A0) CR_TAB
2517 AS2 (mov,%D0,%A0));
2519 else
2521 int bit_nr = exact_log2 (INTVAL (src));
2523 if (bit_nr >= 0)
2525 *l = AVR_HAVE_MOVW ? 5 : 6;
2526 if (!real_l)
2528 output_asm_insn (clr_op0, operands);
2529 output_asm_insn ("set", operands);
2531 if (!real_l)
2532 avr_output_bld (operands, bit_nr);
2534 return "";
2539 /* Last resort, better than loading from memory. */
2540 *l = 10;
2541 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2542 AS2 (ldi,r31,lo8(%1)) CR_TAB
2543 AS2 (mov,%A0,r31) CR_TAB
2544 AS2 (ldi,r31,hi8(%1)) CR_TAB
2545 AS2 (mov,%B0,r31) CR_TAB
2546 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2547 AS2 (mov,%C0,r31) CR_TAB
2548 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2549 AS2 (mov,%D0,r31) CR_TAB
2550 AS2 (mov,r31,__tmp_reg__));
2552 else if (GET_CODE (src) == MEM)
2553 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2555 else if (GET_CODE (dest) == MEM)
2557 const char *template;
2559 if (src == const0_rtx)
2560 operands[1] = zero_reg_rtx;
2562 template = out_movsi_mr_r (insn, operands, real_l);
2564 if (!real_l)
2565 output_asm_insn (template, operands);
2567 operands[1] = src;
2568 return "";
2570 fatal_insn ("invalid insn:", insn);
2571 return "";
2574 const char *
2575 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2577 rtx dest = op[0];
2578 rtx src = op[1];
2579 rtx x = XEXP (dest, 0);
2580 int dummy;
2582 if (!l)
2583 l = &dummy;
2585 if (CONSTANT_ADDRESS_P (x))
2587 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2589 *l = 1;
2590 return AS2 (out,__SREG__,%1);
2592 if (optimize > 0 && io_address_operand (x, QImode))
2594 *l = 1;
2595 return AS2 (out,%0-0x20,%1);
2597 *l = 2;
2598 return AS2 (sts,%0,%1);
2600 /* memory access by reg+disp */
2601 else if (GET_CODE (x) == PLUS
2602 && REG_P (XEXP (x,0))
2603 && GET_CODE (XEXP (x,1)) == CONST_INT)
2605 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2607 int disp = INTVAL (XEXP (x,1));
2608 if (REGNO (XEXP (x,0)) != REG_Y)
2609 fatal_insn ("incorrect insn:",insn);
2611 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2612 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2613 AS2 (std,Y+63,%1) CR_TAB
2614 AS2 (sbiw,r28,%o0-63));
2616 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2617 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2618 AS2 (st,Y,%1) CR_TAB
2619 AS2 (subi,r28,lo8(%o0)) CR_TAB
2620 AS2 (sbci,r29,hi8(%o0)));
2622 else if (REGNO (XEXP (x,0)) == REG_X)
2624 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2626 if (reg_unused_after (insn, XEXP (x,0)))
2627 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2628 AS2 (adiw,r26,%o0) CR_TAB
2629 AS2 (st,X,__tmp_reg__));
2631 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2632 AS2 (adiw,r26,%o0) CR_TAB
2633 AS2 (st,X,__tmp_reg__) CR_TAB
2634 AS2 (sbiw,r26,%o0));
2636 else
2638 if (reg_unused_after (insn, XEXP (x,0)))
2639 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2640 AS2 (st,X,%1));
2642 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2643 AS2 (st,X,%1) CR_TAB
2644 AS2 (sbiw,r26,%o0));
2647 *l = 1;
2648 return AS2 (std,%0,%1);
2650 *l = 1;
2651 return AS2 (st,%0,%1);
2654 const char *
2655 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2657 rtx dest = op[0];
2658 rtx src = op[1];
2659 rtx base = XEXP (dest, 0);
2660 int reg_base = true_regnum (base);
2661 int reg_src = true_regnum (src);
2662 /* "volatile" forces writing high byte first, even if less efficient,
2663 for correct operation with 16-bit I/O registers. */
2664 int mem_volatile_p = MEM_VOLATILE_P (dest);
2665 int tmp;
2667 if (!l)
2668 l = &tmp;
2669 if (CONSTANT_ADDRESS_P (base))
2671 if (optimize > 0 && io_address_operand (base, HImode))
2673 *l = 2;
2674 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2675 AS2 (out,%A0-0x20,%A1));
2677 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2678 AS2 (sts,%A0,%A1));
2680 if (reg_base > 0)
2682 if (reg_base == REG_X)
2684 if (reg_src == REG_X)
2686 /* "st X+,r26" and "st -X,r26" are undefined. */
2687 if (!mem_volatile_p && reg_unused_after (insn, src))
2688 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2689 AS2 (st,X,r26) CR_TAB
2690 AS2 (adiw,r26,1) CR_TAB
2691 AS2 (st,X,__tmp_reg__));
2692 else
2693 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2694 AS2 (adiw,r26,1) CR_TAB
2695 AS2 (st,X,__tmp_reg__) CR_TAB
2696 AS2 (sbiw,r26,1) CR_TAB
2697 AS2 (st,X,r26));
2699 else
2701 if (!mem_volatile_p && reg_unused_after (insn, base))
2702 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2703 AS2 (st,X,%B1));
2704 else
2705 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2706 AS2 (st,X,%B1) CR_TAB
2707 AS2 (st,-X,%A1));
2710 else
2711 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2712 AS2 (st,%0,%A1));
2714 else if (GET_CODE (base) == PLUS)
2716 int disp = INTVAL (XEXP (base, 1));
2717 reg_base = REGNO (XEXP (base, 0));
2718 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2720 if (reg_base != REG_Y)
2721 fatal_insn ("incorrect insn:",insn);
2723 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2724 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2725 AS2 (std,Y+63,%B1) CR_TAB
2726 AS2 (std,Y+62,%A1) CR_TAB
2727 AS2 (sbiw,r28,%o0-62));
2729 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2730 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2731 AS2 (std,Y+1,%B1) CR_TAB
2732 AS2 (st,Y,%A1) CR_TAB
2733 AS2 (subi,r28,lo8(%o0)) CR_TAB
2734 AS2 (sbci,r29,hi8(%o0)));
2736 if (reg_base == REG_X)
2738 /* (X + d) = R */
2739 if (reg_src == REG_X)
2741 *l = 7;
2742 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2743 AS2 (mov,__zero_reg__,r27) CR_TAB
2744 AS2 (adiw,r26,%o0+1) CR_TAB
2745 AS2 (st,X,__zero_reg__) CR_TAB
2746 AS2 (st,-X,__tmp_reg__) CR_TAB
2747 AS1 (clr,__zero_reg__) CR_TAB
2748 AS2 (sbiw,r26,%o0));
2750 *l = 4;
2751 return (AS2 (adiw,r26,%o0+1) CR_TAB
2752 AS2 (st,X,%B1) CR_TAB
2753 AS2 (st,-X,%A1) CR_TAB
2754 AS2 (sbiw,r26,%o0));
2756 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2757 AS2 (std,%A0,%A1));
2759 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2760 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2761 AS2 (st,%0,%A1));
2762 else if (GET_CODE (base) == POST_INC) /* (R++) */
2764 if (mem_volatile_p)
2766 if (REGNO (XEXP (base, 0)) == REG_X)
2768 *l = 4;
2769 return (AS2 (adiw,r26,1) CR_TAB
2770 AS2 (st,X,%B1) CR_TAB
2771 AS2 (st,-X,%A1) CR_TAB
2772 AS2 (adiw,r26,2));
2774 else
2776 *l = 3;
2777 return (AS2 (std,%p0+1,%B1) CR_TAB
2778 AS2 (st,%p0,%A1) CR_TAB
2779 AS2 (adiw,%r0,2));
2783 *l = 2;
2784 return (AS2 (st,%0,%A1) CR_TAB
2785 AS2 (st,%0,%B1));
2787 fatal_insn ("unknown move insn:",insn);
2788 return "";
2791 /* Return 1 if frame pointer for current function required. */
2794 frame_pointer_required_p (void)
2796 return (cfun->calls_alloca
2797 || crtl->args.info.nregs == 0
2798 || get_frame_size () > 0);
2801 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2803 static RTX_CODE
2804 compare_condition (rtx insn)
2806 rtx next = next_real_insn (insn);
2807 RTX_CODE cond = UNKNOWN;
2808 if (next && GET_CODE (next) == JUMP_INSN)
2810 rtx pat = PATTERN (next);
2811 rtx src = SET_SRC (pat);
2812 rtx t = XEXP (src, 0);
2813 cond = GET_CODE (t);
2815 return cond;
2818 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2820 static int
2821 compare_sign_p (rtx insn)
2823 RTX_CODE cond = compare_condition (insn);
2824 return (cond == GE || cond == LT);
2827 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2828 that needs to be swapped (GT, GTU, LE, LEU). */
2831 compare_diff_p (rtx insn)
2833 RTX_CODE cond = compare_condition (insn);
2834 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2837 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2840 compare_eq_p (rtx insn)
2842 RTX_CODE cond = compare_condition (insn);
2843 return (cond == EQ || cond == NE);
2847 /* Output test instruction for HImode. */
2849 const char *
2850 out_tsthi (rtx insn, int *l)
2852 if (compare_sign_p (insn))
2854 if (l) *l = 1;
2855 return AS1 (tst,%B0);
2857 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2858 && compare_eq_p (insn))
2860 /* Faster than sbiw if we can clobber the operand. */
2861 if (l) *l = 1;
2862 return AS2 (or,%A0,%B0);
2864 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2866 if (l) *l = 1;
2867 return AS2 (sbiw,%0,0);
2869 if (l) *l = 2;
2870 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2871 AS2 (cpc,%B0,__zero_reg__));
2875 /* Output test instruction for SImode. */
2877 const char *
2878 out_tstsi (rtx insn, int *l)
2880 if (compare_sign_p (insn))
2882 if (l) *l = 1;
2883 return AS1 (tst,%D0);
2885 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2887 if (l) *l = 3;
2888 return (AS2 (sbiw,%A0,0) CR_TAB
2889 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2890 AS2 (cpc,%D0,__zero_reg__));
2892 if (l) *l = 4;
2893 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2894 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2895 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2896 AS2 (cpc,%D0,__zero_reg__));
2900 /* Generate asm equivalent for various shifts.
2901 Shift count is a CONST_INT, MEM or REG.
2902 This only handles cases that are not already
2903 carefully hand-optimized in ?sh??i3_out. */
2905 void
2906 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2907 int *len, int t_len)
2909 rtx op[10];
2910 char str[500];
2911 int second_label = 1;
2912 int saved_in_tmp = 0;
2913 int use_zero_reg = 0;
2915 op[0] = operands[0];
2916 op[1] = operands[1];
2917 op[2] = operands[2];
2918 op[3] = operands[3];
2919 str[0] = 0;
2921 if (len)
2922 *len = 1;
2924 if (GET_CODE (operands[2]) == CONST_INT)
2926 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2927 int count = INTVAL (operands[2]);
2928 int max_len = 10; /* If larger than this, always use a loop. */
2930 if (count <= 0)
2932 if (len)
2933 *len = 0;
2934 return;
2937 if (count < 8 && !scratch)
2938 use_zero_reg = 1;
2940 if (optimize_size)
2941 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2943 if (t_len * count <= max_len)
2945 /* Output shifts inline with no loop - faster. */
2946 if (len)
2947 *len = t_len * count;
2948 else
2950 while (count-- > 0)
2951 output_asm_insn (template, op);
2954 return;
2957 if (scratch)
2959 if (!len)
2960 strcat (str, AS2 (ldi,%3,%2));
2962 else if (use_zero_reg)
2964 /* Hack to save one word: use __zero_reg__ as loop counter.
2965 Set one bit, then shift in a loop until it is 0 again. */
2967 op[3] = zero_reg_rtx;
2968 if (len)
2969 *len = 2;
2970 else
2971 strcat (str, ("set" CR_TAB
2972 AS2 (bld,%3,%2-1)));
2974 else
2976 /* No scratch register available, use one from LD_REGS (saved in
2977 __tmp_reg__) that doesn't overlap with registers to shift. */
2979 op[3] = gen_rtx_REG (QImode,
2980 ((true_regnum (operands[0]) - 1) & 15) + 16);
2981 op[4] = tmp_reg_rtx;
2982 saved_in_tmp = 1;
2984 if (len)
2985 *len = 3; /* Includes "mov %3,%4" after the loop. */
2986 else
2987 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2988 AS2 (ldi,%3,%2)));
2991 second_label = 0;
2993 else if (GET_CODE (operands[2]) == MEM)
2995 rtx op_mov[10];
2997 op[3] = op_mov[0] = tmp_reg_rtx;
2998 op_mov[1] = op[2];
3000 if (len)
3001 out_movqi_r_mr (insn, op_mov, len);
3002 else
3003 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3005 else if (register_operand (operands[2], QImode))
3007 if (reg_unused_after (insn, operands[2]))
3008 op[3] = op[2];
3009 else
3011 op[3] = tmp_reg_rtx;
3012 if (!len)
3013 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3016 else
3017 fatal_insn ("bad shift insn:", insn);
3019 if (second_label)
3021 if (len)
3022 ++*len;
3023 else
3024 strcat (str, AS1 (rjmp,2f));
3027 if (len)
3028 *len += t_len + 2; /* template + dec + brXX */
3029 else
3031 strcat (str, "\n1:\t");
3032 strcat (str, template);
3033 strcat (str, second_label ? "\n2:\t" : "\n\t");
3034 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3035 strcat (str, CR_TAB);
3036 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3037 if (saved_in_tmp)
3038 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3039 output_asm_insn (str, op);
3044 /* 8bit shift left ((char)x << i) */
3046 const char *
3047 ashlqi3_out (rtx insn, rtx operands[], int *len)
3049 if (GET_CODE (operands[2]) == CONST_INT)
3051 int k;
3053 if (!len)
3054 len = &k;
3056 switch (INTVAL (operands[2]))
3058 default:
3059 if (INTVAL (operands[2]) < 8)
3060 break;
3062 *len = 1;
3063 return AS1 (clr,%0);
3065 case 1:
3066 *len = 1;
3067 return AS1 (lsl,%0);
3069 case 2:
3070 *len = 2;
3071 return (AS1 (lsl,%0) CR_TAB
3072 AS1 (lsl,%0));
3074 case 3:
3075 *len = 3;
3076 return (AS1 (lsl,%0) CR_TAB
3077 AS1 (lsl,%0) CR_TAB
3078 AS1 (lsl,%0));
3080 case 4:
3081 if (test_hard_reg_class (LD_REGS, operands[0]))
3083 *len = 2;
3084 return (AS1 (swap,%0) CR_TAB
3085 AS2 (andi,%0,0xf0));
3087 *len = 4;
3088 return (AS1 (lsl,%0) CR_TAB
3089 AS1 (lsl,%0) CR_TAB
3090 AS1 (lsl,%0) CR_TAB
3091 AS1 (lsl,%0));
3093 case 5:
3094 if (test_hard_reg_class (LD_REGS, operands[0]))
3096 *len = 3;
3097 return (AS1 (swap,%0) CR_TAB
3098 AS1 (lsl,%0) CR_TAB
3099 AS2 (andi,%0,0xe0));
3101 *len = 5;
3102 return (AS1 (lsl,%0) CR_TAB
3103 AS1 (lsl,%0) CR_TAB
3104 AS1 (lsl,%0) CR_TAB
3105 AS1 (lsl,%0) CR_TAB
3106 AS1 (lsl,%0));
3108 case 6:
3109 if (test_hard_reg_class (LD_REGS, operands[0]))
3111 *len = 4;
3112 return (AS1 (swap,%0) CR_TAB
3113 AS1 (lsl,%0) CR_TAB
3114 AS1 (lsl,%0) CR_TAB
3115 AS2 (andi,%0,0xc0));
3117 *len = 6;
3118 return (AS1 (lsl,%0) CR_TAB
3119 AS1 (lsl,%0) CR_TAB
3120 AS1 (lsl,%0) CR_TAB
3121 AS1 (lsl,%0) CR_TAB
3122 AS1 (lsl,%0) CR_TAB
3123 AS1 (lsl,%0));
3125 case 7:
3126 *len = 3;
3127 return (AS1 (ror,%0) CR_TAB
3128 AS1 (clr,%0) CR_TAB
3129 AS1 (ror,%0));
3132 else if (CONSTANT_P (operands[2]))
3133 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3135 out_shift_with_cnt (AS1 (lsl,%0),
3136 insn, operands, len, 1);
3137 return "";
3141 /* 16bit shift left ((short)x << i) */
3143 const char *
3144 ashlhi3_out (rtx insn, rtx operands[], int *len)
3146 if (GET_CODE (operands[2]) == CONST_INT)
3148 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3149 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3150 int k;
3151 int *t = len;
3153 if (!len)
3154 len = &k;
3156 switch (INTVAL (operands[2]))
3158 default:
3159 if (INTVAL (operands[2]) < 16)
3160 break;
3162 *len = 2;
3163 return (AS1 (clr,%B0) CR_TAB
3164 AS1 (clr,%A0));
3166 case 4:
3167 if (optimize_size && scratch)
3168 break; /* 5 */
3169 if (ldi_ok)
3171 *len = 6;
3172 return (AS1 (swap,%A0) CR_TAB
3173 AS1 (swap,%B0) CR_TAB
3174 AS2 (andi,%B0,0xf0) CR_TAB
3175 AS2 (eor,%B0,%A0) CR_TAB
3176 AS2 (andi,%A0,0xf0) CR_TAB
3177 AS2 (eor,%B0,%A0));
3179 if (scratch)
3181 *len = 7;
3182 return (AS1 (swap,%A0) CR_TAB
3183 AS1 (swap,%B0) CR_TAB
3184 AS2 (ldi,%3,0xf0) CR_TAB
3185 AS2 (and,%B0,%3) CR_TAB
3186 AS2 (eor,%B0,%A0) CR_TAB
3187 AS2 (and,%A0,%3) CR_TAB
3188 AS2 (eor,%B0,%A0));
3190 break; /* optimize_size ? 6 : 8 */
3192 case 5:
3193 if (optimize_size)
3194 break; /* scratch ? 5 : 6 */
3195 if (ldi_ok)
3197 *len = 8;
3198 return (AS1 (lsl,%A0) CR_TAB
3199 AS1 (rol,%B0) CR_TAB
3200 AS1 (swap,%A0) CR_TAB
3201 AS1 (swap,%B0) CR_TAB
3202 AS2 (andi,%B0,0xf0) CR_TAB
3203 AS2 (eor,%B0,%A0) CR_TAB
3204 AS2 (andi,%A0,0xf0) CR_TAB
3205 AS2 (eor,%B0,%A0));
3207 if (scratch)
3209 *len = 9;
3210 return (AS1 (lsl,%A0) CR_TAB
3211 AS1 (rol,%B0) CR_TAB
3212 AS1 (swap,%A0) CR_TAB
3213 AS1 (swap,%B0) CR_TAB
3214 AS2 (ldi,%3,0xf0) CR_TAB
3215 AS2 (and,%B0,%3) CR_TAB
3216 AS2 (eor,%B0,%A0) CR_TAB
3217 AS2 (and,%A0,%3) CR_TAB
3218 AS2 (eor,%B0,%A0));
3220 break; /* 10 */
3222 case 6:
3223 if (optimize_size)
3224 break; /* scratch ? 5 : 6 */
3225 *len = 9;
3226 return (AS1 (clr,__tmp_reg__) CR_TAB
3227 AS1 (lsr,%B0) CR_TAB
3228 AS1 (ror,%A0) CR_TAB
3229 AS1 (ror,__tmp_reg__) CR_TAB
3230 AS1 (lsr,%B0) CR_TAB
3231 AS1 (ror,%A0) CR_TAB
3232 AS1 (ror,__tmp_reg__) CR_TAB
3233 AS2 (mov,%B0,%A0) CR_TAB
3234 AS2 (mov,%A0,__tmp_reg__));
3236 case 7:
3237 *len = 5;
3238 return (AS1 (lsr,%B0) CR_TAB
3239 AS2 (mov,%B0,%A0) CR_TAB
3240 AS1 (clr,%A0) CR_TAB
3241 AS1 (ror,%B0) CR_TAB
3242 AS1 (ror,%A0));
3244 case 8:
3245 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3246 AS1 (clr,%A0));
3248 case 9:
3249 *len = 3;
3250 return (AS2 (mov,%B0,%A0) CR_TAB
3251 AS1 (clr,%A0) CR_TAB
3252 AS1 (lsl,%B0));
3254 case 10:
3255 *len = 4;
3256 return (AS2 (mov,%B0,%A0) CR_TAB
3257 AS1 (clr,%A0) CR_TAB
3258 AS1 (lsl,%B0) CR_TAB
3259 AS1 (lsl,%B0));
3261 case 11:
3262 *len = 5;
3263 return (AS2 (mov,%B0,%A0) CR_TAB
3264 AS1 (clr,%A0) CR_TAB
3265 AS1 (lsl,%B0) CR_TAB
3266 AS1 (lsl,%B0) CR_TAB
3267 AS1 (lsl,%B0));
3269 case 12:
3270 if (ldi_ok)
3272 *len = 4;
3273 return (AS2 (mov,%B0,%A0) CR_TAB
3274 AS1 (clr,%A0) CR_TAB
3275 AS1 (swap,%B0) CR_TAB
3276 AS2 (andi,%B0,0xf0));
3278 if (scratch)
3280 *len = 5;
3281 return (AS2 (mov,%B0,%A0) CR_TAB
3282 AS1 (clr,%A0) CR_TAB
3283 AS1 (swap,%B0) CR_TAB
3284 AS2 (ldi,%3,0xf0) CR_TAB
3285 AS2 (and,%B0,%3));
3287 *len = 6;
3288 return (AS2 (mov,%B0,%A0) CR_TAB
3289 AS1 (clr,%A0) CR_TAB
3290 AS1 (lsl,%B0) CR_TAB
3291 AS1 (lsl,%B0) CR_TAB
3292 AS1 (lsl,%B0) CR_TAB
3293 AS1 (lsl,%B0));
3295 case 13:
3296 if (ldi_ok)
3298 *len = 5;
3299 return (AS2 (mov,%B0,%A0) CR_TAB
3300 AS1 (clr,%A0) CR_TAB
3301 AS1 (swap,%B0) CR_TAB
3302 AS1 (lsl,%B0) CR_TAB
3303 AS2 (andi,%B0,0xe0));
3305 if (AVR_HAVE_MUL && scratch)
3307 *len = 5;
3308 return (AS2 (ldi,%3,0x20) CR_TAB
3309 AS2 (mul,%A0,%3) CR_TAB
3310 AS2 (mov,%B0,r0) CR_TAB
3311 AS1 (clr,%A0) CR_TAB
3312 AS1 (clr,__zero_reg__));
3314 if (optimize_size && scratch)
3315 break; /* 5 */
3316 if (scratch)
3318 *len = 6;
3319 return (AS2 (mov,%B0,%A0) CR_TAB
3320 AS1 (clr,%A0) CR_TAB
3321 AS1 (swap,%B0) CR_TAB
3322 AS1 (lsl,%B0) CR_TAB
3323 AS2 (ldi,%3,0xe0) CR_TAB
3324 AS2 (and,%B0,%3));
3326 if (AVR_HAVE_MUL)
3328 *len = 6;
3329 return ("set" CR_TAB
3330 AS2 (bld,r1,5) CR_TAB
3331 AS2 (mul,%A0,r1) CR_TAB
3332 AS2 (mov,%B0,r0) CR_TAB
3333 AS1 (clr,%A0) CR_TAB
3334 AS1 (clr,__zero_reg__));
3336 *len = 7;
3337 return (AS2 (mov,%B0,%A0) CR_TAB
3338 AS1 (clr,%A0) CR_TAB
3339 AS1 (lsl,%B0) CR_TAB
3340 AS1 (lsl,%B0) CR_TAB
3341 AS1 (lsl,%B0) CR_TAB
3342 AS1 (lsl,%B0) CR_TAB
3343 AS1 (lsl,%B0));
3345 case 14:
3346 if (AVR_HAVE_MUL && ldi_ok)
3348 *len = 5;
3349 return (AS2 (ldi,%B0,0x40) CR_TAB
3350 AS2 (mul,%A0,%B0) CR_TAB
3351 AS2 (mov,%B0,r0) CR_TAB
3352 AS1 (clr,%A0) CR_TAB
3353 AS1 (clr,__zero_reg__));
3355 if (AVR_HAVE_MUL && scratch)
3357 *len = 5;
3358 return (AS2 (ldi,%3,0x40) CR_TAB
3359 AS2 (mul,%A0,%3) CR_TAB
3360 AS2 (mov,%B0,r0) CR_TAB
3361 AS1 (clr,%A0) CR_TAB
3362 AS1 (clr,__zero_reg__));
3364 if (optimize_size && ldi_ok)
3366 *len = 5;
3367 return (AS2 (mov,%B0,%A0) CR_TAB
3368 AS2 (ldi,%A0,6) "\n1:\t"
3369 AS1 (lsl,%B0) CR_TAB
3370 AS1 (dec,%A0) CR_TAB
3371 AS1 (brne,1b));
3373 if (optimize_size && scratch)
3374 break; /* 5 */
3375 *len = 6;
3376 return (AS1 (clr,%B0) CR_TAB
3377 AS1 (lsr,%A0) CR_TAB
3378 AS1 (ror,%B0) CR_TAB
3379 AS1 (lsr,%A0) CR_TAB
3380 AS1 (ror,%B0) CR_TAB
3381 AS1 (clr,%A0));
3383 case 15:
3384 *len = 4;
3385 return (AS1 (clr,%B0) CR_TAB
3386 AS1 (lsr,%A0) CR_TAB
3387 AS1 (ror,%B0) CR_TAB
3388 AS1 (clr,%A0));
3390 len = t;
3392 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3393 AS1 (rol,%B0)),
3394 insn, operands, len, 2);
3395 return "";
3399 /* 32bit shift left ((long)x << i) */
3401 const char *
3402 ashlsi3_out (rtx insn, rtx operands[], int *len)
3404 if (GET_CODE (operands[2]) == CONST_INT)
3406 int k;
3407 int *t = len;
3409 if (!len)
3410 len = &k;
3412 switch (INTVAL (operands[2]))
3414 default:
3415 if (INTVAL (operands[2]) < 32)
3416 break;
3418 if (AVR_HAVE_MOVW)
3419 return *len = 3, (AS1 (clr,%D0) CR_TAB
3420 AS1 (clr,%C0) CR_TAB
3421 AS2 (movw,%A0,%C0));
3422 *len = 4;
3423 return (AS1 (clr,%D0) CR_TAB
3424 AS1 (clr,%C0) CR_TAB
3425 AS1 (clr,%B0) CR_TAB
3426 AS1 (clr,%A0));
3428 case 8:
3430 int reg0 = true_regnum (operands[0]);
3431 int reg1 = true_regnum (operands[1]);
3432 *len = 4;
3433 if (reg0 >= reg1)
3434 return (AS2 (mov,%D0,%C1) CR_TAB
3435 AS2 (mov,%C0,%B1) CR_TAB
3436 AS2 (mov,%B0,%A1) CR_TAB
3437 AS1 (clr,%A0));
3438 else
3439 return (AS1 (clr,%A0) CR_TAB
3440 AS2 (mov,%B0,%A1) CR_TAB
3441 AS2 (mov,%C0,%B1) CR_TAB
3442 AS2 (mov,%D0,%C1));
3445 case 16:
3447 int reg0 = true_regnum (operands[0]);
3448 int reg1 = true_regnum (operands[1]);
3449 if (reg0 + 2 == reg1)
3450 return *len = 2, (AS1 (clr,%B0) CR_TAB
3451 AS1 (clr,%A0));
3452 if (AVR_HAVE_MOVW)
3453 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3454 AS1 (clr,%B0) CR_TAB
3455 AS1 (clr,%A0));
3456 else
3457 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3458 AS2 (mov,%D0,%B1) CR_TAB
3459 AS1 (clr,%B0) CR_TAB
3460 AS1 (clr,%A0));
3463 case 24:
3464 *len = 4;
3465 return (AS2 (mov,%D0,%A1) CR_TAB
3466 AS1 (clr,%C0) CR_TAB
3467 AS1 (clr,%B0) CR_TAB
3468 AS1 (clr,%A0));
3470 case 31:
3471 *len = 6;
3472 return (AS1 (clr,%D0) CR_TAB
3473 AS1 (lsr,%A0) CR_TAB
3474 AS1 (ror,%D0) CR_TAB
3475 AS1 (clr,%C0) CR_TAB
3476 AS1 (clr,%B0) CR_TAB
3477 AS1 (clr,%A0));
3479 len = t;
3481 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3482 AS1 (rol,%B0) CR_TAB
3483 AS1 (rol,%C0) CR_TAB
3484 AS1 (rol,%D0)),
3485 insn, operands, len, 4);
3486 return "";
3489 /* 8bit arithmetic shift right ((signed char)x >> i) */
3491 const char *
3492 ashrqi3_out (rtx insn, rtx operands[], int *len)
3494 if (GET_CODE (operands[2]) == CONST_INT)
3496 int k;
3498 if (!len)
3499 len = &k;
3501 switch (INTVAL (operands[2]))
3503 case 1:
3504 *len = 1;
3505 return AS1 (asr,%0);
3507 case 2:
3508 *len = 2;
3509 return (AS1 (asr,%0) CR_TAB
3510 AS1 (asr,%0));
3512 case 3:
3513 *len = 3;
3514 return (AS1 (asr,%0) CR_TAB
3515 AS1 (asr,%0) CR_TAB
3516 AS1 (asr,%0));
3518 case 4:
3519 *len = 4;
3520 return (AS1 (asr,%0) CR_TAB
3521 AS1 (asr,%0) CR_TAB
3522 AS1 (asr,%0) CR_TAB
3523 AS1 (asr,%0));
3525 case 5:
3526 *len = 5;
3527 return (AS1 (asr,%0) CR_TAB
3528 AS1 (asr,%0) CR_TAB
3529 AS1 (asr,%0) CR_TAB
3530 AS1 (asr,%0) CR_TAB
3531 AS1 (asr,%0));
3533 case 6:
3534 *len = 4;
3535 return (AS2 (bst,%0,6) CR_TAB
3536 AS1 (lsl,%0) CR_TAB
3537 AS2 (sbc,%0,%0) CR_TAB
3538 AS2 (bld,%0,0));
3540 default:
3541 if (INTVAL (operands[2]) < 8)
3542 break;
3544 /* fall through */
3546 case 7:
3547 *len = 2;
3548 return (AS1 (lsl,%0) CR_TAB
3549 AS2 (sbc,%0,%0));
3552 else if (CONSTANT_P (operands[2]))
3553 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3555 out_shift_with_cnt (AS1 (asr,%0),
3556 insn, operands, len, 1);
3557 return "";
3561 /* 16bit arithmetic shift right ((signed short)x >> i) */
3563 const char *
3564 ashrhi3_out (rtx insn, rtx operands[], int *len)
3566 if (GET_CODE (operands[2]) == CONST_INT)
3568 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3569 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3570 int k;
3571 int *t = len;
3573 if (!len)
3574 len = &k;
3576 switch (INTVAL (operands[2]))
3578 case 4:
3579 case 5:
3580 /* XXX try to optimize this too? */
3581 break;
3583 case 6:
3584 if (optimize_size)
3585 break; /* scratch ? 5 : 6 */
3586 *len = 8;
3587 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3588 AS2 (mov,%A0,%B0) CR_TAB
3589 AS1 (lsl,__tmp_reg__) CR_TAB
3590 AS1 (rol,%A0) CR_TAB
3591 AS2 (sbc,%B0,%B0) CR_TAB
3592 AS1 (lsl,__tmp_reg__) CR_TAB
3593 AS1 (rol,%A0) CR_TAB
3594 AS1 (rol,%B0));
3596 case 7:
3597 *len = 4;
3598 return (AS1 (lsl,%A0) CR_TAB
3599 AS2 (mov,%A0,%B0) CR_TAB
3600 AS1 (rol,%A0) CR_TAB
3601 AS2 (sbc,%B0,%B0));
3603 case 8:
3605 int reg0 = true_regnum (operands[0]);
3606 int reg1 = true_regnum (operands[1]);
3608 if (reg0 == reg1)
3609 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3610 AS1 (lsl,%B0) CR_TAB
3611 AS2 (sbc,%B0,%B0));
3612 else
3613 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3614 AS1 (clr,%B0) CR_TAB
3615 AS2 (sbrc,%A0,7) CR_TAB
3616 AS1 (dec,%B0));
3619 case 9:
3620 *len = 4;
3621 return (AS2 (mov,%A0,%B0) CR_TAB
3622 AS1 (lsl,%B0) CR_TAB
3623 AS2 (sbc,%B0,%B0) CR_TAB
3624 AS1 (asr,%A0));
3626 case 10:
3627 *len = 5;
3628 return (AS2 (mov,%A0,%B0) CR_TAB
3629 AS1 (lsl,%B0) CR_TAB
3630 AS2 (sbc,%B0,%B0) CR_TAB
3631 AS1 (asr,%A0) CR_TAB
3632 AS1 (asr,%A0));
3634 case 11:
3635 if (AVR_HAVE_MUL && ldi_ok)
3637 *len = 5;
3638 return (AS2 (ldi,%A0,0x20) CR_TAB
3639 AS2 (muls,%B0,%A0) CR_TAB
3640 AS2 (mov,%A0,r1) CR_TAB
3641 AS2 (sbc,%B0,%B0) CR_TAB
3642 AS1 (clr,__zero_reg__));
3644 if (optimize_size && scratch)
3645 break; /* 5 */
3646 *len = 6;
3647 return (AS2 (mov,%A0,%B0) CR_TAB
3648 AS1 (lsl,%B0) CR_TAB
3649 AS2 (sbc,%B0,%B0) CR_TAB
3650 AS1 (asr,%A0) CR_TAB
3651 AS1 (asr,%A0) CR_TAB
3652 AS1 (asr,%A0));
3654 case 12:
3655 if (AVR_HAVE_MUL && ldi_ok)
3657 *len = 5;
3658 return (AS2 (ldi,%A0,0x10) CR_TAB
3659 AS2 (muls,%B0,%A0) CR_TAB
3660 AS2 (mov,%A0,r1) CR_TAB
3661 AS2 (sbc,%B0,%B0) CR_TAB
3662 AS1 (clr,__zero_reg__));
3664 if (optimize_size && scratch)
3665 break; /* 5 */
3666 *len = 7;
3667 return (AS2 (mov,%A0,%B0) CR_TAB
3668 AS1 (lsl,%B0) CR_TAB
3669 AS2 (sbc,%B0,%B0) CR_TAB
3670 AS1 (asr,%A0) CR_TAB
3671 AS1 (asr,%A0) CR_TAB
3672 AS1 (asr,%A0) CR_TAB
3673 AS1 (asr,%A0));
3675 case 13:
3676 if (AVR_HAVE_MUL && ldi_ok)
3678 *len = 5;
3679 return (AS2 (ldi,%A0,0x08) CR_TAB
3680 AS2 (muls,%B0,%A0) CR_TAB
3681 AS2 (mov,%A0,r1) CR_TAB
3682 AS2 (sbc,%B0,%B0) CR_TAB
3683 AS1 (clr,__zero_reg__));
3685 if (optimize_size)
3686 break; /* scratch ? 5 : 7 */
3687 *len = 8;
3688 return (AS2 (mov,%A0,%B0) CR_TAB
3689 AS1 (lsl,%B0) CR_TAB
3690 AS2 (sbc,%B0,%B0) CR_TAB
3691 AS1 (asr,%A0) CR_TAB
3692 AS1 (asr,%A0) CR_TAB
3693 AS1 (asr,%A0) CR_TAB
3694 AS1 (asr,%A0) CR_TAB
3695 AS1 (asr,%A0));
3697 case 14:
3698 *len = 5;
3699 return (AS1 (lsl,%B0) CR_TAB
3700 AS2 (sbc,%A0,%A0) CR_TAB
3701 AS1 (lsl,%B0) CR_TAB
3702 AS2 (mov,%B0,%A0) CR_TAB
3703 AS1 (rol,%A0));
3705 default:
3706 if (INTVAL (operands[2]) < 16)
3707 break;
3709 /* fall through */
3711 case 15:
3712 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3713 AS2 (sbc,%A0,%A0) CR_TAB
3714 AS2 (mov,%B0,%A0));
3716 len = t;
3718 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3719 AS1 (ror,%A0)),
3720 insn, operands, len, 2);
3721 return "";
3725 /* 32bit arithmetic shift right ((signed long)x >> i) */
3727 const char *
3728 ashrsi3_out (rtx insn, rtx operands[], int *len)
3730 if (GET_CODE (operands[2]) == CONST_INT)
3732 int k;
3733 int *t = len;
3735 if (!len)
3736 len = &k;
3738 switch (INTVAL (operands[2]))
3740 case 8:
3742 int reg0 = true_regnum (operands[0]);
3743 int reg1 = true_regnum (operands[1]);
3744 *len=6;
3745 if (reg0 <= reg1)
3746 return (AS2 (mov,%A0,%B1) CR_TAB
3747 AS2 (mov,%B0,%C1) CR_TAB
3748 AS2 (mov,%C0,%D1) CR_TAB
3749 AS1 (clr,%D0) CR_TAB
3750 AS2 (sbrc,%C0,7) CR_TAB
3751 AS1 (dec,%D0));
3752 else
3753 return (AS1 (clr,%D0) CR_TAB
3754 AS2 (sbrc,%D1,7) CR_TAB
3755 AS1 (dec,%D0) CR_TAB
3756 AS2 (mov,%C0,%D1) CR_TAB
3757 AS2 (mov,%B0,%C1) CR_TAB
3758 AS2 (mov,%A0,%B1));
3761 case 16:
3763 int reg0 = true_regnum (operands[0]);
3764 int reg1 = true_regnum (operands[1]);
3766 if (reg0 == reg1 + 2)
3767 return *len = 4, (AS1 (clr,%D0) CR_TAB
3768 AS2 (sbrc,%B0,7) CR_TAB
3769 AS1 (com,%D0) CR_TAB
3770 AS2 (mov,%C0,%D0));
3771 if (AVR_HAVE_MOVW)
3772 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3773 AS1 (clr,%D0) CR_TAB
3774 AS2 (sbrc,%B0,7) CR_TAB
3775 AS1 (com,%D0) CR_TAB
3776 AS2 (mov,%C0,%D0));
3777 else
3778 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3779 AS2 (mov,%A0,%C1) CR_TAB
3780 AS1 (clr,%D0) CR_TAB
3781 AS2 (sbrc,%B0,7) CR_TAB
3782 AS1 (com,%D0) CR_TAB
3783 AS2 (mov,%C0,%D0));
3786 case 24:
3787 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3788 AS1 (clr,%D0) CR_TAB
3789 AS2 (sbrc,%A0,7) CR_TAB
3790 AS1 (com,%D0) CR_TAB
3791 AS2 (mov,%B0,%D0) CR_TAB
3792 AS2 (mov,%C0,%D0));
3794 default:
3795 if (INTVAL (operands[2]) < 32)
3796 break;
3798 /* fall through */
3800 case 31:
3801 if (AVR_HAVE_MOVW)
3802 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3803 AS2 (sbc,%A0,%A0) CR_TAB
3804 AS2 (mov,%B0,%A0) CR_TAB
3805 AS2 (movw,%C0,%A0));
3806 else
3807 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3808 AS2 (sbc,%A0,%A0) CR_TAB
3809 AS2 (mov,%B0,%A0) CR_TAB
3810 AS2 (mov,%C0,%A0) CR_TAB
3811 AS2 (mov,%D0,%A0));
3813 len = t;
3815 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3816 AS1 (ror,%C0) CR_TAB
3817 AS1 (ror,%B0) CR_TAB
3818 AS1 (ror,%A0)),
3819 insn, operands, len, 4);
3820 return "";
3823 /* 8bit logic shift right ((unsigned char)x >> i) */
3825 const char *
3826 lshrqi3_out (rtx insn, rtx operands[], int *len)
3828 if (GET_CODE (operands[2]) == CONST_INT)
3830 int k;
3832 if (!len)
3833 len = &k;
3835 switch (INTVAL (operands[2]))
3837 default:
3838 if (INTVAL (operands[2]) < 8)
3839 break;
3841 *len = 1;
3842 return AS1 (clr,%0);
3844 case 1:
3845 *len = 1;
3846 return AS1 (lsr,%0);
3848 case 2:
3849 *len = 2;
3850 return (AS1 (lsr,%0) CR_TAB
3851 AS1 (lsr,%0));
3852 case 3:
3853 *len = 3;
3854 return (AS1 (lsr,%0) CR_TAB
3855 AS1 (lsr,%0) CR_TAB
3856 AS1 (lsr,%0));
3858 case 4:
3859 if (test_hard_reg_class (LD_REGS, operands[0]))
3861 *len=2;
3862 return (AS1 (swap,%0) CR_TAB
3863 AS2 (andi,%0,0x0f));
3865 *len = 4;
3866 return (AS1 (lsr,%0) CR_TAB
3867 AS1 (lsr,%0) CR_TAB
3868 AS1 (lsr,%0) CR_TAB
3869 AS1 (lsr,%0));
3871 case 5:
3872 if (test_hard_reg_class (LD_REGS, operands[0]))
3874 *len = 3;
3875 return (AS1 (swap,%0) CR_TAB
3876 AS1 (lsr,%0) CR_TAB
3877 AS2 (andi,%0,0x7));
3879 *len = 5;
3880 return (AS1 (lsr,%0) CR_TAB
3881 AS1 (lsr,%0) CR_TAB
3882 AS1 (lsr,%0) CR_TAB
3883 AS1 (lsr,%0) CR_TAB
3884 AS1 (lsr,%0));
3886 case 6:
3887 if (test_hard_reg_class (LD_REGS, operands[0]))
3889 *len = 4;
3890 return (AS1 (swap,%0) CR_TAB
3891 AS1 (lsr,%0) CR_TAB
3892 AS1 (lsr,%0) CR_TAB
3893 AS2 (andi,%0,0x3));
3895 *len = 6;
3896 return (AS1 (lsr,%0) CR_TAB
3897 AS1 (lsr,%0) CR_TAB
3898 AS1 (lsr,%0) CR_TAB
3899 AS1 (lsr,%0) CR_TAB
3900 AS1 (lsr,%0) CR_TAB
3901 AS1 (lsr,%0));
3903 case 7:
3904 *len = 3;
3905 return (AS1 (rol,%0) CR_TAB
3906 AS1 (clr,%0) CR_TAB
3907 AS1 (rol,%0));
3910 else if (CONSTANT_P (operands[2]))
3911 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3913 out_shift_with_cnt (AS1 (lsr,%0),
3914 insn, operands, len, 1);
3915 return "";
3918 /* 16bit logic shift right ((unsigned short)x >> i) */
3920 const char *
3921 lshrhi3_out (rtx insn, rtx operands[], int *len)
3923 if (GET_CODE (operands[2]) == CONST_INT)
3925 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3926 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3927 int k;
3928 int *t = len;
3930 if (!len)
3931 len = &k;
3933 switch (INTVAL (operands[2]))
3935 default:
3936 if (INTVAL (operands[2]) < 16)
3937 break;
3939 *len = 2;
3940 return (AS1 (clr,%B0) CR_TAB
3941 AS1 (clr,%A0));
3943 case 4:
3944 if (optimize_size && scratch)
3945 break; /* 5 */
3946 if (ldi_ok)
3948 *len = 6;
3949 return (AS1 (swap,%B0) CR_TAB
3950 AS1 (swap,%A0) CR_TAB
3951 AS2 (andi,%A0,0x0f) CR_TAB
3952 AS2 (eor,%A0,%B0) CR_TAB
3953 AS2 (andi,%B0,0x0f) CR_TAB
3954 AS2 (eor,%A0,%B0));
3956 if (scratch)
3958 *len = 7;
3959 return (AS1 (swap,%B0) CR_TAB
3960 AS1 (swap,%A0) CR_TAB
3961 AS2 (ldi,%3,0x0f) CR_TAB
3962 AS2 (and,%A0,%3) CR_TAB
3963 AS2 (eor,%A0,%B0) CR_TAB
3964 AS2 (and,%B0,%3) CR_TAB
3965 AS2 (eor,%A0,%B0));
3967 break; /* optimize_size ? 6 : 8 */
3969 case 5:
3970 if (optimize_size)
3971 break; /* scratch ? 5 : 6 */
3972 if (ldi_ok)
3974 *len = 8;
3975 return (AS1 (lsr,%B0) CR_TAB
3976 AS1 (ror,%A0) CR_TAB
3977 AS1 (swap,%B0) CR_TAB
3978 AS1 (swap,%A0) CR_TAB
3979 AS2 (andi,%A0,0x0f) CR_TAB
3980 AS2 (eor,%A0,%B0) CR_TAB
3981 AS2 (andi,%B0,0x0f) CR_TAB
3982 AS2 (eor,%A0,%B0));
3984 if (scratch)
3986 *len = 9;
3987 return (AS1 (lsr,%B0) CR_TAB
3988 AS1 (ror,%A0) CR_TAB
3989 AS1 (swap,%B0) CR_TAB
3990 AS1 (swap,%A0) CR_TAB
3991 AS2 (ldi,%3,0x0f) CR_TAB
3992 AS2 (and,%A0,%3) CR_TAB
3993 AS2 (eor,%A0,%B0) CR_TAB
3994 AS2 (and,%B0,%3) CR_TAB
3995 AS2 (eor,%A0,%B0));
3997 break; /* 10 */
3999 case 6:
4000 if (optimize_size)
4001 break; /* scratch ? 5 : 6 */
4002 *len = 9;
4003 return (AS1 (clr,__tmp_reg__) CR_TAB
4004 AS1 (lsl,%A0) CR_TAB
4005 AS1 (rol,%B0) CR_TAB
4006 AS1 (rol,__tmp_reg__) CR_TAB
4007 AS1 (lsl,%A0) CR_TAB
4008 AS1 (rol,%B0) CR_TAB
4009 AS1 (rol,__tmp_reg__) CR_TAB
4010 AS2 (mov,%A0,%B0) CR_TAB
4011 AS2 (mov,%B0,__tmp_reg__));
4013 case 7:
4014 *len = 5;
4015 return (AS1 (lsl,%A0) CR_TAB
4016 AS2 (mov,%A0,%B0) CR_TAB
4017 AS1 (rol,%A0) CR_TAB
4018 AS2 (sbc,%B0,%B0) CR_TAB
4019 AS1 (neg,%B0));
4021 case 8:
4022 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4023 AS1 (clr,%B0));
4025 case 9:
4026 *len = 3;
4027 return (AS2 (mov,%A0,%B0) CR_TAB
4028 AS1 (clr,%B0) CR_TAB
4029 AS1 (lsr,%A0));
4031 case 10:
4032 *len = 4;
4033 return (AS2 (mov,%A0,%B0) CR_TAB
4034 AS1 (clr,%B0) CR_TAB
4035 AS1 (lsr,%A0) CR_TAB
4036 AS1 (lsr,%A0));
4038 case 11:
4039 *len = 5;
4040 return (AS2 (mov,%A0,%B0) CR_TAB
4041 AS1 (clr,%B0) CR_TAB
4042 AS1 (lsr,%A0) CR_TAB
4043 AS1 (lsr,%A0) CR_TAB
4044 AS1 (lsr,%A0));
4046 case 12:
4047 if (ldi_ok)
4049 *len = 4;
4050 return (AS2 (mov,%A0,%B0) CR_TAB
4051 AS1 (clr,%B0) CR_TAB
4052 AS1 (swap,%A0) CR_TAB
4053 AS2 (andi,%A0,0x0f));
4055 if (scratch)
4057 *len = 5;
4058 return (AS2 (mov,%A0,%B0) CR_TAB
4059 AS1 (clr,%B0) CR_TAB
4060 AS1 (swap,%A0) CR_TAB
4061 AS2 (ldi,%3,0x0f) CR_TAB
4062 AS2 (and,%A0,%3));
4064 *len = 6;
4065 return (AS2 (mov,%A0,%B0) CR_TAB
4066 AS1 (clr,%B0) CR_TAB
4067 AS1 (lsr,%A0) CR_TAB
4068 AS1 (lsr,%A0) CR_TAB
4069 AS1 (lsr,%A0) CR_TAB
4070 AS1 (lsr,%A0));
4072 case 13:
4073 if (ldi_ok)
4075 *len = 5;
4076 return (AS2 (mov,%A0,%B0) CR_TAB
4077 AS1 (clr,%B0) CR_TAB
4078 AS1 (swap,%A0) CR_TAB
4079 AS1 (lsr,%A0) CR_TAB
4080 AS2 (andi,%A0,0x07));
4082 if (AVR_HAVE_MUL && scratch)
4084 *len = 5;
4085 return (AS2 (ldi,%3,0x08) CR_TAB
4086 AS2 (mul,%B0,%3) CR_TAB
4087 AS2 (mov,%A0,r1) CR_TAB
4088 AS1 (clr,%B0) CR_TAB
4089 AS1 (clr,__zero_reg__));
4091 if (optimize_size && scratch)
4092 break; /* 5 */
4093 if (scratch)
4095 *len = 6;
4096 return (AS2 (mov,%A0,%B0) CR_TAB
4097 AS1 (clr,%B0) CR_TAB
4098 AS1 (swap,%A0) CR_TAB
4099 AS1 (lsr,%A0) CR_TAB
4100 AS2 (ldi,%3,0x07) CR_TAB
4101 AS2 (and,%A0,%3));
4103 if (AVR_HAVE_MUL)
4105 *len = 6;
4106 return ("set" CR_TAB
4107 AS2 (bld,r1,3) CR_TAB
4108 AS2 (mul,%B0,r1) CR_TAB
4109 AS2 (mov,%A0,r1) CR_TAB
4110 AS1 (clr,%B0) CR_TAB
4111 AS1 (clr,__zero_reg__));
4113 *len = 7;
4114 return (AS2 (mov,%A0,%B0) CR_TAB
4115 AS1 (clr,%B0) CR_TAB
4116 AS1 (lsr,%A0) CR_TAB
4117 AS1 (lsr,%A0) CR_TAB
4118 AS1 (lsr,%A0) CR_TAB
4119 AS1 (lsr,%A0) CR_TAB
4120 AS1 (lsr,%A0));
4122 case 14:
4123 if (AVR_HAVE_MUL && ldi_ok)
4125 *len = 5;
4126 return (AS2 (ldi,%A0,0x04) CR_TAB
4127 AS2 (mul,%B0,%A0) CR_TAB
4128 AS2 (mov,%A0,r1) CR_TAB
4129 AS1 (clr,%B0) CR_TAB
4130 AS1 (clr,__zero_reg__));
4132 if (AVR_HAVE_MUL && scratch)
4134 *len = 5;
4135 return (AS2 (ldi,%3,0x04) CR_TAB
4136 AS2 (mul,%B0,%3) CR_TAB
4137 AS2 (mov,%A0,r1) CR_TAB
4138 AS1 (clr,%B0) CR_TAB
4139 AS1 (clr,__zero_reg__));
4141 if (optimize_size && ldi_ok)
4143 *len = 5;
4144 return (AS2 (mov,%A0,%B0) CR_TAB
4145 AS2 (ldi,%B0,6) "\n1:\t"
4146 AS1 (lsr,%A0) CR_TAB
4147 AS1 (dec,%B0) CR_TAB
4148 AS1 (brne,1b));
4150 if (optimize_size && scratch)
4151 break; /* 5 */
4152 *len = 6;
4153 return (AS1 (clr,%A0) CR_TAB
4154 AS1 (lsl,%B0) CR_TAB
4155 AS1 (rol,%A0) CR_TAB
4156 AS1 (lsl,%B0) CR_TAB
4157 AS1 (rol,%A0) CR_TAB
4158 AS1 (clr,%B0));
4160 case 15:
4161 *len = 4;
4162 return (AS1 (clr,%A0) CR_TAB
4163 AS1 (lsl,%B0) CR_TAB
4164 AS1 (rol,%A0) CR_TAB
4165 AS1 (clr,%B0));
4167 len = t;
4169 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4170 AS1 (ror,%A0)),
4171 insn, operands, len, 2);
4172 return "";
4175 /* 32bit logic shift right ((unsigned int)x >> i) */
4177 const char *
4178 lshrsi3_out (rtx insn, rtx operands[], int *len)
4180 if (GET_CODE (operands[2]) == CONST_INT)
4182 int k;
4183 int *t = len;
4185 if (!len)
4186 len = &k;
4188 switch (INTVAL (operands[2]))
4190 default:
4191 if (INTVAL (operands[2]) < 32)
4192 break;
4194 if (AVR_HAVE_MOVW)
4195 return *len = 3, (AS1 (clr,%D0) CR_TAB
4196 AS1 (clr,%C0) CR_TAB
4197 AS2 (movw,%A0,%C0));
4198 *len = 4;
4199 return (AS1 (clr,%D0) CR_TAB
4200 AS1 (clr,%C0) CR_TAB
4201 AS1 (clr,%B0) CR_TAB
4202 AS1 (clr,%A0));
4204 case 8:
4206 int reg0 = true_regnum (operands[0]);
4207 int reg1 = true_regnum (operands[1]);
4208 *len = 4;
4209 if (reg0 <= reg1)
4210 return (AS2 (mov,%A0,%B1) CR_TAB
4211 AS2 (mov,%B0,%C1) CR_TAB
4212 AS2 (mov,%C0,%D1) CR_TAB
4213 AS1 (clr,%D0));
4214 else
4215 return (AS1 (clr,%D0) CR_TAB
4216 AS2 (mov,%C0,%D1) CR_TAB
4217 AS2 (mov,%B0,%C1) CR_TAB
4218 AS2 (mov,%A0,%B1));
4221 case 16:
4223 int reg0 = true_regnum (operands[0]);
4224 int reg1 = true_regnum (operands[1]);
4226 if (reg0 == reg1 + 2)
4227 return *len = 2, (AS1 (clr,%C0) CR_TAB
4228 AS1 (clr,%D0));
4229 if (AVR_HAVE_MOVW)
4230 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4231 AS1 (clr,%C0) CR_TAB
4232 AS1 (clr,%D0));
4233 else
4234 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4235 AS2 (mov,%A0,%C1) CR_TAB
4236 AS1 (clr,%C0) CR_TAB
4237 AS1 (clr,%D0));
4240 case 24:
4241 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4242 AS1 (clr,%B0) CR_TAB
4243 AS1 (clr,%C0) CR_TAB
4244 AS1 (clr,%D0));
4246 case 31:
4247 *len = 6;
4248 return (AS1 (clr,%A0) CR_TAB
4249 AS2 (sbrc,%D0,7) CR_TAB
4250 AS1 (inc,%A0) CR_TAB
4251 AS1 (clr,%B0) CR_TAB
4252 AS1 (clr,%C0) CR_TAB
4253 AS1 (clr,%D0));
4255 len = t;
4257 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4258 AS1 (ror,%C0) CR_TAB
4259 AS1 (ror,%B0) CR_TAB
4260 AS1 (ror,%A0)),
4261 insn, operands, len, 4);
4262 return "";
4265 /* Modifies the length assigned to instruction INSN
4266 LEN is the initially computed length of the insn. */
4269 adjust_insn_length (rtx insn, int len)
4271 rtx patt = PATTERN (insn);
4272 rtx set;
4274 if (GET_CODE (patt) == SET)
4276 rtx op[10];
4277 op[1] = SET_SRC (patt);
4278 op[0] = SET_DEST (patt);
4279 if (general_operand (op[1], VOIDmode)
4280 && general_operand (op[0], VOIDmode))
4282 switch (GET_MODE (op[0]))
4284 case QImode:
4285 output_movqi (insn, op, &len);
4286 break;
4287 case HImode:
4288 output_movhi (insn, op, &len);
4289 break;
4290 case SImode:
4291 case SFmode:
4292 output_movsisf (insn, op, &len);
4293 break;
4294 default:
4295 break;
4298 else if (op[0] == cc0_rtx && REG_P (op[1]))
4300 switch (GET_MODE (op[1]))
4302 case HImode: out_tsthi (insn,&len); break;
4303 case SImode: out_tstsi (insn,&len); break;
4304 default: break;
4307 else if (GET_CODE (op[1]) == AND)
4309 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4311 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4312 if (GET_MODE (op[1]) == SImode)
4313 len = (((mask & 0xff) != 0xff)
4314 + ((mask & 0xff00) != 0xff00)
4315 + ((mask & 0xff0000L) != 0xff0000L)
4316 + ((mask & 0xff000000L) != 0xff000000L));
4317 else if (GET_MODE (op[1]) == HImode)
4318 len = (((mask & 0xff) != 0xff)
4319 + ((mask & 0xff00) != 0xff00));
4322 else if (GET_CODE (op[1]) == IOR)
4324 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4326 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4327 if (GET_MODE (op[1]) == SImode)
4328 len = (((mask & 0xff) != 0)
4329 + ((mask & 0xff00) != 0)
4330 + ((mask & 0xff0000L) != 0)
4331 + ((mask & 0xff000000L) != 0));
4332 else if (GET_MODE (op[1]) == HImode)
4333 len = (((mask & 0xff) != 0)
4334 + ((mask & 0xff00) != 0));
4338 set = single_set (insn);
4339 if (set)
4341 rtx op[10];
4343 op[1] = SET_SRC (set);
4344 op[0] = SET_DEST (set);
4346 if (GET_CODE (patt) == PARALLEL
4347 && general_operand (op[1], VOIDmode)
4348 && general_operand (op[0], VOIDmode))
4350 if (XVECLEN (patt, 0) == 2)
4351 op[2] = XVECEXP (patt, 0, 1);
4353 switch (GET_MODE (op[0]))
4355 case QImode:
4356 len = 2;
4357 break;
4358 case HImode:
4359 output_reload_inhi (insn, op, &len);
4360 break;
4361 case SImode:
4362 case SFmode:
4363 output_reload_insisf (insn, op, &len);
4364 break;
4365 default:
4366 break;
4369 else if (GET_CODE (op[1]) == ASHIFT
4370 || GET_CODE (op[1]) == ASHIFTRT
4371 || GET_CODE (op[1]) == LSHIFTRT)
4373 rtx ops[10];
4374 ops[0] = op[0];
4375 ops[1] = XEXP (op[1],0);
4376 ops[2] = XEXP (op[1],1);
4377 switch (GET_CODE (op[1]))
4379 case ASHIFT:
4380 switch (GET_MODE (op[0]))
4382 case QImode: ashlqi3_out (insn,ops,&len); break;
4383 case HImode: ashlhi3_out (insn,ops,&len); break;
4384 case SImode: ashlsi3_out (insn,ops,&len); break;
4385 default: break;
4387 break;
4388 case ASHIFTRT:
4389 switch (GET_MODE (op[0]))
4391 case QImode: ashrqi3_out (insn,ops,&len); break;
4392 case HImode: ashrhi3_out (insn,ops,&len); break;
4393 case SImode: ashrsi3_out (insn,ops,&len); break;
4394 default: break;
4396 break;
4397 case LSHIFTRT:
4398 switch (GET_MODE (op[0]))
4400 case QImode: lshrqi3_out (insn,ops,&len); break;
4401 case HImode: lshrhi3_out (insn,ops,&len); break;
4402 case SImode: lshrsi3_out (insn,ops,&len); break;
4403 default: break;
4405 break;
4406 default:
4407 break;
4411 return len;
4414 /* Return nonzero if register REG dead after INSN. */
4417 reg_unused_after (rtx insn, rtx reg)
4419 return (dead_or_set_p (insn, reg)
4420 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4423 /* Return nonzero if REG is not used after INSN.
4424 We assume REG is a reload reg, and therefore does
4425 not live past labels. It may live past calls or jumps though. */
4428 _reg_unused_after (rtx insn, rtx reg)
4430 enum rtx_code code;
4431 rtx set;
4433 /* If the reg is set by this instruction, then it is safe for our
4434 case. Disregard the case where this is a store to memory, since
4435 we are checking a register used in the store address. */
4436 set = single_set (insn);
4437 if (set && GET_CODE (SET_DEST (set)) != MEM
4438 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4439 return 1;
4441 while ((insn = NEXT_INSN (insn)))
4443 rtx set;
4444 code = GET_CODE (insn);
4446 #if 0
4447 /* If this is a label that existed before reload, then the register
4448 if dead here. However, if this is a label added by reorg, then
4449 the register may still be live here. We can't tell the difference,
4450 so we just ignore labels completely. */
4451 if (code == CODE_LABEL)
4452 return 1;
4453 /* else */
4454 #endif
4456 if (!INSN_P (insn))
4457 continue;
4459 if (code == JUMP_INSN)
4460 return 0;
4462 /* If this is a sequence, we must handle them all at once.
4463 We could have for instance a call that sets the target register,
4464 and an insn in a delay slot that uses the register. In this case,
4465 we must return 0. */
4466 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4468 int i;
4469 int retval = 0;
4471 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4473 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4474 rtx set = single_set (this_insn);
4476 if (GET_CODE (this_insn) == CALL_INSN)
4477 code = CALL_INSN;
4478 else if (GET_CODE (this_insn) == JUMP_INSN)
4480 if (INSN_ANNULLED_BRANCH_P (this_insn))
4481 return 0;
4482 code = JUMP_INSN;
4485 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4486 return 0;
4487 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4489 if (GET_CODE (SET_DEST (set)) != MEM)
4490 retval = 1;
4491 else
4492 return 0;
4494 if (set == 0
4495 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4496 return 0;
4498 if (retval == 1)
4499 return 1;
4500 else if (code == JUMP_INSN)
4501 return 0;
4504 if (code == CALL_INSN)
4506 rtx tem;
4507 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4508 if (GET_CODE (XEXP (tem, 0)) == USE
4509 && REG_P (XEXP (XEXP (tem, 0), 0))
4510 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4511 return 0;
4512 if (call_used_regs[REGNO (reg)])
4513 return 1;
4516 set = single_set (insn);
4518 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4519 return 0;
4520 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4521 return GET_CODE (SET_DEST (set)) != MEM;
4522 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4523 return 0;
4525 return 1;
4528 /* Target hook for assembling integer objects. The AVR version needs
4529 special handling for references to certain labels. */
4531 static bool
4532 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4534 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4535 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4536 || GET_CODE (x) == LABEL_REF))
4538 fputs ("\t.word\tgs(", asm_out_file);
4539 output_addr_const (asm_out_file, x);
4540 fputs (")\n", asm_out_file);
4541 return true;
4543 return default_assemble_integer (x, size, aligned_p);
4546 /* The routine used to output NUL terminated strings. We use a special
4547 version of this for most svr4 targets because doing so makes the
4548 generated assembly code more compact (and thus faster to assemble)
4549 as well as more readable, especially for targets like the i386
4550 (where the only alternative is to output character sequences as
4551 comma separated lists of numbers). */
4553 void
4554 gas_output_limited_string(FILE *file, const char *str)
4556 const unsigned char *_limited_str = (const unsigned char *) str;
4557 unsigned ch;
4558 fprintf (file, "%s\"", STRING_ASM_OP);
4559 for (; (ch = *_limited_str); _limited_str++)
4561 int escape;
4562 switch (escape = ESCAPES[ch])
4564 case 0:
4565 putc (ch, file);
4566 break;
4567 case 1:
4568 fprintf (file, "\\%03o", ch);
4569 break;
4570 default:
4571 putc ('\\', file);
4572 putc (escape, file);
4573 break;
4576 fprintf (file, "\"\n");
4579 /* The routine used to output sequences of byte values. We use a special
4580 version of this for most svr4 targets because doing so makes the
4581 generated assembly code more compact (and thus faster to assemble)
4582 as well as more readable. Note that if we find subparts of the
4583 character sequence which end with NUL (and which are shorter than
4584 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4586 void
4587 gas_output_ascii(FILE *file, const char *str, size_t length)
4589 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4590 const unsigned char *limit = _ascii_bytes + length;
4591 unsigned bytes_in_chunk = 0;
4592 for (; _ascii_bytes < limit; _ascii_bytes++)
4594 const unsigned char *p;
4595 if (bytes_in_chunk >= 60)
4597 fprintf (file, "\"\n");
4598 bytes_in_chunk = 0;
4600 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4601 continue;
4602 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4604 if (bytes_in_chunk > 0)
4606 fprintf (file, "\"\n");
4607 bytes_in_chunk = 0;
4609 gas_output_limited_string (file, (const char*)_ascii_bytes);
4610 _ascii_bytes = p;
4612 else
4614 int escape;
4615 unsigned ch;
4616 if (bytes_in_chunk == 0)
4617 fprintf (file, "\t.ascii\t\"");
4618 switch (escape = ESCAPES[ch = *_ascii_bytes])
4620 case 0:
4621 putc (ch, file);
4622 bytes_in_chunk++;
4623 break;
4624 case 1:
4625 fprintf (file, "\\%03o", ch);
4626 bytes_in_chunk += 4;
4627 break;
4628 default:
4629 putc ('\\', file);
4630 putc (escape, file);
4631 bytes_in_chunk += 2;
4632 break;
4636 if (bytes_in_chunk > 0)
4637 fprintf (file, "\"\n");
4640 /* Return value is nonzero if pseudos that have been
4641 assigned to registers of class CLASS would likely be spilled
4642 because registers of CLASS are needed for spill registers. */
4644 enum reg_class
4645 class_likely_spilled_p (int c)
4647 return (c != ALL_REGS && c != ADDW_REGS);
4650 /* Valid attributes:
4651 progmem - put data to program memory;
4652 signal - make a function to be hardware interrupt. After function
4653 prologue interrupts are disabled;
4654 interrupt - make a function to be hardware interrupt. After function
4655 prologue interrupts are enabled;
4656 naked - don't generate function prologue/epilogue and `ret' command.
4658 Only `progmem' attribute valid for type. */
4660 const struct attribute_spec avr_attribute_table[] =
4662 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4663 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4664 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4665 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4666 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4667 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4668 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4669 { NULL, 0, 0, false, false, false, NULL }
4672 /* Handle a "progmem" attribute; arguments as in
4673 struct attribute_spec.handler. */
4674 static tree
4675 avr_handle_progmem_attribute (tree *node, tree name,
4676 tree args ATTRIBUTE_UNUSED,
4677 int flags ATTRIBUTE_UNUSED,
4678 bool *no_add_attrs)
4680 if (DECL_P (*node))
4682 if (TREE_CODE (*node) == TYPE_DECL)
4684 /* This is really a decl attribute, not a type attribute,
4685 but try to handle it for GCC 3.0 backwards compatibility. */
4687 tree type = TREE_TYPE (*node);
4688 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4689 tree newtype = build_type_attribute_variant (type, attr);
4691 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4692 TREE_TYPE (*node) = newtype;
4693 *no_add_attrs = true;
4695 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4697 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4699 warning (0, "only initialized variables can be placed into "
4700 "program memory area");
4701 *no_add_attrs = true;
4704 else
4706 warning (OPT_Wattributes, "%qs attribute ignored",
4707 IDENTIFIER_POINTER (name));
4708 *no_add_attrs = true;
4712 return NULL_TREE;
4715 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4716 struct attribute_spec.handler. */
4718 static tree
4719 avr_handle_fndecl_attribute (tree *node, tree name,
4720 tree args ATTRIBUTE_UNUSED,
4721 int flags ATTRIBUTE_UNUSED,
4722 bool *no_add_attrs)
4724 if (TREE_CODE (*node) != FUNCTION_DECL)
4726 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4727 IDENTIFIER_POINTER (name));
4728 *no_add_attrs = true;
4730 else
4732 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4733 const char *attr = IDENTIFIER_POINTER (name);
4735 /* If the function has the 'signal' or 'interrupt' attribute, test to
4736 make sure that the name of the function is "__vector_NN" so as to
4737 catch when the user misspells the interrupt vector name. */
4739 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4741 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4743 warning (0, "%qs appears to be a misspelled interrupt handler",
4744 func_name);
4747 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4749 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4751 warning (0, "%qs appears to be a misspelled signal handler",
4752 func_name);
4757 return NULL_TREE;
4760 static tree
4761 avr_handle_fntype_attribute (tree *node, tree name,
4762 tree args ATTRIBUTE_UNUSED,
4763 int flags ATTRIBUTE_UNUSED,
4764 bool *no_add_attrs)
4766 if (TREE_CODE (*node) != FUNCTION_TYPE)
4768 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4769 IDENTIFIER_POINTER (name));
4770 *no_add_attrs = true;
4773 return NULL_TREE;
4776 /* Look for attribute `progmem' in DECL
4777 if found return 1, otherwise 0. */
4780 avr_progmem_p (tree decl, tree attributes)
4782 tree a;
4784 if (TREE_CODE (decl) != VAR_DECL)
4785 return 0;
4787 if (NULL_TREE
4788 != lookup_attribute ("progmem", attributes))
4789 return 1;
4791 a=decl;
4793 a = TREE_TYPE(a);
4794 while (TREE_CODE (a) == ARRAY_TYPE);
4796 if (a == error_mark_node)
4797 return 0;
4799 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4800 return 1;
4802 return 0;
4805 /* Add the section attribute if the variable is in progmem. */
4807 static void
4808 avr_insert_attributes (tree node, tree *attributes)
4810 if (TREE_CODE (node) == VAR_DECL
4811 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4812 && avr_progmem_p (node, *attributes))
4814 static const char dsec[] = ".progmem.data";
4815 *attributes = tree_cons (get_identifier ("section"),
4816 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4817 *attributes);
4819 /* ??? This seems sketchy. Why can't the user declare the
4820 thing const in the first place? */
4821 TREE_READONLY (node) = 1;
4825 /* A get_unnamed_section callback for switching to progmem_section. */
4827 static void
4828 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4830 fprintf (asm_out_file,
4831 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4832 AVR_HAVE_JMP_CALL ? "a" : "ax");
4833 /* Should already be aligned, this is just to be safe if it isn't. */
4834 fprintf (asm_out_file, "\t.p2align 1\n");
4837 /* Implement TARGET_ASM_INIT_SECTIONS. */
4839 static void
4840 avr_asm_init_sections (void)
4842 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4843 avr_output_progmem_section_asm_op,
4844 NULL);
4845 readonly_data_section = data_section;
4848 static unsigned int
4849 avr_section_type_flags (tree decl, const char *name, int reloc)
4851 unsigned int flags = default_section_type_flags (decl, name, reloc);
4853 if (strncmp (name, ".noinit", 7) == 0)
4855 if (decl && TREE_CODE (decl) == VAR_DECL
4856 && DECL_INITIAL (decl) == NULL_TREE)
4857 flags |= SECTION_BSS; /* @nobits */
4858 else
4859 warning (0, "only uninitialized variables can be placed in the "
4860 ".noinit section");
4863 return flags;
4866 /* Outputs some appropriate text to go at the start of an assembler
4867 file. */
4869 static void
4870 avr_file_start (void)
4872 if (avr_current_arch->asm_only)
4873 error ("MCU %qs supported for assembler only", avr_mcu_name);
4875 default_file_start ();
4877 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4878 fputs ("__SREG__ = 0x3f\n"
4879 "__SP_H__ = 0x3e\n"
4880 "__SP_L__ = 0x3d\n", asm_out_file);
4882 fputs ("__tmp_reg__ = 0\n"
4883 "__zero_reg__ = 1\n", asm_out_file);
4885 /* FIXME: output these only if there is anything in the .data / .bss
4886 sections - some code size could be saved by not linking in the
4887 initialization code from libgcc if one or both sections are empty. */
4888 fputs ("\t.global __do_copy_data\n", asm_out_file);
4889 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4892 /* Outputs to the stdio stream FILE some
4893 appropriate text to go at the end of an assembler file. */
4895 static void
4896 avr_file_end (void)
4900 /* Choose the order in which to allocate hard registers for
4901 pseudo-registers local to a basic block.
4903 Store the desired register order in the array `reg_alloc_order'.
4904 Element 0 should be the register to allocate first; element 1, the
4905 next register; and so on. */
4907 void
4908 order_regs_for_local_alloc (void)
4910 unsigned int i;
4911 static const int order_0[] = {
4912 24,25,
4913 18,19,
4914 20,21,
4915 22,23,
4916 30,31,
4917 26,27,
4918 28,29,
4919 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4920 0,1,
4921 32,33,34,35
4923 static const int order_1[] = {
4924 18,19,
4925 20,21,
4926 22,23,
4927 24,25,
4928 30,31,
4929 26,27,
4930 28,29,
4931 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4932 0,1,
4933 32,33,34,35
4935 static const int order_2[] = {
4936 25,24,
4937 23,22,
4938 21,20,
4939 19,18,
4940 30,31,
4941 26,27,
4942 28,29,
4943 17,16,
4944 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4945 1,0,
4946 32,33,34,35
4949 const int *order = (TARGET_ORDER_1 ? order_1 :
4950 TARGET_ORDER_2 ? order_2 :
4951 order_0);
4952 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4953 reg_alloc_order[i] = order[i];
4957 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4958 cost of an RTX operand given its context. X is the rtx of the
4959 operand, MODE is its mode, and OUTER is the rtx_code of this
4960 operand's parent operator. */
4962 static int
4963 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4965 enum rtx_code code = GET_CODE (x);
4966 int total;
4968 switch (code)
4970 case REG:
4971 case SUBREG:
4972 return 0;
4974 case CONST_INT:
4975 case CONST_DOUBLE:
4976 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4978 default:
4979 break;
4982 total = 0;
4983 avr_rtx_costs (x, code, outer, &total);
4984 return total;
4987 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4988 is to be calculated. Return true if the complete cost has been
4989 computed, and false if subexpressions should be scanned. In either
4990 case, *TOTAL contains the cost result. */
4992 static bool
4993 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4995 enum machine_mode mode = GET_MODE (x);
4996 HOST_WIDE_INT val;
4998 switch (code)
5000 case CONST_INT:
5001 case CONST_DOUBLE:
5002 /* Immediate constants are as cheap as registers. */
5003 *total = 0;
5004 return true;
5006 case MEM:
5007 case CONST:
5008 case LABEL_REF:
5009 case SYMBOL_REF:
5010 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5011 return true;
5013 case NEG:
5014 switch (mode)
5016 case QImode:
5017 case SFmode:
5018 *total = COSTS_N_INSNS (1);
5019 break;
5021 case HImode:
5022 *total = COSTS_N_INSNS (3);
5023 break;
5025 case SImode:
5026 *total = COSTS_N_INSNS (7);
5027 break;
5029 default:
5030 return false;
5032 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5033 return true;
5035 case ABS:
5036 switch (mode)
5038 case QImode:
5039 case SFmode:
5040 *total = COSTS_N_INSNS (1);
5041 break;
5043 default:
5044 return false;
5046 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5047 return true;
5049 case NOT:
5050 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5051 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5052 return true;
5054 case ZERO_EXTEND:
5055 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5056 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5057 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5058 return true;
5060 case SIGN_EXTEND:
5061 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5062 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5063 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5064 return true;
5066 case PLUS:
5067 switch (mode)
5069 case QImode:
5070 *total = COSTS_N_INSNS (1);
5071 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5072 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5073 break;
5075 case HImode:
5076 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5078 *total = COSTS_N_INSNS (2);
5079 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5081 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5082 *total = COSTS_N_INSNS (1);
5083 else
5084 *total = COSTS_N_INSNS (2);
5085 break;
5087 case SImode:
5088 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5090 *total = COSTS_N_INSNS (4);
5091 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5093 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5094 *total = COSTS_N_INSNS (1);
5095 else
5096 *total = COSTS_N_INSNS (4);
5097 break;
5099 default:
5100 return false;
5102 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5103 return true;
5105 case MINUS:
5106 case AND:
5107 case IOR:
5108 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5109 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5110 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5111 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5112 return true;
5114 case XOR:
5115 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5116 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5117 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5118 return true;
5120 case MULT:
5121 switch (mode)
5123 case QImode:
5124 if (AVR_HAVE_MUL)
5125 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5126 else if (optimize_size)
5127 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5128 else
5129 return false;
5130 break;
5132 case HImode:
5133 if (AVR_HAVE_MUL)
5134 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5135 else if (optimize_size)
5136 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5137 else
5138 return false;
5139 break;
5141 default:
5142 return false;
5144 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5145 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5146 return true;
5148 case DIV:
5149 case MOD:
5150 case UDIV:
5151 case UMOD:
5152 if (optimize_size)
5153 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5154 else
5155 return false;
5156 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5157 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5158 return true;
5160 case ASHIFT:
5161 switch (mode)
5163 case QImode:
5164 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5166 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5167 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5169 else
5171 val = INTVAL (XEXP (x, 1));
5172 if (val == 7)
5173 *total = COSTS_N_INSNS (3);
5174 else if (val >= 0 && val <= 7)
5175 *total = COSTS_N_INSNS (val);
5176 else
5177 *total = COSTS_N_INSNS (1);
5179 break;
5181 case HImode:
5182 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5184 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5185 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5187 else
5188 switch (INTVAL (XEXP (x, 1)))
5190 case 0:
5191 *total = 0;
5192 break;
5193 case 1:
5194 case 8:
5195 *total = COSTS_N_INSNS (2);
5196 break;
5197 case 9:
5198 *total = COSTS_N_INSNS (3);
5199 break;
5200 case 2:
5201 case 3:
5202 case 10:
5203 case 15:
5204 *total = COSTS_N_INSNS (4);
5205 break;
5206 case 7:
5207 case 11:
5208 case 12:
5209 *total = COSTS_N_INSNS (5);
5210 break;
5211 case 4:
5212 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5213 break;
5214 case 6:
5215 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5216 break;
5217 case 5:
5218 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5219 break;
5220 default:
5221 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5222 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5224 break;
5226 case SImode:
5227 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5229 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5230 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5232 else
5233 switch (INTVAL (XEXP (x, 1)))
5235 case 0:
5236 *total = 0;
5237 break;
5238 case 24:
5239 *total = COSTS_N_INSNS (3);
5240 break;
5241 case 1:
5242 case 8:
5243 case 16:
5244 *total = COSTS_N_INSNS (4);
5245 break;
5246 case 31:
5247 *total = COSTS_N_INSNS (6);
5248 break;
5249 case 2:
5250 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5251 break;
5252 default:
5253 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5256 break;
5258 default:
5259 return false;
5261 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5262 return true;
5264 case ASHIFTRT:
5265 switch (mode)
5267 case QImode:
5268 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5270 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5271 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5273 else
5275 val = INTVAL (XEXP (x, 1));
5276 if (val == 6)
5277 *total = COSTS_N_INSNS (4);
5278 else if (val == 7)
5279 *total = COSTS_N_INSNS (2);
5280 else if (val >= 0 && val <= 7)
5281 *total = COSTS_N_INSNS (val);
5282 else
5283 *total = COSTS_N_INSNS (1);
5285 break;
5287 case HImode:
5288 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5290 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5291 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5293 else
5294 switch (INTVAL (XEXP (x, 1)))
5296 case 0:
5297 *total = 0;
5298 break;
5299 case 1:
5300 *total = COSTS_N_INSNS (2);
5301 break;
5302 case 15:
5303 *total = COSTS_N_INSNS (3);
5304 break;
5305 case 2:
5306 case 7:
5307 case 8:
5308 case 9:
5309 *total = COSTS_N_INSNS (4);
5310 break;
5311 case 10:
5312 case 14:
5313 *total = COSTS_N_INSNS (5);
5314 break;
5315 case 11:
5316 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5317 break;
5318 case 12:
5319 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5320 break;
5321 case 6:
5322 case 13:
5323 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5324 break;
5325 default:
5326 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5327 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5329 break;
5331 case SImode:
5332 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5334 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5335 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5337 else
5338 switch (INTVAL (XEXP (x, 1)))
5340 case 0:
5341 *total = 0;
5342 break;
5343 case 1:
5344 *total = COSTS_N_INSNS (4);
5345 break;
5346 case 8:
5347 case 16:
5348 case 24:
5349 *total = COSTS_N_INSNS (6);
5350 break;
5351 case 2:
5352 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5353 break;
5354 case 31:
5355 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5356 break;
5357 default:
5358 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5359 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5361 break;
5363 default:
5364 return false;
5366 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5367 return true;
5369 case LSHIFTRT:
5370 switch (mode)
5372 case QImode:
5373 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5375 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5378 else
5380 val = INTVAL (XEXP (x, 1));
5381 if (val == 7)
5382 *total = COSTS_N_INSNS (3);
5383 else if (val >= 0 && val <= 7)
5384 *total = COSTS_N_INSNS (val);
5385 else
5386 *total = COSTS_N_INSNS (1);
5388 break;
5390 case HImode:
5391 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5393 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5394 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5396 else
5397 switch (INTVAL (XEXP (x, 1)))
5399 case 0:
5400 *total = 0;
5401 break;
5402 case 1:
5403 case 8:
5404 *total = COSTS_N_INSNS (2);
5405 break;
5406 case 9:
5407 *total = COSTS_N_INSNS (3);
5408 break;
5409 case 2:
5410 case 10:
5411 case 15:
5412 *total = COSTS_N_INSNS (4);
5413 break;
5414 case 7:
5415 case 11:
5416 *total = COSTS_N_INSNS (5);
5417 break;
5418 case 3:
5419 case 12:
5420 case 13:
5421 case 14:
5422 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5423 break;
5424 case 4:
5425 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5426 break;
5427 case 5:
5428 case 6:
5429 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5430 break;
5431 default:
5432 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5433 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5435 break;
5437 case SImode:
5438 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5440 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5441 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5443 else
5444 switch (INTVAL (XEXP (x, 1)))
5446 case 0:
5447 *total = 0;
5448 break;
5449 case 1:
5450 *total = COSTS_N_INSNS (4);
5451 break;
5452 case 2:
5453 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5454 break;
5455 case 8:
5456 case 16:
5457 case 24:
5458 *total = COSTS_N_INSNS (4);
5459 break;
5460 case 31:
5461 *total = COSTS_N_INSNS (6);
5462 break;
5463 default:
5464 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5467 break;
5469 default:
5470 return false;
5472 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5473 return true;
5475 case COMPARE:
5476 switch (GET_MODE (XEXP (x, 0)))
5478 case QImode:
5479 *total = COSTS_N_INSNS (1);
5480 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5482 break;
5484 case HImode:
5485 *total = COSTS_N_INSNS (2);
5486 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5487 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5488 else if (INTVAL (XEXP (x, 1)) != 0)
5489 *total += COSTS_N_INSNS (1);
5490 break;
5492 case SImode:
5493 *total = COSTS_N_INSNS (4);
5494 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5495 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5496 else if (INTVAL (XEXP (x, 1)) != 0)
5497 *total += COSTS_N_INSNS (3);
5498 break;
5500 default:
5501 return false;
5503 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5504 return true;
5506 default:
5507 break;
5509 return false;
5512 /* Calculate the cost of a memory address. */
5514 static int
5515 avr_address_cost (rtx x)
5517 if (GET_CODE (x) == PLUS
5518 && GET_CODE (XEXP (x,1)) == CONST_INT
5519 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5520 && INTVAL (XEXP (x,1)) >= 61)
5521 return 18;
5522 if (CONSTANT_ADDRESS_P (x))
5524 if (optimize > 0 && io_address_operand (x, QImode))
5525 return 2;
5526 return 4;
5528 return 4;
5531 /* Test for extra memory constraint 'Q'.
5532 It's a memory address based on Y or Z pointer with valid displacement. */
5535 extra_constraint_Q (rtx x)
5537 if (GET_CODE (XEXP (x,0)) == PLUS
5538 && REG_P (XEXP (XEXP (x,0), 0))
5539 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5540 && (INTVAL (XEXP (XEXP (x,0), 1))
5541 <= MAX_LD_OFFSET (GET_MODE (x))))
5543 rtx xx = XEXP (XEXP (x,0), 0);
5544 int regno = REGNO (xx);
5545 if (TARGET_ALL_DEBUG)
5547 fprintf (stderr, ("extra_constraint:\n"
5548 "reload_completed: %d\n"
5549 "reload_in_progress: %d\n"),
5550 reload_completed, reload_in_progress);
5551 debug_rtx (x);
5553 if (regno >= FIRST_PSEUDO_REGISTER)
5554 return 1; /* allocate pseudos */
5555 else if (regno == REG_Z || regno == REG_Y)
5556 return 1; /* strictly check */
5557 else if (xx == frame_pointer_rtx
5558 || xx == arg_pointer_rtx)
5559 return 1; /* XXX frame & arg pointer checks */
5561 return 0;
5564 /* Convert condition code CONDITION to the valid AVR condition code. */
5566 RTX_CODE
5567 avr_normalize_condition (RTX_CODE condition)
5569 switch (condition)
5571 case GT:
5572 return GE;
5573 case GTU:
5574 return GEU;
5575 case LE:
5576 return LT;
5577 case LEU:
5578 return LTU;
5579 default:
5580 gcc_unreachable ();
5584 /* This function optimizes conditional jumps. */
5586 static void
5587 avr_reorg (void)
5589 rtx insn, pattern;
5591 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5593 if (! (GET_CODE (insn) == INSN
5594 || GET_CODE (insn) == CALL_INSN
5595 || GET_CODE (insn) == JUMP_INSN)
5596 || !single_set (insn))
5597 continue;
5599 pattern = PATTERN (insn);
5601 if (GET_CODE (pattern) == PARALLEL)
5602 pattern = XVECEXP (pattern, 0, 0);
5603 if (GET_CODE (pattern) == SET
5604 && SET_DEST (pattern) == cc0_rtx
5605 && compare_diff_p (insn))
5607 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5609 /* Now we work under compare insn. */
5611 pattern = SET_SRC (pattern);
5612 if (true_regnum (XEXP (pattern,0)) >= 0
5613 && true_regnum (XEXP (pattern,1)) >= 0 )
5615 rtx x = XEXP (pattern,0);
5616 rtx next = next_real_insn (insn);
5617 rtx pat = PATTERN (next);
5618 rtx src = SET_SRC (pat);
5619 rtx t = XEXP (src,0);
5620 PUT_CODE (t, swap_condition (GET_CODE (t)));
5621 XEXP (pattern,0) = XEXP (pattern,1);
5622 XEXP (pattern,1) = x;
5623 INSN_CODE (next) = -1;
5625 else if (true_regnum (XEXP (pattern,0)) >= 0
5626 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5628 rtx x = XEXP (pattern,1);
5629 rtx next = next_real_insn (insn);
5630 rtx pat = PATTERN (next);
5631 rtx src = SET_SRC (pat);
5632 rtx t = XEXP (src,0);
5633 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5635 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5637 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5638 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5639 INSN_CODE (next) = -1;
5640 INSN_CODE (insn) = -1;
5644 else if (true_regnum (SET_SRC (pattern)) >= 0)
5646 /* This is a tst insn */
5647 rtx next = next_real_insn (insn);
5648 rtx pat = PATTERN (next);
5649 rtx src = SET_SRC (pat);
5650 rtx t = XEXP (src,0);
5652 PUT_CODE (t, swap_condition (GET_CODE (t)));
5653 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5654 SET_SRC (pattern));
5655 INSN_CODE (next) = -1;
5656 INSN_CODE (insn) = -1;
5662 /* Returns register number for function return value.*/
5665 avr_ret_register (void)
5667 return 24;
5670 /* Create an RTX representing the place where a
5671 library function returns a value of mode MODE. */
5674 avr_libcall_value (enum machine_mode mode)
5676 int offs = GET_MODE_SIZE (mode);
5677 if (offs < 2)
5678 offs = 2;
5679 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5682 /* Create an RTX representing the place where a
5683 function returns a value of data type VALTYPE. */
5686 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5688 unsigned int offs;
5690 if (TYPE_MODE (type) != BLKmode)
5691 return avr_libcall_value (TYPE_MODE (type));
5693 offs = int_size_in_bytes (type);
5694 if (offs < 2)
5695 offs = 2;
5696 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5697 offs = GET_MODE_SIZE (SImode);
5698 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5699 offs = GET_MODE_SIZE (DImode);
5701 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5704 /* Places additional restrictions on the register class to
5705 use when it is necessary to copy value X into a register
5706 in class CLASS. */
5708 enum reg_class
5709 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5711 return class;
5715 test_hard_reg_class (enum reg_class class, rtx x)
5717 int regno = true_regnum (x);
5718 if (regno < 0)
5719 return 0;
5721 if (TEST_HARD_REG_CLASS (class, regno))
5722 return 1;
5724 return 0;
5729 jump_over_one_insn_p (rtx insn, rtx dest)
5731 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5732 ? XEXP (dest, 0)
5733 : dest);
5734 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5735 int dest_addr = INSN_ADDRESSES (uid);
5736 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5739 /* Returns 1 if a value of mode MODE can be stored starting with hard
5740 register number REGNO. On the enhanced core, anything larger than
5741 1 byte must start in even numbered register for "movw" to work
5742 (this way we don't have to check for odd registers everywhere). */
5745 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5747 /* Disallow QImode in stack pointer regs. */
5748 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5749 return 0;
5751 /* The only thing that can go into registers r28:r29 is a Pmode. */
5752 if (regno == REG_Y && mode == Pmode)
5753 return 1;
5755 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5756 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5757 return 0;
5759 if (mode == QImode)
5760 return 1;
5762 /* Modes larger than QImode occupy consecutive registers. */
5763 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5764 return 0;
5766 /* All modes larger than QImode should start in an even register. */
5767 return !(regno & 1);
5770 const char *
5771 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5773 int tmp;
5774 if (!len)
5775 len = &tmp;
5777 if (GET_CODE (operands[1]) == CONST_INT)
5779 int val = INTVAL (operands[1]);
5780 if ((val & 0xff) == 0)
5782 *len = 3;
5783 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5784 AS2 (ldi,%2,hi8(%1)) CR_TAB
5785 AS2 (mov,%B0,%2));
5787 else if ((val & 0xff00) == 0)
5789 *len = 3;
5790 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5791 AS2 (mov,%A0,%2) CR_TAB
5792 AS2 (mov,%B0,__zero_reg__));
5794 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5796 *len = 3;
5797 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5798 AS2 (mov,%A0,%2) CR_TAB
5799 AS2 (mov,%B0,%2));
5802 *len = 4;
5803 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5804 AS2 (mov,%A0,%2) CR_TAB
5805 AS2 (ldi,%2,hi8(%1)) CR_TAB
5806 AS2 (mov,%B0,%2));
5810 const char *
5811 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5813 rtx src = operands[1];
5814 int cnst = (GET_CODE (src) == CONST_INT);
5816 if (len)
5818 if (cnst)
5819 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5820 + ((INTVAL (src) & 0xff00) != 0)
5821 + ((INTVAL (src) & 0xff0000) != 0)
5822 + ((INTVAL (src) & 0xff000000) != 0);
5823 else
5824 *len = 8;
5826 return "";
5829 if (cnst && ((INTVAL (src) & 0xff) == 0))
5830 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5831 else
5833 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5834 output_asm_insn (AS2 (mov, %A0, %2), operands);
5836 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5837 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5838 else
5840 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5841 output_asm_insn (AS2 (mov, %B0, %2), operands);
5843 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5844 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5845 else
5847 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5848 output_asm_insn (AS2 (mov, %C0, %2), operands);
5850 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5851 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5852 else
5854 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5855 output_asm_insn (AS2 (mov, %D0, %2), operands);
5857 return "";
5860 void
5861 avr_output_bld (rtx operands[], int bit_nr)
5863 static char s[] = "bld %A0,0";
5865 s[5] = 'A' + (bit_nr >> 3);
5866 s[8] = '0' + (bit_nr & 7);
5867 output_asm_insn (s, operands);
5870 void
5871 avr_output_addr_vec_elt (FILE *stream, int value)
5873 switch_to_section (progmem_section);
5874 if (AVR_HAVE_JMP_CALL)
5875 fprintf (stream, "\t.word gs(.L%d)\n", value);
5876 else
5877 fprintf (stream, "\trjmp .L%d\n", value);
5880 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5881 registers (for a define_peephole2) in the current function. */
5884 avr_peep2_scratch_safe (rtx scratch)
5886 if ((interrupt_function_p (current_function_decl)
5887 || signal_function_p (current_function_decl))
5888 && leaf_function_p ())
5890 int first_reg = true_regnum (scratch);
5891 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5892 int reg;
5894 for (reg = first_reg; reg <= last_reg; reg++)
5896 if (!df_regs_ever_live_p (reg))
5897 return 0;
5900 return 1;
5903 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5904 or memory location in the I/O space (QImode only).
5906 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5907 Operand 1: register operand to test, or CONST_INT memory address.
5908 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5909 Operand 3: label to jump to if the test is true. */
5911 const char *
5912 avr_out_sbxx_branch (rtx insn, rtx operands[])
5914 enum rtx_code comp = GET_CODE (operands[0]);
5915 int long_jump = (get_attr_length (insn) >= 4);
5916 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5918 if (comp == GE)
5919 comp = EQ;
5920 else if (comp == LT)
5921 comp = NE;
5923 if (reverse)
5924 comp = reverse_condition (comp);
5926 if (GET_CODE (operands[1]) == CONST_INT)
5928 if (INTVAL (operands[1]) < 0x40)
5930 if (comp == EQ)
5931 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5932 else
5933 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5935 else
5937 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5938 if (comp == EQ)
5939 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5940 else
5941 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5944 else /* GET_CODE (operands[1]) == REG */
5946 if (GET_MODE (operands[1]) == QImode)
5948 if (comp == EQ)
5949 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5950 else
5951 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5953 else /* HImode or SImode */
5955 static char buf[] = "sbrc %A1,0";
5956 int bit_nr = exact_log2 (INTVAL (operands[2])
5957 & GET_MODE_MASK (GET_MODE (operands[1])));
5959 buf[3] = (comp == EQ) ? 's' : 'c';
5960 buf[6] = 'A' + (bit_nr >> 3);
5961 buf[9] = '0' + (bit_nr & 7);
5962 output_asm_insn (buf, operands);
5966 if (long_jump)
5967 return (AS1 (rjmp,.+4) CR_TAB
5968 AS1 (jmp,%3));
5969 if (!reverse)
5970 return AS1 (rjmp,%3);
5971 return "";
5974 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5976 static void
5977 avr_asm_out_ctor (rtx symbol, int priority)
5979 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5980 default_ctor_section_asm_out_constructor (symbol, priority);
5983 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5985 static void
5986 avr_asm_out_dtor (rtx symbol, int priority)
5988 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5989 default_dtor_section_asm_out_destructor (symbol, priority);
5992 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5994 static bool
5995 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5997 if (TYPE_MODE (type) == BLKmode)
5999 HOST_WIDE_INT size = int_size_in_bytes (type);
6000 return (size == -1 || size > 8);
6002 else
6003 return false;
6006 #include "gt-avr.h"