2008-05-30 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / config / avr / avr.c
blobd2cc33a0293f6258f8f5cef1e593b45a77602e8e
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static int compare_sign_p (rtx insn);
65 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
66 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
68 const struct attribute_spec avr_attribute_table[];
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static void avr_asm_function_end_prologue (FILE *);
73 static void avr_asm_function_begin_epilogue (FILE *);
74 static void avr_insert_attributes (tree, tree *);
75 static void avr_asm_init_sections (void);
76 static unsigned int avr_section_type_flags (tree, const char *, int);
78 static void avr_reorg (void);
79 static void avr_asm_out_ctor (rtx, int);
80 static void avr_asm_out_dtor (rtx, int);
81 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
82 static bool avr_rtx_costs (rtx, int, int, int *);
83 static int avr_address_cost (rtx);
84 static bool avr_return_in_memory (const_tree, const_tree);
85 static struct machine_function * avr_init_machine_status (void);
86 /* Allocate registers from r25 to r8 for parameters for function calls. */
87 #define FIRST_CUM_REG 26
89 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
90 static GTY(()) rtx tmp_reg_rtx;
92 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
93 static GTY(()) rtx zero_reg_rtx;
95 /* AVR register names {"r0", "r1", ..., "r31"} */
96 static const char *const avr_regnames[] = REGISTER_NAMES;
98 /* This holds the last insn address. */
99 static int last_insn_address = 0;
101 /* Preprocessor macros to define depending on MCU type. */
102 const char *avr_extra_arch_macro;
104 /* Current architecture. */
105 const struct base_arch_s *avr_current_arch;
107 section *progmem_section;
109 static const struct base_arch_s avr_arch_types[] = {
110 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
111 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
112 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
113 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
114 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
115 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
116 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
117 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
118 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
119 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
120 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
123 /* These names are used as the index into the avr_arch_types[] table
124 above. */
126 enum avr_arch
128 ARCH_UNKNOWN,
129 ARCH_AVR1,
130 ARCH_AVR2,
131 ARCH_AVR25,
132 ARCH_AVR3,
133 ARCH_AVR31,
134 ARCH_AVR35,
135 ARCH_AVR4,
136 ARCH_AVR5,
137 ARCH_AVR51,
138 ARCH_AVR6
141 struct mcu_type_s {
142 const char *const name;
143 int arch; /* index in avr_arch_types[] */
144 /* Must lie outside user's namespace. NULL == no macro. */
145 const char *const macro;
148 /* List of all known AVR MCU types - if updated, it has to be kept
149 in sync in several places (FIXME: is there a better way?):
150 - here
151 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
152 - t-avr (MULTILIB_MATCHES)
153 - gas/config/tc-avr.c
154 - avr-libc */
156 static const struct mcu_type_s avr_mcu_types[] = {
157 /* Classic, <= 8K. */
158 { "avr2", ARCH_AVR2, NULL },
159 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
160 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
161 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
162 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
163 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
164 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
165 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
166 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
167 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
168 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
169 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
170 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
171 /* Classic + MOVW, <= 8K. */
172 { "avr25", ARCH_AVR25, NULL },
173 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
174 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
175 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
176 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
177 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
178 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
179 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
180 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
181 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
182 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
183 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
184 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
185 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
186 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
187 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
188 /* Classic, > 8K, <= 64K. */
189 { "avr3", ARCH_AVR3, NULL },
190 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
191 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
192 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
193 /* Classic, == 128K. */
194 { "avr31", ARCH_AVR31, NULL },
195 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
196 /* Classic + MOVW + JMP/CALL. */
197 { "avr35", ARCH_AVR35, NULL },
198 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
199 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
200 /* Enhanced, <= 8K. */
201 { "avr4", ARCH_AVR4, NULL },
202 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
203 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
204 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
205 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
206 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
207 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
208 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
209 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
210 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
211 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
212 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
213 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
214 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
215 /* Enhanced, > 8K, <= 64K. */
216 { "avr5", ARCH_AVR5, NULL },
217 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
218 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
219 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
220 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
221 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
222 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
223 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
224 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
225 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
226 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
227 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
228 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
229 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
230 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
231 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
232 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
233 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
234 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
235 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
236 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
237 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
238 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
239 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
240 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
241 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
242 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
243 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
244 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
245 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
246 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
247 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
248 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
249 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
250 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
251 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
252 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
253 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
254 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
255 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
256 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
257 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
258 /* Enhanced, == 128K. */
259 { "avr51", ARCH_AVR51, NULL },
260 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
261 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
262 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
263 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
264 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
265 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
266 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
267 /* 3-Byte PC. */
268 { "avr6", ARCH_AVR6, NULL },
269 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
270 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
271 /* Assembler only. */
272 { "avr1", ARCH_AVR1, NULL },
273 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
274 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
275 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
276 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
277 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
278 { NULL, ARCH_UNKNOWN, NULL }
281 int avr_case_values_threshold = 30000;
283 /* Initialize the GCC target structure. */
284 #undef TARGET_ASM_ALIGNED_HI_OP
285 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
286 #undef TARGET_ASM_ALIGNED_SI_OP
287 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
288 #undef TARGET_ASM_UNALIGNED_HI_OP
289 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
290 #undef TARGET_ASM_UNALIGNED_SI_OP
291 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
292 #undef TARGET_ASM_INTEGER
293 #define TARGET_ASM_INTEGER avr_assemble_integer
294 #undef TARGET_ASM_FILE_START
295 #define TARGET_ASM_FILE_START avr_file_start
296 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
297 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
298 #undef TARGET_ASM_FILE_END
299 #define TARGET_ASM_FILE_END avr_file_end
301 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
302 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
303 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
304 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
305 #undef TARGET_ATTRIBUTE_TABLE
306 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
307 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
308 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
309 #undef TARGET_INSERT_ATTRIBUTES
310 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
311 #undef TARGET_SECTION_TYPE_FLAGS
312 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
313 #undef TARGET_RTX_COSTS
314 #define TARGET_RTX_COSTS avr_rtx_costs
315 #undef TARGET_ADDRESS_COST
316 #define TARGET_ADDRESS_COST avr_address_cost
317 #undef TARGET_MACHINE_DEPENDENT_REORG
318 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
320 #undef TARGET_RETURN_IN_MEMORY
321 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
323 #undef TARGET_STRICT_ARGUMENT_NAMING
324 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
326 struct gcc_target targetm = TARGET_INITIALIZER;
328 void
329 avr_override_options (void)
331 const struct mcu_type_s *t;
333 flag_delete_null_pointer_checks = 0;
335 for (t = avr_mcu_types; t->name; t++)
336 if (strcmp (t->name, avr_mcu_name) == 0)
337 break;
339 if (!t->name)
341 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
342 avr_mcu_name);
343 for (t = avr_mcu_types; t->name; t++)
344 fprintf (stderr," %s\n", t->name);
347 avr_current_arch = &avr_arch_types[t->arch];
348 avr_extra_arch_macro = t->macro;
350 if (optimize && !TARGET_NO_TABLEJUMP)
351 avr_case_values_threshold =
352 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
354 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
355 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
357 init_machine_status = avr_init_machine_status;
360 /* return register class from register number. */
362 static const int reg_class_tab[]={
363 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
364 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
365 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
366 GENERAL_REGS, /* r0 - r15 */
367 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
368 LD_REGS, /* r16 - 23 */
369 ADDW_REGS,ADDW_REGS, /* r24,r25 */
370 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
371 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
372 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
373 STACK_REG,STACK_REG /* SPL,SPH */
376 /* Function to set up the backend function structure. */
378 static struct machine_function *
379 avr_init_machine_status (void)
381 return ((struct machine_function *)
382 ggc_alloc_cleared (sizeof (struct machine_function)));
385 /* Return register class for register R. */
387 enum reg_class
388 avr_regno_reg_class (int r)
390 if (r <= 33)
391 return reg_class_tab[r];
392 return ALL_REGS;
395 /* Return nonzero if FUNC is a naked function. */
397 static int
398 avr_naked_function_p (tree func)
400 tree a;
402 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
404 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
405 return a != NULL_TREE;
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
411 static int
412 interrupt_function_p (tree func)
414 tree a;
416 if (TREE_CODE (func) != FUNCTION_DECL)
417 return 0;
419 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
420 return a != NULL_TREE;
423 /* Return nonzero if FUNC is a signal function as specified
424 by the "signal" attribute. */
426 static int
427 signal_function_p (tree func)
429 tree a;
431 if (TREE_CODE (func) != FUNCTION_DECL)
432 return 0;
434 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
435 return a != NULL_TREE;
438 /* Return nonzero if FUNC is a OS_task function. */
440 static int
441 avr_OS_task_function_p (tree func)
443 tree a;
445 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
447 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
448 return a != NULL_TREE;
451 /* Return nonzero if FUNC is a OS_main function. */
453 static int
454 avr_OS_main_function_p (tree func)
456 tree a;
458 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
460 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
461 return a != NULL_TREE;
464 /* Return the number of hard registers to push/pop in the prologue/epilogue
465 of the current function, and optionally store these registers in SET. */
467 static int
468 avr_regs_to_save (HARD_REG_SET *set)
470 int reg, count;
471 int int_or_sig_p = (interrupt_function_p (current_function_decl)
472 || signal_function_p (current_function_decl));
474 if (!reload_completed)
475 cfun->machine->is_leaf = leaf_function_p ();
477 if (set)
478 CLEAR_HARD_REG_SET (*set);
479 count = 0;
481 /* No need to save any registers if the function never returns or
482 is have "OS_task" or "OS_main" attribute. */
483 if (TREE_THIS_VOLATILE (current_function_decl)
484 || cfun->machine->is_OS_task
485 || cfun->machine->is_OS_main)
486 return 0;
488 for (reg = 0; reg < 32; reg++)
490 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
491 any global register variables. */
492 if (fixed_regs[reg])
493 continue;
495 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
496 || (df_regs_ever_live_p (reg)
497 && (int_or_sig_p || !call_used_regs[reg])
498 && !(frame_pointer_needed
499 && (reg == REG_Y || reg == (REG_Y+1)))))
501 if (set)
502 SET_HARD_REG_BIT (*set, reg);
503 count++;
506 return count;
509 /* Compute offset between arg_pointer and frame_pointer. */
512 initial_elimination_offset (int from, int to)
514 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
515 return 0;
516 else
518 int offset = frame_pointer_needed ? 2 : 0;
519 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
521 offset += avr_regs_to_save (NULL);
522 return get_frame_size () + (avr_pc_size) + 1 + offset;
526 /* Return 1 if the function epilogue is just a single "ret". */
529 avr_simple_epilogue (void)
531 return (! frame_pointer_needed
532 && get_frame_size () == 0
533 && avr_regs_to_save (NULL) == 0
534 && ! interrupt_function_p (current_function_decl)
535 && ! signal_function_p (current_function_decl)
536 && ! avr_naked_function_p (current_function_decl)
537 && ! TREE_THIS_VOLATILE (current_function_decl));
540 /* This function checks sequence of live registers. */
542 static int
543 sequent_regs_live (void)
545 int reg;
546 int live_seq=0;
547 int cur_seq=0;
549 for (reg = 0; reg < 18; ++reg)
551 if (!call_used_regs[reg])
553 if (df_regs_ever_live_p (reg))
555 ++live_seq;
556 ++cur_seq;
558 else
559 cur_seq = 0;
563 if (!frame_pointer_needed)
565 if (df_regs_ever_live_p (REG_Y))
567 ++live_seq;
568 ++cur_seq;
570 else
571 cur_seq = 0;
573 if (df_regs_ever_live_p (REG_Y+1))
575 ++live_seq;
576 ++cur_seq;
578 else
579 cur_seq = 0;
581 else
583 cur_seq += 2;
584 live_seq += 2;
586 return (cur_seq == live_seq) ? live_seq : 0;
589 /* Obtain the length sequence of insns. */
592 get_sequence_length (rtx insns)
594 rtx insn;
595 int length;
597 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
598 length += get_attr_length (insn);
600 return length;
603 /* Output function prologue. */
605 void
606 expand_prologue (void)
608 int live_seq;
609 HARD_REG_SET set;
610 int minimize;
611 HOST_WIDE_INT size = get_frame_size();
612 /* Define templates for push instructions. */
613 rtx pushbyte = gen_rtx_MEM (QImode,
614 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
615 rtx pushword = gen_rtx_MEM (HImode,
616 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
617 rtx insn;
619 last_insn_address = 0;
621 /* Init cfun->machine. */
622 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
623 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
624 cfun->machine->is_signal = signal_function_p (current_function_decl);
625 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
626 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
628 /* Prologue: naked. */
629 if (cfun->machine->is_naked)
631 return;
634 avr_regs_to_save (&set);
635 live_seq = sequent_regs_live ();
636 minimize = (TARGET_CALL_PROLOGUES
637 && !cfun->machine->is_interrupt
638 && !cfun->machine->is_signal
639 && !cfun->machine->is_OS_task
640 && !cfun->machine->is_OS_main
641 && live_seq);
643 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
645 if (cfun->machine->is_interrupt)
647 /* Enable interrupts. */
648 insn = emit_insn (gen_enable_interrupt ());
649 RTX_FRAME_RELATED_P (insn) = 1;
652 /* Push zero reg. */
653 insn = emit_move_insn (pushbyte, zero_reg_rtx);
654 RTX_FRAME_RELATED_P (insn) = 1;
656 /* Push tmp reg. */
657 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
658 RTX_FRAME_RELATED_P (insn) = 1;
660 /* Push SREG. */
661 insn = emit_move_insn (tmp_reg_rtx,
662 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
663 RTX_FRAME_RELATED_P (insn) = 1;
664 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
665 RTX_FRAME_RELATED_P (insn) = 1;
667 /* Push RAMPZ. */
668 if(AVR_HAVE_RAMPZ
669 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
671 insn = emit_move_insn (tmp_reg_rtx,
672 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
673 RTX_FRAME_RELATED_P (insn) = 1;
674 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
675 RTX_FRAME_RELATED_P (insn) = 1;
678 /* Clear zero reg. */
679 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
680 RTX_FRAME_RELATED_P (insn) = 1;
682 /* Prevent any attempt to delete the setting of ZERO_REG! */
683 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
685 if (minimize && (frame_pointer_needed
686 || (AVR_2_BYTE_PC && live_seq > 6)
687 || live_seq > 7))
689 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
690 gen_int_mode (size, HImode));
691 RTX_FRAME_RELATED_P (insn) = 1;
693 insn =
694 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
695 gen_int_mode (size + live_seq, HImode)));
696 RTX_FRAME_RELATED_P (insn) = 1;
698 else
700 int reg;
701 for (reg = 0; reg < 32; ++reg)
703 if (TEST_HARD_REG_BIT (set, reg))
705 /* Emit push of register to save. */
706 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
707 RTX_FRAME_RELATED_P (insn) = 1;
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. */
715 insn = emit_move_insn (pushword, frame_pointer_rtx);
716 RTX_FRAME_RELATED_P (insn) = 1;
719 if (!size)
721 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
722 RTX_FRAME_RELATED_P (insn) = 1;
724 else
726 /* Creating a frame can be done by direct manipulation of the
727 stack or via the frame pointer. These two methods are:
728 fp=sp
729 fp-=size
730 sp=fp
732 sp-=size
733 fp=sp
734 the optimum method depends on function type, stack and frame size.
735 To avoid a complex logic, both methods are tested and shortest
736 is selected. */
737 rtx myfp;
738 rtx fp_plus_insns;
739 rtx sp_plus_insns = NULL_RTX;
741 if (TARGET_TINY_STACK)
743 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
744 over 'sbiw' (2 cycles, same size). */
745 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
747 else
749 /* Normal sized addition. */
750 myfp = frame_pointer_rtx;
753 /* Method 1-Adjust frame pointer. */
754 start_sequence ();
756 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
757 RTX_FRAME_RELATED_P (insn) = 1;
759 insn =
760 emit_move_insn (myfp,
761 gen_rtx_PLUS (GET_MODE(myfp), myfp,
762 gen_int_mode (-size,
763 GET_MODE(myfp))));
764 RTX_FRAME_RELATED_P (insn) = 1;
766 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
767 RTX_FRAME_RELATED_P (insn) = 1;
769 fp_plus_insns = get_insns ();
770 end_sequence ();
772 /* Method 2-Adjust Stack pointer. */
773 if (size <= 6)
775 start_sequence ();
777 insn =
778 emit_move_insn (stack_pointer_rtx,
779 gen_rtx_PLUS (HImode,
780 stack_pointer_rtx,
781 gen_int_mode (-size,
782 HImode)));
783 RTX_FRAME_RELATED_P (insn) = 1;
785 insn =
786 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
787 RTX_FRAME_RELATED_P (insn) = 1;
789 sp_plus_insns = get_insns ();
790 end_sequence ();
793 /* Use shortest method. */
794 if (size <= 6 && (get_sequence_length (sp_plus_insns)
795 < get_sequence_length (fp_plus_insns)))
796 emit_insn (sp_plus_insns);
797 else
798 emit_insn (fp_plus_insns);
804 /* Output summary at end of function prologue. */
806 static void
807 avr_asm_function_end_prologue (FILE *file)
809 if (cfun->machine->is_naked)
811 fputs ("/* prologue: naked */\n", file);
813 else
815 if (cfun->machine->is_interrupt)
817 fputs ("/* prologue: Interrupt */\n", file);
819 else if (cfun->machine->is_signal)
821 fputs ("/* prologue: Signal */\n", file);
823 else
824 fputs ("/* prologue: function */\n", file);
826 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
827 get_frame_size());
831 /* Implement EPILOGUE_USES. */
834 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
836 if (reload_completed
837 && cfun->machine
838 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
839 return 1;
840 return 0;
843 /* Output RTL epilogue. */
845 void
846 expand_epilogue (void)
848 int reg;
849 int live_seq;
850 HARD_REG_SET set;
851 int minimize;
852 HOST_WIDE_INT size = get_frame_size();
854 /* epilogue: naked */
855 if (cfun->machine->is_naked)
857 emit_jump_insn (gen_return ());
858 return;
861 avr_regs_to_save (&set);
862 live_seq = sequent_regs_live ();
863 minimize = (TARGET_CALL_PROLOGUES
864 && !cfun->machine->is_interrupt
865 && !cfun->machine->is_signal
866 && !cfun->machine->is_OS_task
867 && !cfun->machine->is_OS_main
868 && live_seq);
870 if (minimize && (frame_pointer_needed || live_seq > 4))
872 if (frame_pointer_needed)
874 /* Get rid of frame. */
875 emit_move_insn(frame_pointer_rtx,
876 gen_rtx_PLUS (HImode, frame_pointer_rtx,
877 gen_int_mode (size, HImode)));
879 else
881 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
884 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
886 else
888 if (frame_pointer_needed)
890 if (size)
892 /* Try two methods to adjust stack and select shortest. */
893 rtx myfp;
894 rtx fp_plus_insns;
895 rtx sp_plus_insns = NULL_RTX;
897 if (TARGET_TINY_STACK)
899 /* The high byte (r29) doesn't change - prefer 'subi'
900 (1 cycle) over 'sbiw' (2 cycles, same size). */
901 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
903 else
905 /* Normal sized addition. */
906 myfp = frame_pointer_rtx;
909 /* Method 1-Adjust frame pointer. */
910 start_sequence ();
912 emit_move_insn (myfp,
913 gen_rtx_PLUS (HImode, myfp,
914 gen_int_mode (size,
915 GET_MODE(myfp))));
917 /* Copy to stack pointer. */
918 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
920 fp_plus_insns = get_insns ();
921 end_sequence ();
923 /* Method 2-Adjust Stack pointer. */
924 if (size <= 5)
926 start_sequence ();
928 emit_move_insn (stack_pointer_rtx,
929 gen_rtx_PLUS (HImode, stack_pointer_rtx,
930 gen_int_mode (size,
931 HImode)));
933 sp_plus_insns = get_insns ();
934 end_sequence ();
937 /* Use shortest method. */
938 if (size <= 5 && (get_sequence_length (sp_plus_insns)
939 < get_sequence_length (fp_plus_insns)))
940 emit_insn (sp_plus_insns);
941 else
942 emit_insn (fp_plus_insns);
944 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
946 /* Restore previous frame_pointer. */
947 emit_insn (gen_pophi (frame_pointer_rtx));
950 /* Restore used registers. */
951 for (reg = 31; reg >= 0; --reg)
953 if (TEST_HARD_REG_BIT (set, reg))
954 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
956 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
958 /* Restore RAMPZ using tmp reg as scratch. */
959 if(AVR_HAVE_RAMPZ
960 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
962 emit_insn (gen_popqi (tmp_reg_rtx));
963 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
964 tmp_reg_rtx);
967 /* Restore SREG using tmp reg as scratch. */
968 emit_insn (gen_popqi (tmp_reg_rtx));
970 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
971 tmp_reg_rtx);
973 /* Restore tmp REG. */
974 emit_insn (gen_popqi (tmp_reg_rtx));
976 /* Restore zero REG. */
977 emit_insn (gen_popqi (zero_reg_rtx));
980 emit_jump_insn (gen_return ());
984 /* Output summary messages at beginning of function epilogue. */
986 static void
987 avr_asm_function_begin_epilogue (FILE *file)
989 fprintf (file, "/* epilogue start */\n");
992 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
993 machine for a memory operand of mode MODE. */
996 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
998 enum reg_class r = NO_REGS;
1000 if (TARGET_ALL_DEBUG)
1002 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1003 GET_MODE_NAME(mode),
1004 strict ? "(strict)": "",
1005 reload_completed ? "(reload_completed)": "",
1006 reload_in_progress ? "(reload_in_progress)": "",
1007 reg_renumber ? "(reg_renumber)" : "");
1008 if (GET_CODE (x) == PLUS
1009 && REG_P (XEXP (x, 0))
1010 && GET_CODE (XEXP (x, 1)) == CONST_INT
1011 && INTVAL (XEXP (x, 1)) >= 0
1012 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1013 && reg_renumber
1015 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1016 true_regnum (XEXP (x, 0)));
1017 debug_rtx (x);
1019 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1020 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1021 r = POINTER_REGS;
1022 else if (CONSTANT_ADDRESS_P (x))
1023 r = ALL_REGS;
1024 else if (GET_CODE (x) == PLUS
1025 && REG_P (XEXP (x, 0))
1026 && GET_CODE (XEXP (x, 1)) == CONST_INT
1027 && INTVAL (XEXP (x, 1)) >= 0)
1029 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1030 if (fit)
1032 if (! strict
1033 || REGNO (XEXP (x,0)) == REG_Y
1034 || REGNO (XEXP (x,0)) == REG_Z)
1035 r = BASE_POINTER_REGS;
1036 if (XEXP (x,0) == frame_pointer_rtx
1037 || XEXP (x,0) == arg_pointer_rtx)
1038 r = BASE_POINTER_REGS;
1040 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1041 r = POINTER_Y_REGS;
1043 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1044 && REG_P (XEXP (x, 0))
1045 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1046 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1048 r = POINTER_REGS;
1050 if (TARGET_ALL_DEBUG)
1052 fprintf (stderr, " ret = %c\n", r + '0');
1054 return r == NO_REGS ? 0 : (int)r;
1057 /* Attempts to replace X with a valid
1058 memory address for an operand of mode MODE */
1061 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1063 x = oldx;
1064 if (TARGET_ALL_DEBUG)
1066 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1067 debug_rtx (oldx);
1070 if (GET_CODE (oldx) == PLUS
1071 && REG_P (XEXP (oldx,0)))
1073 if (REG_P (XEXP (oldx,1)))
1074 x = force_reg (GET_MODE (oldx), oldx);
1075 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1077 int offs = INTVAL (XEXP (oldx,1));
1078 if (frame_pointer_rtx != XEXP (oldx,0))
1079 if (offs > MAX_LD_OFFSET (mode))
1081 if (TARGET_ALL_DEBUG)
1082 fprintf (stderr, "force_reg (big offset)\n");
1083 x = force_reg (GET_MODE (oldx), oldx);
1087 return x;
1091 /* Return a pointer register name as a string. */
1093 static const char *
1094 ptrreg_to_str (int regno)
1096 switch (regno)
1098 case REG_X: return "X";
1099 case REG_Y: return "Y";
1100 case REG_Z: return "Z";
1101 default:
1102 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1104 return NULL;
1107 /* Return the condition name as a string.
1108 Used in conditional jump constructing */
1110 static const char *
1111 cond_string (enum rtx_code code)
1113 switch (code)
1115 case NE:
1116 return "ne";
1117 case EQ:
1118 return "eq";
1119 case GE:
1120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1121 return "pl";
1122 else
1123 return "ge";
1124 case LT:
1125 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1126 return "mi";
1127 else
1128 return "lt";
1129 case GEU:
1130 return "sh";
1131 case LTU:
1132 return "lo";
1133 default:
1134 gcc_unreachable ();
1138 /* Output ADDR to FILE as address. */
1140 void
1141 print_operand_address (FILE *file, rtx addr)
1143 switch (GET_CODE (addr))
1145 case REG:
1146 fprintf (file, ptrreg_to_str (REGNO (addr)));
1147 break;
1149 case PRE_DEC:
1150 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1151 break;
1153 case POST_INC:
1154 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1155 break;
1157 default:
1158 if (CONSTANT_ADDRESS_P (addr)
1159 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1160 || GET_CODE (addr) == LABEL_REF))
1162 fprintf (file, "gs(");
1163 output_addr_const (file,addr);
1164 fprintf (file ,")");
1166 else
1167 output_addr_const (file, addr);
1172 /* Output X as assembler operand to file FILE. */
1174 void
1175 print_operand (FILE *file, rtx x, int code)
1177 int abcd = 0;
1179 if (code >= 'A' && code <= 'D')
1180 abcd = code - 'A';
1182 if (code == '~')
1184 if (!AVR_HAVE_JMP_CALL)
1185 fputc ('r', file);
1187 else if (code == '!')
1189 if (AVR_HAVE_EIJMP_EICALL)
1190 fputc ('e', file);
1192 else if (REG_P (x))
1194 if (x == zero_reg_rtx)
1195 fprintf (file, "__zero_reg__");
1196 else
1197 fprintf (file, reg_names[true_regnum (x) + abcd]);
1199 else if (GET_CODE (x) == CONST_INT)
1200 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1201 else if (GET_CODE (x) == MEM)
1203 rtx addr = XEXP (x,0);
1205 if (CONSTANT_P (addr) && abcd)
1207 fputc ('(', file);
1208 output_address (addr);
1209 fprintf (file, ")+%d", abcd);
1211 else if (code == 'o')
1213 if (GET_CODE (addr) != PLUS)
1214 fatal_insn ("bad address, not (reg+disp):", addr);
1216 print_operand (file, XEXP (addr, 1), 0);
1218 else if (code == 'p' || code == 'r')
1220 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1221 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1223 if (code == 'p')
1224 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1225 else
1226 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1228 else if (GET_CODE (addr) == PLUS)
1230 print_operand_address (file, XEXP (addr,0));
1231 if (REGNO (XEXP (addr, 0)) == REG_X)
1232 fatal_insn ("internal compiler error. Bad address:"
1233 ,addr);
1234 fputc ('+', file);
1235 print_operand (file, XEXP (addr,1), code);
1237 else
1238 print_operand_address (file, addr);
1240 else if (GET_CODE (x) == CONST_DOUBLE)
1242 long val;
1243 REAL_VALUE_TYPE rv;
1244 if (GET_MODE (x) != SFmode)
1245 fatal_insn ("internal compiler error. Unknown mode:", x);
1246 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1247 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1248 fprintf (file, "0x%lx", val);
1250 else if (code == 'j')
1251 fputs (cond_string (GET_CODE (x)), file);
1252 else if (code == 'k')
1253 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1254 else
1255 print_operand_address (file, x);
1258 /* Update the condition code in the INSN. */
1260 void
1261 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1263 rtx set;
1265 switch (get_attr_cc (insn))
1267 case CC_NONE:
1268 /* Insn does not affect CC at all. */
1269 break;
1271 case CC_SET_N:
1272 CC_STATUS_INIT;
1273 break;
1275 case CC_SET_ZN:
1276 set = single_set (insn);
1277 CC_STATUS_INIT;
1278 if (set)
1280 cc_status.flags |= CC_NO_OVERFLOW;
1281 cc_status.value1 = SET_DEST (set);
1283 break;
1285 case CC_SET_CZN:
1286 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1287 The V flag may or may not be known but that's ok because
1288 alter_cond will change tests to use EQ/NE. */
1289 set = single_set (insn);
1290 CC_STATUS_INIT;
1291 if (set)
1293 cc_status.value1 = SET_DEST (set);
1294 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1296 break;
1298 case CC_COMPARE:
1299 set = single_set (insn);
1300 CC_STATUS_INIT;
1301 if (set)
1302 cc_status.value1 = SET_SRC (set);
1303 break;
1305 case CC_CLOBBER:
1306 /* Insn doesn't leave CC in a usable state. */
1307 CC_STATUS_INIT;
1309 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1310 set = single_set (insn);
1311 if (set)
1313 rtx src = SET_SRC (set);
1315 if (GET_CODE (src) == ASHIFTRT
1316 && GET_MODE (src) == QImode)
1318 rtx x = XEXP (src, 1);
1320 if (GET_CODE (x) == CONST_INT
1321 && INTVAL (x) > 0
1322 && INTVAL (x) != 6)
1324 cc_status.value1 = SET_DEST (set);
1325 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1329 break;
1333 /* Return maximum number of consecutive registers of
1334 class CLASS needed to hold a value of mode MODE. */
1337 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1339 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1342 /* Choose mode for jump insn:
1343 1 - relative jump in range -63 <= x <= 62 ;
1344 2 - relative jump in range -2046 <= x <= 2045 ;
1345 3 - absolute jump (only for ATmega[16]03). */
1348 avr_jump_mode (rtx x, rtx insn)
1350 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1351 ? XEXP (x, 0) : x));
1352 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1353 int jump_distance = cur_addr - dest_addr;
1355 if (-63 <= jump_distance && jump_distance <= 62)
1356 return 1;
1357 else if (-2046 <= jump_distance && jump_distance <= 2045)
1358 return 2;
1359 else if (AVR_HAVE_JMP_CALL)
1360 return 3;
1362 return 2;
1365 /* return an AVR condition jump commands.
1366 X is a comparison RTX.
1367 LEN is a number returned by avr_jump_mode function.
1368 if REVERSE nonzero then condition code in X must be reversed. */
1370 const char *
1371 ret_cond_branch (rtx x, int len, int reverse)
1373 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1375 switch (cond)
1377 case GT:
1378 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1379 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1380 AS1 (brpl,%0)) :
1381 len == 2 ? (AS1 (breq,.+4) CR_TAB
1382 AS1 (brmi,.+2) CR_TAB
1383 AS1 (rjmp,%0)) :
1384 (AS1 (breq,.+6) CR_TAB
1385 AS1 (brmi,.+4) CR_TAB
1386 AS1 (jmp,%0)));
1388 else
1389 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1390 AS1 (brge,%0)) :
1391 len == 2 ? (AS1 (breq,.+4) CR_TAB
1392 AS1 (brlt,.+2) CR_TAB
1393 AS1 (rjmp,%0)) :
1394 (AS1 (breq,.+6) CR_TAB
1395 AS1 (brlt,.+4) CR_TAB
1396 AS1 (jmp,%0)));
1397 case GTU:
1398 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1399 AS1 (brsh,%0)) :
1400 len == 2 ? (AS1 (breq,.+4) CR_TAB
1401 AS1 (brlo,.+2) CR_TAB
1402 AS1 (rjmp,%0)) :
1403 (AS1 (breq,.+6) CR_TAB
1404 AS1 (brlo,.+4) CR_TAB
1405 AS1 (jmp,%0)));
1406 case LE:
1407 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1408 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1409 AS1 (brmi,%0)) :
1410 len == 2 ? (AS1 (breq,.+2) CR_TAB
1411 AS1 (brpl,.+2) CR_TAB
1412 AS1 (rjmp,%0)) :
1413 (AS1 (breq,.+2) CR_TAB
1414 AS1 (brpl,.+4) CR_TAB
1415 AS1 (jmp,%0)));
1416 else
1417 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1418 AS1 (brlt,%0)) :
1419 len == 2 ? (AS1 (breq,.+2) CR_TAB
1420 AS1 (brge,.+2) CR_TAB
1421 AS1 (rjmp,%0)) :
1422 (AS1 (breq,.+2) CR_TAB
1423 AS1 (brge,.+4) CR_TAB
1424 AS1 (jmp,%0)));
1425 case LEU:
1426 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1427 AS1 (brlo,%0)) :
1428 len == 2 ? (AS1 (breq,.+2) CR_TAB
1429 AS1 (brsh,.+2) CR_TAB
1430 AS1 (rjmp,%0)) :
1431 (AS1 (breq,.+2) CR_TAB
1432 AS1 (brsh,.+4) CR_TAB
1433 AS1 (jmp,%0)));
1434 default:
1435 if (reverse)
1437 switch (len)
1439 case 1:
1440 return AS1 (br%k1,%0);
1441 case 2:
1442 return (AS1 (br%j1,.+2) CR_TAB
1443 AS1 (rjmp,%0));
1444 default:
1445 return (AS1 (br%j1,.+4) CR_TAB
1446 AS1 (jmp,%0));
1449 else
1451 switch (len)
1453 case 1:
1454 return AS1 (br%j1,%0);
1455 case 2:
1456 return (AS1 (br%k1,.+2) CR_TAB
1457 AS1 (rjmp,%0));
1458 default:
1459 return (AS1 (br%k1,.+4) CR_TAB
1460 AS1 (jmp,%0));
1464 return "";
1467 /* Predicate function for immediate operand which fits to byte (8bit) */
1470 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1472 return (GET_CODE (op) == CONST_INT
1473 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1476 /* Output all insn addresses and their sizes into the assembly language
1477 output file. This is helpful for debugging whether the length attributes
1478 in the md file are correct.
1479 Output insn cost for next insn. */
1481 void
1482 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1483 int num_operands ATTRIBUTE_UNUSED)
1485 int uid = INSN_UID (insn);
1487 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1489 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1490 INSN_ADDRESSES (uid),
1491 INSN_ADDRESSES (uid) - last_insn_address,
1492 rtx_cost (PATTERN (insn), INSN));
1494 last_insn_address = INSN_ADDRESSES (uid);
1497 /* Return 0 if undefined, 1 if always true or always false. */
1500 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1502 unsigned int max = (mode == QImode ? 0xff :
1503 mode == HImode ? 0xffff :
1504 mode == SImode ? 0xffffffff : 0);
1505 if (max && operator && GET_CODE (x) == CONST_INT)
1507 if (unsigned_condition (operator) != operator)
1508 max >>= 1;
1510 if (max != (INTVAL (x) & max)
1511 && INTVAL (x) != 0xff)
1512 return 1;
1514 return 0;
1518 /* Returns nonzero if REGNO is the number of a hard
1519 register in which function arguments are sometimes passed. */
1522 function_arg_regno_p(int r)
1524 return (r >= 8 && r <= 25);
1527 /* Initializing the variable cum for the state at the beginning
1528 of the argument list. */
1530 void
1531 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1532 tree fndecl ATTRIBUTE_UNUSED)
1534 cum->nregs = 18;
1535 cum->regno = FIRST_CUM_REG;
1536 if (!libname && fntype)
1538 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1539 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1540 != void_type_node));
1541 if (stdarg)
1542 cum->nregs = 0;
1546 /* Returns the number of registers to allocate for a function argument. */
1548 static int
1549 avr_num_arg_regs (enum machine_mode mode, tree type)
1551 int size;
1553 if (mode == BLKmode)
1554 size = int_size_in_bytes (type);
1555 else
1556 size = GET_MODE_SIZE (mode);
1558 /* Align all function arguments to start in even-numbered registers.
1559 Odd-sized arguments leave holes above them. */
1561 return (size + 1) & ~1;
1564 /* Controls whether a function argument is passed
1565 in a register, and which register. */
1568 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1569 int named ATTRIBUTE_UNUSED)
1571 int bytes = avr_num_arg_regs (mode, type);
1573 if (cum->nregs && bytes <= cum->nregs)
1574 return gen_rtx_REG (mode, cum->regno - bytes);
1576 return NULL_RTX;
1579 /* Update the summarizer variable CUM to advance past an argument
1580 in the argument list. */
1582 void
1583 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1584 int named ATTRIBUTE_UNUSED)
1586 int bytes = avr_num_arg_regs (mode, type);
1588 cum->nregs -= bytes;
1589 cum->regno -= bytes;
1591 if (cum->nregs <= 0)
1593 cum->nregs = 0;
1594 cum->regno = FIRST_CUM_REG;
1598 /***********************************************************************
1599 Functions for outputting various mov's for a various modes
1600 ************************************************************************/
1601 const char *
1602 output_movqi (rtx insn, rtx operands[], int *l)
1604 int dummy;
1605 rtx dest = operands[0];
1606 rtx src = operands[1];
1607 int *real_l = l;
1609 if (!l)
1610 l = &dummy;
1612 *l = 1;
1614 if (register_operand (dest, QImode))
1616 if (register_operand (src, QImode)) /* mov r,r */
1618 if (test_hard_reg_class (STACK_REG, dest))
1619 return AS2 (out,%0,%1);
1620 else if (test_hard_reg_class (STACK_REG, src))
1621 return AS2 (in,%0,%1);
1623 return AS2 (mov,%0,%1);
1625 else if (CONSTANT_P (src))
1627 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1628 return AS2 (ldi,%0,lo8(%1));
1630 if (GET_CODE (src) == CONST_INT)
1632 if (src == const0_rtx) /* mov r,L */
1633 return AS1 (clr,%0);
1634 else if (src == const1_rtx)
1636 *l = 2;
1637 return (AS1 (clr,%0) CR_TAB
1638 AS1 (inc,%0));
1640 else if (src == constm1_rtx)
1642 /* Immediate constants -1 to any register */
1643 *l = 2;
1644 return (AS1 (clr,%0) CR_TAB
1645 AS1 (dec,%0));
1647 else
1649 int bit_nr = exact_log2 (INTVAL (src));
1651 if (bit_nr >= 0)
1653 *l = 3;
1654 if (!real_l)
1655 output_asm_insn ((AS1 (clr,%0) CR_TAB
1656 "set"), operands);
1657 if (!real_l)
1658 avr_output_bld (operands, bit_nr);
1660 return "";
1665 /* Last resort, larger than loading from memory. */
1666 *l = 4;
1667 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1668 AS2 (ldi,r31,lo8(%1)) CR_TAB
1669 AS2 (mov,%0,r31) CR_TAB
1670 AS2 (mov,r31,__tmp_reg__));
1672 else if (GET_CODE (src) == MEM)
1673 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1675 else if (GET_CODE (dest) == MEM)
1677 const char *template;
1679 if (src == const0_rtx)
1680 operands[1] = zero_reg_rtx;
1682 template = out_movqi_mr_r (insn, operands, real_l);
1684 if (!real_l)
1685 output_asm_insn (template, operands);
1687 operands[1] = src;
1689 return "";
1693 const char *
1694 output_movhi (rtx insn, rtx operands[], int *l)
1696 int dummy;
1697 rtx dest = operands[0];
1698 rtx src = operands[1];
1699 int *real_l = l;
1701 if (!l)
1702 l = &dummy;
1704 if (register_operand (dest, HImode))
1706 if (register_operand (src, HImode)) /* mov r,r */
1708 if (test_hard_reg_class (STACK_REG, dest))
1710 if (TARGET_TINY_STACK)
1712 *l = 1;
1713 return AS2 (out,__SP_L__,%A1);
1715 /* Use simple load of stack pointer if no interrupts are used
1716 or inside main or signal function prologue where they disabled. */
1717 else if (TARGET_NO_INTERRUPTS
1718 || (reload_completed
1719 && cfun->machine->is_signal
1720 && prologue_epilogue_contains (insn)))
1722 *l = 2;
1723 return (AS2 (out,__SP_H__,%B1) CR_TAB
1724 AS2 (out,__SP_L__,%A1));
1726 /* In interrupt prolog we know interrupts are enabled. */
1727 else if (reload_completed
1728 && cfun->machine->is_interrupt
1729 && prologue_epilogue_contains (insn))
1731 *l = 4;
1732 return ("cli" CR_TAB
1733 AS2 (out,__SP_H__,%B1) CR_TAB
1734 "sei" CR_TAB
1735 AS2 (out,__SP_L__,%A1));
1737 *l = 5;
1738 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1739 "cli" CR_TAB
1740 AS2 (out,__SP_H__,%B1) CR_TAB
1741 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1742 AS2 (out,__SP_L__,%A1));
1744 else if (test_hard_reg_class (STACK_REG, src))
1746 *l = 2;
1747 return (AS2 (in,%A0,__SP_L__) CR_TAB
1748 AS2 (in,%B0,__SP_H__));
1751 if (AVR_HAVE_MOVW)
1753 *l = 1;
1754 return (AS2 (movw,%0,%1));
1756 else
1758 *l = 2;
1759 return (AS2 (mov,%A0,%A1) CR_TAB
1760 AS2 (mov,%B0,%B1));
1763 else if (CONSTANT_P (src))
1765 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1767 *l = 2;
1768 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1769 AS2 (ldi,%B0,hi8(%1)));
1772 if (GET_CODE (src) == CONST_INT)
1774 if (src == const0_rtx) /* mov r,L */
1776 *l = 2;
1777 return (AS1 (clr,%A0) CR_TAB
1778 AS1 (clr,%B0));
1780 else if (src == const1_rtx)
1782 *l = 3;
1783 return (AS1 (clr,%A0) CR_TAB
1784 AS1 (clr,%B0) CR_TAB
1785 AS1 (inc,%A0));
1787 else if (src == constm1_rtx)
1789 /* Immediate constants -1 to any register */
1790 *l = 3;
1791 return (AS1 (clr,%0) CR_TAB
1792 AS1 (dec,%A0) CR_TAB
1793 AS2 (mov,%B0,%A0));
1795 else
1797 int bit_nr = exact_log2 (INTVAL (src));
1799 if (bit_nr >= 0)
1801 *l = 4;
1802 if (!real_l)
1803 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1804 AS1 (clr,%B0) CR_TAB
1805 "set"), operands);
1806 if (!real_l)
1807 avr_output_bld (operands, bit_nr);
1809 return "";
1813 if ((INTVAL (src) & 0xff) == 0)
1815 *l = 5;
1816 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1817 AS1 (clr,%A0) CR_TAB
1818 AS2 (ldi,r31,hi8(%1)) CR_TAB
1819 AS2 (mov,%B0,r31) CR_TAB
1820 AS2 (mov,r31,__tmp_reg__));
1822 else if ((INTVAL (src) & 0xff00) == 0)
1824 *l = 5;
1825 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1826 AS2 (ldi,r31,lo8(%1)) CR_TAB
1827 AS2 (mov,%A0,r31) CR_TAB
1828 AS1 (clr,%B0) CR_TAB
1829 AS2 (mov,r31,__tmp_reg__));
1833 /* Last resort, equal to loading from memory. */
1834 *l = 6;
1835 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1836 AS2 (ldi,r31,lo8(%1)) CR_TAB
1837 AS2 (mov,%A0,r31) CR_TAB
1838 AS2 (ldi,r31,hi8(%1)) CR_TAB
1839 AS2 (mov,%B0,r31) CR_TAB
1840 AS2 (mov,r31,__tmp_reg__));
1842 else if (GET_CODE (src) == MEM)
1843 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1845 else if (GET_CODE (dest) == MEM)
1847 const char *template;
1849 if (src == const0_rtx)
1850 operands[1] = zero_reg_rtx;
1852 template = out_movhi_mr_r (insn, operands, real_l);
1854 if (!real_l)
1855 output_asm_insn (template, operands);
1857 operands[1] = src;
1858 return "";
1860 fatal_insn ("invalid insn:", insn);
1861 return "";
1864 const char *
1865 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1867 rtx dest = op[0];
1868 rtx src = op[1];
1869 rtx x = XEXP (src, 0);
1870 int dummy;
1872 if (!l)
1873 l = &dummy;
1875 if (CONSTANT_ADDRESS_P (x))
1877 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1879 *l = 1;
1880 return AS2 (in,%0,__SREG__);
1882 if (optimize > 0 && io_address_operand (x, QImode))
1884 *l = 1;
1885 return AS2 (in,%0,%1-0x20);
1887 *l = 2;
1888 return AS2 (lds,%0,%1);
1890 /* memory access by reg+disp */
1891 else if (GET_CODE (x) == PLUS
1892 && REG_P (XEXP (x,0))
1893 && GET_CODE (XEXP (x,1)) == CONST_INT)
1895 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1897 int disp = INTVAL (XEXP (x,1));
1898 if (REGNO (XEXP (x,0)) != REG_Y)
1899 fatal_insn ("incorrect insn:",insn);
1901 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1902 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1903 AS2 (ldd,%0,Y+63) CR_TAB
1904 AS2 (sbiw,r28,%o1-63));
1906 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1907 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1908 AS2 (ld,%0,Y) CR_TAB
1909 AS2 (subi,r28,lo8(%o1)) CR_TAB
1910 AS2 (sbci,r29,hi8(%o1)));
1912 else if (REGNO (XEXP (x,0)) == REG_X)
1914 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1915 it but I have this situation with extremal optimizing options. */
1916 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1917 || reg_unused_after (insn, XEXP (x,0)))
1918 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1919 AS2 (ld,%0,X));
1921 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1922 AS2 (ld,%0,X) CR_TAB
1923 AS2 (sbiw,r26,%o1));
1925 *l = 1;
1926 return AS2 (ldd,%0,%1);
1928 *l = 1;
1929 return AS2 (ld,%0,%1);
1932 const char *
1933 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1935 rtx dest = op[0];
1936 rtx src = op[1];
1937 rtx base = XEXP (src, 0);
1938 int reg_dest = true_regnum (dest);
1939 int reg_base = true_regnum (base);
1940 /* "volatile" forces reading low byte first, even if less efficient,
1941 for correct operation with 16-bit I/O registers. */
1942 int mem_volatile_p = MEM_VOLATILE_P (src);
1943 int tmp;
1945 if (!l)
1946 l = &tmp;
1948 if (reg_base > 0)
1950 if (reg_dest == reg_base) /* R = (R) */
1952 *l = 3;
1953 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1954 AS2 (ld,%B0,%1) CR_TAB
1955 AS2 (mov,%A0,__tmp_reg__));
1957 else if (reg_base == REG_X) /* (R26) */
1959 if (reg_unused_after (insn, base))
1961 *l = 2;
1962 return (AS2 (ld,%A0,X+) CR_TAB
1963 AS2 (ld,%B0,X));
1965 *l = 3;
1966 return (AS2 (ld,%A0,X+) CR_TAB
1967 AS2 (ld,%B0,X) CR_TAB
1968 AS2 (sbiw,r26,1));
1970 else /* (R) */
1972 *l = 2;
1973 return (AS2 (ld,%A0,%1) CR_TAB
1974 AS2 (ldd,%B0,%1+1));
1977 else if (GET_CODE (base) == PLUS) /* (R + i) */
1979 int disp = INTVAL (XEXP (base, 1));
1980 int reg_base = true_regnum (XEXP (base, 0));
1982 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1984 if (REGNO (XEXP (base, 0)) != REG_Y)
1985 fatal_insn ("incorrect insn:",insn);
1987 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1988 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1989 AS2 (ldd,%A0,Y+62) CR_TAB
1990 AS2 (ldd,%B0,Y+63) CR_TAB
1991 AS2 (sbiw,r28,%o1-62));
1993 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1994 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1995 AS2 (ld,%A0,Y) CR_TAB
1996 AS2 (ldd,%B0,Y+1) CR_TAB
1997 AS2 (subi,r28,lo8(%o1)) CR_TAB
1998 AS2 (sbci,r29,hi8(%o1)));
2000 if (reg_base == REG_X)
2002 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2003 it but I have this situation with extremal
2004 optimization options. */
2006 *l = 4;
2007 if (reg_base == reg_dest)
2008 return (AS2 (adiw,r26,%o1) CR_TAB
2009 AS2 (ld,__tmp_reg__,X+) CR_TAB
2010 AS2 (ld,%B0,X) CR_TAB
2011 AS2 (mov,%A0,__tmp_reg__));
2013 return (AS2 (adiw,r26,%o1) CR_TAB
2014 AS2 (ld,%A0,X+) CR_TAB
2015 AS2 (ld,%B0,X) CR_TAB
2016 AS2 (sbiw,r26,%o1+1));
2019 if (reg_base == reg_dest)
2021 *l = 3;
2022 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2023 AS2 (ldd,%B0,%B1) CR_TAB
2024 AS2 (mov,%A0,__tmp_reg__));
2027 *l = 2;
2028 return (AS2 (ldd,%A0,%A1) CR_TAB
2029 AS2 (ldd,%B0,%B1));
2031 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2033 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2034 fatal_insn ("incorrect insn:", insn);
2036 if (mem_volatile_p)
2038 if (REGNO (XEXP (base, 0)) == REG_X)
2040 *l = 4;
2041 return (AS2 (sbiw,r26,2) CR_TAB
2042 AS2 (ld,%A0,X+) CR_TAB
2043 AS2 (ld,%B0,X) CR_TAB
2044 AS2 (sbiw,r26,1));
2046 else
2048 *l = 3;
2049 return (AS2 (sbiw,%r1,2) CR_TAB
2050 AS2 (ld,%A0,%p1) CR_TAB
2051 AS2 (ldd,%B0,%p1+1));
2055 *l = 2;
2056 return (AS2 (ld,%B0,%1) CR_TAB
2057 AS2 (ld,%A0,%1));
2059 else if (GET_CODE (base) == POST_INC) /* (R++) */
2061 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2062 fatal_insn ("incorrect insn:", insn);
2064 *l = 2;
2065 return (AS2 (ld,%A0,%1) CR_TAB
2066 AS2 (ld,%B0,%1));
2068 else if (CONSTANT_ADDRESS_P (base))
2070 if (optimize > 0 && io_address_operand (base, HImode))
2072 *l = 2;
2073 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2074 AS2 (in,%B0,%B1-0x20));
2076 *l = 4;
2077 return (AS2 (lds,%A0,%A1) CR_TAB
2078 AS2 (lds,%B0,%B1));
2081 fatal_insn ("unknown move insn:",insn);
2082 return "";
2085 const char *
2086 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2088 rtx dest = op[0];
2089 rtx src = op[1];
2090 rtx base = XEXP (src, 0);
2091 int reg_dest = true_regnum (dest);
2092 int reg_base = true_regnum (base);
2093 int tmp;
2095 if (!l)
2096 l = &tmp;
2098 if (reg_base > 0)
2100 if (reg_base == REG_X) /* (R26) */
2102 if (reg_dest == REG_X)
2103 /* "ld r26,-X" is undefined */
2104 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2105 AS2 (ld,r29,X) CR_TAB
2106 AS2 (ld,r28,-X) CR_TAB
2107 AS2 (ld,__tmp_reg__,-X) CR_TAB
2108 AS2 (sbiw,r26,1) CR_TAB
2109 AS2 (ld,r26,X) CR_TAB
2110 AS2 (mov,r27,__tmp_reg__));
2111 else if (reg_dest == REG_X - 2)
2112 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2113 AS2 (ld,%B0,X+) CR_TAB
2114 AS2 (ld,__tmp_reg__,X+) CR_TAB
2115 AS2 (ld,%D0,X) CR_TAB
2116 AS2 (mov,%C0,__tmp_reg__));
2117 else if (reg_unused_after (insn, base))
2118 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2119 AS2 (ld,%B0,X+) CR_TAB
2120 AS2 (ld,%C0,X+) CR_TAB
2121 AS2 (ld,%D0,X));
2122 else
2123 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2124 AS2 (ld,%B0,X+) CR_TAB
2125 AS2 (ld,%C0,X+) CR_TAB
2126 AS2 (ld,%D0,X) CR_TAB
2127 AS2 (sbiw,r26,3));
2129 else
2131 if (reg_dest == reg_base)
2132 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2133 AS2 (ldd,%C0,%1+2) CR_TAB
2134 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2135 AS2 (ld,%A0,%1) CR_TAB
2136 AS2 (mov,%B0,__tmp_reg__));
2137 else if (reg_base == reg_dest + 2)
2138 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2139 AS2 (ldd,%B0,%1+1) CR_TAB
2140 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2141 AS2 (ldd,%D0,%1+3) CR_TAB
2142 AS2 (mov,%C0,__tmp_reg__));
2143 else
2144 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2145 AS2 (ldd,%B0,%1+1) CR_TAB
2146 AS2 (ldd,%C0,%1+2) CR_TAB
2147 AS2 (ldd,%D0,%1+3));
2150 else if (GET_CODE (base) == PLUS) /* (R + i) */
2152 int disp = INTVAL (XEXP (base, 1));
2154 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2156 if (REGNO (XEXP (base, 0)) != REG_Y)
2157 fatal_insn ("incorrect insn:",insn);
2159 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2160 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2161 AS2 (ldd,%A0,Y+60) CR_TAB
2162 AS2 (ldd,%B0,Y+61) CR_TAB
2163 AS2 (ldd,%C0,Y+62) CR_TAB
2164 AS2 (ldd,%D0,Y+63) CR_TAB
2165 AS2 (sbiw,r28,%o1-60));
2167 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2168 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2169 AS2 (ld,%A0,Y) CR_TAB
2170 AS2 (ldd,%B0,Y+1) CR_TAB
2171 AS2 (ldd,%C0,Y+2) CR_TAB
2172 AS2 (ldd,%D0,Y+3) CR_TAB
2173 AS2 (subi,r28,lo8(%o1)) CR_TAB
2174 AS2 (sbci,r29,hi8(%o1)));
2177 reg_base = true_regnum (XEXP (base, 0));
2178 if (reg_base == REG_X)
2180 /* R = (X + d) */
2181 if (reg_dest == REG_X)
2183 *l = 7;
2184 /* "ld r26,-X" is undefined */
2185 return (AS2 (adiw,r26,%o1+3) CR_TAB
2186 AS2 (ld,r29,X) CR_TAB
2187 AS2 (ld,r28,-X) CR_TAB
2188 AS2 (ld,__tmp_reg__,-X) CR_TAB
2189 AS2 (sbiw,r26,1) CR_TAB
2190 AS2 (ld,r26,X) CR_TAB
2191 AS2 (mov,r27,__tmp_reg__));
2193 *l = 6;
2194 if (reg_dest == REG_X - 2)
2195 return (AS2 (adiw,r26,%o1) CR_TAB
2196 AS2 (ld,r24,X+) CR_TAB
2197 AS2 (ld,r25,X+) CR_TAB
2198 AS2 (ld,__tmp_reg__,X+) CR_TAB
2199 AS2 (ld,r27,X) CR_TAB
2200 AS2 (mov,r26,__tmp_reg__));
2202 return (AS2 (adiw,r26,%o1) CR_TAB
2203 AS2 (ld,%A0,X+) CR_TAB
2204 AS2 (ld,%B0,X+) CR_TAB
2205 AS2 (ld,%C0,X+) CR_TAB
2206 AS2 (ld,%D0,X) CR_TAB
2207 AS2 (sbiw,r26,%o1+3));
2209 if (reg_dest == reg_base)
2210 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2211 AS2 (ldd,%C0,%C1) CR_TAB
2212 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2213 AS2 (ldd,%A0,%A1) CR_TAB
2214 AS2 (mov,%B0,__tmp_reg__));
2215 else if (reg_dest == reg_base - 2)
2216 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2217 AS2 (ldd,%B0,%B1) CR_TAB
2218 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2219 AS2 (ldd,%D0,%D1) CR_TAB
2220 AS2 (mov,%C0,__tmp_reg__));
2221 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2222 AS2 (ldd,%B0,%B1) CR_TAB
2223 AS2 (ldd,%C0,%C1) CR_TAB
2224 AS2 (ldd,%D0,%D1));
2226 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2227 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2228 AS2 (ld,%C0,%1) CR_TAB
2229 AS2 (ld,%B0,%1) CR_TAB
2230 AS2 (ld,%A0,%1));
2231 else if (GET_CODE (base) == POST_INC) /* (R++) */
2232 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2233 AS2 (ld,%B0,%1) CR_TAB
2234 AS2 (ld,%C0,%1) CR_TAB
2235 AS2 (ld,%D0,%1));
2236 else if (CONSTANT_ADDRESS_P (base))
2237 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2238 AS2 (lds,%B0,%B1) CR_TAB
2239 AS2 (lds,%C0,%C1) CR_TAB
2240 AS2 (lds,%D0,%D1));
2242 fatal_insn ("unknown move insn:",insn);
2243 return "";
2246 const char *
2247 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2249 rtx dest = op[0];
2250 rtx src = op[1];
2251 rtx base = XEXP (dest, 0);
2252 int reg_base = true_regnum (base);
2253 int reg_src = true_regnum (src);
2254 int tmp;
2256 if (!l)
2257 l = &tmp;
2259 if (CONSTANT_ADDRESS_P (base))
2260 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2261 AS2 (sts,%B0,%B1) CR_TAB
2262 AS2 (sts,%C0,%C1) CR_TAB
2263 AS2 (sts,%D0,%D1));
2264 if (reg_base > 0) /* (r) */
2266 if (reg_base == REG_X) /* (R26) */
2268 if (reg_src == REG_X)
2270 /* "st X+,r26" is undefined */
2271 if (reg_unused_after (insn, base))
2272 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2273 AS2 (st,X,r26) CR_TAB
2274 AS2 (adiw,r26,1) CR_TAB
2275 AS2 (st,X+,__tmp_reg__) CR_TAB
2276 AS2 (st,X+,r28) CR_TAB
2277 AS2 (st,X,r29));
2278 else
2279 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2280 AS2 (st,X,r26) CR_TAB
2281 AS2 (adiw,r26,1) CR_TAB
2282 AS2 (st,X+,__tmp_reg__) CR_TAB
2283 AS2 (st,X+,r28) CR_TAB
2284 AS2 (st,X,r29) CR_TAB
2285 AS2 (sbiw,r26,3));
2287 else if (reg_base == reg_src + 2)
2289 if (reg_unused_after (insn, base))
2290 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2291 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2292 AS2 (st,%0+,%A1) CR_TAB
2293 AS2 (st,%0+,%B1) CR_TAB
2294 AS2 (st,%0+,__zero_reg__) CR_TAB
2295 AS2 (st,%0,__tmp_reg__) CR_TAB
2296 AS1 (clr,__zero_reg__));
2297 else
2298 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2299 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2300 AS2 (st,%0+,%A1) CR_TAB
2301 AS2 (st,%0+,%B1) CR_TAB
2302 AS2 (st,%0+,__zero_reg__) CR_TAB
2303 AS2 (st,%0,__tmp_reg__) CR_TAB
2304 AS1 (clr,__zero_reg__) CR_TAB
2305 AS2 (sbiw,r26,3));
2307 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2308 AS2 (st,%0+,%B1) CR_TAB
2309 AS2 (st,%0+,%C1) CR_TAB
2310 AS2 (st,%0,%D1) CR_TAB
2311 AS2 (sbiw,r26,3));
2313 else
2314 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2315 AS2 (std,%0+1,%B1) CR_TAB
2316 AS2 (std,%0+2,%C1) CR_TAB
2317 AS2 (std,%0+3,%D1));
2319 else if (GET_CODE (base) == PLUS) /* (R + i) */
2321 int disp = INTVAL (XEXP (base, 1));
2322 reg_base = REGNO (XEXP (base, 0));
2323 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2325 if (reg_base != REG_Y)
2326 fatal_insn ("incorrect insn:",insn);
2328 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2329 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2330 AS2 (std,Y+60,%A1) CR_TAB
2331 AS2 (std,Y+61,%B1) CR_TAB
2332 AS2 (std,Y+62,%C1) CR_TAB
2333 AS2 (std,Y+63,%D1) CR_TAB
2334 AS2 (sbiw,r28,%o0-60));
2336 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2337 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2338 AS2 (st,Y,%A1) CR_TAB
2339 AS2 (std,Y+1,%B1) CR_TAB
2340 AS2 (std,Y+2,%C1) CR_TAB
2341 AS2 (std,Y+3,%D1) CR_TAB
2342 AS2 (subi,r28,lo8(%o0)) CR_TAB
2343 AS2 (sbci,r29,hi8(%o0)));
2345 if (reg_base == REG_X)
2347 /* (X + d) = R */
2348 if (reg_src == REG_X)
2350 *l = 9;
2351 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2352 AS2 (mov,__zero_reg__,r27) CR_TAB
2353 AS2 (adiw,r26,%o0) CR_TAB
2354 AS2 (st,X+,__tmp_reg__) CR_TAB
2355 AS2 (st,X+,__zero_reg__) CR_TAB
2356 AS2 (st,X+,r28) CR_TAB
2357 AS2 (st,X,r29) CR_TAB
2358 AS1 (clr,__zero_reg__) CR_TAB
2359 AS2 (sbiw,r26,%o0+3));
2361 else if (reg_src == REG_X - 2)
2363 *l = 9;
2364 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2365 AS2 (mov,__zero_reg__,r27) CR_TAB
2366 AS2 (adiw,r26,%o0) CR_TAB
2367 AS2 (st,X+,r24) CR_TAB
2368 AS2 (st,X+,r25) CR_TAB
2369 AS2 (st,X+,__tmp_reg__) CR_TAB
2370 AS2 (st,X,__zero_reg__) CR_TAB
2371 AS1 (clr,__zero_reg__) CR_TAB
2372 AS2 (sbiw,r26,%o0+3));
2374 *l = 6;
2375 return (AS2 (adiw,r26,%o0) CR_TAB
2376 AS2 (st,X+,%A1) CR_TAB
2377 AS2 (st,X+,%B1) CR_TAB
2378 AS2 (st,X+,%C1) CR_TAB
2379 AS2 (st,X,%D1) CR_TAB
2380 AS2 (sbiw,r26,%o0+3));
2382 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2383 AS2 (std,%B0,%B1) CR_TAB
2384 AS2 (std,%C0,%C1) CR_TAB
2385 AS2 (std,%D0,%D1));
2387 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2388 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2389 AS2 (st,%0,%C1) CR_TAB
2390 AS2 (st,%0,%B1) CR_TAB
2391 AS2 (st,%0,%A1));
2392 else if (GET_CODE (base) == POST_INC) /* (R++) */
2393 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2394 AS2 (st,%0,%B1) CR_TAB
2395 AS2 (st,%0,%C1) CR_TAB
2396 AS2 (st,%0,%D1));
2397 fatal_insn ("unknown move insn:",insn);
2398 return "";
2401 const char *
2402 output_movsisf(rtx insn, rtx operands[], int *l)
2404 int dummy;
2405 rtx dest = operands[0];
2406 rtx src = operands[1];
2407 int *real_l = l;
2409 if (!l)
2410 l = &dummy;
2412 if (register_operand (dest, VOIDmode))
2414 if (register_operand (src, VOIDmode)) /* mov r,r */
2416 if (true_regnum (dest) > true_regnum (src))
2418 if (AVR_HAVE_MOVW)
2420 *l = 2;
2421 return (AS2 (movw,%C0,%C1) CR_TAB
2422 AS2 (movw,%A0,%A1));
2424 *l = 4;
2425 return (AS2 (mov,%D0,%D1) CR_TAB
2426 AS2 (mov,%C0,%C1) CR_TAB
2427 AS2 (mov,%B0,%B1) CR_TAB
2428 AS2 (mov,%A0,%A1));
2430 else
2432 if (AVR_HAVE_MOVW)
2434 *l = 2;
2435 return (AS2 (movw,%A0,%A1) CR_TAB
2436 AS2 (movw,%C0,%C1));
2438 *l = 4;
2439 return (AS2 (mov,%A0,%A1) CR_TAB
2440 AS2 (mov,%B0,%B1) CR_TAB
2441 AS2 (mov,%C0,%C1) CR_TAB
2442 AS2 (mov,%D0,%D1));
2445 else if (CONSTANT_P (src))
2447 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2449 *l = 4;
2450 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2451 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2452 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2453 AS2 (ldi,%D0,hhi8(%1)));
2456 if (GET_CODE (src) == CONST_INT)
2458 const char *const clr_op0 =
2459 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2460 AS1 (clr,%B0) CR_TAB
2461 AS2 (movw,%C0,%A0))
2462 : (AS1 (clr,%A0) CR_TAB
2463 AS1 (clr,%B0) CR_TAB
2464 AS1 (clr,%C0) CR_TAB
2465 AS1 (clr,%D0));
2467 if (src == const0_rtx) /* mov r,L */
2469 *l = AVR_HAVE_MOVW ? 3 : 4;
2470 return clr_op0;
2472 else if (src == const1_rtx)
2474 if (!real_l)
2475 output_asm_insn (clr_op0, operands);
2476 *l = AVR_HAVE_MOVW ? 4 : 5;
2477 return AS1 (inc,%A0);
2479 else if (src == constm1_rtx)
2481 /* Immediate constants -1 to any register */
2482 if (AVR_HAVE_MOVW)
2484 *l = 4;
2485 return (AS1 (clr,%A0) CR_TAB
2486 AS1 (dec,%A0) CR_TAB
2487 AS2 (mov,%B0,%A0) CR_TAB
2488 AS2 (movw,%C0,%A0));
2490 *l = 5;
2491 return (AS1 (clr,%A0) CR_TAB
2492 AS1 (dec,%A0) CR_TAB
2493 AS2 (mov,%B0,%A0) CR_TAB
2494 AS2 (mov,%C0,%A0) CR_TAB
2495 AS2 (mov,%D0,%A0));
2497 else
2499 int bit_nr = exact_log2 (INTVAL (src));
2501 if (bit_nr >= 0)
2503 *l = AVR_HAVE_MOVW ? 5 : 6;
2504 if (!real_l)
2506 output_asm_insn (clr_op0, operands);
2507 output_asm_insn ("set", operands);
2509 if (!real_l)
2510 avr_output_bld (operands, bit_nr);
2512 return "";
2517 /* Last resort, better than loading from memory. */
2518 *l = 10;
2519 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2520 AS2 (ldi,r31,lo8(%1)) CR_TAB
2521 AS2 (mov,%A0,r31) CR_TAB
2522 AS2 (ldi,r31,hi8(%1)) CR_TAB
2523 AS2 (mov,%B0,r31) CR_TAB
2524 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2525 AS2 (mov,%C0,r31) CR_TAB
2526 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2527 AS2 (mov,%D0,r31) CR_TAB
2528 AS2 (mov,r31,__tmp_reg__));
2530 else if (GET_CODE (src) == MEM)
2531 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2533 else if (GET_CODE (dest) == MEM)
2535 const char *template;
2537 if (src == const0_rtx)
2538 operands[1] = zero_reg_rtx;
2540 template = out_movsi_mr_r (insn, operands, real_l);
2542 if (!real_l)
2543 output_asm_insn (template, operands);
2545 operands[1] = src;
2546 return "";
2548 fatal_insn ("invalid insn:", insn);
2549 return "";
2552 const char *
2553 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2555 rtx dest = op[0];
2556 rtx src = op[1];
2557 rtx x = XEXP (dest, 0);
2558 int dummy;
2560 if (!l)
2561 l = &dummy;
2563 if (CONSTANT_ADDRESS_P (x))
2565 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2567 *l = 1;
2568 return AS2 (out,__SREG__,%1);
2570 if (optimize > 0 && io_address_operand (x, QImode))
2572 *l = 1;
2573 return AS2 (out,%0-0x20,%1);
2575 *l = 2;
2576 return AS2 (sts,%0,%1);
2578 /* memory access by reg+disp */
2579 else if (GET_CODE (x) == PLUS
2580 && REG_P (XEXP (x,0))
2581 && GET_CODE (XEXP (x,1)) == CONST_INT)
2583 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2585 int disp = INTVAL (XEXP (x,1));
2586 if (REGNO (XEXP (x,0)) != REG_Y)
2587 fatal_insn ("incorrect insn:",insn);
2589 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2590 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2591 AS2 (std,Y+63,%1) CR_TAB
2592 AS2 (sbiw,r28,%o0-63));
2594 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2595 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2596 AS2 (st,Y,%1) CR_TAB
2597 AS2 (subi,r28,lo8(%o0)) CR_TAB
2598 AS2 (sbci,r29,hi8(%o0)));
2600 else if (REGNO (XEXP (x,0)) == REG_X)
2602 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2604 if (reg_unused_after (insn, XEXP (x,0)))
2605 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2606 AS2 (adiw,r26,%o0) CR_TAB
2607 AS2 (st,X,__tmp_reg__));
2609 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2610 AS2 (adiw,r26,%o0) CR_TAB
2611 AS2 (st,X,__tmp_reg__) CR_TAB
2612 AS2 (sbiw,r26,%o0));
2614 else
2616 if (reg_unused_after (insn, XEXP (x,0)))
2617 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2618 AS2 (st,X,%1));
2620 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2621 AS2 (st,X,%1) CR_TAB
2622 AS2 (sbiw,r26,%o0));
2625 *l = 1;
2626 return AS2 (std,%0,%1);
2628 *l = 1;
2629 return AS2 (st,%0,%1);
2632 const char *
2633 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2635 rtx dest = op[0];
2636 rtx src = op[1];
2637 rtx base = XEXP (dest, 0);
2638 int reg_base = true_regnum (base);
2639 int reg_src = true_regnum (src);
2640 /* "volatile" forces writing high byte first, even if less efficient,
2641 for correct operation with 16-bit I/O registers. */
2642 int mem_volatile_p = MEM_VOLATILE_P (dest);
2643 int tmp;
2645 if (!l)
2646 l = &tmp;
2647 if (CONSTANT_ADDRESS_P (base))
2649 if (optimize > 0 && io_address_operand (base, HImode))
2651 *l = 2;
2652 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2653 AS2 (out,%A0-0x20,%A1));
2655 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2656 AS2 (sts,%A0,%A1));
2658 if (reg_base > 0)
2660 if (reg_base == REG_X)
2662 if (reg_src == REG_X)
2664 /* "st X+,r26" and "st -X,r26" are undefined. */
2665 if (!mem_volatile_p && reg_unused_after (insn, src))
2666 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2667 AS2 (st,X,r26) CR_TAB
2668 AS2 (adiw,r26,1) CR_TAB
2669 AS2 (st,X,__tmp_reg__));
2670 else
2671 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2672 AS2 (adiw,r26,1) CR_TAB
2673 AS2 (st,X,__tmp_reg__) CR_TAB
2674 AS2 (sbiw,r26,1) CR_TAB
2675 AS2 (st,X,r26));
2677 else
2679 if (!mem_volatile_p && reg_unused_after (insn, base))
2680 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2681 AS2 (st,X,%B1));
2682 else
2683 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2684 AS2 (st,X,%B1) CR_TAB
2685 AS2 (st,-X,%A1));
2688 else
2689 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2690 AS2 (st,%0,%A1));
2692 else if (GET_CODE (base) == PLUS)
2694 int disp = INTVAL (XEXP (base, 1));
2695 reg_base = REGNO (XEXP (base, 0));
2696 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2698 if (reg_base != REG_Y)
2699 fatal_insn ("incorrect insn:",insn);
2701 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2702 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2703 AS2 (std,Y+63,%B1) CR_TAB
2704 AS2 (std,Y+62,%A1) CR_TAB
2705 AS2 (sbiw,r28,%o0-62));
2707 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2708 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2709 AS2 (std,Y+1,%B1) CR_TAB
2710 AS2 (st,Y,%A1) CR_TAB
2711 AS2 (subi,r28,lo8(%o0)) CR_TAB
2712 AS2 (sbci,r29,hi8(%o0)));
2714 if (reg_base == REG_X)
2716 /* (X + d) = R */
2717 if (reg_src == REG_X)
2719 *l = 7;
2720 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2721 AS2 (mov,__zero_reg__,r27) CR_TAB
2722 AS2 (adiw,r26,%o0+1) CR_TAB
2723 AS2 (st,X,__zero_reg__) CR_TAB
2724 AS2 (st,-X,__tmp_reg__) CR_TAB
2725 AS1 (clr,__zero_reg__) CR_TAB
2726 AS2 (sbiw,r26,%o0));
2728 *l = 4;
2729 return (AS2 (adiw,r26,%o0+1) CR_TAB
2730 AS2 (st,X,%B1) CR_TAB
2731 AS2 (st,-X,%A1) CR_TAB
2732 AS2 (sbiw,r26,%o0));
2734 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2735 AS2 (std,%A0,%A1));
2737 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2738 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2739 AS2 (st,%0,%A1));
2740 else if (GET_CODE (base) == POST_INC) /* (R++) */
2742 if (mem_volatile_p)
2744 if (REGNO (XEXP (base, 0)) == REG_X)
2746 *l = 4;
2747 return (AS2 (adiw,r26,1) CR_TAB
2748 AS2 (st,X,%B1) CR_TAB
2749 AS2 (st,-X,%A1) CR_TAB
2750 AS2 (adiw,r26,2));
2752 else
2754 *l = 3;
2755 return (AS2 (std,%p0+1,%B1) CR_TAB
2756 AS2 (st,%p0,%A1) CR_TAB
2757 AS2 (adiw,%r0,2));
2761 *l = 2;
2762 return (AS2 (st,%0,%A1) CR_TAB
2763 AS2 (st,%0,%B1));
2765 fatal_insn ("unknown move insn:",insn);
2766 return "";
2769 /* Return 1 if frame pointer for current function required. */
2772 frame_pointer_required_p (void)
2774 return (cfun->calls_alloca
2775 || crtl->args.info.nregs == 0
2776 || get_frame_size () > 0);
2779 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2781 static RTX_CODE
2782 compare_condition (rtx insn)
2784 rtx next = next_real_insn (insn);
2785 RTX_CODE cond = UNKNOWN;
2786 if (next && GET_CODE (next) == JUMP_INSN)
2788 rtx pat = PATTERN (next);
2789 rtx src = SET_SRC (pat);
2790 rtx t = XEXP (src, 0);
2791 cond = GET_CODE (t);
2793 return cond;
2796 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2798 static int
2799 compare_sign_p (rtx insn)
2801 RTX_CODE cond = compare_condition (insn);
2802 return (cond == GE || cond == LT);
2805 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2806 that needs to be swapped (GT, GTU, LE, LEU). */
2809 compare_diff_p (rtx insn)
2811 RTX_CODE cond = compare_condition (insn);
2812 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2815 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2818 compare_eq_p (rtx insn)
2820 RTX_CODE cond = compare_condition (insn);
2821 return (cond == EQ || cond == NE);
2825 /* Output test instruction for HImode. */
2827 const char *
2828 out_tsthi (rtx insn, int *l)
2830 if (compare_sign_p (insn))
2832 if (l) *l = 1;
2833 return AS1 (tst,%B0);
2835 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2836 && compare_eq_p (insn))
2838 /* Faster than sbiw if we can clobber the operand. */
2839 if (l) *l = 1;
2840 return AS2 (or,%A0,%B0);
2842 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2844 if (l) *l = 1;
2845 return AS2 (sbiw,%0,0);
2847 if (l) *l = 2;
2848 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2849 AS2 (cpc,%B0,__zero_reg__));
2853 /* Output test instruction for SImode. */
2855 const char *
2856 out_tstsi (rtx insn, int *l)
2858 if (compare_sign_p (insn))
2860 if (l) *l = 1;
2861 return AS1 (tst,%D0);
2863 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2865 if (l) *l = 3;
2866 return (AS2 (sbiw,%A0,0) CR_TAB
2867 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2868 AS2 (cpc,%D0,__zero_reg__));
2870 if (l) *l = 4;
2871 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2872 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2873 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2874 AS2 (cpc,%D0,__zero_reg__));
2878 /* Generate asm equivalent for various shifts.
2879 Shift count is a CONST_INT, MEM or REG.
2880 This only handles cases that are not already
2881 carefully hand-optimized in ?sh??i3_out. */
2883 void
2884 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2885 int *len, int t_len)
2887 rtx op[10];
2888 char str[500];
2889 int second_label = 1;
2890 int saved_in_tmp = 0;
2891 int use_zero_reg = 0;
2893 op[0] = operands[0];
2894 op[1] = operands[1];
2895 op[2] = operands[2];
2896 op[3] = operands[3];
2897 str[0] = 0;
2899 if (len)
2900 *len = 1;
2902 if (GET_CODE (operands[2]) == CONST_INT)
2904 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2905 int count = INTVAL (operands[2]);
2906 int max_len = 10; /* If larger than this, always use a loop. */
2908 if (count <= 0)
2910 if (len)
2911 *len = 0;
2912 return;
2915 if (count < 8 && !scratch)
2916 use_zero_reg = 1;
2918 if (optimize_size)
2919 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2921 if (t_len * count <= max_len)
2923 /* Output shifts inline with no loop - faster. */
2924 if (len)
2925 *len = t_len * count;
2926 else
2928 while (count-- > 0)
2929 output_asm_insn (template, op);
2932 return;
2935 if (scratch)
2937 if (!len)
2938 strcat (str, AS2 (ldi,%3,%2));
2940 else if (use_zero_reg)
2942 /* Hack to save one word: use __zero_reg__ as loop counter.
2943 Set one bit, then shift in a loop until it is 0 again. */
2945 op[3] = zero_reg_rtx;
2946 if (len)
2947 *len = 2;
2948 else
2949 strcat (str, ("set" CR_TAB
2950 AS2 (bld,%3,%2-1)));
2952 else
2954 /* No scratch register available, use one from LD_REGS (saved in
2955 __tmp_reg__) that doesn't overlap with registers to shift. */
2957 op[3] = gen_rtx_REG (QImode,
2958 ((true_regnum (operands[0]) - 1) & 15) + 16);
2959 op[4] = tmp_reg_rtx;
2960 saved_in_tmp = 1;
2962 if (len)
2963 *len = 3; /* Includes "mov %3,%4" after the loop. */
2964 else
2965 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2966 AS2 (ldi,%3,%2)));
2969 second_label = 0;
2971 else if (GET_CODE (operands[2]) == MEM)
2973 rtx op_mov[10];
2975 op[3] = op_mov[0] = tmp_reg_rtx;
2976 op_mov[1] = op[2];
2978 if (len)
2979 out_movqi_r_mr (insn, op_mov, len);
2980 else
2981 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2983 else if (register_operand (operands[2], QImode))
2985 if (reg_unused_after (insn, operands[2]))
2986 op[3] = op[2];
2987 else
2989 op[3] = tmp_reg_rtx;
2990 if (!len)
2991 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2994 else
2995 fatal_insn ("bad shift insn:", insn);
2997 if (second_label)
2999 if (len)
3000 ++*len;
3001 else
3002 strcat (str, AS1 (rjmp,2f));
3005 if (len)
3006 *len += t_len + 2; /* template + dec + brXX */
3007 else
3009 strcat (str, "\n1:\t");
3010 strcat (str, template);
3011 strcat (str, second_label ? "\n2:\t" : "\n\t");
3012 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3013 strcat (str, CR_TAB);
3014 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3015 if (saved_in_tmp)
3016 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3017 output_asm_insn (str, op);
3022 /* 8bit shift left ((char)x << i) */
3024 const char *
3025 ashlqi3_out (rtx insn, rtx operands[], int *len)
3027 if (GET_CODE (operands[2]) == CONST_INT)
3029 int k;
3031 if (!len)
3032 len = &k;
3034 switch (INTVAL (operands[2]))
3036 default:
3037 if (INTVAL (operands[2]) < 8)
3038 break;
3040 *len = 1;
3041 return AS1 (clr,%0);
3043 case 1:
3044 *len = 1;
3045 return AS1 (lsl,%0);
3047 case 2:
3048 *len = 2;
3049 return (AS1 (lsl,%0) CR_TAB
3050 AS1 (lsl,%0));
3052 case 3:
3053 *len = 3;
3054 return (AS1 (lsl,%0) CR_TAB
3055 AS1 (lsl,%0) CR_TAB
3056 AS1 (lsl,%0));
3058 case 4:
3059 if (test_hard_reg_class (LD_REGS, operands[0]))
3061 *len = 2;
3062 return (AS1 (swap,%0) CR_TAB
3063 AS2 (andi,%0,0xf0));
3065 *len = 4;
3066 return (AS1 (lsl,%0) CR_TAB
3067 AS1 (lsl,%0) CR_TAB
3068 AS1 (lsl,%0) CR_TAB
3069 AS1 (lsl,%0));
3071 case 5:
3072 if (test_hard_reg_class (LD_REGS, operands[0]))
3074 *len = 3;
3075 return (AS1 (swap,%0) CR_TAB
3076 AS1 (lsl,%0) CR_TAB
3077 AS2 (andi,%0,0xe0));
3079 *len = 5;
3080 return (AS1 (lsl,%0) CR_TAB
3081 AS1 (lsl,%0) CR_TAB
3082 AS1 (lsl,%0) CR_TAB
3083 AS1 (lsl,%0) CR_TAB
3084 AS1 (lsl,%0));
3086 case 6:
3087 if (test_hard_reg_class (LD_REGS, operands[0]))
3089 *len = 4;
3090 return (AS1 (swap,%0) CR_TAB
3091 AS1 (lsl,%0) CR_TAB
3092 AS1 (lsl,%0) CR_TAB
3093 AS2 (andi,%0,0xc0));
3095 *len = 6;
3096 return (AS1 (lsl,%0) CR_TAB
3097 AS1 (lsl,%0) CR_TAB
3098 AS1 (lsl,%0) CR_TAB
3099 AS1 (lsl,%0) CR_TAB
3100 AS1 (lsl,%0) CR_TAB
3101 AS1 (lsl,%0));
3103 case 7:
3104 *len = 3;
3105 return (AS1 (ror,%0) CR_TAB
3106 AS1 (clr,%0) CR_TAB
3107 AS1 (ror,%0));
3110 else if (CONSTANT_P (operands[2]))
3111 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3113 out_shift_with_cnt (AS1 (lsl,%0),
3114 insn, operands, len, 1);
3115 return "";
3119 /* 16bit shift left ((short)x << i) */
3121 const char *
3122 ashlhi3_out (rtx insn, rtx operands[], int *len)
3124 if (GET_CODE (operands[2]) == CONST_INT)
3126 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3127 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3128 int k;
3129 int *t = len;
3131 if (!len)
3132 len = &k;
3134 switch (INTVAL (operands[2]))
3136 default:
3137 if (INTVAL (operands[2]) < 16)
3138 break;
3140 *len = 2;
3141 return (AS1 (clr,%B0) CR_TAB
3142 AS1 (clr,%A0));
3144 case 4:
3145 if (optimize_size && scratch)
3146 break; /* 5 */
3147 if (ldi_ok)
3149 *len = 6;
3150 return (AS1 (swap,%A0) CR_TAB
3151 AS1 (swap,%B0) CR_TAB
3152 AS2 (andi,%B0,0xf0) CR_TAB
3153 AS2 (eor,%B0,%A0) CR_TAB
3154 AS2 (andi,%A0,0xf0) CR_TAB
3155 AS2 (eor,%B0,%A0));
3157 if (scratch)
3159 *len = 7;
3160 return (AS1 (swap,%A0) CR_TAB
3161 AS1 (swap,%B0) CR_TAB
3162 AS2 (ldi,%3,0xf0) CR_TAB
3163 AS2 (and,%B0,%3) CR_TAB
3164 AS2 (eor,%B0,%A0) CR_TAB
3165 AS2 (and,%A0,%3) CR_TAB
3166 AS2 (eor,%B0,%A0));
3168 break; /* optimize_size ? 6 : 8 */
3170 case 5:
3171 if (optimize_size)
3172 break; /* scratch ? 5 : 6 */
3173 if (ldi_ok)
3175 *len = 8;
3176 return (AS1 (lsl,%A0) CR_TAB
3177 AS1 (rol,%B0) CR_TAB
3178 AS1 (swap,%A0) CR_TAB
3179 AS1 (swap,%B0) CR_TAB
3180 AS2 (andi,%B0,0xf0) CR_TAB
3181 AS2 (eor,%B0,%A0) CR_TAB
3182 AS2 (andi,%A0,0xf0) CR_TAB
3183 AS2 (eor,%B0,%A0));
3185 if (scratch)
3187 *len = 9;
3188 return (AS1 (lsl,%A0) CR_TAB
3189 AS1 (rol,%B0) CR_TAB
3190 AS1 (swap,%A0) CR_TAB
3191 AS1 (swap,%B0) CR_TAB
3192 AS2 (ldi,%3,0xf0) CR_TAB
3193 AS2 (and,%B0,%3) CR_TAB
3194 AS2 (eor,%B0,%A0) CR_TAB
3195 AS2 (and,%A0,%3) CR_TAB
3196 AS2 (eor,%B0,%A0));
3198 break; /* 10 */
3200 case 6:
3201 if (optimize_size)
3202 break; /* scratch ? 5 : 6 */
3203 *len = 9;
3204 return (AS1 (clr,__tmp_reg__) CR_TAB
3205 AS1 (lsr,%B0) CR_TAB
3206 AS1 (ror,%A0) CR_TAB
3207 AS1 (ror,__tmp_reg__) CR_TAB
3208 AS1 (lsr,%B0) CR_TAB
3209 AS1 (ror,%A0) CR_TAB
3210 AS1 (ror,__tmp_reg__) CR_TAB
3211 AS2 (mov,%B0,%A0) CR_TAB
3212 AS2 (mov,%A0,__tmp_reg__));
3214 case 7:
3215 *len = 5;
3216 return (AS1 (lsr,%B0) CR_TAB
3217 AS2 (mov,%B0,%A0) CR_TAB
3218 AS1 (clr,%A0) CR_TAB
3219 AS1 (ror,%B0) CR_TAB
3220 AS1 (ror,%A0));
3222 case 8:
3223 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3224 AS1 (clr,%A0));
3226 case 9:
3227 *len = 3;
3228 return (AS2 (mov,%B0,%A0) CR_TAB
3229 AS1 (clr,%A0) CR_TAB
3230 AS1 (lsl,%B0));
3232 case 10:
3233 *len = 4;
3234 return (AS2 (mov,%B0,%A0) CR_TAB
3235 AS1 (clr,%A0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS1 (lsl,%B0));
3239 case 11:
3240 *len = 5;
3241 return (AS2 (mov,%B0,%A0) CR_TAB
3242 AS1 (clr,%A0) CR_TAB
3243 AS1 (lsl,%B0) CR_TAB
3244 AS1 (lsl,%B0) CR_TAB
3245 AS1 (lsl,%B0));
3247 case 12:
3248 if (ldi_ok)
3250 *len = 4;
3251 return (AS2 (mov,%B0,%A0) CR_TAB
3252 AS1 (clr,%A0) CR_TAB
3253 AS1 (swap,%B0) CR_TAB
3254 AS2 (andi,%B0,0xf0));
3256 if (scratch)
3258 *len = 5;
3259 return (AS2 (mov,%B0,%A0) CR_TAB
3260 AS1 (clr,%A0) CR_TAB
3261 AS1 (swap,%B0) CR_TAB
3262 AS2 (ldi,%3,0xf0) CR_TAB
3263 AS2 (and,%B0,%3));
3265 *len = 6;
3266 return (AS2 (mov,%B0,%A0) CR_TAB
3267 AS1 (clr,%A0) CR_TAB
3268 AS1 (lsl,%B0) CR_TAB
3269 AS1 (lsl,%B0) CR_TAB
3270 AS1 (lsl,%B0) CR_TAB
3271 AS1 (lsl,%B0));
3273 case 13:
3274 if (ldi_ok)
3276 *len = 5;
3277 return (AS2 (mov,%B0,%A0) CR_TAB
3278 AS1 (clr,%A0) CR_TAB
3279 AS1 (swap,%B0) CR_TAB
3280 AS1 (lsl,%B0) CR_TAB
3281 AS2 (andi,%B0,0xe0));
3283 if (AVR_HAVE_MUL && scratch)
3285 *len = 5;
3286 return (AS2 (ldi,%3,0x20) CR_TAB
3287 AS2 (mul,%A0,%3) CR_TAB
3288 AS2 (mov,%B0,r0) CR_TAB
3289 AS1 (clr,%A0) CR_TAB
3290 AS1 (clr,__zero_reg__));
3292 if (optimize_size && scratch)
3293 break; /* 5 */
3294 if (scratch)
3296 *len = 6;
3297 return (AS2 (mov,%B0,%A0) CR_TAB
3298 AS1 (clr,%A0) CR_TAB
3299 AS1 (swap,%B0) CR_TAB
3300 AS1 (lsl,%B0) CR_TAB
3301 AS2 (ldi,%3,0xe0) CR_TAB
3302 AS2 (and,%B0,%3));
3304 if (AVR_HAVE_MUL)
3306 *len = 6;
3307 return ("set" CR_TAB
3308 AS2 (bld,r1,5) CR_TAB
3309 AS2 (mul,%A0,r1) CR_TAB
3310 AS2 (mov,%B0,r0) CR_TAB
3311 AS1 (clr,%A0) CR_TAB
3312 AS1 (clr,__zero_reg__));
3314 *len = 7;
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS1 (lsl,%B0) CR_TAB
3320 AS1 (lsl,%B0) CR_TAB
3321 AS1 (lsl,%B0));
3323 case 14:
3324 if (AVR_HAVE_MUL && ldi_ok)
3326 *len = 5;
3327 return (AS2 (ldi,%B0,0x40) CR_TAB
3328 AS2 (mul,%A0,%B0) CR_TAB
3329 AS2 (mov,%B0,r0) CR_TAB
3330 AS1 (clr,%A0) CR_TAB
3331 AS1 (clr,__zero_reg__));
3333 if (AVR_HAVE_MUL && scratch)
3335 *len = 5;
3336 return (AS2 (ldi,%3,0x40) CR_TAB
3337 AS2 (mul,%A0,%3) CR_TAB
3338 AS2 (mov,%B0,r0) CR_TAB
3339 AS1 (clr,%A0) CR_TAB
3340 AS1 (clr,__zero_reg__));
3342 if (optimize_size && ldi_ok)
3344 *len = 5;
3345 return (AS2 (mov,%B0,%A0) CR_TAB
3346 AS2 (ldi,%A0,6) "\n1:\t"
3347 AS1 (lsl,%B0) CR_TAB
3348 AS1 (dec,%A0) CR_TAB
3349 AS1 (brne,1b));
3351 if (optimize_size && scratch)
3352 break; /* 5 */
3353 *len = 6;
3354 return (AS1 (clr,%B0) CR_TAB
3355 AS1 (lsr,%A0) CR_TAB
3356 AS1 (ror,%B0) CR_TAB
3357 AS1 (lsr,%A0) CR_TAB
3358 AS1 (ror,%B0) CR_TAB
3359 AS1 (clr,%A0));
3361 case 15:
3362 *len = 4;
3363 return (AS1 (clr,%B0) CR_TAB
3364 AS1 (lsr,%A0) CR_TAB
3365 AS1 (ror,%B0) CR_TAB
3366 AS1 (clr,%A0));
3368 len = t;
3370 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3371 AS1 (rol,%B0)),
3372 insn, operands, len, 2);
3373 return "";
3377 /* 32bit shift left ((long)x << i) */
3379 const char *
3380 ashlsi3_out (rtx insn, rtx operands[], int *len)
3382 if (GET_CODE (operands[2]) == CONST_INT)
3384 int k;
3385 int *t = len;
3387 if (!len)
3388 len = &k;
3390 switch (INTVAL (operands[2]))
3392 default:
3393 if (INTVAL (operands[2]) < 32)
3394 break;
3396 if (AVR_HAVE_MOVW)
3397 return *len = 3, (AS1 (clr,%D0) CR_TAB
3398 AS1 (clr,%C0) CR_TAB
3399 AS2 (movw,%A0,%C0));
3400 *len = 4;
3401 return (AS1 (clr,%D0) CR_TAB
3402 AS1 (clr,%C0) CR_TAB
3403 AS1 (clr,%B0) CR_TAB
3404 AS1 (clr,%A0));
3406 case 8:
3408 int reg0 = true_regnum (operands[0]);
3409 int reg1 = true_regnum (operands[1]);
3410 *len = 4;
3411 if (reg0 >= reg1)
3412 return (AS2 (mov,%D0,%C1) CR_TAB
3413 AS2 (mov,%C0,%B1) CR_TAB
3414 AS2 (mov,%B0,%A1) CR_TAB
3415 AS1 (clr,%A0));
3416 else
3417 return (AS1 (clr,%A0) CR_TAB
3418 AS2 (mov,%B0,%A1) CR_TAB
3419 AS2 (mov,%C0,%B1) CR_TAB
3420 AS2 (mov,%D0,%C1));
3423 case 16:
3425 int reg0 = true_regnum (operands[0]);
3426 int reg1 = true_regnum (operands[1]);
3427 if (reg0 + 2 == reg1)
3428 return *len = 2, (AS1 (clr,%B0) CR_TAB
3429 AS1 (clr,%A0));
3430 if (AVR_HAVE_MOVW)
3431 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3432 AS1 (clr,%B0) CR_TAB
3433 AS1 (clr,%A0));
3434 else
3435 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3436 AS2 (mov,%D0,%B1) CR_TAB
3437 AS1 (clr,%B0) CR_TAB
3438 AS1 (clr,%A0));
3441 case 24:
3442 *len = 4;
3443 return (AS2 (mov,%D0,%A1) CR_TAB
3444 AS1 (clr,%C0) CR_TAB
3445 AS1 (clr,%B0) CR_TAB
3446 AS1 (clr,%A0));
3448 case 31:
3449 *len = 6;
3450 return (AS1 (clr,%D0) CR_TAB
3451 AS1 (lsr,%A0) CR_TAB
3452 AS1 (ror,%D0) CR_TAB
3453 AS1 (clr,%C0) CR_TAB
3454 AS1 (clr,%B0) CR_TAB
3455 AS1 (clr,%A0));
3457 len = t;
3459 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3460 AS1 (rol,%B0) CR_TAB
3461 AS1 (rol,%C0) CR_TAB
3462 AS1 (rol,%D0)),
3463 insn, operands, len, 4);
3464 return "";
3467 /* 8bit arithmetic shift right ((signed char)x >> i) */
3469 const char *
3470 ashrqi3_out (rtx insn, rtx operands[], int *len)
3472 if (GET_CODE (operands[2]) == CONST_INT)
3474 int k;
3476 if (!len)
3477 len = &k;
3479 switch (INTVAL (operands[2]))
3481 case 1:
3482 *len = 1;
3483 return AS1 (asr,%0);
3485 case 2:
3486 *len = 2;
3487 return (AS1 (asr,%0) CR_TAB
3488 AS1 (asr,%0));
3490 case 3:
3491 *len = 3;
3492 return (AS1 (asr,%0) CR_TAB
3493 AS1 (asr,%0) CR_TAB
3494 AS1 (asr,%0));
3496 case 4:
3497 *len = 4;
3498 return (AS1 (asr,%0) CR_TAB
3499 AS1 (asr,%0) CR_TAB
3500 AS1 (asr,%0) CR_TAB
3501 AS1 (asr,%0));
3503 case 5:
3504 *len = 5;
3505 return (AS1 (asr,%0) CR_TAB
3506 AS1 (asr,%0) CR_TAB
3507 AS1 (asr,%0) CR_TAB
3508 AS1 (asr,%0) CR_TAB
3509 AS1 (asr,%0));
3511 case 6:
3512 *len = 4;
3513 return (AS2 (bst,%0,6) CR_TAB
3514 AS1 (lsl,%0) CR_TAB
3515 AS2 (sbc,%0,%0) CR_TAB
3516 AS2 (bld,%0,0));
3518 default:
3519 if (INTVAL (operands[2]) < 8)
3520 break;
3522 /* fall through */
3524 case 7:
3525 *len = 2;
3526 return (AS1 (lsl,%0) CR_TAB
3527 AS2 (sbc,%0,%0));
3530 else if (CONSTANT_P (operands[2]))
3531 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3533 out_shift_with_cnt (AS1 (asr,%0),
3534 insn, operands, len, 1);
3535 return "";
3539 /* 16bit arithmetic shift right ((signed short)x >> i) */
3541 const char *
3542 ashrhi3_out (rtx insn, rtx operands[], int *len)
3544 if (GET_CODE (operands[2]) == CONST_INT)
3546 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3547 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3548 int k;
3549 int *t = len;
3551 if (!len)
3552 len = &k;
3554 switch (INTVAL (operands[2]))
3556 case 4:
3557 case 5:
3558 /* XXX try to optimize this too? */
3559 break;
3561 case 6:
3562 if (optimize_size)
3563 break; /* scratch ? 5 : 6 */
3564 *len = 8;
3565 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3566 AS2 (mov,%A0,%B0) CR_TAB
3567 AS1 (lsl,__tmp_reg__) CR_TAB
3568 AS1 (rol,%A0) CR_TAB
3569 AS2 (sbc,%B0,%B0) CR_TAB
3570 AS1 (lsl,__tmp_reg__) CR_TAB
3571 AS1 (rol,%A0) CR_TAB
3572 AS1 (rol,%B0));
3574 case 7:
3575 *len = 4;
3576 return (AS1 (lsl,%A0) CR_TAB
3577 AS2 (mov,%A0,%B0) CR_TAB
3578 AS1 (rol,%A0) CR_TAB
3579 AS2 (sbc,%B0,%B0));
3581 case 8:
3583 int reg0 = true_regnum (operands[0]);
3584 int reg1 = true_regnum (operands[1]);
3586 if (reg0 == reg1)
3587 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3588 AS1 (lsl,%B0) CR_TAB
3589 AS2 (sbc,%B0,%B0));
3590 else
3591 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3592 AS1 (clr,%B0) CR_TAB
3593 AS2 (sbrc,%A0,7) CR_TAB
3594 AS1 (dec,%B0));
3597 case 9:
3598 *len = 4;
3599 return (AS2 (mov,%A0,%B0) CR_TAB
3600 AS1 (lsl,%B0) CR_TAB
3601 AS2 (sbc,%B0,%B0) CR_TAB
3602 AS1 (asr,%A0));
3604 case 10:
3605 *len = 5;
3606 return (AS2 (mov,%A0,%B0) CR_TAB
3607 AS1 (lsl,%B0) CR_TAB
3608 AS2 (sbc,%B0,%B0) CR_TAB
3609 AS1 (asr,%A0) CR_TAB
3610 AS1 (asr,%A0));
3612 case 11:
3613 if (AVR_HAVE_MUL && ldi_ok)
3615 *len = 5;
3616 return (AS2 (ldi,%A0,0x20) CR_TAB
3617 AS2 (muls,%B0,%A0) CR_TAB
3618 AS2 (mov,%A0,r1) CR_TAB
3619 AS2 (sbc,%B0,%B0) CR_TAB
3620 AS1 (clr,__zero_reg__));
3622 if (optimize_size && scratch)
3623 break; /* 5 */
3624 *len = 6;
3625 return (AS2 (mov,%A0,%B0) CR_TAB
3626 AS1 (lsl,%B0) CR_TAB
3627 AS2 (sbc,%B0,%B0) CR_TAB
3628 AS1 (asr,%A0) CR_TAB
3629 AS1 (asr,%A0) CR_TAB
3630 AS1 (asr,%A0));
3632 case 12:
3633 if (AVR_HAVE_MUL && ldi_ok)
3635 *len = 5;
3636 return (AS2 (ldi,%A0,0x10) CR_TAB
3637 AS2 (muls,%B0,%A0) CR_TAB
3638 AS2 (mov,%A0,r1) CR_TAB
3639 AS2 (sbc,%B0,%B0) CR_TAB
3640 AS1 (clr,__zero_reg__));
3642 if (optimize_size && scratch)
3643 break; /* 5 */
3644 *len = 7;
3645 return (AS2 (mov,%A0,%B0) CR_TAB
3646 AS1 (lsl,%B0) CR_TAB
3647 AS2 (sbc,%B0,%B0) CR_TAB
3648 AS1 (asr,%A0) CR_TAB
3649 AS1 (asr,%A0) CR_TAB
3650 AS1 (asr,%A0) CR_TAB
3651 AS1 (asr,%A0));
3653 case 13:
3654 if (AVR_HAVE_MUL && ldi_ok)
3656 *len = 5;
3657 return (AS2 (ldi,%A0,0x08) CR_TAB
3658 AS2 (muls,%B0,%A0) CR_TAB
3659 AS2 (mov,%A0,r1) CR_TAB
3660 AS2 (sbc,%B0,%B0) CR_TAB
3661 AS1 (clr,__zero_reg__));
3663 if (optimize_size)
3664 break; /* scratch ? 5 : 7 */
3665 *len = 8;
3666 return (AS2 (mov,%A0,%B0) CR_TAB
3667 AS1 (lsl,%B0) CR_TAB
3668 AS2 (sbc,%B0,%B0) CR_TAB
3669 AS1 (asr,%A0) CR_TAB
3670 AS1 (asr,%A0) CR_TAB
3671 AS1 (asr,%A0) CR_TAB
3672 AS1 (asr,%A0) CR_TAB
3673 AS1 (asr,%A0));
3675 case 14:
3676 *len = 5;
3677 return (AS1 (lsl,%B0) CR_TAB
3678 AS2 (sbc,%A0,%A0) CR_TAB
3679 AS1 (lsl,%B0) CR_TAB
3680 AS2 (mov,%B0,%A0) CR_TAB
3681 AS1 (rol,%A0));
3683 default:
3684 if (INTVAL (operands[2]) < 16)
3685 break;
3687 /* fall through */
3689 case 15:
3690 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3691 AS2 (sbc,%A0,%A0) CR_TAB
3692 AS2 (mov,%B0,%A0));
3694 len = t;
3696 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3697 AS1 (ror,%A0)),
3698 insn, operands, len, 2);
3699 return "";
3703 /* 32bit arithmetic shift right ((signed long)x >> i) */
3705 const char *
3706 ashrsi3_out (rtx insn, rtx operands[], int *len)
3708 if (GET_CODE (operands[2]) == CONST_INT)
3710 int k;
3711 int *t = len;
3713 if (!len)
3714 len = &k;
3716 switch (INTVAL (operands[2]))
3718 case 8:
3720 int reg0 = true_regnum (operands[0]);
3721 int reg1 = true_regnum (operands[1]);
3722 *len=6;
3723 if (reg0 <= reg1)
3724 return (AS2 (mov,%A0,%B1) CR_TAB
3725 AS2 (mov,%B0,%C1) CR_TAB
3726 AS2 (mov,%C0,%D1) CR_TAB
3727 AS1 (clr,%D0) CR_TAB
3728 AS2 (sbrc,%C0,7) CR_TAB
3729 AS1 (dec,%D0));
3730 else
3731 return (AS1 (clr,%D0) CR_TAB
3732 AS2 (sbrc,%D1,7) CR_TAB
3733 AS1 (dec,%D0) CR_TAB
3734 AS2 (mov,%C0,%D1) CR_TAB
3735 AS2 (mov,%B0,%C1) CR_TAB
3736 AS2 (mov,%A0,%B1));
3739 case 16:
3741 int reg0 = true_regnum (operands[0]);
3742 int reg1 = true_regnum (operands[1]);
3744 if (reg0 == reg1 + 2)
3745 return *len = 4, (AS1 (clr,%D0) CR_TAB
3746 AS2 (sbrc,%B0,7) CR_TAB
3747 AS1 (com,%D0) CR_TAB
3748 AS2 (mov,%C0,%D0));
3749 if (AVR_HAVE_MOVW)
3750 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3751 AS1 (clr,%D0) CR_TAB
3752 AS2 (sbrc,%B0,7) CR_TAB
3753 AS1 (com,%D0) CR_TAB
3754 AS2 (mov,%C0,%D0));
3755 else
3756 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3757 AS2 (mov,%A0,%C1) CR_TAB
3758 AS1 (clr,%D0) CR_TAB
3759 AS2 (sbrc,%B0,7) CR_TAB
3760 AS1 (com,%D0) CR_TAB
3761 AS2 (mov,%C0,%D0));
3764 case 24:
3765 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3766 AS1 (clr,%D0) CR_TAB
3767 AS2 (sbrc,%A0,7) CR_TAB
3768 AS1 (com,%D0) CR_TAB
3769 AS2 (mov,%B0,%D0) CR_TAB
3770 AS2 (mov,%C0,%D0));
3772 default:
3773 if (INTVAL (operands[2]) < 32)
3774 break;
3776 /* fall through */
3778 case 31:
3779 if (AVR_HAVE_MOVW)
3780 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3781 AS2 (sbc,%A0,%A0) CR_TAB
3782 AS2 (mov,%B0,%A0) CR_TAB
3783 AS2 (movw,%C0,%A0));
3784 else
3785 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3786 AS2 (sbc,%A0,%A0) CR_TAB
3787 AS2 (mov,%B0,%A0) CR_TAB
3788 AS2 (mov,%C0,%A0) CR_TAB
3789 AS2 (mov,%D0,%A0));
3791 len = t;
3793 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3794 AS1 (ror,%C0) CR_TAB
3795 AS1 (ror,%B0) CR_TAB
3796 AS1 (ror,%A0)),
3797 insn, operands, len, 4);
3798 return "";
3801 /* 8bit logic shift right ((unsigned char)x >> i) */
3803 const char *
3804 lshrqi3_out (rtx insn, rtx operands[], int *len)
3806 if (GET_CODE (operands[2]) == CONST_INT)
3808 int k;
3810 if (!len)
3811 len = &k;
3813 switch (INTVAL (operands[2]))
3815 default:
3816 if (INTVAL (operands[2]) < 8)
3817 break;
3819 *len = 1;
3820 return AS1 (clr,%0);
3822 case 1:
3823 *len = 1;
3824 return AS1 (lsr,%0);
3826 case 2:
3827 *len = 2;
3828 return (AS1 (lsr,%0) CR_TAB
3829 AS1 (lsr,%0));
3830 case 3:
3831 *len = 3;
3832 return (AS1 (lsr,%0) CR_TAB
3833 AS1 (lsr,%0) CR_TAB
3834 AS1 (lsr,%0));
3836 case 4:
3837 if (test_hard_reg_class (LD_REGS, operands[0]))
3839 *len=2;
3840 return (AS1 (swap,%0) CR_TAB
3841 AS2 (andi,%0,0x0f));
3843 *len = 4;
3844 return (AS1 (lsr,%0) CR_TAB
3845 AS1 (lsr,%0) CR_TAB
3846 AS1 (lsr,%0) CR_TAB
3847 AS1 (lsr,%0));
3849 case 5:
3850 if (test_hard_reg_class (LD_REGS, operands[0]))
3852 *len = 3;
3853 return (AS1 (swap,%0) CR_TAB
3854 AS1 (lsr,%0) CR_TAB
3855 AS2 (andi,%0,0x7));
3857 *len = 5;
3858 return (AS1 (lsr,%0) CR_TAB
3859 AS1 (lsr,%0) CR_TAB
3860 AS1 (lsr,%0) CR_TAB
3861 AS1 (lsr,%0) CR_TAB
3862 AS1 (lsr,%0));
3864 case 6:
3865 if (test_hard_reg_class (LD_REGS, operands[0]))
3867 *len = 4;
3868 return (AS1 (swap,%0) CR_TAB
3869 AS1 (lsr,%0) CR_TAB
3870 AS1 (lsr,%0) CR_TAB
3871 AS2 (andi,%0,0x3));
3873 *len = 6;
3874 return (AS1 (lsr,%0) CR_TAB
3875 AS1 (lsr,%0) CR_TAB
3876 AS1 (lsr,%0) CR_TAB
3877 AS1 (lsr,%0) CR_TAB
3878 AS1 (lsr,%0) CR_TAB
3879 AS1 (lsr,%0));
3881 case 7:
3882 *len = 3;
3883 return (AS1 (rol,%0) CR_TAB
3884 AS1 (clr,%0) CR_TAB
3885 AS1 (rol,%0));
3888 else if (CONSTANT_P (operands[2]))
3889 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3891 out_shift_with_cnt (AS1 (lsr,%0),
3892 insn, operands, len, 1);
3893 return "";
3896 /* 16bit logic shift right ((unsigned short)x >> i) */
3898 const char *
3899 lshrhi3_out (rtx insn, rtx operands[], int *len)
3901 if (GET_CODE (operands[2]) == CONST_INT)
3903 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3904 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3905 int k;
3906 int *t = len;
3908 if (!len)
3909 len = &k;
3911 switch (INTVAL (operands[2]))
3913 default:
3914 if (INTVAL (operands[2]) < 16)
3915 break;
3917 *len = 2;
3918 return (AS1 (clr,%B0) CR_TAB
3919 AS1 (clr,%A0));
3921 case 4:
3922 if (optimize_size && scratch)
3923 break; /* 5 */
3924 if (ldi_ok)
3926 *len = 6;
3927 return (AS1 (swap,%B0) CR_TAB
3928 AS1 (swap,%A0) CR_TAB
3929 AS2 (andi,%A0,0x0f) CR_TAB
3930 AS2 (eor,%A0,%B0) CR_TAB
3931 AS2 (andi,%B0,0x0f) CR_TAB
3932 AS2 (eor,%A0,%B0));
3934 if (scratch)
3936 *len = 7;
3937 return (AS1 (swap,%B0) CR_TAB
3938 AS1 (swap,%A0) CR_TAB
3939 AS2 (ldi,%3,0x0f) CR_TAB
3940 AS2 (and,%A0,%3) CR_TAB
3941 AS2 (eor,%A0,%B0) CR_TAB
3942 AS2 (and,%B0,%3) CR_TAB
3943 AS2 (eor,%A0,%B0));
3945 break; /* optimize_size ? 6 : 8 */
3947 case 5:
3948 if (optimize_size)
3949 break; /* scratch ? 5 : 6 */
3950 if (ldi_ok)
3952 *len = 8;
3953 return (AS1 (lsr,%B0) CR_TAB
3954 AS1 (ror,%A0) CR_TAB
3955 AS1 (swap,%B0) CR_TAB
3956 AS1 (swap,%A0) CR_TAB
3957 AS2 (andi,%A0,0x0f) CR_TAB
3958 AS2 (eor,%A0,%B0) CR_TAB
3959 AS2 (andi,%B0,0x0f) CR_TAB
3960 AS2 (eor,%A0,%B0));
3962 if (scratch)
3964 *len = 9;
3965 return (AS1 (lsr,%B0) CR_TAB
3966 AS1 (ror,%A0) CR_TAB
3967 AS1 (swap,%B0) CR_TAB
3968 AS1 (swap,%A0) CR_TAB
3969 AS2 (ldi,%3,0x0f) CR_TAB
3970 AS2 (and,%A0,%3) CR_TAB
3971 AS2 (eor,%A0,%B0) CR_TAB
3972 AS2 (and,%B0,%3) CR_TAB
3973 AS2 (eor,%A0,%B0));
3975 break; /* 10 */
3977 case 6:
3978 if (optimize_size)
3979 break; /* scratch ? 5 : 6 */
3980 *len = 9;
3981 return (AS1 (clr,__tmp_reg__) CR_TAB
3982 AS1 (lsl,%A0) CR_TAB
3983 AS1 (rol,%B0) CR_TAB
3984 AS1 (rol,__tmp_reg__) CR_TAB
3985 AS1 (lsl,%A0) CR_TAB
3986 AS1 (rol,%B0) CR_TAB
3987 AS1 (rol,__tmp_reg__) CR_TAB
3988 AS2 (mov,%A0,%B0) CR_TAB
3989 AS2 (mov,%B0,__tmp_reg__));
3991 case 7:
3992 *len = 5;
3993 return (AS1 (lsl,%A0) CR_TAB
3994 AS2 (mov,%A0,%B0) CR_TAB
3995 AS1 (rol,%A0) CR_TAB
3996 AS2 (sbc,%B0,%B0) CR_TAB
3997 AS1 (neg,%B0));
3999 case 8:
4000 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4001 AS1 (clr,%B0));
4003 case 9:
4004 *len = 3;
4005 return (AS2 (mov,%A0,%B0) CR_TAB
4006 AS1 (clr,%B0) CR_TAB
4007 AS1 (lsr,%A0));
4009 case 10:
4010 *len = 4;
4011 return (AS2 (mov,%A0,%B0) CR_TAB
4012 AS1 (clr,%B0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS1 (lsr,%A0));
4016 case 11:
4017 *len = 5;
4018 return (AS2 (mov,%A0,%B0) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (lsr,%A0) CR_TAB
4021 AS1 (lsr,%A0) CR_TAB
4022 AS1 (lsr,%A0));
4024 case 12:
4025 if (ldi_ok)
4027 *len = 4;
4028 return (AS2 (mov,%A0,%B0) CR_TAB
4029 AS1 (clr,%B0) CR_TAB
4030 AS1 (swap,%A0) CR_TAB
4031 AS2 (andi,%A0,0x0f));
4033 if (scratch)
4035 *len = 5;
4036 return (AS2 (mov,%A0,%B0) CR_TAB
4037 AS1 (clr,%B0) CR_TAB
4038 AS1 (swap,%A0) CR_TAB
4039 AS2 (ldi,%3,0x0f) CR_TAB
4040 AS2 (and,%A0,%3));
4042 *len = 6;
4043 return (AS2 (mov,%A0,%B0) CR_TAB
4044 AS1 (clr,%B0) CR_TAB
4045 AS1 (lsr,%A0) CR_TAB
4046 AS1 (lsr,%A0) CR_TAB
4047 AS1 (lsr,%A0) CR_TAB
4048 AS1 (lsr,%A0));
4050 case 13:
4051 if (ldi_ok)
4053 *len = 5;
4054 return (AS2 (mov,%A0,%B0) CR_TAB
4055 AS1 (clr,%B0) CR_TAB
4056 AS1 (swap,%A0) CR_TAB
4057 AS1 (lsr,%A0) CR_TAB
4058 AS2 (andi,%A0,0x07));
4060 if (AVR_HAVE_MUL && scratch)
4062 *len = 5;
4063 return (AS2 (ldi,%3,0x08) CR_TAB
4064 AS2 (mul,%B0,%3) CR_TAB
4065 AS2 (mov,%A0,r1) CR_TAB
4066 AS1 (clr,%B0) CR_TAB
4067 AS1 (clr,__zero_reg__));
4069 if (optimize_size && scratch)
4070 break; /* 5 */
4071 if (scratch)
4073 *len = 6;
4074 return (AS2 (mov,%A0,%B0) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4076 AS1 (swap,%A0) CR_TAB
4077 AS1 (lsr,%A0) CR_TAB
4078 AS2 (ldi,%3,0x07) CR_TAB
4079 AS2 (and,%A0,%3));
4081 if (AVR_HAVE_MUL)
4083 *len = 6;
4084 return ("set" CR_TAB
4085 AS2 (bld,r1,3) CR_TAB
4086 AS2 (mul,%B0,r1) CR_TAB
4087 AS2 (mov,%A0,r1) CR_TAB
4088 AS1 (clr,%B0) CR_TAB
4089 AS1 (clr,__zero_reg__));
4091 *len = 7;
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS1 (lsr,%A0) CR_TAB
4097 AS1 (lsr,%A0) CR_TAB
4098 AS1 (lsr,%A0));
4100 case 14:
4101 if (AVR_HAVE_MUL && ldi_ok)
4103 *len = 5;
4104 return (AS2 (ldi,%A0,0x04) CR_TAB
4105 AS2 (mul,%B0,%A0) CR_TAB
4106 AS2 (mov,%A0,r1) CR_TAB
4107 AS1 (clr,%B0) CR_TAB
4108 AS1 (clr,__zero_reg__));
4110 if (AVR_HAVE_MUL && scratch)
4112 *len = 5;
4113 return (AS2 (ldi,%3,0x04) CR_TAB
4114 AS2 (mul,%B0,%3) CR_TAB
4115 AS2 (mov,%A0,r1) CR_TAB
4116 AS1 (clr,%B0) CR_TAB
4117 AS1 (clr,__zero_reg__));
4119 if (optimize_size && ldi_ok)
4121 *len = 5;
4122 return (AS2 (mov,%A0,%B0) CR_TAB
4123 AS2 (ldi,%B0,6) "\n1:\t"
4124 AS1 (lsr,%A0) CR_TAB
4125 AS1 (dec,%B0) CR_TAB
4126 AS1 (brne,1b));
4128 if (optimize_size && scratch)
4129 break; /* 5 */
4130 *len = 6;
4131 return (AS1 (clr,%A0) CR_TAB
4132 AS1 (lsl,%B0) CR_TAB
4133 AS1 (rol,%A0) CR_TAB
4134 AS1 (lsl,%B0) CR_TAB
4135 AS1 (rol,%A0) CR_TAB
4136 AS1 (clr,%B0));
4138 case 15:
4139 *len = 4;
4140 return (AS1 (clr,%A0) CR_TAB
4141 AS1 (lsl,%B0) CR_TAB
4142 AS1 (rol,%A0) CR_TAB
4143 AS1 (clr,%B0));
4145 len = t;
4147 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4148 AS1 (ror,%A0)),
4149 insn, operands, len, 2);
4150 return "";
4153 /* 32bit logic shift right ((unsigned int)x >> i) */
4155 const char *
4156 lshrsi3_out (rtx insn, rtx operands[], int *len)
4158 if (GET_CODE (operands[2]) == CONST_INT)
4160 int k;
4161 int *t = len;
4163 if (!len)
4164 len = &k;
4166 switch (INTVAL (operands[2]))
4168 default:
4169 if (INTVAL (operands[2]) < 32)
4170 break;
4172 if (AVR_HAVE_MOVW)
4173 return *len = 3, (AS1 (clr,%D0) CR_TAB
4174 AS1 (clr,%C0) CR_TAB
4175 AS2 (movw,%A0,%C0));
4176 *len = 4;
4177 return (AS1 (clr,%D0) CR_TAB
4178 AS1 (clr,%C0) CR_TAB
4179 AS1 (clr,%B0) CR_TAB
4180 AS1 (clr,%A0));
4182 case 8:
4184 int reg0 = true_regnum (operands[0]);
4185 int reg1 = true_regnum (operands[1]);
4186 *len = 4;
4187 if (reg0 <= reg1)
4188 return (AS2 (mov,%A0,%B1) CR_TAB
4189 AS2 (mov,%B0,%C1) CR_TAB
4190 AS2 (mov,%C0,%D1) CR_TAB
4191 AS1 (clr,%D0));
4192 else
4193 return (AS1 (clr,%D0) CR_TAB
4194 AS2 (mov,%C0,%D1) CR_TAB
4195 AS2 (mov,%B0,%C1) CR_TAB
4196 AS2 (mov,%A0,%B1));
4199 case 16:
4201 int reg0 = true_regnum (operands[0]);
4202 int reg1 = true_regnum (operands[1]);
4204 if (reg0 == reg1 + 2)
4205 return *len = 2, (AS1 (clr,%C0) CR_TAB
4206 AS1 (clr,%D0));
4207 if (AVR_HAVE_MOVW)
4208 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4209 AS1 (clr,%C0) CR_TAB
4210 AS1 (clr,%D0));
4211 else
4212 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4213 AS2 (mov,%A0,%C1) CR_TAB
4214 AS1 (clr,%C0) CR_TAB
4215 AS1 (clr,%D0));
4218 case 24:
4219 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4220 AS1 (clr,%B0) CR_TAB
4221 AS1 (clr,%C0) CR_TAB
4222 AS1 (clr,%D0));
4224 case 31:
4225 *len = 6;
4226 return (AS1 (clr,%A0) CR_TAB
4227 AS2 (sbrc,%D0,7) CR_TAB
4228 AS1 (inc,%A0) CR_TAB
4229 AS1 (clr,%B0) CR_TAB
4230 AS1 (clr,%C0) CR_TAB
4231 AS1 (clr,%D0));
4233 len = t;
4235 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4236 AS1 (ror,%C0) CR_TAB
4237 AS1 (ror,%B0) CR_TAB
4238 AS1 (ror,%A0)),
4239 insn, operands, len, 4);
4240 return "";
4243 /* Modifies the length assigned to instruction INSN
4244 LEN is the initially computed length of the insn. */
4247 adjust_insn_length (rtx insn, int len)
4249 rtx patt = PATTERN (insn);
4250 rtx set;
4252 if (GET_CODE (patt) == SET)
4254 rtx op[10];
4255 op[1] = SET_SRC (patt);
4256 op[0] = SET_DEST (patt);
4257 if (general_operand (op[1], VOIDmode)
4258 && general_operand (op[0], VOIDmode))
4260 switch (GET_MODE (op[0]))
4262 case QImode:
4263 output_movqi (insn, op, &len);
4264 break;
4265 case HImode:
4266 output_movhi (insn, op, &len);
4267 break;
4268 case SImode:
4269 case SFmode:
4270 output_movsisf (insn, op, &len);
4271 break;
4272 default:
4273 break;
4276 else if (op[0] == cc0_rtx && REG_P (op[1]))
4278 switch (GET_MODE (op[1]))
4280 case HImode: out_tsthi (insn,&len); break;
4281 case SImode: out_tstsi (insn,&len); break;
4282 default: break;
4285 else if (GET_CODE (op[1]) == AND)
4287 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4289 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4290 if (GET_MODE (op[1]) == SImode)
4291 len = (((mask & 0xff) != 0xff)
4292 + ((mask & 0xff00) != 0xff00)
4293 + ((mask & 0xff0000L) != 0xff0000L)
4294 + ((mask & 0xff000000L) != 0xff000000L));
4295 else if (GET_MODE (op[1]) == HImode)
4296 len = (((mask & 0xff) != 0xff)
4297 + ((mask & 0xff00) != 0xff00));
4300 else if (GET_CODE (op[1]) == IOR)
4302 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4304 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4305 if (GET_MODE (op[1]) == SImode)
4306 len = (((mask & 0xff) != 0)
4307 + ((mask & 0xff00) != 0)
4308 + ((mask & 0xff0000L) != 0)
4309 + ((mask & 0xff000000L) != 0));
4310 else if (GET_MODE (op[1]) == HImode)
4311 len = (((mask & 0xff) != 0)
4312 + ((mask & 0xff00) != 0));
4316 set = single_set (insn);
4317 if (set)
4319 rtx op[10];
4321 op[1] = SET_SRC (set);
4322 op[0] = SET_DEST (set);
4324 if (GET_CODE (patt) == PARALLEL
4325 && general_operand (op[1], VOIDmode)
4326 && general_operand (op[0], VOIDmode))
4328 if (XVECLEN (patt, 0) == 2)
4329 op[2] = XVECEXP (patt, 0, 1);
4331 switch (GET_MODE (op[0]))
4333 case QImode:
4334 len = 2;
4335 break;
4336 case HImode:
4337 output_reload_inhi (insn, op, &len);
4338 break;
4339 case SImode:
4340 case SFmode:
4341 output_reload_insisf (insn, op, &len);
4342 break;
4343 default:
4344 break;
4347 else if (GET_CODE (op[1]) == ASHIFT
4348 || GET_CODE (op[1]) == ASHIFTRT
4349 || GET_CODE (op[1]) == LSHIFTRT)
4351 rtx ops[10];
4352 ops[0] = op[0];
4353 ops[1] = XEXP (op[1],0);
4354 ops[2] = XEXP (op[1],1);
4355 switch (GET_CODE (op[1]))
4357 case ASHIFT:
4358 switch (GET_MODE (op[0]))
4360 case QImode: ashlqi3_out (insn,ops,&len); break;
4361 case HImode: ashlhi3_out (insn,ops,&len); break;
4362 case SImode: ashlsi3_out (insn,ops,&len); break;
4363 default: break;
4365 break;
4366 case ASHIFTRT:
4367 switch (GET_MODE (op[0]))
4369 case QImode: ashrqi3_out (insn,ops,&len); break;
4370 case HImode: ashrhi3_out (insn,ops,&len); break;
4371 case SImode: ashrsi3_out (insn,ops,&len); break;
4372 default: break;
4374 break;
4375 case LSHIFTRT:
4376 switch (GET_MODE (op[0]))
4378 case QImode: lshrqi3_out (insn,ops,&len); break;
4379 case HImode: lshrhi3_out (insn,ops,&len); break;
4380 case SImode: lshrsi3_out (insn,ops,&len); break;
4381 default: break;
4383 break;
4384 default:
4385 break;
4389 return len;
4392 /* Return nonzero if register REG dead after INSN. */
4395 reg_unused_after (rtx insn, rtx reg)
4397 return (dead_or_set_p (insn, reg)
4398 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4401 /* Return nonzero if REG is not used after INSN.
4402 We assume REG is a reload reg, and therefore does
4403 not live past labels. It may live past calls or jumps though. */
4406 _reg_unused_after (rtx insn, rtx reg)
4408 enum rtx_code code;
4409 rtx set;
4411 /* If the reg is set by this instruction, then it is safe for our
4412 case. Disregard the case where this is a store to memory, since
4413 we are checking a register used in the store address. */
4414 set = single_set (insn);
4415 if (set && GET_CODE (SET_DEST (set)) != MEM
4416 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4417 return 1;
4419 while ((insn = NEXT_INSN (insn)))
4421 rtx set;
4422 code = GET_CODE (insn);
4424 #if 0
4425 /* If this is a label that existed before reload, then the register
4426 if dead here. However, if this is a label added by reorg, then
4427 the register may still be live here. We can't tell the difference,
4428 so we just ignore labels completely. */
4429 if (code == CODE_LABEL)
4430 return 1;
4431 /* else */
4432 #endif
4434 if (!INSN_P (insn))
4435 continue;
4437 if (code == JUMP_INSN)
4438 return 0;
4440 /* If this is a sequence, we must handle them all at once.
4441 We could have for instance a call that sets the target register,
4442 and an insn in a delay slot that uses the register. In this case,
4443 we must return 0. */
4444 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4446 int i;
4447 int retval = 0;
4449 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4451 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4452 rtx set = single_set (this_insn);
4454 if (GET_CODE (this_insn) == CALL_INSN)
4455 code = CALL_INSN;
4456 else if (GET_CODE (this_insn) == JUMP_INSN)
4458 if (INSN_ANNULLED_BRANCH_P (this_insn))
4459 return 0;
4460 code = JUMP_INSN;
4463 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4464 return 0;
4465 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4467 if (GET_CODE (SET_DEST (set)) != MEM)
4468 retval = 1;
4469 else
4470 return 0;
4472 if (set == 0
4473 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4474 return 0;
4476 if (retval == 1)
4477 return 1;
4478 else if (code == JUMP_INSN)
4479 return 0;
4482 if (code == CALL_INSN)
4484 rtx tem;
4485 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4486 if (GET_CODE (XEXP (tem, 0)) == USE
4487 && REG_P (XEXP (XEXP (tem, 0), 0))
4488 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4489 return 0;
4490 if (call_used_regs[REGNO (reg)])
4491 return 1;
4494 set = single_set (insn);
4496 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4497 return 0;
4498 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4499 return GET_CODE (SET_DEST (set)) != MEM;
4500 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4501 return 0;
4503 return 1;
4506 /* Target hook for assembling integer objects. The AVR version needs
4507 special handling for references to certain labels. */
4509 static bool
4510 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4512 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4513 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4514 || GET_CODE (x) == LABEL_REF))
4516 fputs ("\t.word\tgs(", asm_out_file);
4517 output_addr_const (asm_out_file, x);
4518 fputs (")\n", asm_out_file);
4519 return true;
4521 return default_assemble_integer (x, size, aligned_p);
4524 /* The routine used to output NUL terminated strings. We use a special
4525 version of this for most svr4 targets because doing so makes the
4526 generated assembly code more compact (and thus faster to assemble)
4527 as well as more readable, especially for targets like the i386
4528 (where the only alternative is to output character sequences as
4529 comma separated lists of numbers). */
4531 void
4532 gas_output_limited_string(FILE *file, const char *str)
4534 const unsigned char *_limited_str = (const unsigned char *) str;
4535 unsigned ch;
4536 fprintf (file, "%s\"", STRING_ASM_OP);
4537 for (; (ch = *_limited_str); _limited_str++)
4539 int escape;
4540 switch (escape = ESCAPES[ch])
4542 case 0:
4543 putc (ch, file);
4544 break;
4545 case 1:
4546 fprintf (file, "\\%03o", ch);
4547 break;
4548 default:
4549 putc ('\\', file);
4550 putc (escape, file);
4551 break;
4554 fprintf (file, "\"\n");
4557 /* The routine used to output sequences of byte values. We use a special
4558 version of this for most svr4 targets because doing so makes the
4559 generated assembly code more compact (and thus faster to assemble)
4560 as well as more readable. Note that if we find subparts of the
4561 character sequence which end with NUL (and which are shorter than
4562 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4564 void
4565 gas_output_ascii(FILE *file, const char *str, size_t length)
4567 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4568 const unsigned char *limit = _ascii_bytes + length;
4569 unsigned bytes_in_chunk = 0;
4570 for (; _ascii_bytes < limit; _ascii_bytes++)
4572 const unsigned char *p;
4573 if (bytes_in_chunk >= 60)
4575 fprintf (file, "\"\n");
4576 bytes_in_chunk = 0;
4578 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4579 continue;
4580 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4582 if (bytes_in_chunk > 0)
4584 fprintf (file, "\"\n");
4585 bytes_in_chunk = 0;
4587 gas_output_limited_string (file, (const char*)_ascii_bytes);
4588 _ascii_bytes = p;
4590 else
4592 int escape;
4593 unsigned ch;
4594 if (bytes_in_chunk == 0)
4595 fprintf (file, "\t.ascii\t\"");
4596 switch (escape = ESCAPES[ch = *_ascii_bytes])
4598 case 0:
4599 putc (ch, file);
4600 bytes_in_chunk++;
4601 break;
4602 case 1:
4603 fprintf (file, "\\%03o", ch);
4604 bytes_in_chunk += 4;
4605 break;
4606 default:
4607 putc ('\\', file);
4608 putc (escape, file);
4609 bytes_in_chunk += 2;
4610 break;
4614 if (bytes_in_chunk > 0)
4615 fprintf (file, "\"\n");
4618 /* Return value is nonzero if pseudos that have been
4619 assigned to registers of class CLASS would likely be spilled
4620 because registers of CLASS are needed for spill registers. */
4622 enum reg_class
4623 class_likely_spilled_p (int c)
4625 return (c != ALL_REGS && c != ADDW_REGS);
4628 /* Valid attributes:
4629 progmem - put data to program memory;
4630 signal - make a function to be hardware interrupt. After function
4631 prologue interrupts are disabled;
4632 interrupt - make a function to be hardware interrupt. After function
4633 prologue interrupts are enabled;
4634 naked - don't generate function prologue/epilogue and `ret' command.
4636 Only `progmem' attribute valid for type. */
4638 const struct attribute_spec avr_attribute_table[] =
4640 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4641 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4642 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4643 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4644 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4645 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4646 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4647 { NULL, 0, 0, false, false, false, NULL }
4650 /* Handle a "progmem" attribute; arguments as in
4651 struct attribute_spec.handler. */
4652 static tree
4653 avr_handle_progmem_attribute (tree *node, tree name,
4654 tree args ATTRIBUTE_UNUSED,
4655 int flags ATTRIBUTE_UNUSED,
4656 bool *no_add_attrs)
4658 if (DECL_P (*node))
4660 if (TREE_CODE (*node) == TYPE_DECL)
4662 /* This is really a decl attribute, not a type attribute,
4663 but try to handle it for GCC 3.0 backwards compatibility. */
4665 tree type = TREE_TYPE (*node);
4666 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4667 tree newtype = build_type_attribute_variant (type, attr);
4669 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4670 TREE_TYPE (*node) = newtype;
4671 *no_add_attrs = true;
4673 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4675 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4677 warning (0, "only initialized variables can be placed into "
4678 "program memory area");
4679 *no_add_attrs = true;
4682 else
4684 warning (OPT_Wattributes, "%qs attribute ignored",
4685 IDENTIFIER_POINTER (name));
4686 *no_add_attrs = true;
4690 return NULL_TREE;
4693 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4694 struct attribute_spec.handler. */
4696 static tree
4697 avr_handle_fndecl_attribute (tree *node, tree name,
4698 tree args ATTRIBUTE_UNUSED,
4699 int flags ATTRIBUTE_UNUSED,
4700 bool *no_add_attrs)
4702 if (TREE_CODE (*node) != FUNCTION_DECL)
4704 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4705 IDENTIFIER_POINTER (name));
4706 *no_add_attrs = true;
4708 else
4710 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4711 const char *attr = IDENTIFIER_POINTER (name);
4713 /* If the function has the 'signal' or 'interrupt' attribute, test to
4714 make sure that the name of the function is "__vector_NN" so as to
4715 catch when the user misspells the interrupt vector name. */
4717 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4719 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4721 warning (0, "%qs appears to be a misspelled interrupt handler",
4722 func_name);
4725 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4727 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4729 warning (0, "%qs appears to be a misspelled signal handler",
4730 func_name);
4735 return NULL_TREE;
4738 static tree
4739 avr_handle_fntype_attribute (tree *node, tree name,
4740 tree args ATTRIBUTE_UNUSED,
4741 int flags ATTRIBUTE_UNUSED,
4742 bool *no_add_attrs)
4744 if (TREE_CODE (*node) != FUNCTION_TYPE)
4746 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4747 IDENTIFIER_POINTER (name));
4748 *no_add_attrs = true;
4751 return NULL_TREE;
4754 /* Look for attribute `progmem' in DECL
4755 if found return 1, otherwise 0. */
4758 avr_progmem_p (tree decl, tree attributes)
4760 tree a;
4762 if (TREE_CODE (decl) != VAR_DECL)
4763 return 0;
4765 if (NULL_TREE
4766 != lookup_attribute ("progmem", attributes))
4767 return 1;
4769 a=decl;
4771 a = TREE_TYPE(a);
4772 while (TREE_CODE (a) == ARRAY_TYPE);
4774 if (a == error_mark_node)
4775 return 0;
4777 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4778 return 1;
4780 return 0;
4783 /* Add the section attribute if the variable is in progmem. */
4785 static void
4786 avr_insert_attributes (tree node, tree *attributes)
4788 if (TREE_CODE (node) == VAR_DECL
4789 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4790 && avr_progmem_p (node, *attributes))
4792 static const char dsec[] = ".progmem.data";
4793 *attributes = tree_cons (get_identifier ("section"),
4794 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4795 *attributes);
4797 /* ??? This seems sketchy. Why can't the user declare the
4798 thing const in the first place? */
4799 TREE_READONLY (node) = 1;
4803 /* A get_unnamed_section callback for switching to progmem_section. */
4805 static void
4806 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4808 fprintf (asm_out_file,
4809 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4810 AVR_HAVE_JMP_CALL ? "a" : "ax");
4811 /* Should already be aligned, this is just to be safe if it isn't. */
4812 fprintf (asm_out_file, "\t.p2align 1\n");
4815 /* Implement TARGET_ASM_INIT_SECTIONS. */
4817 static void
4818 avr_asm_init_sections (void)
4820 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4821 avr_output_progmem_section_asm_op,
4822 NULL);
4823 readonly_data_section = data_section;
4826 static unsigned int
4827 avr_section_type_flags (tree decl, const char *name, int reloc)
4829 unsigned int flags = default_section_type_flags (decl, name, reloc);
4831 if (strncmp (name, ".noinit", 7) == 0)
4833 if (decl && TREE_CODE (decl) == VAR_DECL
4834 && DECL_INITIAL (decl) == NULL_TREE)
4835 flags |= SECTION_BSS; /* @nobits */
4836 else
4837 warning (0, "only uninitialized variables can be placed in the "
4838 ".noinit section");
4841 return flags;
4844 /* Outputs some appropriate text to go at the start of an assembler
4845 file. */
4847 static void
4848 avr_file_start (void)
4850 if (avr_current_arch->asm_only)
4851 error ("MCU %qs supported for assembler only", avr_mcu_name);
4853 default_file_start ();
4855 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4856 fputs ("__SREG__ = 0x3f\n"
4857 "__SP_H__ = 0x3e\n"
4858 "__SP_L__ = 0x3d\n", asm_out_file);
4860 fputs ("__tmp_reg__ = 0\n"
4861 "__zero_reg__ = 1\n", asm_out_file);
4863 /* FIXME: output these only if there is anything in the .data / .bss
4864 sections - some code size could be saved by not linking in the
4865 initialization code from libgcc if one or both sections are empty. */
4866 fputs ("\t.global __do_copy_data\n", asm_out_file);
4867 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4870 /* Outputs to the stdio stream FILE some
4871 appropriate text to go at the end of an assembler file. */
4873 static void
4874 avr_file_end (void)
4878 /* Choose the order in which to allocate hard registers for
4879 pseudo-registers local to a basic block.
4881 Store the desired register order in the array `reg_alloc_order'.
4882 Element 0 should be the register to allocate first; element 1, the
4883 next register; and so on. */
4885 void
4886 order_regs_for_local_alloc (void)
4888 unsigned int i;
4889 static const int order_0[] = {
4890 24,25,
4891 18,19,
4892 20,21,
4893 22,23,
4894 30,31,
4895 26,27,
4896 28,29,
4897 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4898 0,1,
4899 32,33,34,35
4901 static const int order_1[] = {
4902 18,19,
4903 20,21,
4904 22,23,
4905 24,25,
4906 30,31,
4907 26,27,
4908 28,29,
4909 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4910 0,1,
4911 32,33,34,35
4913 static const int order_2[] = {
4914 25,24,
4915 23,22,
4916 21,20,
4917 19,18,
4918 30,31,
4919 26,27,
4920 28,29,
4921 17,16,
4922 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4923 1,0,
4924 32,33,34,35
4927 const int *order = (TARGET_ORDER_1 ? order_1 :
4928 TARGET_ORDER_2 ? order_2 :
4929 order_0);
4930 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4931 reg_alloc_order[i] = order[i];
4935 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4936 cost of an RTX operand given its context. X is the rtx of the
4937 operand, MODE is its mode, and OUTER is the rtx_code of this
4938 operand's parent operator. */
4940 static int
4941 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4943 enum rtx_code code = GET_CODE (x);
4944 int total;
4946 switch (code)
4948 case REG:
4949 case SUBREG:
4950 return 0;
4952 case CONST_INT:
4953 case CONST_DOUBLE:
4954 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4956 default:
4957 break;
4960 total = 0;
4961 avr_rtx_costs (x, code, outer, &total);
4962 return total;
4965 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4966 is to be calculated. Return true if the complete cost has been
4967 computed, and false if subexpressions should be scanned. In either
4968 case, *TOTAL contains the cost result. */
4970 static bool
4971 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4973 enum machine_mode mode = GET_MODE (x);
4974 HOST_WIDE_INT val;
4976 switch (code)
4978 case CONST_INT:
4979 case CONST_DOUBLE:
4980 /* Immediate constants are as cheap as registers. */
4981 *total = 0;
4982 return true;
4984 case MEM:
4985 case CONST:
4986 case LABEL_REF:
4987 case SYMBOL_REF:
4988 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4989 return true;
4991 case NEG:
4992 switch (mode)
4994 case QImode:
4995 case SFmode:
4996 *total = COSTS_N_INSNS (1);
4997 break;
4999 case HImode:
5000 *total = COSTS_N_INSNS (3);
5001 break;
5003 case SImode:
5004 *total = COSTS_N_INSNS (7);
5005 break;
5007 default:
5008 return false;
5010 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5011 return true;
5013 case ABS:
5014 switch (mode)
5016 case QImode:
5017 case SFmode:
5018 *total = COSTS_N_INSNS (1);
5019 break;
5021 default:
5022 return false;
5024 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5025 return true;
5027 case NOT:
5028 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5029 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5030 return true;
5032 case ZERO_EXTEND:
5033 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5034 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5035 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5036 return true;
5038 case SIGN_EXTEND:
5039 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5040 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5041 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5042 return true;
5044 case PLUS:
5045 switch (mode)
5047 case QImode:
5048 *total = COSTS_N_INSNS (1);
5049 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5050 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5051 break;
5053 case HImode:
5054 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5056 *total = COSTS_N_INSNS (2);
5057 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5059 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5060 *total = COSTS_N_INSNS (1);
5061 else
5062 *total = COSTS_N_INSNS (2);
5063 break;
5065 case SImode:
5066 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5068 *total = COSTS_N_INSNS (4);
5069 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5071 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5072 *total = COSTS_N_INSNS (1);
5073 else
5074 *total = COSTS_N_INSNS (4);
5075 break;
5077 default:
5078 return false;
5080 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5081 return true;
5083 case MINUS:
5084 case AND:
5085 case IOR:
5086 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5087 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5088 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5089 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5090 return true;
5092 case XOR:
5093 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5094 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5095 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5096 return true;
5098 case MULT:
5099 switch (mode)
5101 case QImode:
5102 if (AVR_HAVE_MUL)
5103 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5104 else if (optimize_size)
5105 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5106 else
5107 return false;
5108 break;
5110 case HImode:
5111 if (AVR_HAVE_MUL)
5112 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5113 else if (optimize_size)
5114 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5115 else
5116 return false;
5117 break;
5119 default:
5120 return false;
5122 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5123 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5124 return true;
5126 case DIV:
5127 case MOD:
5128 case UDIV:
5129 case UMOD:
5130 if (optimize_size)
5131 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5132 else
5133 return false;
5134 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5135 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5136 return true;
5138 case ASHIFT:
5139 switch (mode)
5141 case QImode:
5142 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5144 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5145 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5147 else
5149 val = INTVAL (XEXP (x, 1));
5150 if (val == 7)
5151 *total = COSTS_N_INSNS (3);
5152 else if (val >= 0 && val <= 7)
5153 *total = COSTS_N_INSNS (val);
5154 else
5155 *total = COSTS_N_INSNS (1);
5157 break;
5159 case HImode:
5160 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5162 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5165 else
5166 switch (INTVAL (XEXP (x, 1)))
5168 case 0:
5169 *total = 0;
5170 break;
5171 case 1:
5172 case 8:
5173 *total = COSTS_N_INSNS (2);
5174 break;
5175 case 9:
5176 *total = COSTS_N_INSNS (3);
5177 break;
5178 case 2:
5179 case 3:
5180 case 10:
5181 case 15:
5182 *total = COSTS_N_INSNS (4);
5183 break;
5184 case 7:
5185 case 11:
5186 case 12:
5187 *total = COSTS_N_INSNS (5);
5188 break;
5189 case 4:
5190 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5191 break;
5192 case 6:
5193 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5194 break;
5195 case 5:
5196 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5197 break;
5198 default:
5199 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5200 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5202 break;
5204 case SImode:
5205 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5207 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5208 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5210 else
5211 switch (INTVAL (XEXP (x, 1)))
5213 case 0:
5214 *total = 0;
5215 break;
5216 case 24:
5217 *total = COSTS_N_INSNS (3);
5218 break;
5219 case 1:
5220 case 8:
5221 case 16:
5222 *total = COSTS_N_INSNS (4);
5223 break;
5224 case 31:
5225 *total = COSTS_N_INSNS (6);
5226 break;
5227 case 2:
5228 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5229 break;
5230 default:
5231 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5234 break;
5236 default:
5237 return false;
5239 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5240 return true;
5242 case ASHIFTRT:
5243 switch (mode)
5245 case QImode:
5246 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5248 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5249 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5251 else
5253 val = INTVAL (XEXP (x, 1));
5254 if (val == 6)
5255 *total = COSTS_N_INSNS (4);
5256 else if (val == 7)
5257 *total = COSTS_N_INSNS (2);
5258 else if (val >= 0 && val <= 7)
5259 *total = COSTS_N_INSNS (val);
5260 else
5261 *total = COSTS_N_INSNS (1);
5263 break;
5265 case HImode:
5266 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5268 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5269 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5271 else
5272 switch (INTVAL (XEXP (x, 1)))
5274 case 0:
5275 *total = 0;
5276 break;
5277 case 1:
5278 *total = COSTS_N_INSNS (2);
5279 break;
5280 case 15:
5281 *total = COSTS_N_INSNS (3);
5282 break;
5283 case 2:
5284 case 7:
5285 case 8:
5286 case 9:
5287 *total = COSTS_N_INSNS (4);
5288 break;
5289 case 10:
5290 case 14:
5291 *total = COSTS_N_INSNS (5);
5292 break;
5293 case 11:
5294 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5295 break;
5296 case 12:
5297 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5298 break;
5299 case 6:
5300 case 13:
5301 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5302 break;
5303 default:
5304 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5305 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5307 break;
5309 case SImode:
5310 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5312 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5313 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5315 else
5316 switch (INTVAL (XEXP (x, 1)))
5318 case 0:
5319 *total = 0;
5320 break;
5321 case 1:
5322 *total = COSTS_N_INSNS (4);
5323 break;
5324 case 8:
5325 case 16:
5326 case 24:
5327 *total = COSTS_N_INSNS (6);
5328 break;
5329 case 2:
5330 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5331 break;
5332 case 31:
5333 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5334 break;
5335 default:
5336 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5337 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5339 break;
5341 default:
5342 return false;
5344 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5345 return true;
5347 case LSHIFTRT:
5348 switch (mode)
5350 case QImode:
5351 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5353 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5356 else
5358 val = INTVAL (XEXP (x, 1));
5359 if (val == 7)
5360 *total = COSTS_N_INSNS (3);
5361 else if (val >= 0 && val <= 7)
5362 *total = COSTS_N_INSNS (val);
5363 else
5364 *total = COSTS_N_INSNS (1);
5366 break;
5368 case HImode:
5369 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5371 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5372 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5374 else
5375 switch (INTVAL (XEXP (x, 1)))
5377 case 0:
5378 *total = 0;
5379 break;
5380 case 1:
5381 case 8:
5382 *total = COSTS_N_INSNS (2);
5383 break;
5384 case 9:
5385 *total = COSTS_N_INSNS (3);
5386 break;
5387 case 2:
5388 case 10:
5389 case 15:
5390 *total = COSTS_N_INSNS (4);
5391 break;
5392 case 7:
5393 case 11:
5394 *total = COSTS_N_INSNS (5);
5395 break;
5396 case 3:
5397 case 12:
5398 case 13:
5399 case 14:
5400 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5401 break;
5402 case 4:
5403 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5404 break;
5405 case 5:
5406 case 6:
5407 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5408 break;
5409 default:
5410 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5411 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5413 break;
5415 case SImode:
5416 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5418 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5419 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5421 else
5422 switch (INTVAL (XEXP (x, 1)))
5424 case 0:
5425 *total = 0;
5426 break;
5427 case 1:
5428 *total = COSTS_N_INSNS (4);
5429 break;
5430 case 2:
5431 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5432 break;
5433 case 8:
5434 case 16:
5435 case 24:
5436 *total = COSTS_N_INSNS (4);
5437 break;
5438 case 31:
5439 *total = COSTS_N_INSNS (6);
5440 break;
5441 default:
5442 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5443 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5445 break;
5447 default:
5448 return false;
5450 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5451 return true;
5453 case COMPARE:
5454 switch (GET_MODE (XEXP (x, 0)))
5456 case QImode:
5457 *total = COSTS_N_INSNS (1);
5458 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5459 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5460 break;
5462 case HImode:
5463 *total = COSTS_N_INSNS (2);
5464 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5466 else if (INTVAL (XEXP (x, 1)) != 0)
5467 *total += COSTS_N_INSNS (1);
5468 break;
5470 case SImode:
5471 *total = COSTS_N_INSNS (4);
5472 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5473 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5474 else if (INTVAL (XEXP (x, 1)) != 0)
5475 *total += COSTS_N_INSNS (3);
5476 break;
5478 default:
5479 return false;
5481 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5482 return true;
5484 default:
5485 break;
5487 return false;
5490 /* Calculate the cost of a memory address. */
5492 static int
5493 avr_address_cost (rtx x)
5495 if (GET_CODE (x) == PLUS
5496 && GET_CODE (XEXP (x,1)) == CONST_INT
5497 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5498 && INTVAL (XEXP (x,1)) >= 61)
5499 return 18;
5500 if (CONSTANT_ADDRESS_P (x))
5502 if (optimize > 0 && io_address_operand (x, QImode))
5503 return 2;
5504 return 4;
5506 return 4;
5509 /* Test for extra memory constraint 'Q'.
5510 It's a memory address based on Y or Z pointer with valid displacement. */
5513 extra_constraint_Q (rtx x)
5515 if (GET_CODE (XEXP (x,0)) == PLUS
5516 && REG_P (XEXP (XEXP (x,0), 0))
5517 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5518 && (INTVAL (XEXP (XEXP (x,0), 1))
5519 <= MAX_LD_OFFSET (GET_MODE (x))))
5521 rtx xx = XEXP (XEXP (x,0), 0);
5522 int regno = REGNO (xx);
5523 if (TARGET_ALL_DEBUG)
5525 fprintf (stderr, ("extra_constraint:\n"
5526 "reload_completed: %d\n"
5527 "reload_in_progress: %d\n"),
5528 reload_completed, reload_in_progress);
5529 debug_rtx (x);
5531 if (regno >= FIRST_PSEUDO_REGISTER)
5532 return 1; /* allocate pseudos */
5533 else if (regno == REG_Z || regno == REG_Y)
5534 return 1; /* strictly check */
5535 else if (xx == frame_pointer_rtx
5536 || xx == arg_pointer_rtx)
5537 return 1; /* XXX frame & arg pointer checks */
5539 return 0;
5542 /* Convert condition code CONDITION to the valid AVR condition code. */
5544 RTX_CODE
5545 avr_normalize_condition (RTX_CODE condition)
5547 switch (condition)
5549 case GT:
5550 return GE;
5551 case GTU:
5552 return GEU;
5553 case LE:
5554 return LT;
5555 case LEU:
5556 return LTU;
5557 default:
5558 gcc_unreachable ();
5562 /* This function optimizes conditional jumps. */
5564 static void
5565 avr_reorg (void)
5567 rtx insn, pattern;
5569 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5571 if (! (GET_CODE (insn) == INSN
5572 || GET_CODE (insn) == CALL_INSN
5573 || GET_CODE (insn) == JUMP_INSN)
5574 || !single_set (insn))
5575 continue;
5577 pattern = PATTERN (insn);
5579 if (GET_CODE (pattern) == PARALLEL)
5580 pattern = XVECEXP (pattern, 0, 0);
5581 if (GET_CODE (pattern) == SET
5582 && SET_DEST (pattern) == cc0_rtx
5583 && compare_diff_p (insn))
5585 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5587 /* Now we work under compare insn. */
5589 pattern = SET_SRC (pattern);
5590 if (true_regnum (XEXP (pattern,0)) >= 0
5591 && true_regnum (XEXP (pattern,1)) >= 0 )
5593 rtx x = XEXP (pattern,0);
5594 rtx next = next_real_insn (insn);
5595 rtx pat = PATTERN (next);
5596 rtx src = SET_SRC (pat);
5597 rtx t = XEXP (src,0);
5598 PUT_CODE (t, swap_condition (GET_CODE (t)));
5599 XEXP (pattern,0) = XEXP (pattern,1);
5600 XEXP (pattern,1) = x;
5601 INSN_CODE (next) = -1;
5603 else if (true_regnum (XEXP (pattern,0)) >= 0
5604 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5606 rtx x = XEXP (pattern,1);
5607 rtx next = next_real_insn (insn);
5608 rtx pat = PATTERN (next);
5609 rtx src = SET_SRC (pat);
5610 rtx t = XEXP (src,0);
5611 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5613 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5615 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5616 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5617 INSN_CODE (next) = -1;
5618 INSN_CODE (insn) = -1;
5622 else if (true_regnum (SET_SRC (pattern)) >= 0)
5624 /* This is a tst insn */
5625 rtx next = next_real_insn (insn);
5626 rtx pat = PATTERN (next);
5627 rtx src = SET_SRC (pat);
5628 rtx t = XEXP (src,0);
5630 PUT_CODE (t, swap_condition (GET_CODE (t)));
5631 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5632 SET_SRC (pattern));
5633 INSN_CODE (next) = -1;
5634 INSN_CODE (insn) = -1;
5640 /* Returns register number for function return value.*/
5643 avr_ret_register (void)
5645 return 24;
5648 /* Create an RTX representing the place where a
5649 library function returns a value of mode MODE. */
5652 avr_libcall_value (enum machine_mode mode)
5654 int offs = GET_MODE_SIZE (mode);
5655 if (offs < 2)
5656 offs = 2;
5657 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5660 /* Create an RTX representing the place where a
5661 function returns a value of data type VALTYPE. */
5664 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5666 unsigned int offs;
5668 if (TYPE_MODE (type) != BLKmode)
5669 return avr_libcall_value (TYPE_MODE (type));
5671 offs = int_size_in_bytes (type);
5672 if (offs < 2)
5673 offs = 2;
5674 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5675 offs = GET_MODE_SIZE (SImode);
5676 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5677 offs = GET_MODE_SIZE (DImode);
5679 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5682 /* Places additional restrictions on the register class to
5683 use when it is necessary to copy value X into a register
5684 in class CLASS. */
5686 enum reg_class
5687 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5689 return class;
5693 test_hard_reg_class (enum reg_class class, rtx x)
5695 int regno = true_regnum (x);
5696 if (regno < 0)
5697 return 0;
5699 if (TEST_HARD_REG_CLASS (class, regno))
5700 return 1;
5702 return 0;
5707 jump_over_one_insn_p (rtx insn, rtx dest)
5709 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5710 ? XEXP (dest, 0)
5711 : dest);
5712 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5713 int dest_addr = INSN_ADDRESSES (uid);
5714 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5717 /* Returns 1 if a value of mode MODE can be stored starting with hard
5718 register number REGNO. On the enhanced core, anything larger than
5719 1 byte must start in even numbered register for "movw" to work
5720 (this way we don't have to check for odd registers everywhere). */
5723 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5725 /* Disallow QImode in stack pointer regs. */
5726 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5727 return 0;
5729 /* The only thing that can go into registers r28:r29 is a Pmode. */
5730 if (regno == REG_Y && mode == Pmode)
5731 return 1;
5733 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5734 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5735 return 0;
5737 if (mode == QImode)
5738 return 1;
5740 /* Modes larger than QImode occupy consecutive registers. */
5741 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5742 return 0;
5744 /* All modes larger than QImode should start in an even register. */
5745 return !(regno & 1);
5748 const char *
5749 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5751 int tmp;
5752 if (!len)
5753 len = &tmp;
5755 if (GET_CODE (operands[1]) == CONST_INT)
5757 int val = INTVAL (operands[1]);
5758 if ((val & 0xff) == 0)
5760 *len = 3;
5761 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5762 AS2 (ldi,%2,hi8(%1)) CR_TAB
5763 AS2 (mov,%B0,%2));
5765 else if ((val & 0xff00) == 0)
5767 *len = 3;
5768 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5769 AS2 (mov,%A0,%2) CR_TAB
5770 AS2 (mov,%B0,__zero_reg__));
5772 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5774 *len = 3;
5775 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5776 AS2 (mov,%A0,%2) CR_TAB
5777 AS2 (mov,%B0,%2));
5780 *len = 4;
5781 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5782 AS2 (mov,%A0,%2) CR_TAB
5783 AS2 (ldi,%2,hi8(%1)) CR_TAB
5784 AS2 (mov,%B0,%2));
5788 const char *
5789 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5791 rtx src = operands[1];
5792 int cnst = (GET_CODE (src) == CONST_INT);
5794 if (len)
5796 if (cnst)
5797 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5798 + ((INTVAL (src) & 0xff00) != 0)
5799 + ((INTVAL (src) & 0xff0000) != 0)
5800 + ((INTVAL (src) & 0xff000000) != 0);
5801 else
5802 *len = 8;
5804 return "";
5807 if (cnst && ((INTVAL (src) & 0xff) == 0))
5808 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5809 else
5811 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5812 output_asm_insn (AS2 (mov, %A0, %2), operands);
5814 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5815 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5816 else
5818 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5819 output_asm_insn (AS2 (mov, %B0, %2), operands);
5821 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5822 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5823 else
5825 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5826 output_asm_insn (AS2 (mov, %C0, %2), operands);
5828 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5829 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5830 else
5832 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5833 output_asm_insn (AS2 (mov, %D0, %2), operands);
5835 return "";
5838 void
5839 avr_output_bld (rtx operands[], int bit_nr)
5841 static char s[] = "bld %A0,0";
5843 s[5] = 'A' + (bit_nr >> 3);
5844 s[8] = '0' + (bit_nr & 7);
5845 output_asm_insn (s, operands);
5848 void
5849 avr_output_addr_vec_elt (FILE *stream, int value)
5851 switch_to_section (progmem_section);
5852 if (AVR_HAVE_JMP_CALL)
5853 fprintf (stream, "\t.word gs(.L%d)\n", value);
5854 else
5855 fprintf (stream, "\trjmp .L%d\n", value);
5858 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5859 registers (for a define_peephole2) in the current function. */
5862 avr_peep2_scratch_safe (rtx scratch)
5864 if ((interrupt_function_p (current_function_decl)
5865 || signal_function_p (current_function_decl))
5866 && leaf_function_p ())
5868 int first_reg = true_regnum (scratch);
5869 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5870 int reg;
5872 for (reg = first_reg; reg <= last_reg; reg++)
5874 if (!df_regs_ever_live_p (reg))
5875 return 0;
5878 return 1;
5881 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5882 or memory location in the I/O space (QImode only).
5884 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5885 Operand 1: register operand to test, or CONST_INT memory address.
5886 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5887 Operand 3: label to jump to if the test is true. */
5889 const char *
5890 avr_out_sbxx_branch (rtx insn, rtx operands[])
5892 enum rtx_code comp = GET_CODE (operands[0]);
5893 int long_jump = (get_attr_length (insn) >= 4);
5894 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5896 if (comp == GE)
5897 comp = EQ;
5898 else if (comp == LT)
5899 comp = NE;
5901 if (reverse)
5902 comp = reverse_condition (comp);
5904 if (GET_CODE (operands[1]) == CONST_INT)
5906 if (INTVAL (operands[1]) < 0x40)
5908 if (comp == EQ)
5909 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5910 else
5911 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5913 else
5915 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5916 if (comp == EQ)
5917 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5918 else
5919 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5922 else /* GET_CODE (operands[1]) == REG */
5924 if (GET_MODE (operands[1]) == QImode)
5926 if (comp == EQ)
5927 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5928 else
5929 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5931 else /* HImode or SImode */
5933 static char buf[] = "sbrc %A1,0";
5934 int bit_nr = exact_log2 (INTVAL (operands[2])
5935 & GET_MODE_MASK (GET_MODE (operands[1])));
5937 buf[3] = (comp == EQ) ? 's' : 'c';
5938 buf[6] = 'A' + (bit_nr >> 3);
5939 buf[9] = '0' + (bit_nr & 7);
5940 output_asm_insn (buf, operands);
5944 if (long_jump)
5945 return (AS1 (rjmp,.+4) CR_TAB
5946 AS1 (jmp,%3));
5947 if (!reverse)
5948 return AS1 (rjmp,%3);
5949 return "";
5952 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5954 static void
5955 avr_asm_out_ctor (rtx symbol, int priority)
5957 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5958 default_ctor_section_asm_out_constructor (symbol, priority);
5961 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5963 static void
5964 avr_asm_out_dtor (rtx symbol, int priority)
5966 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5967 default_dtor_section_asm_out_destructor (symbol, priority);
5970 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5972 static bool
5973 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5975 if (TYPE_MODE (type) == BLKmode)
5977 HOST_WIDE_INT size = int_size_in_bytes (type);
5978 return (size == -1 || size > 8);
5980 else
5981 return false;
5984 #include "gt-avr.h"