Merged revisions 143552,143554,143557,143560,143562,143564-143567,143570-143573,14357...
[official-gcc.git] / gcc / config / avr / avr.c
blobf8ef6d58fa2bb282f5dfc99b6b5165457a7f164a
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 const struct attribute_spec avr_attribute_table[];
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames[] = REGISTER_NAMES;
103 /* This holds the last insn address. */
104 static int last_insn_address = 0;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 section *progmem_section;
114 static const struct base_arch_s avr_arch_types[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
128 /* These names are used as the index into the avr_arch_types[] table
129 above. */
131 enum avr_arch
133 ARCH_UNKNOWN,
134 ARCH_AVR1,
135 ARCH_AVR2,
136 ARCH_AVR25,
137 ARCH_AVR3,
138 ARCH_AVR31,
139 ARCH_AVR35,
140 ARCH_AVR4,
141 ARCH_AVR5,
142 ARCH_AVR51,
143 ARCH_AVR6
146 struct mcu_type_s {
147 const char *const name;
148 int arch; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro;
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
155 - here
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
159 - avr-libc */
161 static const struct mcu_type_s avr_mcu_types[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2, NULL },
164 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25, NULL },
178 { "ata6289", ARCH_AVR25, "__AVR_ATA6289__" },
179 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
180 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
181 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
182 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
183 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
184 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
185 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
186 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
187 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
188 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
189 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
190 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
191 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
192 { "attiny87", ARCH_AVR25, "__AVR_ATtiny87__" },
193 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
194 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
195 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
196 /* Classic, > 8K, <= 64K. */
197 { "avr3", ARCH_AVR3, NULL },
198 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
199 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
200 /* Classic, == 128K. */
201 { "avr31", ARCH_AVR31, NULL },
202 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
203 { "at43usb320", ARCH_AVR31, "__AVR_AT43USB320__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35, NULL },
206 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
208 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
209 { "attiny327", ARCH_AVR35, "__AVR_ATtiny327__" },
210 /* Enhanced, <= 8K. */
211 { "avr4", ARCH_AVR4, NULL },
212 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
213 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
214 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
215 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
216 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
217 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
218 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
219 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
220 { "atmega4hvd", ARCH_AVR4, "__AVR_ATmega4HVD__" },
221 { "atmega8hvd", ARCH_AVR4, "__AVR_ATmega8HVD__" },
222 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
223 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
224 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
225 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
226 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
227 { "at90pwm81", ARCH_AVR4, "__AVR_AT90PWM81__" },
228 /* Enhanced, > 8K, <= 64K. */
229 { "avr5", ARCH_AVR5, NULL },
230 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
231 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
232 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
233 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
234 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
235 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
236 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
237 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
238 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
239 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
240 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
241 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
242 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
243 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
244 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
245 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
246 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
247 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
248 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
249 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
250 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
251 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
252 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
253 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
254 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
255 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
256 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
257 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
258 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
259 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
260 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
261 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
262 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
263 { "atmega16hvb", ARCH_AVR5, "__AVR_ATmega16HVB__" },
264 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
265 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
266 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
267 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
268 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
269 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
270 { "atmega64c1", ARCH_AVR5, "__AVR_ATmega64C1__" },
271 { "atmega16m1", ARCH_AVR5, "__AVR_ATmega16M1__" },
272 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
273 { "atmega64m1", ARCH_AVR5, "__AVR_ATmega64M1__" },
274 { "atmega16u4", ARCH_AVR5, "__AVR_ATmega16U4__" },
275 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
276 { "atmega32u6", ARCH_AVR5, "__AVR_ATmega32U6__" },
277 { "at90scr100", ARCH_AVR5, "__AVR_AT90SCR100__" },
278 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
279 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
280 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
281 /* Enhanced, == 128K. */
282 { "avr51", ARCH_AVR51, NULL },
283 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
284 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
285 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
286 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
287 { "atmega128rfa1", ARCH_AVR51, "__AVR_ATmega128RFA1__" },
288 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
289 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
290 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
291 { "m3000f", ARCH_AVR51, "__AVR_M3000F__" },
292 { "m3000s", ARCH_AVR51, "__AVR_M3000S__" },
293 { "m3001b", ARCH_AVR51, "__AVR_M3001B__" },
294 /* 3-Byte PC. */
295 { "avr6", ARCH_AVR6, NULL },
296 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
297 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
298 /* Assembler only. */
299 { "avr1", ARCH_AVR1, NULL },
300 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
301 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
302 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
303 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
304 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
305 { NULL, ARCH_UNKNOWN, NULL }
308 int avr_case_values_threshold = 30000;
310 /* Initialize the GCC target structure. */
311 #undef TARGET_ASM_ALIGNED_HI_OP
312 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
313 #undef TARGET_ASM_ALIGNED_SI_OP
314 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
315 #undef TARGET_ASM_UNALIGNED_HI_OP
316 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
317 #undef TARGET_ASM_UNALIGNED_SI_OP
318 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
319 #undef TARGET_ASM_INTEGER
320 #define TARGET_ASM_INTEGER avr_assemble_integer
321 #undef TARGET_ASM_FILE_START
322 #define TARGET_ASM_FILE_START avr_file_start
323 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
324 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
325 #undef TARGET_ASM_FILE_END
326 #define TARGET_ASM_FILE_END avr_file_end
328 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
329 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
330 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
331 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
332 #undef TARGET_FUNCTION_VALUE
333 #define TARGET_FUNCTION_VALUE avr_function_value
334 #undef TARGET_ATTRIBUTE_TABLE
335 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
336 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
337 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
338 #undef TARGET_INSERT_ATTRIBUTES
339 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
340 #undef TARGET_SECTION_TYPE_FLAGS
341 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
342 #undef TARGET_RTX_COSTS
343 #define TARGET_RTX_COSTS avr_rtx_costs
344 #undef TARGET_ADDRESS_COST
345 #define TARGET_ADDRESS_COST avr_address_cost
346 #undef TARGET_MACHINE_DEPENDENT_REORG
347 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
349 #undef TARGET_RETURN_IN_MEMORY
350 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
352 #undef TARGET_STRICT_ARGUMENT_NAMING
353 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
355 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
356 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
358 #undef TARGET_HARD_REGNO_SCRATCH_OK
359 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
361 struct gcc_target targetm = TARGET_INITIALIZER;
363 void
364 avr_override_options (void)
366 const struct mcu_type_s *t;
368 flag_delete_null_pointer_checks = 0;
370 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST))
371 set_param_value ("inline-call-cost", 5);
373 for (t = avr_mcu_types; t->name; t++)
374 if (strcmp (t->name, avr_mcu_name) == 0)
375 break;
377 if (!t->name)
379 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
380 avr_mcu_name);
381 for (t = avr_mcu_types; t->name; t++)
382 fprintf (stderr," %s\n", t->name);
385 avr_current_arch = &avr_arch_types[t->arch];
386 avr_extra_arch_macro = t->macro;
388 if (optimize && !TARGET_NO_TABLEJUMP)
389 avr_case_values_threshold =
390 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
392 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
393 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
395 init_machine_status = avr_init_machine_status;
398 /* return register class from register number. */
400 static const int reg_class_tab[]={
401 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
402 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
403 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
404 GENERAL_REGS, /* r0 - r15 */
405 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
406 LD_REGS, /* r16 - 23 */
407 ADDW_REGS,ADDW_REGS, /* r24,r25 */
408 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
409 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
410 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
411 STACK_REG,STACK_REG /* SPL,SPH */
414 /* Function to set up the backend function structure. */
416 static struct machine_function *
417 avr_init_machine_status (void)
419 return ((struct machine_function *)
420 ggc_alloc_cleared (sizeof (struct machine_function)));
423 /* Return register class for register R. */
425 enum reg_class
426 avr_regno_reg_class (int r)
428 if (r <= 33)
429 return reg_class_tab[r];
430 return ALL_REGS;
433 /* Return nonzero if FUNC is a naked function. */
435 static int
436 avr_naked_function_p (tree func)
438 tree a;
440 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
442 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
443 return a != NULL_TREE;
446 /* Return nonzero if FUNC is an interrupt function as specified
447 by the "interrupt" attribute. */
449 static int
450 interrupt_function_p (tree func)
452 tree a;
454 if (TREE_CODE (func) != FUNCTION_DECL)
455 return 0;
457 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
458 return a != NULL_TREE;
461 /* Return nonzero if FUNC is a signal function as specified
462 by the "signal" attribute. */
464 static int
465 signal_function_p (tree func)
467 tree a;
469 if (TREE_CODE (func) != FUNCTION_DECL)
470 return 0;
472 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
473 return a != NULL_TREE;
476 /* Return nonzero if FUNC is a OS_task function. */
478 static int
479 avr_OS_task_function_p (tree func)
481 tree a;
483 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
485 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
486 return a != NULL_TREE;
489 /* Return nonzero if FUNC is a OS_main function. */
491 static int
492 avr_OS_main_function_p (tree func)
494 tree a;
496 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
498 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
499 return a != NULL_TREE;
502 /* Return the number of hard registers to push/pop in the prologue/epilogue
503 of the current function, and optionally store these registers in SET. */
505 static int
506 avr_regs_to_save (HARD_REG_SET *set)
508 int reg, count;
509 int int_or_sig_p = (interrupt_function_p (current_function_decl)
510 || signal_function_p (current_function_decl));
512 if (!reload_completed)
513 cfun->machine->is_leaf = leaf_function_p ();
515 if (set)
516 CLEAR_HARD_REG_SET (*set);
517 count = 0;
519 /* No need to save any registers if the function never returns or
520 is have "OS_task" or "OS_main" attribute. */
521 if (TREE_THIS_VOLATILE (current_function_decl)
522 || cfun->machine->is_OS_task
523 || cfun->machine->is_OS_main)
524 return 0;
526 for (reg = 0; reg < 32; reg++)
528 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
529 any global register variables. */
530 if (fixed_regs[reg])
531 continue;
533 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
534 || (df_regs_ever_live_p (reg)
535 && (int_or_sig_p || !call_used_regs[reg])
536 && !(frame_pointer_needed
537 && (reg == REG_Y || reg == (REG_Y+1)))))
539 if (set)
540 SET_HARD_REG_BIT (*set, reg);
541 count++;
544 return count;
547 /* Compute offset between arg_pointer and frame_pointer. */
550 initial_elimination_offset (int from, int to)
552 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
553 return 0;
554 else
556 int offset = frame_pointer_needed ? 2 : 0;
557 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
559 offset += avr_regs_to_save (NULL);
560 return get_frame_size () + (avr_pc_size) + 1 + offset;
564 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
565 frame pointer by +STARTING_FRAME_OFFSET.
566 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
567 avoids creating add/sub of offset in nonlocal goto and setjmp. */
569 rtx avr_builtin_setjmp_frame_value (void)
571 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
572 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
575 /* Return 1 if the function epilogue is just a single "ret". */
578 avr_simple_epilogue (void)
580 return (! frame_pointer_needed
581 && get_frame_size () == 0
582 && avr_regs_to_save (NULL) == 0
583 && ! interrupt_function_p (current_function_decl)
584 && ! signal_function_p (current_function_decl)
585 && ! avr_naked_function_p (current_function_decl)
586 && ! TREE_THIS_VOLATILE (current_function_decl));
589 /* This function checks sequence of live registers. */
591 static int
592 sequent_regs_live (void)
594 int reg;
595 int live_seq=0;
596 int cur_seq=0;
598 for (reg = 0; reg < 18; ++reg)
600 if (!call_used_regs[reg])
602 if (df_regs_ever_live_p (reg))
604 ++live_seq;
605 ++cur_seq;
607 else
608 cur_seq = 0;
612 if (!frame_pointer_needed)
614 if (df_regs_ever_live_p (REG_Y))
616 ++live_seq;
617 ++cur_seq;
619 else
620 cur_seq = 0;
622 if (df_regs_ever_live_p (REG_Y+1))
624 ++live_seq;
625 ++cur_seq;
627 else
628 cur_seq = 0;
630 else
632 cur_seq += 2;
633 live_seq += 2;
635 return (cur_seq == live_seq) ? live_seq : 0;
638 /* Obtain the length sequence of insns. */
641 get_sequence_length (rtx insns)
643 rtx insn;
644 int length;
646 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
647 length += get_attr_length (insn);
649 return length;
652 /* Output function prologue. */
654 void
655 expand_prologue (void)
657 int live_seq;
658 HARD_REG_SET set;
659 int minimize;
660 HOST_WIDE_INT size = get_frame_size();
661 /* Define templates for push instructions. */
662 rtx pushbyte = gen_rtx_MEM (QImode,
663 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
664 rtx pushword = gen_rtx_MEM (HImode,
665 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
666 rtx insn;
668 last_insn_address = 0;
670 /* Init cfun->machine. */
671 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
672 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
673 cfun->machine->is_signal = signal_function_p (current_function_decl);
674 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
675 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
677 /* Prologue: naked. */
678 if (cfun->machine->is_naked)
680 return;
683 avr_regs_to_save (&set);
684 live_seq = sequent_regs_live ();
685 minimize = (TARGET_CALL_PROLOGUES
686 && !cfun->machine->is_interrupt
687 && !cfun->machine->is_signal
688 && !cfun->machine->is_OS_task
689 && !cfun->machine->is_OS_main
690 && live_seq);
692 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
694 if (cfun->machine->is_interrupt)
696 /* Enable interrupts. */
697 insn = emit_insn (gen_enable_interrupt ());
698 RTX_FRAME_RELATED_P (insn) = 1;
701 /* Push zero reg. */
702 insn = emit_move_insn (pushbyte, zero_reg_rtx);
703 RTX_FRAME_RELATED_P (insn) = 1;
705 /* Push tmp reg. */
706 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
707 RTX_FRAME_RELATED_P (insn) = 1;
709 /* Push SREG. */
710 insn = emit_move_insn (tmp_reg_rtx,
711 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
712 RTX_FRAME_RELATED_P (insn) = 1;
713 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
714 RTX_FRAME_RELATED_P (insn) = 1;
716 /* Push RAMPZ. */
717 if(AVR_HAVE_RAMPZ
718 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
720 insn = emit_move_insn (tmp_reg_rtx,
721 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
722 RTX_FRAME_RELATED_P (insn) = 1;
723 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
727 /* Clear zero reg. */
728 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
729 RTX_FRAME_RELATED_P (insn) = 1;
731 /* Prevent any attempt to delete the setting of ZERO_REG! */
732 emit_use (zero_reg_rtx);
734 if (minimize && (frame_pointer_needed
735 || (AVR_2_BYTE_PC && live_seq > 6)
736 || live_seq > 7))
738 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
739 gen_int_mode (size, HImode));
740 RTX_FRAME_RELATED_P (insn) = 1;
742 insn =
743 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
744 gen_int_mode (size + live_seq, HImode)));
745 RTX_FRAME_RELATED_P (insn) = 1;
747 else
749 int reg;
750 for (reg = 0; reg < 32; ++reg)
752 if (TEST_HARD_REG_BIT (set, reg))
754 /* Emit push of register to save. */
755 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
756 RTX_FRAME_RELATED_P (insn) = 1;
759 if (frame_pointer_needed)
761 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
763 /* Push frame pointer. */
764 insn = emit_move_insn (pushword, frame_pointer_rtx);
765 RTX_FRAME_RELATED_P (insn) = 1;
768 if (!size)
770 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
771 RTX_FRAME_RELATED_P (insn) = 1;
773 else
775 /* Creating a frame can be done by direct manipulation of the
776 stack or via the frame pointer. These two methods are:
777 fp=sp
778 fp-=size
779 sp=fp
781 sp-=size
782 fp=sp
783 the optimum method depends on function type, stack and frame size.
784 To avoid a complex logic, both methods are tested and shortest
785 is selected. */
786 rtx myfp;
787 rtx fp_plus_insns;
788 rtx sp_plus_insns = NULL_RTX;
790 if (TARGET_TINY_STACK)
792 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
793 over 'sbiw' (2 cycles, same size). */
794 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
796 else
798 /* Normal sized addition. */
799 myfp = frame_pointer_rtx;
802 /* Method 1-Adjust frame pointer. */
803 start_sequence ();
805 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
806 RTX_FRAME_RELATED_P (insn) = 1;
808 insn =
809 emit_move_insn (myfp,
810 gen_rtx_PLUS (GET_MODE(myfp), myfp,
811 gen_int_mode (-size,
812 GET_MODE(myfp))));
813 RTX_FRAME_RELATED_P (insn) = 1;
815 /* Copy to stack pointer. */
816 if (TARGET_TINY_STACK)
818 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
819 RTX_FRAME_RELATED_P (insn) = 1;
821 else if (TARGET_NO_INTERRUPTS
822 || cfun->machine->is_signal
823 || cfun->machine->is_OS_main)
825 insn =
826 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
827 frame_pointer_rtx));
828 RTX_FRAME_RELATED_P (insn) = 1;
830 else if (cfun->machine->is_interrupt)
832 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
833 frame_pointer_rtx));
834 RTX_FRAME_RELATED_P (insn) = 1;
836 else
838 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
839 RTX_FRAME_RELATED_P (insn) = 1;
842 fp_plus_insns = get_insns ();
843 end_sequence ();
845 /* Method 2-Adjust Stack pointer. */
846 if (size <= 6)
848 start_sequence ();
850 insn =
851 emit_move_insn (stack_pointer_rtx,
852 gen_rtx_PLUS (HImode,
853 stack_pointer_rtx,
854 gen_int_mode (-size,
855 HImode)));
856 RTX_FRAME_RELATED_P (insn) = 1;
858 insn =
859 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
860 RTX_FRAME_RELATED_P (insn) = 1;
862 sp_plus_insns = get_insns ();
863 end_sequence ();
866 /* Use shortest method. */
867 if (size <= 6 && (get_sequence_length (sp_plus_insns)
868 < get_sequence_length (fp_plus_insns)))
869 emit_insn (sp_plus_insns);
870 else
871 emit_insn (fp_plus_insns);
877 /* Output summary at end of function prologue. */
879 static void
880 avr_asm_function_end_prologue (FILE *file)
882 if (cfun->machine->is_naked)
884 fputs ("/* prologue: naked */\n", file);
886 else
888 if (cfun->machine->is_interrupt)
890 fputs ("/* prologue: Interrupt */\n", file);
892 else if (cfun->machine->is_signal)
894 fputs ("/* prologue: Signal */\n", file);
896 else
897 fputs ("/* prologue: function */\n", file);
899 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
900 get_frame_size());
904 /* Implement EPILOGUE_USES. */
907 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
909 if (reload_completed
910 && cfun->machine
911 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
912 return 1;
913 return 0;
916 /* Output RTL epilogue. */
918 void
919 expand_epilogue (void)
921 int reg;
922 int live_seq;
923 HARD_REG_SET set;
924 int minimize;
925 HOST_WIDE_INT size = get_frame_size();
927 /* epilogue: naked */
928 if (cfun->machine->is_naked)
930 emit_jump_insn (gen_return ());
931 return;
934 avr_regs_to_save (&set);
935 live_seq = sequent_regs_live ();
936 minimize = (TARGET_CALL_PROLOGUES
937 && !cfun->machine->is_interrupt
938 && !cfun->machine->is_signal
939 && !cfun->machine->is_OS_task
940 && !cfun->machine->is_OS_main
941 && live_seq);
943 if (minimize && (frame_pointer_needed || live_seq > 4))
945 if (frame_pointer_needed)
947 /* Get rid of frame. */
948 emit_move_insn(frame_pointer_rtx,
949 gen_rtx_PLUS (HImode, frame_pointer_rtx,
950 gen_int_mode (size, HImode)));
952 else
954 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
957 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
959 else
961 if (frame_pointer_needed)
963 if (size)
965 /* Try two methods to adjust stack and select shortest. */
966 rtx myfp;
967 rtx fp_plus_insns;
968 rtx sp_plus_insns = NULL_RTX;
970 if (TARGET_TINY_STACK)
972 /* The high byte (r29) doesn't change - prefer 'subi'
973 (1 cycle) over 'sbiw' (2 cycles, same size). */
974 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
976 else
978 /* Normal sized addition. */
979 myfp = frame_pointer_rtx;
982 /* Method 1-Adjust frame pointer. */
983 start_sequence ();
985 emit_move_insn (myfp,
986 gen_rtx_PLUS (HImode, myfp,
987 gen_int_mode (size,
988 GET_MODE(myfp))));
990 /* Copy to stack pointer. */
991 if (TARGET_TINY_STACK)
993 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
995 else if (TARGET_NO_INTERRUPTS
996 || cfun->machine->is_signal)
998 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
999 frame_pointer_rtx));
1001 else if (cfun->machine->is_interrupt)
1003 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1004 frame_pointer_rtx));
1006 else
1008 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1011 fp_plus_insns = get_insns ();
1012 end_sequence ();
1014 /* Method 2-Adjust Stack pointer. */
1015 if (size <= 5)
1017 start_sequence ();
1019 emit_move_insn (stack_pointer_rtx,
1020 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1021 gen_int_mode (size,
1022 HImode)));
1024 sp_plus_insns = get_insns ();
1025 end_sequence ();
1028 /* Use shortest method. */
1029 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1030 < get_sequence_length (fp_plus_insns)))
1031 emit_insn (sp_plus_insns);
1032 else
1033 emit_insn (fp_plus_insns);
1035 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1037 /* Restore previous frame_pointer. */
1038 emit_insn (gen_pophi (frame_pointer_rtx));
1041 /* Restore used registers. */
1042 for (reg = 31; reg >= 0; --reg)
1044 if (TEST_HARD_REG_BIT (set, reg))
1045 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1047 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1049 /* Restore RAMPZ using tmp reg as scratch. */
1050 if(AVR_HAVE_RAMPZ
1051 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1053 emit_insn (gen_popqi (tmp_reg_rtx));
1054 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1055 tmp_reg_rtx);
1058 /* Restore SREG using tmp reg as scratch. */
1059 emit_insn (gen_popqi (tmp_reg_rtx));
1061 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1062 tmp_reg_rtx);
1064 /* Restore tmp REG. */
1065 emit_insn (gen_popqi (tmp_reg_rtx));
1067 /* Restore zero REG. */
1068 emit_insn (gen_popqi (zero_reg_rtx));
1071 emit_jump_insn (gen_return ());
1075 /* Output summary messages at beginning of function epilogue. */
1077 static void
1078 avr_asm_function_begin_epilogue (FILE *file)
1080 fprintf (file, "/* epilogue start */\n");
1083 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1084 machine for a memory operand of mode MODE. */
1087 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1089 enum reg_class r = NO_REGS;
1091 if (TARGET_ALL_DEBUG)
1093 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1094 GET_MODE_NAME(mode),
1095 strict ? "(strict)": "",
1096 reload_completed ? "(reload_completed)": "",
1097 reload_in_progress ? "(reload_in_progress)": "",
1098 reg_renumber ? "(reg_renumber)" : "");
1099 if (GET_CODE (x) == PLUS
1100 && REG_P (XEXP (x, 0))
1101 && GET_CODE (XEXP (x, 1)) == CONST_INT
1102 && INTVAL (XEXP (x, 1)) >= 0
1103 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1104 && reg_renumber
1106 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1107 true_regnum (XEXP (x, 0)));
1108 debug_rtx (x);
1110 if (!strict && GET_CODE (x) == SUBREG)
1111 x = SUBREG_REG (x);
1112 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1113 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1114 r = POINTER_REGS;
1115 else if (CONSTANT_ADDRESS_P (x))
1116 r = ALL_REGS;
1117 else if (GET_CODE (x) == PLUS
1118 && REG_P (XEXP (x, 0))
1119 && GET_CODE (XEXP (x, 1)) == CONST_INT
1120 && INTVAL (XEXP (x, 1)) >= 0)
1122 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1123 if (fit)
1125 if (! strict
1126 || REGNO (XEXP (x,0)) == REG_X
1127 || REGNO (XEXP (x,0)) == REG_Y
1128 || REGNO (XEXP (x,0)) == REG_Z)
1129 r = BASE_POINTER_REGS;
1130 if (XEXP (x,0) == frame_pointer_rtx
1131 || XEXP (x,0) == arg_pointer_rtx)
1132 r = BASE_POINTER_REGS;
1134 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1135 r = POINTER_Y_REGS;
1137 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1138 && REG_P (XEXP (x, 0))
1139 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1140 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1142 r = POINTER_REGS;
1144 if (TARGET_ALL_DEBUG)
1146 fprintf (stderr, " ret = %c\n", r + '0');
1148 return r == NO_REGS ? 0 : (int)r;
1151 /* Attempts to replace X with a valid
1152 memory address for an operand of mode MODE */
1155 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1157 x = oldx;
1158 if (TARGET_ALL_DEBUG)
1160 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1161 debug_rtx (oldx);
1164 if (GET_CODE (oldx) == PLUS
1165 && REG_P (XEXP (oldx,0)))
1167 if (REG_P (XEXP (oldx,1)))
1168 x = force_reg (GET_MODE (oldx), oldx);
1169 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1171 int offs = INTVAL (XEXP (oldx,1));
1172 if (frame_pointer_rtx != XEXP (oldx,0))
1173 if (offs > MAX_LD_OFFSET (mode))
1175 if (TARGET_ALL_DEBUG)
1176 fprintf (stderr, "force_reg (big offset)\n");
1177 x = force_reg (GET_MODE (oldx), oldx);
1181 return x;
1185 /* Return a pointer register name as a string. */
1187 static const char *
1188 ptrreg_to_str (int regno)
1190 switch (regno)
1192 case REG_X: return "X";
1193 case REG_Y: return "Y";
1194 case REG_Z: return "Z";
1195 default:
1196 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1198 return NULL;
1201 /* Return the condition name as a string.
1202 Used in conditional jump constructing */
1204 static const char *
1205 cond_string (enum rtx_code code)
1207 switch (code)
1209 case NE:
1210 return "ne";
1211 case EQ:
1212 return "eq";
1213 case GE:
1214 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1215 return "pl";
1216 else
1217 return "ge";
1218 case LT:
1219 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1220 return "mi";
1221 else
1222 return "lt";
1223 case GEU:
1224 return "sh";
1225 case LTU:
1226 return "lo";
1227 default:
1228 gcc_unreachable ();
1232 /* Output ADDR to FILE as address. */
1234 void
1235 print_operand_address (FILE *file, rtx addr)
1237 switch (GET_CODE (addr))
1239 case REG:
1240 fprintf (file, ptrreg_to_str (REGNO (addr)));
1241 break;
1243 case PRE_DEC:
1244 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1245 break;
1247 case POST_INC:
1248 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1249 break;
1251 default:
1252 if (CONSTANT_ADDRESS_P (addr)
1253 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1254 || GET_CODE (addr) == LABEL_REF))
1256 fprintf (file, "gs(");
1257 output_addr_const (file,addr);
1258 fprintf (file ,")");
1260 else
1261 output_addr_const (file, addr);
1266 /* Output X as assembler operand to file FILE. */
1268 void
1269 print_operand (FILE *file, rtx x, int code)
1271 int abcd = 0;
1273 if (code >= 'A' && code <= 'D')
1274 abcd = code - 'A';
1276 if (code == '~')
1278 if (!AVR_HAVE_JMP_CALL)
1279 fputc ('r', file);
1281 else if (code == '!')
1283 if (AVR_HAVE_EIJMP_EICALL)
1284 fputc ('e', file);
1286 else if (REG_P (x))
1288 if (x == zero_reg_rtx)
1289 fprintf (file, "__zero_reg__");
1290 else
1291 fprintf (file, reg_names[true_regnum (x) + abcd]);
1293 else if (GET_CODE (x) == CONST_INT)
1294 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1295 else if (GET_CODE (x) == MEM)
1297 rtx addr = XEXP (x,0);
1299 if (CONSTANT_P (addr) && abcd)
1301 fputc ('(', file);
1302 output_address (addr);
1303 fprintf (file, ")+%d", abcd);
1305 else if (code == 'o')
1307 if (GET_CODE (addr) != PLUS)
1308 fatal_insn ("bad address, not (reg+disp):", addr);
1310 print_operand (file, XEXP (addr, 1), 0);
1312 else if (code == 'p' || code == 'r')
1314 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1315 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1317 if (code == 'p')
1318 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1319 else
1320 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1322 else if (GET_CODE (addr) == PLUS)
1324 print_operand_address (file, XEXP (addr,0));
1325 if (REGNO (XEXP (addr, 0)) == REG_X)
1326 fatal_insn ("internal compiler error. Bad address:"
1327 ,addr);
1328 fputc ('+', file);
1329 print_operand (file, XEXP (addr,1), code);
1331 else
1332 print_operand_address (file, addr);
1334 else if (GET_CODE (x) == CONST_DOUBLE)
1336 long val;
1337 REAL_VALUE_TYPE rv;
1338 if (GET_MODE (x) != SFmode)
1339 fatal_insn ("internal compiler error. Unknown mode:", x);
1340 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1341 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1342 fprintf (file, "0x%lx", val);
1344 else if (code == 'j')
1345 fputs (cond_string (GET_CODE (x)), file);
1346 else if (code == 'k')
1347 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1348 else
1349 print_operand_address (file, x);
1352 /* Update the condition code in the INSN. */
1354 void
1355 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1357 rtx set;
1359 switch (get_attr_cc (insn))
1361 case CC_NONE:
1362 /* Insn does not affect CC at all. */
1363 break;
1365 case CC_SET_N:
1366 CC_STATUS_INIT;
1367 break;
1369 case CC_SET_ZN:
1370 set = single_set (insn);
1371 CC_STATUS_INIT;
1372 if (set)
1374 cc_status.flags |= CC_NO_OVERFLOW;
1375 cc_status.value1 = SET_DEST (set);
1377 break;
1379 case CC_SET_CZN:
1380 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1381 The V flag may or may not be known but that's ok because
1382 alter_cond will change tests to use EQ/NE. */
1383 set = single_set (insn);
1384 CC_STATUS_INIT;
1385 if (set)
1387 cc_status.value1 = SET_DEST (set);
1388 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1390 break;
1392 case CC_COMPARE:
1393 set = single_set (insn);
1394 CC_STATUS_INIT;
1395 if (set)
1396 cc_status.value1 = SET_SRC (set);
1397 break;
1399 case CC_CLOBBER:
1400 /* Insn doesn't leave CC in a usable state. */
1401 CC_STATUS_INIT;
1403 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1404 set = single_set (insn);
1405 if (set)
1407 rtx src = SET_SRC (set);
1409 if (GET_CODE (src) == ASHIFTRT
1410 && GET_MODE (src) == QImode)
1412 rtx x = XEXP (src, 1);
1414 if (GET_CODE (x) == CONST_INT
1415 && INTVAL (x) > 0
1416 && INTVAL (x) != 6)
1418 cc_status.value1 = SET_DEST (set);
1419 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1423 break;
1427 /* Return maximum number of consecutive registers of
1428 class CLASS needed to hold a value of mode MODE. */
1431 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1433 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1436 /* Choose mode for jump insn:
1437 1 - relative jump in range -63 <= x <= 62 ;
1438 2 - relative jump in range -2046 <= x <= 2045 ;
1439 3 - absolute jump (only for ATmega[16]03). */
1442 avr_jump_mode (rtx x, rtx insn)
1444 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1445 ? XEXP (x, 0) : x));
1446 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1447 int jump_distance = cur_addr - dest_addr;
1449 if (-63 <= jump_distance && jump_distance <= 62)
1450 return 1;
1451 else if (-2046 <= jump_distance && jump_distance <= 2045)
1452 return 2;
1453 else if (AVR_HAVE_JMP_CALL)
1454 return 3;
1456 return 2;
1459 /* return an AVR condition jump commands.
1460 X is a comparison RTX.
1461 LEN is a number returned by avr_jump_mode function.
1462 if REVERSE nonzero then condition code in X must be reversed. */
1464 const char *
1465 ret_cond_branch (rtx x, int len, int reverse)
1467 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1469 switch (cond)
1471 case GT:
1472 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1473 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1474 AS1 (brpl,%0)) :
1475 len == 2 ? (AS1 (breq,.+4) CR_TAB
1476 AS1 (brmi,.+2) CR_TAB
1477 AS1 (rjmp,%0)) :
1478 (AS1 (breq,.+6) CR_TAB
1479 AS1 (brmi,.+4) CR_TAB
1480 AS1 (jmp,%0)));
1482 else
1483 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1484 AS1 (brge,%0)) :
1485 len == 2 ? (AS1 (breq,.+4) CR_TAB
1486 AS1 (brlt,.+2) CR_TAB
1487 AS1 (rjmp,%0)) :
1488 (AS1 (breq,.+6) CR_TAB
1489 AS1 (brlt,.+4) CR_TAB
1490 AS1 (jmp,%0)));
1491 case GTU:
1492 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1493 AS1 (brsh,%0)) :
1494 len == 2 ? (AS1 (breq,.+4) CR_TAB
1495 AS1 (brlo,.+2) CR_TAB
1496 AS1 (rjmp,%0)) :
1497 (AS1 (breq,.+6) CR_TAB
1498 AS1 (brlo,.+4) CR_TAB
1499 AS1 (jmp,%0)));
1500 case LE:
1501 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1502 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1503 AS1 (brmi,%0)) :
1504 len == 2 ? (AS1 (breq,.+2) CR_TAB
1505 AS1 (brpl,.+2) CR_TAB
1506 AS1 (rjmp,%0)) :
1507 (AS1 (breq,.+2) CR_TAB
1508 AS1 (brpl,.+4) CR_TAB
1509 AS1 (jmp,%0)));
1510 else
1511 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1512 AS1 (brlt,%0)) :
1513 len == 2 ? (AS1 (breq,.+2) CR_TAB
1514 AS1 (brge,.+2) CR_TAB
1515 AS1 (rjmp,%0)) :
1516 (AS1 (breq,.+2) CR_TAB
1517 AS1 (brge,.+4) CR_TAB
1518 AS1 (jmp,%0)));
1519 case LEU:
1520 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1521 AS1 (brlo,%0)) :
1522 len == 2 ? (AS1 (breq,.+2) CR_TAB
1523 AS1 (brsh,.+2) CR_TAB
1524 AS1 (rjmp,%0)) :
1525 (AS1 (breq,.+2) CR_TAB
1526 AS1 (brsh,.+4) CR_TAB
1527 AS1 (jmp,%0)));
1528 default:
1529 if (reverse)
1531 switch (len)
1533 case 1:
1534 return AS1 (br%k1,%0);
1535 case 2:
1536 return (AS1 (br%j1,.+2) CR_TAB
1537 AS1 (rjmp,%0));
1538 default:
1539 return (AS1 (br%j1,.+4) CR_TAB
1540 AS1 (jmp,%0));
1543 else
1545 switch (len)
1547 case 1:
1548 return AS1 (br%j1,%0);
1549 case 2:
1550 return (AS1 (br%k1,.+2) CR_TAB
1551 AS1 (rjmp,%0));
1552 default:
1553 return (AS1 (br%k1,.+4) CR_TAB
1554 AS1 (jmp,%0));
1558 return "";
1561 /* Predicate function for immediate operand which fits to byte (8bit) */
1564 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1566 return (GET_CODE (op) == CONST_INT
1567 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1570 /* Output all insn addresses and their sizes into the assembly language
1571 output file. This is helpful for debugging whether the length attributes
1572 in the md file are correct.
1573 Output insn cost for next insn. */
1575 void
1576 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1577 int num_operands ATTRIBUTE_UNUSED)
1579 int uid = INSN_UID (insn);
1581 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1583 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1584 INSN_ADDRESSES (uid),
1585 INSN_ADDRESSES (uid) - last_insn_address,
1586 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1588 last_insn_address = INSN_ADDRESSES (uid);
1591 /* Return 0 if undefined, 1 if always true or always false. */
1594 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1596 unsigned int max = (mode == QImode ? 0xff :
1597 mode == HImode ? 0xffff :
1598 mode == SImode ? 0xffffffff : 0);
1599 if (max && op && GET_CODE (x) == CONST_INT)
1601 if (unsigned_condition (op) != op)
1602 max >>= 1;
1604 if (max != (INTVAL (x) & max)
1605 && INTVAL (x) != 0xff)
1606 return 1;
1608 return 0;
1612 /* Returns nonzero if REGNO is the number of a hard
1613 register in which function arguments are sometimes passed. */
1616 function_arg_regno_p(int r)
1618 return (r >= 8 && r <= 25);
1621 /* Initializing the variable cum for the state at the beginning
1622 of the argument list. */
1624 void
1625 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1626 tree fndecl ATTRIBUTE_UNUSED)
1628 cum->nregs = 18;
1629 cum->regno = FIRST_CUM_REG;
1630 if (!libname && fntype)
1632 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1633 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1634 != void_type_node));
1635 if (stdarg)
1636 cum->nregs = 0;
1640 /* Returns the number of registers to allocate for a function argument. */
1642 static int
1643 avr_num_arg_regs (enum machine_mode mode, tree type)
1645 int size;
1647 if (mode == BLKmode)
1648 size = int_size_in_bytes (type);
1649 else
1650 size = GET_MODE_SIZE (mode);
1652 /* Align all function arguments to start in even-numbered registers.
1653 Odd-sized arguments leave holes above them. */
1655 return (size + 1) & ~1;
1658 /* Controls whether a function argument is passed
1659 in a register, and which register. */
1662 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1663 int named ATTRIBUTE_UNUSED)
1665 int bytes = avr_num_arg_regs (mode, type);
1667 if (cum->nregs && bytes <= cum->nregs)
1668 return gen_rtx_REG (mode, cum->regno - bytes);
1670 return NULL_RTX;
1673 /* Update the summarizer variable CUM to advance past an argument
1674 in the argument list. */
1676 void
1677 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1678 int named ATTRIBUTE_UNUSED)
1680 int bytes = avr_num_arg_regs (mode, type);
1682 cum->nregs -= bytes;
1683 cum->regno -= bytes;
1685 if (cum->nregs <= 0)
1687 cum->nregs = 0;
1688 cum->regno = FIRST_CUM_REG;
1692 /***********************************************************************
1693 Functions for outputting various mov's for a various modes
1694 ************************************************************************/
1695 const char *
1696 output_movqi (rtx insn, rtx operands[], int *l)
1698 int dummy;
1699 rtx dest = operands[0];
1700 rtx src = operands[1];
1701 int *real_l = l;
1703 if (!l)
1704 l = &dummy;
1706 *l = 1;
1708 if (register_operand (dest, QImode))
1710 if (register_operand (src, QImode)) /* mov r,r */
1712 if (test_hard_reg_class (STACK_REG, dest))
1713 return AS2 (out,%0,%1);
1714 else if (test_hard_reg_class (STACK_REG, src))
1715 return AS2 (in,%0,%1);
1717 return AS2 (mov,%0,%1);
1719 else if (CONSTANT_P (src))
1721 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1722 return AS2 (ldi,%0,lo8(%1));
1724 if (GET_CODE (src) == CONST_INT)
1726 if (src == const0_rtx) /* mov r,L */
1727 return AS1 (clr,%0);
1728 else if (src == const1_rtx)
1730 *l = 2;
1731 return (AS1 (clr,%0) CR_TAB
1732 AS1 (inc,%0));
1734 else if (src == constm1_rtx)
1736 /* Immediate constants -1 to any register */
1737 *l = 2;
1738 return (AS1 (clr,%0) CR_TAB
1739 AS1 (dec,%0));
1741 else
1743 int bit_nr = exact_log2 (INTVAL (src));
1745 if (bit_nr >= 0)
1747 *l = 3;
1748 if (!real_l)
1749 output_asm_insn ((AS1 (clr,%0) CR_TAB
1750 "set"), operands);
1751 if (!real_l)
1752 avr_output_bld (operands, bit_nr);
1754 return "";
1759 /* Last resort, larger than loading from memory. */
1760 *l = 4;
1761 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1762 AS2 (ldi,r31,lo8(%1)) CR_TAB
1763 AS2 (mov,%0,r31) CR_TAB
1764 AS2 (mov,r31,__tmp_reg__));
1766 else if (GET_CODE (src) == MEM)
1767 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1769 else if (GET_CODE (dest) == MEM)
1771 const char *templ;
1773 if (src == const0_rtx)
1774 operands[1] = zero_reg_rtx;
1776 templ = out_movqi_mr_r (insn, operands, real_l);
1778 if (!real_l)
1779 output_asm_insn (templ, operands);
1781 operands[1] = src;
1783 return "";
1787 const char *
1788 output_movhi (rtx insn, rtx operands[], int *l)
1790 int dummy;
1791 rtx dest = operands[0];
1792 rtx src = operands[1];
1793 int *real_l = l;
1795 if (!l)
1796 l = &dummy;
1798 if (register_operand (dest, HImode))
1800 if (register_operand (src, HImode)) /* mov r,r */
1802 if (test_hard_reg_class (STACK_REG, dest))
1804 if (TARGET_TINY_STACK)
1805 return *l = 1, AS2 (out,__SP_L__,%A1);
1806 /* Use simple load of stack pointer if no interrupts are
1807 used. */
1808 else if (TARGET_NO_INTERRUPTS)
1809 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1810 AS2 (out,__SP_L__,%A1));
1811 *l = 5;
1812 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1813 "cli" CR_TAB
1814 AS2 (out,__SP_H__,%B1) CR_TAB
1815 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1816 AS2 (out,__SP_L__,%A1));
1818 else if (test_hard_reg_class (STACK_REG, src))
1820 *l = 2;
1821 return (AS2 (in,%A0,__SP_L__) CR_TAB
1822 AS2 (in,%B0,__SP_H__));
1825 if (AVR_HAVE_MOVW)
1827 *l = 1;
1828 return (AS2 (movw,%0,%1));
1830 else
1832 *l = 2;
1833 return (AS2 (mov,%A0,%A1) CR_TAB
1834 AS2 (mov,%B0,%B1));
1837 else if (CONSTANT_P (src))
1839 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1841 *l = 2;
1842 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1843 AS2 (ldi,%B0,hi8(%1)));
1846 if (GET_CODE (src) == CONST_INT)
1848 if (src == const0_rtx) /* mov r,L */
1850 *l = 2;
1851 return (AS1 (clr,%A0) CR_TAB
1852 AS1 (clr,%B0));
1854 else if (src == const1_rtx)
1856 *l = 3;
1857 return (AS1 (clr,%A0) CR_TAB
1858 AS1 (clr,%B0) CR_TAB
1859 AS1 (inc,%A0));
1861 else if (src == constm1_rtx)
1863 /* Immediate constants -1 to any register */
1864 *l = 3;
1865 return (AS1 (clr,%0) CR_TAB
1866 AS1 (dec,%A0) CR_TAB
1867 AS2 (mov,%B0,%A0));
1869 else
1871 int bit_nr = exact_log2 (INTVAL (src));
1873 if (bit_nr >= 0)
1875 *l = 4;
1876 if (!real_l)
1877 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1878 AS1 (clr,%B0) CR_TAB
1879 "set"), operands);
1880 if (!real_l)
1881 avr_output_bld (operands, bit_nr);
1883 return "";
1887 if ((INTVAL (src) & 0xff) == 0)
1889 *l = 5;
1890 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1891 AS1 (clr,%A0) CR_TAB
1892 AS2 (ldi,r31,hi8(%1)) CR_TAB
1893 AS2 (mov,%B0,r31) CR_TAB
1894 AS2 (mov,r31,__tmp_reg__));
1896 else if ((INTVAL (src) & 0xff00) == 0)
1898 *l = 5;
1899 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1900 AS2 (ldi,r31,lo8(%1)) CR_TAB
1901 AS2 (mov,%A0,r31) CR_TAB
1902 AS1 (clr,%B0) CR_TAB
1903 AS2 (mov,r31,__tmp_reg__));
1907 /* Last resort, equal to loading from memory. */
1908 *l = 6;
1909 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1910 AS2 (ldi,r31,lo8(%1)) CR_TAB
1911 AS2 (mov,%A0,r31) CR_TAB
1912 AS2 (ldi,r31,hi8(%1)) CR_TAB
1913 AS2 (mov,%B0,r31) CR_TAB
1914 AS2 (mov,r31,__tmp_reg__));
1916 else if (GET_CODE (src) == MEM)
1917 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1919 else if (GET_CODE (dest) == MEM)
1921 const char *templ;
1923 if (src == const0_rtx)
1924 operands[1] = zero_reg_rtx;
1926 templ = out_movhi_mr_r (insn, operands, real_l);
1928 if (!real_l)
1929 output_asm_insn (templ, operands);
1931 operands[1] = src;
1932 return "";
1934 fatal_insn ("invalid insn:", insn);
1935 return "";
1938 const char *
1939 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1941 rtx dest = op[0];
1942 rtx src = op[1];
1943 rtx x = XEXP (src, 0);
1944 int dummy;
1946 if (!l)
1947 l = &dummy;
1949 if (CONSTANT_ADDRESS_P (x))
1951 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1953 *l = 1;
1954 return AS2 (in,%0,__SREG__);
1956 if (optimize > 0 && io_address_operand (x, QImode))
1958 *l = 1;
1959 return AS2 (in,%0,%1-0x20);
1961 *l = 2;
1962 return AS2 (lds,%0,%1);
1964 /* memory access by reg+disp */
1965 else if (GET_CODE (x) == PLUS
1966 && REG_P (XEXP (x,0))
1967 && GET_CODE (XEXP (x,1)) == CONST_INT)
1969 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1971 int disp = INTVAL (XEXP (x,1));
1972 if (REGNO (XEXP (x,0)) != REG_Y)
1973 fatal_insn ("incorrect insn:",insn);
1975 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1976 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1977 AS2 (ldd,%0,Y+63) CR_TAB
1978 AS2 (sbiw,r28,%o1-63));
1980 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1981 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1982 AS2 (ld,%0,Y) CR_TAB
1983 AS2 (subi,r28,lo8(%o1)) CR_TAB
1984 AS2 (sbci,r29,hi8(%o1)));
1986 else if (REGNO (XEXP (x,0)) == REG_X)
1988 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1989 it but I have this situation with extremal optimizing options. */
1990 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1991 || reg_unused_after (insn, XEXP (x,0)))
1992 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1993 AS2 (ld,%0,X));
1995 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1996 AS2 (ld,%0,X) CR_TAB
1997 AS2 (sbiw,r26,%o1));
1999 *l = 1;
2000 return AS2 (ldd,%0,%1);
2002 *l = 1;
2003 return AS2 (ld,%0,%1);
2006 const char *
2007 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2009 rtx dest = op[0];
2010 rtx src = op[1];
2011 rtx base = XEXP (src, 0);
2012 int reg_dest = true_regnum (dest);
2013 int reg_base = true_regnum (base);
2014 /* "volatile" forces reading low byte first, even if less efficient,
2015 for correct operation with 16-bit I/O registers. */
2016 int mem_volatile_p = MEM_VOLATILE_P (src);
2017 int tmp;
2019 if (!l)
2020 l = &tmp;
2022 if (reg_base > 0)
2024 if (reg_dest == reg_base) /* R = (R) */
2026 *l = 3;
2027 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2028 AS2 (ld,%B0,%1) CR_TAB
2029 AS2 (mov,%A0,__tmp_reg__));
2031 else if (reg_base == REG_X) /* (R26) */
2033 if (reg_unused_after (insn, base))
2035 *l = 2;
2036 return (AS2 (ld,%A0,X+) CR_TAB
2037 AS2 (ld,%B0,X));
2039 *l = 3;
2040 return (AS2 (ld,%A0,X+) CR_TAB
2041 AS2 (ld,%B0,X) CR_TAB
2042 AS2 (sbiw,r26,1));
2044 else /* (R) */
2046 *l = 2;
2047 return (AS2 (ld,%A0,%1) CR_TAB
2048 AS2 (ldd,%B0,%1+1));
2051 else if (GET_CODE (base) == PLUS) /* (R + i) */
2053 int disp = INTVAL (XEXP (base, 1));
2054 int reg_base = true_regnum (XEXP (base, 0));
2056 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2058 if (REGNO (XEXP (base, 0)) != REG_Y)
2059 fatal_insn ("incorrect insn:",insn);
2061 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2062 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2063 AS2 (ldd,%A0,Y+62) CR_TAB
2064 AS2 (ldd,%B0,Y+63) CR_TAB
2065 AS2 (sbiw,r28,%o1-62));
2067 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2068 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2069 AS2 (ld,%A0,Y) CR_TAB
2070 AS2 (ldd,%B0,Y+1) CR_TAB
2071 AS2 (subi,r28,lo8(%o1)) CR_TAB
2072 AS2 (sbci,r29,hi8(%o1)));
2074 if (reg_base == REG_X)
2076 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2077 it but I have this situation with extremal
2078 optimization options. */
2080 *l = 4;
2081 if (reg_base == reg_dest)
2082 return (AS2 (adiw,r26,%o1) CR_TAB
2083 AS2 (ld,__tmp_reg__,X+) CR_TAB
2084 AS2 (ld,%B0,X) CR_TAB
2085 AS2 (mov,%A0,__tmp_reg__));
2087 return (AS2 (adiw,r26,%o1) CR_TAB
2088 AS2 (ld,%A0,X+) CR_TAB
2089 AS2 (ld,%B0,X) CR_TAB
2090 AS2 (sbiw,r26,%o1+1));
2093 if (reg_base == reg_dest)
2095 *l = 3;
2096 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2097 AS2 (ldd,%B0,%B1) CR_TAB
2098 AS2 (mov,%A0,__tmp_reg__));
2101 *l = 2;
2102 return (AS2 (ldd,%A0,%A1) CR_TAB
2103 AS2 (ldd,%B0,%B1));
2105 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2107 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2108 fatal_insn ("incorrect insn:", insn);
2110 if (mem_volatile_p)
2112 if (REGNO (XEXP (base, 0)) == REG_X)
2114 *l = 4;
2115 return (AS2 (sbiw,r26,2) CR_TAB
2116 AS2 (ld,%A0,X+) CR_TAB
2117 AS2 (ld,%B0,X) CR_TAB
2118 AS2 (sbiw,r26,1));
2120 else
2122 *l = 3;
2123 return (AS2 (sbiw,%r1,2) CR_TAB
2124 AS2 (ld,%A0,%p1) CR_TAB
2125 AS2 (ldd,%B0,%p1+1));
2129 *l = 2;
2130 return (AS2 (ld,%B0,%1) CR_TAB
2131 AS2 (ld,%A0,%1));
2133 else if (GET_CODE (base) == POST_INC) /* (R++) */
2135 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2136 fatal_insn ("incorrect insn:", insn);
2138 *l = 2;
2139 return (AS2 (ld,%A0,%1) CR_TAB
2140 AS2 (ld,%B0,%1));
2142 else if (CONSTANT_ADDRESS_P (base))
2144 if (optimize > 0 && io_address_operand (base, HImode))
2146 *l = 2;
2147 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2148 AS2 (in,%B0,%B1-0x20));
2150 *l = 4;
2151 return (AS2 (lds,%A0,%A1) CR_TAB
2152 AS2 (lds,%B0,%B1));
2155 fatal_insn ("unknown move insn:",insn);
2156 return "";
2159 const char *
2160 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2162 rtx dest = op[0];
2163 rtx src = op[1];
2164 rtx base = XEXP (src, 0);
2165 int reg_dest = true_regnum (dest);
2166 int reg_base = true_regnum (base);
2167 int tmp;
2169 if (!l)
2170 l = &tmp;
2172 if (reg_base > 0)
2174 if (reg_base == REG_X) /* (R26) */
2176 if (reg_dest == REG_X)
2177 /* "ld r26,-X" is undefined */
2178 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2179 AS2 (ld,r29,X) CR_TAB
2180 AS2 (ld,r28,-X) CR_TAB
2181 AS2 (ld,__tmp_reg__,-X) CR_TAB
2182 AS2 (sbiw,r26,1) CR_TAB
2183 AS2 (ld,r26,X) CR_TAB
2184 AS2 (mov,r27,__tmp_reg__));
2185 else if (reg_dest == REG_X - 2)
2186 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2187 AS2 (ld,%B0,X+) CR_TAB
2188 AS2 (ld,__tmp_reg__,X+) CR_TAB
2189 AS2 (ld,%D0,X) CR_TAB
2190 AS2 (mov,%C0,__tmp_reg__));
2191 else if (reg_unused_after (insn, base))
2192 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2193 AS2 (ld,%B0,X+) CR_TAB
2194 AS2 (ld,%C0,X+) CR_TAB
2195 AS2 (ld,%D0,X));
2196 else
2197 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2198 AS2 (ld,%B0,X+) CR_TAB
2199 AS2 (ld,%C0,X+) CR_TAB
2200 AS2 (ld,%D0,X) CR_TAB
2201 AS2 (sbiw,r26,3));
2203 else
2205 if (reg_dest == reg_base)
2206 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2207 AS2 (ldd,%C0,%1+2) CR_TAB
2208 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2209 AS2 (ld,%A0,%1) CR_TAB
2210 AS2 (mov,%B0,__tmp_reg__));
2211 else if (reg_base == reg_dest + 2)
2212 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2213 AS2 (ldd,%B0,%1+1) CR_TAB
2214 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2215 AS2 (ldd,%D0,%1+3) CR_TAB
2216 AS2 (mov,%C0,__tmp_reg__));
2217 else
2218 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2219 AS2 (ldd,%B0,%1+1) CR_TAB
2220 AS2 (ldd,%C0,%1+2) CR_TAB
2221 AS2 (ldd,%D0,%1+3));
2224 else if (GET_CODE (base) == PLUS) /* (R + i) */
2226 int disp = INTVAL (XEXP (base, 1));
2228 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2230 if (REGNO (XEXP (base, 0)) != REG_Y)
2231 fatal_insn ("incorrect insn:",insn);
2233 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2234 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2235 AS2 (ldd,%A0,Y+60) CR_TAB
2236 AS2 (ldd,%B0,Y+61) CR_TAB
2237 AS2 (ldd,%C0,Y+62) CR_TAB
2238 AS2 (ldd,%D0,Y+63) CR_TAB
2239 AS2 (sbiw,r28,%o1-60));
2241 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2242 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2243 AS2 (ld,%A0,Y) CR_TAB
2244 AS2 (ldd,%B0,Y+1) CR_TAB
2245 AS2 (ldd,%C0,Y+2) CR_TAB
2246 AS2 (ldd,%D0,Y+3) CR_TAB
2247 AS2 (subi,r28,lo8(%o1)) CR_TAB
2248 AS2 (sbci,r29,hi8(%o1)));
2251 reg_base = true_regnum (XEXP (base, 0));
2252 if (reg_base == REG_X)
2254 /* R = (X + d) */
2255 if (reg_dest == REG_X)
2257 *l = 7;
2258 /* "ld r26,-X" is undefined */
2259 return (AS2 (adiw,r26,%o1+3) CR_TAB
2260 AS2 (ld,r29,X) CR_TAB
2261 AS2 (ld,r28,-X) CR_TAB
2262 AS2 (ld,__tmp_reg__,-X) CR_TAB
2263 AS2 (sbiw,r26,1) CR_TAB
2264 AS2 (ld,r26,X) CR_TAB
2265 AS2 (mov,r27,__tmp_reg__));
2267 *l = 6;
2268 if (reg_dest == REG_X - 2)
2269 return (AS2 (adiw,r26,%o1) CR_TAB
2270 AS2 (ld,r24,X+) CR_TAB
2271 AS2 (ld,r25,X+) CR_TAB
2272 AS2 (ld,__tmp_reg__,X+) CR_TAB
2273 AS2 (ld,r27,X) CR_TAB
2274 AS2 (mov,r26,__tmp_reg__));
2276 return (AS2 (adiw,r26,%o1) CR_TAB
2277 AS2 (ld,%A0,X+) CR_TAB
2278 AS2 (ld,%B0,X+) CR_TAB
2279 AS2 (ld,%C0,X+) CR_TAB
2280 AS2 (ld,%D0,X) CR_TAB
2281 AS2 (sbiw,r26,%o1+3));
2283 if (reg_dest == reg_base)
2284 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2285 AS2 (ldd,%C0,%C1) CR_TAB
2286 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2287 AS2 (ldd,%A0,%A1) CR_TAB
2288 AS2 (mov,%B0,__tmp_reg__));
2289 else if (reg_dest == reg_base - 2)
2290 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2291 AS2 (ldd,%B0,%B1) CR_TAB
2292 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2293 AS2 (ldd,%D0,%D1) CR_TAB
2294 AS2 (mov,%C0,__tmp_reg__));
2295 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2296 AS2 (ldd,%B0,%B1) CR_TAB
2297 AS2 (ldd,%C0,%C1) CR_TAB
2298 AS2 (ldd,%D0,%D1));
2300 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2301 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2302 AS2 (ld,%C0,%1) CR_TAB
2303 AS2 (ld,%B0,%1) CR_TAB
2304 AS2 (ld,%A0,%1));
2305 else if (GET_CODE (base) == POST_INC) /* (R++) */
2306 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2307 AS2 (ld,%B0,%1) CR_TAB
2308 AS2 (ld,%C0,%1) CR_TAB
2309 AS2 (ld,%D0,%1));
2310 else if (CONSTANT_ADDRESS_P (base))
2311 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2312 AS2 (lds,%B0,%B1) CR_TAB
2313 AS2 (lds,%C0,%C1) CR_TAB
2314 AS2 (lds,%D0,%D1));
2316 fatal_insn ("unknown move insn:",insn);
2317 return "";
2320 const char *
2321 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2323 rtx dest = op[0];
2324 rtx src = op[1];
2325 rtx base = XEXP (dest, 0);
2326 int reg_base = true_regnum (base);
2327 int reg_src = true_regnum (src);
2328 int tmp;
2330 if (!l)
2331 l = &tmp;
2333 if (CONSTANT_ADDRESS_P (base))
2334 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2335 AS2 (sts,%B0,%B1) CR_TAB
2336 AS2 (sts,%C0,%C1) CR_TAB
2337 AS2 (sts,%D0,%D1));
2338 if (reg_base > 0) /* (r) */
2340 if (reg_base == REG_X) /* (R26) */
2342 if (reg_src == REG_X)
2344 /* "st X+,r26" is undefined */
2345 if (reg_unused_after (insn, base))
2346 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2347 AS2 (st,X,r26) CR_TAB
2348 AS2 (adiw,r26,1) CR_TAB
2349 AS2 (st,X+,__tmp_reg__) CR_TAB
2350 AS2 (st,X+,r28) CR_TAB
2351 AS2 (st,X,r29));
2352 else
2353 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2354 AS2 (st,X,r26) CR_TAB
2355 AS2 (adiw,r26,1) CR_TAB
2356 AS2 (st,X+,__tmp_reg__) CR_TAB
2357 AS2 (st,X+,r28) CR_TAB
2358 AS2 (st,X,r29) CR_TAB
2359 AS2 (sbiw,r26,3));
2361 else if (reg_base == reg_src + 2)
2363 if (reg_unused_after (insn, base))
2364 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2365 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2366 AS2 (st,%0+,%A1) CR_TAB
2367 AS2 (st,%0+,%B1) CR_TAB
2368 AS2 (st,%0+,__zero_reg__) CR_TAB
2369 AS2 (st,%0,__tmp_reg__) CR_TAB
2370 AS1 (clr,__zero_reg__));
2371 else
2372 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2373 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2374 AS2 (st,%0+,%A1) CR_TAB
2375 AS2 (st,%0+,%B1) CR_TAB
2376 AS2 (st,%0+,__zero_reg__) CR_TAB
2377 AS2 (st,%0,__tmp_reg__) CR_TAB
2378 AS1 (clr,__zero_reg__) CR_TAB
2379 AS2 (sbiw,r26,3));
2381 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2382 AS2 (st,%0+,%B1) CR_TAB
2383 AS2 (st,%0+,%C1) CR_TAB
2384 AS2 (st,%0,%D1) CR_TAB
2385 AS2 (sbiw,r26,3));
2387 else
2388 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2389 AS2 (std,%0+1,%B1) CR_TAB
2390 AS2 (std,%0+2,%C1) CR_TAB
2391 AS2 (std,%0+3,%D1));
2393 else if (GET_CODE (base) == PLUS) /* (R + i) */
2395 int disp = INTVAL (XEXP (base, 1));
2396 reg_base = REGNO (XEXP (base, 0));
2397 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2399 if (reg_base != REG_Y)
2400 fatal_insn ("incorrect insn:",insn);
2402 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2403 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2404 AS2 (std,Y+60,%A1) CR_TAB
2405 AS2 (std,Y+61,%B1) CR_TAB
2406 AS2 (std,Y+62,%C1) CR_TAB
2407 AS2 (std,Y+63,%D1) CR_TAB
2408 AS2 (sbiw,r28,%o0-60));
2410 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2411 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2412 AS2 (st,Y,%A1) CR_TAB
2413 AS2 (std,Y+1,%B1) CR_TAB
2414 AS2 (std,Y+2,%C1) CR_TAB
2415 AS2 (std,Y+3,%D1) CR_TAB
2416 AS2 (subi,r28,lo8(%o0)) CR_TAB
2417 AS2 (sbci,r29,hi8(%o0)));
2419 if (reg_base == REG_X)
2421 /* (X + d) = R */
2422 if (reg_src == REG_X)
2424 *l = 9;
2425 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2426 AS2 (mov,__zero_reg__,r27) CR_TAB
2427 AS2 (adiw,r26,%o0) CR_TAB
2428 AS2 (st,X+,__tmp_reg__) CR_TAB
2429 AS2 (st,X+,__zero_reg__) CR_TAB
2430 AS2 (st,X+,r28) CR_TAB
2431 AS2 (st,X,r29) CR_TAB
2432 AS1 (clr,__zero_reg__) CR_TAB
2433 AS2 (sbiw,r26,%o0+3));
2435 else if (reg_src == REG_X - 2)
2437 *l = 9;
2438 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2439 AS2 (mov,__zero_reg__,r27) CR_TAB
2440 AS2 (adiw,r26,%o0) CR_TAB
2441 AS2 (st,X+,r24) CR_TAB
2442 AS2 (st,X+,r25) CR_TAB
2443 AS2 (st,X+,__tmp_reg__) CR_TAB
2444 AS2 (st,X,__zero_reg__) CR_TAB
2445 AS1 (clr,__zero_reg__) CR_TAB
2446 AS2 (sbiw,r26,%o0+3));
2448 *l = 6;
2449 return (AS2 (adiw,r26,%o0) CR_TAB
2450 AS2 (st,X+,%A1) CR_TAB
2451 AS2 (st,X+,%B1) CR_TAB
2452 AS2 (st,X+,%C1) CR_TAB
2453 AS2 (st,X,%D1) CR_TAB
2454 AS2 (sbiw,r26,%o0+3));
2456 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2457 AS2 (std,%B0,%B1) CR_TAB
2458 AS2 (std,%C0,%C1) CR_TAB
2459 AS2 (std,%D0,%D1));
2461 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2462 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2463 AS2 (st,%0,%C1) CR_TAB
2464 AS2 (st,%0,%B1) CR_TAB
2465 AS2 (st,%0,%A1));
2466 else if (GET_CODE (base) == POST_INC) /* (R++) */
2467 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2468 AS2 (st,%0,%B1) CR_TAB
2469 AS2 (st,%0,%C1) CR_TAB
2470 AS2 (st,%0,%D1));
2471 fatal_insn ("unknown move insn:",insn);
2472 return "";
2475 const char *
2476 output_movsisf(rtx insn, rtx operands[], int *l)
2478 int dummy;
2479 rtx dest = operands[0];
2480 rtx src = operands[1];
2481 int *real_l = l;
2483 if (!l)
2484 l = &dummy;
2486 if (register_operand (dest, VOIDmode))
2488 if (register_operand (src, VOIDmode)) /* mov r,r */
2490 if (true_regnum (dest) > true_regnum (src))
2492 if (AVR_HAVE_MOVW)
2494 *l = 2;
2495 return (AS2 (movw,%C0,%C1) CR_TAB
2496 AS2 (movw,%A0,%A1));
2498 *l = 4;
2499 return (AS2 (mov,%D0,%D1) CR_TAB
2500 AS2 (mov,%C0,%C1) CR_TAB
2501 AS2 (mov,%B0,%B1) CR_TAB
2502 AS2 (mov,%A0,%A1));
2504 else
2506 if (AVR_HAVE_MOVW)
2508 *l = 2;
2509 return (AS2 (movw,%A0,%A1) CR_TAB
2510 AS2 (movw,%C0,%C1));
2512 *l = 4;
2513 return (AS2 (mov,%A0,%A1) CR_TAB
2514 AS2 (mov,%B0,%B1) CR_TAB
2515 AS2 (mov,%C0,%C1) CR_TAB
2516 AS2 (mov,%D0,%D1));
2519 else if (CONSTANT_P (src))
2521 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2523 *l = 4;
2524 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2525 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2526 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2527 AS2 (ldi,%D0,hhi8(%1)));
2530 if (GET_CODE (src) == CONST_INT)
2532 const char *const clr_op0 =
2533 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2534 AS1 (clr,%B0) CR_TAB
2535 AS2 (movw,%C0,%A0))
2536 : (AS1 (clr,%A0) CR_TAB
2537 AS1 (clr,%B0) CR_TAB
2538 AS1 (clr,%C0) CR_TAB
2539 AS1 (clr,%D0));
2541 if (src == const0_rtx) /* mov r,L */
2543 *l = AVR_HAVE_MOVW ? 3 : 4;
2544 return clr_op0;
2546 else if (src == const1_rtx)
2548 if (!real_l)
2549 output_asm_insn (clr_op0, operands);
2550 *l = AVR_HAVE_MOVW ? 4 : 5;
2551 return AS1 (inc,%A0);
2553 else if (src == constm1_rtx)
2555 /* Immediate constants -1 to any register */
2556 if (AVR_HAVE_MOVW)
2558 *l = 4;
2559 return (AS1 (clr,%A0) CR_TAB
2560 AS1 (dec,%A0) CR_TAB
2561 AS2 (mov,%B0,%A0) CR_TAB
2562 AS2 (movw,%C0,%A0));
2564 *l = 5;
2565 return (AS1 (clr,%A0) CR_TAB
2566 AS1 (dec,%A0) CR_TAB
2567 AS2 (mov,%B0,%A0) CR_TAB
2568 AS2 (mov,%C0,%A0) CR_TAB
2569 AS2 (mov,%D0,%A0));
2571 else
2573 int bit_nr = exact_log2 (INTVAL (src));
2575 if (bit_nr >= 0)
2577 *l = AVR_HAVE_MOVW ? 5 : 6;
2578 if (!real_l)
2580 output_asm_insn (clr_op0, operands);
2581 output_asm_insn ("set", operands);
2583 if (!real_l)
2584 avr_output_bld (operands, bit_nr);
2586 return "";
2591 /* Last resort, better than loading from memory. */
2592 *l = 10;
2593 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2594 AS2 (ldi,r31,lo8(%1)) CR_TAB
2595 AS2 (mov,%A0,r31) CR_TAB
2596 AS2 (ldi,r31,hi8(%1)) CR_TAB
2597 AS2 (mov,%B0,r31) CR_TAB
2598 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2599 AS2 (mov,%C0,r31) CR_TAB
2600 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2601 AS2 (mov,%D0,r31) CR_TAB
2602 AS2 (mov,r31,__tmp_reg__));
2604 else if (GET_CODE (src) == MEM)
2605 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2607 else if (GET_CODE (dest) == MEM)
2609 const char *templ;
2611 if (src == const0_rtx)
2612 operands[1] = zero_reg_rtx;
2614 templ = out_movsi_mr_r (insn, operands, real_l);
2616 if (!real_l)
2617 output_asm_insn (templ, operands);
2619 operands[1] = src;
2620 return "";
2622 fatal_insn ("invalid insn:", insn);
2623 return "";
2626 const char *
2627 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2629 rtx dest = op[0];
2630 rtx src = op[1];
2631 rtx x = XEXP (dest, 0);
2632 int dummy;
2634 if (!l)
2635 l = &dummy;
2637 if (CONSTANT_ADDRESS_P (x))
2639 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2641 *l = 1;
2642 return AS2 (out,__SREG__,%1);
2644 if (optimize > 0 && io_address_operand (x, QImode))
2646 *l = 1;
2647 return AS2 (out,%0-0x20,%1);
2649 *l = 2;
2650 return AS2 (sts,%0,%1);
2652 /* memory access by reg+disp */
2653 else if (GET_CODE (x) == PLUS
2654 && REG_P (XEXP (x,0))
2655 && GET_CODE (XEXP (x,1)) == CONST_INT)
2657 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2659 int disp = INTVAL (XEXP (x,1));
2660 if (REGNO (XEXP (x,0)) != REG_Y)
2661 fatal_insn ("incorrect insn:",insn);
2663 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2664 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2665 AS2 (std,Y+63,%1) CR_TAB
2666 AS2 (sbiw,r28,%o0-63));
2668 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2669 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2670 AS2 (st,Y,%1) CR_TAB
2671 AS2 (subi,r28,lo8(%o0)) CR_TAB
2672 AS2 (sbci,r29,hi8(%o0)));
2674 else if (REGNO (XEXP (x,0)) == REG_X)
2676 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2678 if (reg_unused_after (insn, XEXP (x,0)))
2679 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2680 AS2 (adiw,r26,%o0) CR_TAB
2681 AS2 (st,X,__tmp_reg__));
2683 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2684 AS2 (adiw,r26,%o0) CR_TAB
2685 AS2 (st,X,__tmp_reg__) CR_TAB
2686 AS2 (sbiw,r26,%o0));
2688 else
2690 if (reg_unused_after (insn, XEXP (x,0)))
2691 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2692 AS2 (st,X,%1));
2694 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2695 AS2 (st,X,%1) CR_TAB
2696 AS2 (sbiw,r26,%o0));
2699 *l = 1;
2700 return AS2 (std,%0,%1);
2702 *l = 1;
2703 return AS2 (st,%0,%1);
2706 const char *
2707 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2709 rtx dest = op[0];
2710 rtx src = op[1];
2711 rtx base = XEXP (dest, 0);
2712 int reg_base = true_regnum (base);
2713 int reg_src = true_regnum (src);
2714 /* "volatile" forces writing high byte first, even if less efficient,
2715 for correct operation with 16-bit I/O registers. */
2716 int mem_volatile_p = MEM_VOLATILE_P (dest);
2717 int tmp;
2719 if (!l)
2720 l = &tmp;
2721 if (CONSTANT_ADDRESS_P (base))
2723 if (optimize > 0 && io_address_operand (base, HImode))
2725 *l = 2;
2726 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2727 AS2 (out,%A0-0x20,%A1));
2729 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2730 AS2 (sts,%A0,%A1));
2732 if (reg_base > 0)
2734 if (reg_base == REG_X)
2736 if (reg_src == REG_X)
2738 /* "st X+,r26" and "st -X,r26" are undefined. */
2739 if (!mem_volatile_p && reg_unused_after (insn, src))
2740 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2741 AS2 (st,X,r26) CR_TAB
2742 AS2 (adiw,r26,1) CR_TAB
2743 AS2 (st,X,__tmp_reg__));
2744 else
2745 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2746 AS2 (adiw,r26,1) CR_TAB
2747 AS2 (st,X,__tmp_reg__) CR_TAB
2748 AS2 (sbiw,r26,1) CR_TAB
2749 AS2 (st,X,r26));
2751 else
2753 if (!mem_volatile_p && reg_unused_after (insn, base))
2754 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2755 AS2 (st,X,%B1));
2756 else
2757 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2758 AS2 (st,X,%B1) CR_TAB
2759 AS2 (st,-X,%A1));
2762 else
2763 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2764 AS2 (st,%0,%A1));
2766 else if (GET_CODE (base) == PLUS)
2768 int disp = INTVAL (XEXP (base, 1));
2769 reg_base = REGNO (XEXP (base, 0));
2770 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2772 if (reg_base != REG_Y)
2773 fatal_insn ("incorrect insn:",insn);
2775 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2776 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2777 AS2 (std,Y+63,%B1) CR_TAB
2778 AS2 (std,Y+62,%A1) CR_TAB
2779 AS2 (sbiw,r28,%o0-62));
2781 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2782 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2783 AS2 (std,Y+1,%B1) CR_TAB
2784 AS2 (st,Y,%A1) CR_TAB
2785 AS2 (subi,r28,lo8(%o0)) CR_TAB
2786 AS2 (sbci,r29,hi8(%o0)));
2788 if (reg_base == REG_X)
2790 /* (X + d) = R */
2791 if (reg_src == REG_X)
2793 *l = 7;
2794 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2795 AS2 (mov,__zero_reg__,r27) CR_TAB
2796 AS2 (adiw,r26,%o0+1) CR_TAB
2797 AS2 (st,X,__zero_reg__) CR_TAB
2798 AS2 (st,-X,__tmp_reg__) CR_TAB
2799 AS1 (clr,__zero_reg__) CR_TAB
2800 AS2 (sbiw,r26,%o0));
2802 *l = 4;
2803 return (AS2 (adiw,r26,%o0+1) CR_TAB
2804 AS2 (st,X,%B1) CR_TAB
2805 AS2 (st,-X,%A1) CR_TAB
2806 AS2 (sbiw,r26,%o0));
2808 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2809 AS2 (std,%A0,%A1));
2811 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2812 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2813 AS2 (st,%0,%A1));
2814 else if (GET_CODE (base) == POST_INC) /* (R++) */
2816 if (mem_volatile_p)
2818 if (REGNO (XEXP (base, 0)) == REG_X)
2820 *l = 4;
2821 return (AS2 (adiw,r26,1) CR_TAB
2822 AS2 (st,X,%B1) CR_TAB
2823 AS2 (st,-X,%A1) CR_TAB
2824 AS2 (adiw,r26,2));
2826 else
2828 *l = 3;
2829 return (AS2 (std,%p0+1,%B1) CR_TAB
2830 AS2 (st,%p0,%A1) CR_TAB
2831 AS2 (adiw,%r0,2));
2835 *l = 2;
2836 return (AS2 (st,%0,%A1) CR_TAB
2837 AS2 (st,%0,%B1));
2839 fatal_insn ("unknown move insn:",insn);
2840 return "";
2843 /* Return 1 if frame pointer for current function required. */
2846 frame_pointer_required_p (void)
2848 return (cfun->calls_alloca
2849 || crtl->args.info.nregs == 0
2850 || get_frame_size () > 0);
2853 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2855 static RTX_CODE
2856 compare_condition (rtx insn)
2858 rtx next = next_real_insn (insn);
2859 RTX_CODE cond = UNKNOWN;
2860 if (next && GET_CODE (next) == JUMP_INSN)
2862 rtx pat = PATTERN (next);
2863 rtx src = SET_SRC (pat);
2864 rtx t = XEXP (src, 0);
2865 cond = GET_CODE (t);
2867 return cond;
2870 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2872 static int
2873 compare_sign_p (rtx insn)
2875 RTX_CODE cond = compare_condition (insn);
2876 return (cond == GE || cond == LT);
2879 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2880 that needs to be swapped (GT, GTU, LE, LEU). */
2883 compare_diff_p (rtx insn)
2885 RTX_CODE cond = compare_condition (insn);
2886 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2889 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2892 compare_eq_p (rtx insn)
2894 RTX_CODE cond = compare_condition (insn);
2895 return (cond == EQ || cond == NE);
2899 /* Output test instruction for HImode. */
2901 const char *
2902 out_tsthi (rtx insn, int *l)
2904 if (compare_sign_p (insn))
2906 if (l) *l = 1;
2907 return AS1 (tst,%B0);
2909 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2910 && compare_eq_p (insn))
2912 /* Faster than sbiw if we can clobber the operand. */
2913 if (l) *l = 1;
2914 return AS2 (or,%A0,%B0);
2916 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2918 if (l) *l = 1;
2919 return AS2 (sbiw,%0,0);
2921 if (l) *l = 2;
2922 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2923 AS2 (cpc,%B0,__zero_reg__));
2927 /* Output test instruction for SImode. */
2929 const char *
2930 out_tstsi (rtx insn, int *l)
2932 if (compare_sign_p (insn))
2934 if (l) *l = 1;
2935 return AS1 (tst,%D0);
2937 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2939 if (l) *l = 3;
2940 return (AS2 (sbiw,%A0,0) CR_TAB
2941 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2942 AS2 (cpc,%D0,__zero_reg__));
2944 if (l) *l = 4;
2945 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2946 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2947 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2948 AS2 (cpc,%D0,__zero_reg__));
2952 /* Generate asm equivalent for various shifts.
2953 Shift count is a CONST_INT, MEM or REG.
2954 This only handles cases that are not already
2955 carefully hand-optimized in ?sh??i3_out. */
2957 void
2958 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2959 int *len, int t_len)
2961 rtx op[10];
2962 char str[500];
2963 int second_label = 1;
2964 int saved_in_tmp = 0;
2965 int use_zero_reg = 0;
2967 op[0] = operands[0];
2968 op[1] = operands[1];
2969 op[2] = operands[2];
2970 op[3] = operands[3];
2971 str[0] = 0;
2973 if (len)
2974 *len = 1;
2976 if (GET_CODE (operands[2]) == CONST_INT)
2978 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2979 int count = INTVAL (operands[2]);
2980 int max_len = 10; /* If larger than this, always use a loop. */
2982 if (count <= 0)
2984 if (len)
2985 *len = 0;
2986 return;
2989 if (count < 8 && !scratch)
2990 use_zero_reg = 1;
2992 if (optimize_size)
2993 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2995 if (t_len * count <= max_len)
2997 /* Output shifts inline with no loop - faster. */
2998 if (len)
2999 *len = t_len * count;
3000 else
3002 while (count-- > 0)
3003 output_asm_insn (templ, op);
3006 return;
3009 if (scratch)
3011 if (!len)
3012 strcat (str, AS2 (ldi,%3,%2));
3014 else if (use_zero_reg)
3016 /* Hack to save one word: use __zero_reg__ as loop counter.
3017 Set one bit, then shift in a loop until it is 0 again. */
3019 op[3] = zero_reg_rtx;
3020 if (len)
3021 *len = 2;
3022 else
3023 strcat (str, ("set" CR_TAB
3024 AS2 (bld,%3,%2-1)));
3026 else
3028 /* No scratch register available, use one from LD_REGS (saved in
3029 __tmp_reg__) that doesn't overlap with registers to shift. */
3031 op[3] = gen_rtx_REG (QImode,
3032 ((true_regnum (operands[0]) - 1) & 15) + 16);
3033 op[4] = tmp_reg_rtx;
3034 saved_in_tmp = 1;
3036 if (len)
3037 *len = 3; /* Includes "mov %3,%4" after the loop. */
3038 else
3039 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3040 AS2 (ldi,%3,%2)));
3043 second_label = 0;
3045 else if (GET_CODE (operands[2]) == MEM)
3047 rtx op_mov[10];
3049 op[3] = op_mov[0] = tmp_reg_rtx;
3050 op_mov[1] = op[2];
3052 if (len)
3053 out_movqi_r_mr (insn, op_mov, len);
3054 else
3055 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3057 else if (register_operand (operands[2], QImode))
3059 if (reg_unused_after (insn, operands[2]))
3060 op[3] = op[2];
3061 else
3063 op[3] = tmp_reg_rtx;
3064 if (!len)
3065 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3068 else
3069 fatal_insn ("bad shift insn:", insn);
3071 if (second_label)
3073 if (len)
3074 ++*len;
3075 else
3076 strcat (str, AS1 (rjmp,2f));
3079 if (len)
3080 *len += t_len + 2; /* template + dec + brXX */
3081 else
3083 strcat (str, "\n1:\t");
3084 strcat (str, templ);
3085 strcat (str, second_label ? "\n2:\t" : "\n\t");
3086 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3087 strcat (str, CR_TAB);
3088 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3089 if (saved_in_tmp)
3090 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3091 output_asm_insn (str, op);
3096 /* 8bit shift left ((char)x << i) */
3098 const char *
3099 ashlqi3_out (rtx insn, rtx operands[], int *len)
3101 if (GET_CODE (operands[2]) == CONST_INT)
3103 int k;
3105 if (!len)
3106 len = &k;
3108 switch (INTVAL (operands[2]))
3110 default:
3111 if (INTVAL (operands[2]) < 8)
3112 break;
3114 *len = 1;
3115 return AS1 (clr,%0);
3117 case 1:
3118 *len = 1;
3119 return AS1 (lsl,%0);
3121 case 2:
3122 *len = 2;
3123 return (AS1 (lsl,%0) CR_TAB
3124 AS1 (lsl,%0));
3126 case 3:
3127 *len = 3;
3128 return (AS1 (lsl,%0) CR_TAB
3129 AS1 (lsl,%0) CR_TAB
3130 AS1 (lsl,%0));
3132 case 4:
3133 if (test_hard_reg_class (LD_REGS, operands[0]))
3135 *len = 2;
3136 return (AS1 (swap,%0) CR_TAB
3137 AS2 (andi,%0,0xf0));
3139 *len = 4;
3140 return (AS1 (lsl,%0) CR_TAB
3141 AS1 (lsl,%0) CR_TAB
3142 AS1 (lsl,%0) CR_TAB
3143 AS1 (lsl,%0));
3145 case 5:
3146 if (test_hard_reg_class (LD_REGS, operands[0]))
3148 *len = 3;
3149 return (AS1 (swap,%0) CR_TAB
3150 AS1 (lsl,%0) CR_TAB
3151 AS2 (andi,%0,0xe0));
3153 *len = 5;
3154 return (AS1 (lsl,%0) CR_TAB
3155 AS1 (lsl,%0) CR_TAB
3156 AS1 (lsl,%0) CR_TAB
3157 AS1 (lsl,%0) CR_TAB
3158 AS1 (lsl,%0));
3160 case 6:
3161 if (test_hard_reg_class (LD_REGS, operands[0]))
3163 *len = 4;
3164 return (AS1 (swap,%0) CR_TAB
3165 AS1 (lsl,%0) CR_TAB
3166 AS1 (lsl,%0) CR_TAB
3167 AS2 (andi,%0,0xc0));
3169 *len = 6;
3170 return (AS1 (lsl,%0) CR_TAB
3171 AS1 (lsl,%0) CR_TAB
3172 AS1 (lsl,%0) CR_TAB
3173 AS1 (lsl,%0) CR_TAB
3174 AS1 (lsl,%0) CR_TAB
3175 AS1 (lsl,%0));
3177 case 7:
3178 *len = 3;
3179 return (AS1 (ror,%0) CR_TAB
3180 AS1 (clr,%0) CR_TAB
3181 AS1 (ror,%0));
3184 else if (CONSTANT_P (operands[2]))
3185 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3187 out_shift_with_cnt (AS1 (lsl,%0),
3188 insn, operands, len, 1);
3189 return "";
3193 /* 16bit shift left ((short)x << i) */
3195 const char *
3196 ashlhi3_out (rtx insn, rtx operands[], int *len)
3198 if (GET_CODE (operands[2]) == CONST_INT)
3200 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3201 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3202 int k;
3203 int *t = len;
3205 if (!len)
3206 len = &k;
3208 switch (INTVAL (operands[2]))
3210 default:
3211 if (INTVAL (operands[2]) < 16)
3212 break;
3214 *len = 2;
3215 return (AS1 (clr,%B0) CR_TAB
3216 AS1 (clr,%A0));
3218 case 4:
3219 if (optimize_size && scratch)
3220 break; /* 5 */
3221 if (ldi_ok)
3223 *len = 6;
3224 return (AS1 (swap,%A0) CR_TAB
3225 AS1 (swap,%B0) CR_TAB
3226 AS2 (andi,%B0,0xf0) CR_TAB
3227 AS2 (eor,%B0,%A0) CR_TAB
3228 AS2 (andi,%A0,0xf0) CR_TAB
3229 AS2 (eor,%B0,%A0));
3231 if (scratch)
3233 *len = 7;
3234 return (AS1 (swap,%A0) CR_TAB
3235 AS1 (swap,%B0) CR_TAB
3236 AS2 (ldi,%3,0xf0) CR_TAB
3237 AS2 (and,%B0,%3) CR_TAB
3238 AS2 (eor,%B0,%A0) CR_TAB
3239 AS2 (and,%A0,%3) CR_TAB
3240 AS2 (eor,%B0,%A0));
3242 break; /* optimize_size ? 6 : 8 */
3244 case 5:
3245 if (optimize_size)
3246 break; /* scratch ? 5 : 6 */
3247 if (ldi_ok)
3249 *len = 8;
3250 return (AS1 (lsl,%A0) CR_TAB
3251 AS1 (rol,%B0) CR_TAB
3252 AS1 (swap,%A0) CR_TAB
3253 AS1 (swap,%B0) CR_TAB
3254 AS2 (andi,%B0,0xf0) CR_TAB
3255 AS2 (eor,%B0,%A0) CR_TAB
3256 AS2 (andi,%A0,0xf0) CR_TAB
3257 AS2 (eor,%B0,%A0));
3259 if (scratch)
3261 *len = 9;
3262 return (AS1 (lsl,%A0) CR_TAB
3263 AS1 (rol,%B0) CR_TAB
3264 AS1 (swap,%A0) CR_TAB
3265 AS1 (swap,%B0) CR_TAB
3266 AS2 (ldi,%3,0xf0) CR_TAB
3267 AS2 (and,%B0,%3) CR_TAB
3268 AS2 (eor,%B0,%A0) CR_TAB
3269 AS2 (and,%A0,%3) CR_TAB
3270 AS2 (eor,%B0,%A0));
3272 break; /* 10 */
3274 case 6:
3275 if (optimize_size)
3276 break; /* scratch ? 5 : 6 */
3277 *len = 9;
3278 return (AS1 (clr,__tmp_reg__) CR_TAB
3279 AS1 (lsr,%B0) CR_TAB
3280 AS1 (ror,%A0) CR_TAB
3281 AS1 (ror,__tmp_reg__) CR_TAB
3282 AS1 (lsr,%B0) CR_TAB
3283 AS1 (ror,%A0) CR_TAB
3284 AS1 (ror,__tmp_reg__) CR_TAB
3285 AS2 (mov,%B0,%A0) CR_TAB
3286 AS2 (mov,%A0,__tmp_reg__));
3288 case 7:
3289 *len = 5;
3290 return (AS1 (lsr,%B0) CR_TAB
3291 AS2 (mov,%B0,%A0) CR_TAB
3292 AS1 (clr,%A0) CR_TAB
3293 AS1 (ror,%B0) CR_TAB
3294 AS1 (ror,%A0));
3296 case 8:
3297 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3298 AS1 (clr,%A0));
3300 case 9:
3301 *len = 3;
3302 return (AS2 (mov,%B0,%A0) CR_TAB
3303 AS1 (clr,%A0) CR_TAB
3304 AS1 (lsl,%B0));
3306 case 10:
3307 *len = 4;
3308 return (AS2 (mov,%B0,%A0) CR_TAB
3309 AS1 (clr,%A0) CR_TAB
3310 AS1 (lsl,%B0) CR_TAB
3311 AS1 (lsl,%B0));
3313 case 11:
3314 *len = 5;
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS1 (lsl,%B0));
3321 case 12:
3322 if (ldi_ok)
3324 *len = 4;
3325 return (AS2 (mov,%B0,%A0) CR_TAB
3326 AS1 (clr,%A0) CR_TAB
3327 AS1 (swap,%B0) CR_TAB
3328 AS2 (andi,%B0,0xf0));
3330 if (scratch)
3332 *len = 5;
3333 return (AS2 (mov,%B0,%A0) CR_TAB
3334 AS1 (clr,%A0) CR_TAB
3335 AS1 (swap,%B0) CR_TAB
3336 AS2 (ldi,%3,0xf0) CR_TAB
3337 AS2 (and,%B0,%3));
3339 *len = 6;
3340 return (AS2 (mov,%B0,%A0) CR_TAB
3341 AS1 (clr,%A0) CR_TAB
3342 AS1 (lsl,%B0) CR_TAB
3343 AS1 (lsl,%B0) CR_TAB
3344 AS1 (lsl,%B0) CR_TAB
3345 AS1 (lsl,%B0));
3347 case 13:
3348 if (ldi_ok)
3350 *len = 5;
3351 return (AS2 (mov,%B0,%A0) CR_TAB
3352 AS1 (clr,%A0) CR_TAB
3353 AS1 (swap,%B0) CR_TAB
3354 AS1 (lsl,%B0) CR_TAB
3355 AS2 (andi,%B0,0xe0));
3357 if (AVR_HAVE_MUL && scratch)
3359 *len = 5;
3360 return (AS2 (ldi,%3,0x20) CR_TAB
3361 AS2 (mul,%A0,%3) CR_TAB
3362 AS2 (mov,%B0,r0) CR_TAB
3363 AS1 (clr,%A0) CR_TAB
3364 AS1 (clr,__zero_reg__));
3366 if (optimize_size && scratch)
3367 break; /* 5 */
3368 if (scratch)
3370 *len = 6;
3371 return (AS2 (mov,%B0,%A0) CR_TAB
3372 AS1 (clr,%A0) CR_TAB
3373 AS1 (swap,%B0) CR_TAB
3374 AS1 (lsl,%B0) CR_TAB
3375 AS2 (ldi,%3,0xe0) CR_TAB
3376 AS2 (and,%B0,%3));
3378 if (AVR_HAVE_MUL)
3380 *len = 6;
3381 return ("set" CR_TAB
3382 AS2 (bld,r1,5) CR_TAB
3383 AS2 (mul,%A0,r1) CR_TAB
3384 AS2 (mov,%B0,r0) CR_TAB
3385 AS1 (clr,%A0) CR_TAB
3386 AS1 (clr,__zero_reg__));
3388 *len = 7;
3389 return (AS2 (mov,%B0,%A0) CR_TAB
3390 AS1 (clr,%A0) CR_TAB
3391 AS1 (lsl,%B0) CR_TAB
3392 AS1 (lsl,%B0) CR_TAB
3393 AS1 (lsl,%B0) CR_TAB
3394 AS1 (lsl,%B0) CR_TAB
3395 AS1 (lsl,%B0));
3397 case 14:
3398 if (AVR_HAVE_MUL && ldi_ok)
3400 *len = 5;
3401 return (AS2 (ldi,%B0,0x40) CR_TAB
3402 AS2 (mul,%A0,%B0) CR_TAB
3403 AS2 (mov,%B0,r0) CR_TAB
3404 AS1 (clr,%A0) CR_TAB
3405 AS1 (clr,__zero_reg__));
3407 if (AVR_HAVE_MUL && scratch)
3409 *len = 5;
3410 return (AS2 (ldi,%3,0x40) CR_TAB
3411 AS2 (mul,%A0,%3) CR_TAB
3412 AS2 (mov,%B0,r0) CR_TAB
3413 AS1 (clr,%A0) CR_TAB
3414 AS1 (clr,__zero_reg__));
3416 if (optimize_size && ldi_ok)
3418 *len = 5;
3419 return (AS2 (mov,%B0,%A0) CR_TAB
3420 AS2 (ldi,%A0,6) "\n1:\t"
3421 AS1 (lsl,%B0) CR_TAB
3422 AS1 (dec,%A0) CR_TAB
3423 AS1 (brne,1b));
3425 if (optimize_size && scratch)
3426 break; /* 5 */
3427 *len = 6;
3428 return (AS1 (clr,%B0) CR_TAB
3429 AS1 (lsr,%A0) CR_TAB
3430 AS1 (ror,%B0) CR_TAB
3431 AS1 (lsr,%A0) CR_TAB
3432 AS1 (ror,%B0) CR_TAB
3433 AS1 (clr,%A0));
3435 case 15:
3436 *len = 4;
3437 return (AS1 (clr,%B0) CR_TAB
3438 AS1 (lsr,%A0) CR_TAB
3439 AS1 (ror,%B0) CR_TAB
3440 AS1 (clr,%A0));
3442 len = t;
3444 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3445 AS1 (rol,%B0)),
3446 insn, operands, len, 2);
3447 return "";
3451 /* 32bit shift left ((long)x << i) */
3453 const char *
3454 ashlsi3_out (rtx insn, rtx operands[], int *len)
3456 if (GET_CODE (operands[2]) == CONST_INT)
3458 int k;
3459 int *t = len;
3461 if (!len)
3462 len = &k;
3464 switch (INTVAL (operands[2]))
3466 default:
3467 if (INTVAL (operands[2]) < 32)
3468 break;
3470 if (AVR_HAVE_MOVW)
3471 return *len = 3, (AS1 (clr,%D0) CR_TAB
3472 AS1 (clr,%C0) CR_TAB
3473 AS2 (movw,%A0,%C0));
3474 *len = 4;
3475 return (AS1 (clr,%D0) CR_TAB
3476 AS1 (clr,%C0) CR_TAB
3477 AS1 (clr,%B0) CR_TAB
3478 AS1 (clr,%A0));
3480 case 8:
3482 int reg0 = true_regnum (operands[0]);
3483 int reg1 = true_regnum (operands[1]);
3484 *len = 4;
3485 if (reg0 >= reg1)
3486 return (AS2 (mov,%D0,%C1) CR_TAB
3487 AS2 (mov,%C0,%B1) CR_TAB
3488 AS2 (mov,%B0,%A1) CR_TAB
3489 AS1 (clr,%A0));
3490 else
3491 return (AS1 (clr,%A0) CR_TAB
3492 AS2 (mov,%B0,%A1) CR_TAB
3493 AS2 (mov,%C0,%B1) CR_TAB
3494 AS2 (mov,%D0,%C1));
3497 case 16:
3499 int reg0 = true_regnum (operands[0]);
3500 int reg1 = true_regnum (operands[1]);
3501 if (reg0 + 2 == reg1)
3502 return *len = 2, (AS1 (clr,%B0) CR_TAB
3503 AS1 (clr,%A0));
3504 if (AVR_HAVE_MOVW)
3505 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3506 AS1 (clr,%B0) CR_TAB
3507 AS1 (clr,%A0));
3508 else
3509 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3510 AS2 (mov,%D0,%B1) CR_TAB
3511 AS1 (clr,%B0) CR_TAB
3512 AS1 (clr,%A0));
3515 case 24:
3516 *len = 4;
3517 return (AS2 (mov,%D0,%A1) CR_TAB
3518 AS1 (clr,%C0) CR_TAB
3519 AS1 (clr,%B0) CR_TAB
3520 AS1 (clr,%A0));
3522 case 31:
3523 *len = 6;
3524 return (AS1 (clr,%D0) CR_TAB
3525 AS1 (lsr,%A0) CR_TAB
3526 AS1 (ror,%D0) CR_TAB
3527 AS1 (clr,%C0) CR_TAB
3528 AS1 (clr,%B0) CR_TAB
3529 AS1 (clr,%A0));
3531 len = t;
3533 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3534 AS1 (rol,%B0) CR_TAB
3535 AS1 (rol,%C0) CR_TAB
3536 AS1 (rol,%D0)),
3537 insn, operands, len, 4);
3538 return "";
3541 /* 8bit arithmetic shift right ((signed char)x >> i) */
3543 const char *
3544 ashrqi3_out (rtx insn, rtx operands[], int *len)
3546 if (GET_CODE (operands[2]) == CONST_INT)
3548 int k;
3550 if (!len)
3551 len = &k;
3553 switch (INTVAL (operands[2]))
3555 case 1:
3556 *len = 1;
3557 return AS1 (asr,%0);
3559 case 2:
3560 *len = 2;
3561 return (AS1 (asr,%0) CR_TAB
3562 AS1 (asr,%0));
3564 case 3:
3565 *len = 3;
3566 return (AS1 (asr,%0) CR_TAB
3567 AS1 (asr,%0) CR_TAB
3568 AS1 (asr,%0));
3570 case 4:
3571 *len = 4;
3572 return (AS1 (asr,%0) CR_TAB
3573 AS1 (asr,%0) CR_TAB
3574 AS1 (asr,%0) CR_TAB
3575 AS1 (asr,%0));
3577 case 5:
3578 *len = 5;
3579 return (AS1 (asr,%0) CR_TAB
3580 AS1 (asr,%0) CR_TAB
3581 AS1 (asr,%0) CR_TAB
3582 AS1 (asr,%0) CR_TAB
3583 AS1 (asr,%0));
3585 case 6:
3586 *len = 4;
3587 return (AS2 (bst,%0,6) CR_TAB
3588 AS1 (lsl,%0) CR_TAB
3589 AS2 (sbc,%0,%0) CR_TAB
3590 AS2 (bld,%0,0));
3592 default:
3593 if (INTVAL (operands[2]) < 8)
3594 break;
3596 /* fall through */
3598 case 7:
3599 *len = 2;
3600 return (AS1 (lsl,%0) CR_TAB
3601 AS2 (sbc,%0,%0));
3604 else if (CONSTANT_P (operands[2]))
3605 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3607 out_shift_with_cnt (AS1 (asr,%0),
3608 insn, operands, len, 1);
3609 return "";
3613 /* 16bit arithmetic shift right ((signed short)x >> i) */
3615 const char *
3616 ashrhi3_out (rtx insn, rtx operands[], int *len)
3618 if (GET_CODE (operands[2]) == CONST_INT)
3620 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3621 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3622 int k;
3623 int *t = len;
3625 if (!len)
3626 len = &k;
3628 switch (INTVAL (operands[2]))
3630 case 4:
3631 case 5:
3632 /* XXX try to optimize this too? */
3633 break;
3635 case 6:
3636 if (optimize_size)
3637 break; /* scratch ? 5 : 6 */
3638 *len = 8;
3639 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3640 AS2 (mov,%A0,%B0) CR_TAB
3641 AS1 (lsl,__tmp_reg__) CR_TAB
3642 AS1 (rol,%A0) CR_TAB
3643 AS2 (sbc,%B0,%B0) CR_TAB
3644 AS1 (lsl,__tmp_reg__) CR_TAB
3645 AS1 (rol,%A0) CR_TAB
3646 AS1 (rol,%B0));
3648 case 7:
3649 *len = 4;
3650 return (AS1 (lsl,%A0) CR_TAB
3651 AS2 (mov,%A0,%B0) CR_TAB
3652 AS1 (rol,%A0) CR_TAB
3653 AS2 (sbc,%B0,%B0));
3655 case 8:
3657 int reg0 = true_regnum (operands[0]);
3658 int reg1 = true_regnum (operands[1]);
3660 if (reg0 == reg1)
3661 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3662 AS1 (lsl,%B0) CR_TAB
3663 AS2 (sbc,%B0,%B0));
3664 else
3665 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3666 AS1 (clr,%B0) CR_TAB
3667 AS2 (sbrc,%A0,7) CR_TAB
3668 AS1 (dec,%B0));
3671 case 9:
3672 *len = 4;
3673 return (AS2 (mov,%A0,%B0) CR_TAB
3674 AS1 (lsl,%B0) CR_TAB
3675 AS2 (sbc,%B0,%B0) CR_TAB
3676 AS1 (asr,%A0));
3678 case 10:
3679 *len = 5;
3680 return (AS2 (mov,%A0,%B0) CR_TAB
3681 AS1 (lsl,%B0) CR_TAB
3682 AS2 (sbc,%B0,%B0) CR_TAB
3683 AS1 (asr,%A0) CR_TAB
3684 AS1 (asr,%A0));
3686 case 11:
3687 if (AVR_HAVE_MUL && ldi_ok)
3689 *len = 5;
3690 return (AS2 (ldi,%A0,0x20) CR_TAB
3691 AS2 (muls,%B0,%A0) CR_TAB
3692 AS2 (mov,%A0,r1) CR_TAB
3693 AS2 (sbc,%B0,%B0) CR_TAB
3694 AS1 (clr,__zero_reg__));
3696 if (optimize_size && scratch)
3697 break; /* 5 */
3698 *len = 6;
3699 return (AS2 (mov,%A0,%B0) CR_TAB
3700 AS1 (lsl,%B0) CR_TAB
3701 AS2 (sbc,%B0,%B0) CR_TAB
3702 AS1 (asr,%A0) CR_TAB
3703 AS1 (asr,%A0) CR_TAB
3704 AS1 (asr,%A0));
3706 case 12:
3707 if (AVR_HAVE_MUL && ldi_ok)
3709 *len = 5;
3710 return (AS2 (ldi,%A0,0x10) CR_TAB
3711 AS2 (muls,%B0,%A0) CR_TAB
3712 AS2 (mov,%A0,r1) CR_TAB
3713 AS2 (sbc,%B0,%B0) CR_TAB
3714 AS1 (clr,__zero_reg__));
3716 if (optimize_size && scratch)
3717 break; /* 5 */
3718 *len = 7;
3719 return (AS2 (mov,%A0,%B0) CR_TAB
3720 AS1 (lsl,%B0) CR_TAB
3721 AS2 (sbc,%B0,%B0) CR_TAB
3722 AS1 (asr,%A0) CR_TAB
3723 AS1 (asr,%A0) CR_TAB
3724 AS1 (asr,%A0) CR_TAB
3725 AS1 (asr,%A0));
3727 case 13:
3728 if (AVR_HAVE_MUL && ldi_ok)
3730 *len = 5;
3731 return (AS2 (ldi,%A0,0x08) CR_TAB
3732 AS2 (muls,%B0,%A0) CR_TAB
3733 AS2 (mov,%A0,r1) CR_TAB
3734 AS2 (sbc,%B0,%B0) CR_TAB
3735 AS1 (clr,__zero_reg__));
3737 if (optimize_size)
3738 break; /* scratch ? 5 : 7 */
3739 *len = 8;
3740 return (AS2 (mov,%A0,%B0) CR_TAB
3741 AS1 (lsl,%B0) CR_TAB
3742 AS2 (sbc,%B0,%B0) CR_TAB
3743 AS1 (asr,%A0) CR_TAB
3744 AS1 (asr,%A0) CR_TAB
3745 AS1 (asr,%A0) CR_TAB
3746 AS1 (asr,%A0) CR_TAB
3747 AS1 (asr,%A0));
3749 case 14:
3750 *len = 5;
3751 return (AS1 (lsl,%B0) CR_TAB
3752 AS2 (sbc,%A0,%A0) CR_TAB
3753 AS1 (lsl,%B0) CR_TAB
3754 AS2 (mov,%B0,%A0) CR_TAB
3755 AS1 (rol,%A0));
3757 default:
3758 if (INTVAL (operands[2]) < 16)
3759 break;
3761 /* fall through */
3763 case 15:
3764 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3765 AS2 (sbc,%A0,%A0) CR_TAB
3766 AS2 (mov,%B0,%A0));
3768 len = t;
3770 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3771 AS1 (ror,%A0)),
3772 insn, operands, len, 2);
3773 return "";
3777 /* 32bit arithmetic shift right ((signed long)x >> i) */
3779 const char *
3780 ashrsi3_out (rtx insn, rtx operands[], int *len)
3782 if (GET_CODE (operands[2]) == CONST_INT)
3784 int k;
3785 int *t = len;
3787 if (!len)
3788 len = &k;
3790 switch (INTVAL (operands[2]))
3792 case 8:
3794 int reg0 = true_regnum (operands[0]);
3795 int reg1 = true_regnum (operands[1]);
3796 *len=6;
3797 if (reg0 <= reg1)
3798 return (AS2 (mov,%A0,%B1) CR_TAB
3799 AS2 (mov,%B0,%C1) CR_TAB
3800 AS2 (mov,%C0,%D1) CR_TAB
3801 AS1 (clr,%D0) CR_TAB
3802 AS2 (sbrc,%C0,7) CR_TAB
3803 AS1 (dec,%D0));
3804 else
3805 return (AS1 (clr,%D0) CR_TAB
3806 AS2 (sbrc,%D1,7) CR_TAB
3807 AS1 (dec,%D0) CR_TAB
3808 AS2 (mov,%C0,%D1) CR_TAB
3809 AS2 (mov,%B0,%C1) CR_TAB
3810 AS2 (mov,%A0,%B1));
3813 case 16:
3815 int reg0 = true_regnum (operands[0]);
3816 int reg1 = true_regnum (operands[1]);
3818 if (reg0 == reg1 + 2)
3819 return *len = 4, (AS1 (clr,%D0) CR_TAB
3820 AS2 (sbrc,%B0,7) CR_TAB
3821 AS1 (com,%D0) CR_TAB
3822 AS2 (mov,%C0,%D0));
3823 if (AVR_HAVE_MOVW)
3824 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3825 AS1 (clr,%D0) CR_TAB
3826 AS2 (sbrc,%B0,7) CR_TAB
3827 AS1 (com,%D0) CR_TAB
3828 AS2 (mov,%C0,%D0));
3829 else
3830 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3831 AS2 (mov,%A0,%C1) CR_TAB
3832 AS1 (clr,%D0) CR_TAB
3833 AS2 (sbrc,%B0,7) CR_TAB
3834 AS1 (com,%D0) CR_TAB
3835 AS2 (mov,%C0,%D0));
3838 case 24:
3839 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3840 AS1 (clr,%D0) CR_TAB
3841 AS2 (sbrc,%A0,7) CR_TAB
3842 AS1 (com,%D0) CR_TAB
3843 AS2 (mov,%B0,%D0) CR_TAB
3844 AS2 (mov,%C0,%D0));
3846 default:
3847 if (INTVAL (operands[2]) < 32)
3848 break;
3850 /* fall through */
3852 case 31:
3853 if (AVR_HAVE_MOVW)
3854 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3855 AS2 (sbc,%A0,%A0) CR_TAB
3856 AS2 (mov,%B0,%A0) CR_TAB
3857 AS2 (movw,%C0,%A0));
3858 else
3859 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3860 AS2 (sbc,%A0,%A0) CR_TAB
3861 AS2 (mov,%B0,%A0) CR_TAB
3862 AS2 (mov,%C0,%A0) CR_TAB
3863 AS2 (mov,%D0,%A0));
3865 len = t;
3867 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3868 AS1 (ror,%C0) CR_TAB
3869 AS1 (ror,%B0) CR_TAB
3870 AS1 (ror,%A0)),
3871 insn, operands, len, 4);
3872 return "";
3875 /* 8bit logic shift right ((unsigned char)x >> i) */
3877 const char *
3878 lshrqi3_out (rtx insn, rtx operands[], int *len)
3880 if (GET_CODE (operands[2]) == CONST_INT)
3882 int k;
3884 if (!len)
3885 len = &k;
3887 switch (INTVAL (operands[2]))
3889 default:
3890 if (INTVAL (operands[2]) < 8)
3891 break;
3893 *len = 1;
3894 return AS1 (clr,%0);
3896 case 1:
3897 *len = 1;
3898 return AS1 (lsr,%0);
3900 case 2:
3901 *len = 2;
3902 return (AS1 (lsr,%0) CR_TAB
3903 AS1 (lsr,%0));
3904 case 3:
3905 *len = 3;
3906 return (AS1 (lsr,%0) CR_TAB
3907 AS1 (lsr,%0) CR_TAB
3908 AS1 (lsr,%0));
3910 case 4:
3911 if (test_hard_reg_class (LD_REGS, operands[0]))
3913 *len=2;
3914 return (AS1 (swap,%0) CR_TAB
3915 AS2 (andi,%0,0x0f));
3917 *len = 4;
3918 return (AS1 (lsr,%0) CR_TAB
3919 AS1 (lsr,%0) CR_TAB
3920 AS1 (lsr,%0) CR_TAB
3921 AS1 (lsr,%0));
3923 case 5:
3924 if (test_hard_reg_class (LD_REGS, operands[0]))
3926 *len = 3;
3927 return (AS1 (swap,%0) CR_TAB
3928 AS1 (lsr,%0) CR_TAB
3929 AS2 (andi,%0,0x7));
3931 *len = 5;
3932 return (AS1 (lsr,%0) CR_TAB
3933 AS1 (lsr,%0) CR_TAB
3934 AS1 (lsr,%0) CR_TAB
3935 AS1 (lsr,%0) CR_TAB
3936 AS1 (lsr,%0));
3938 case 6:
3939 if (test_hard_reg_class (LD_REGS, operands[0]))
3941 *len = 4;
3942 return (AS1 (swap,%0) CR_TAB
3943 AS1 (lsr,%0) CR_TAB
3944 AS1 (lsr,%0) CR_TAB
3945 AS2 (andi,%0,0x3));
3947 *len = 6;
3948 return (AS1 (lsr,%0) CR_TAB
3949 AS1 (lsr,%0) CR_TAB
3950 AS1 (lsr,%0) CR_TAB
3951 AS1 (lsr,%0) CR_TAB
3952 AS1 (lsr,%0) CR_TAB
3953 AS1 (lsr,%0));
3955 case 7:
3956 *len = 3;
3957 return (AS1 (rol,%0) CR_TAB
3958 AS1 (clr,%0) CR_TAB
3959 AS1 (rol,%0));
3962 else if (CONSTANT_P (operands[2]))
3963 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3965 out_shift_with_cnt (AS1 (lsr,%0),
3966 insn, operands, len, 1);
3967 return "";
3970 /* 16bit logic shift right ((unsigned short)x >> i) */
3972 const char *
3973 lshrhi3_out (rtx insn, rtx operands[], int *len)
3975 if (GET_CODE (operands[2]) == CONST_INT)
3977 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3978 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3979 int k;
3980 int *t = len;
3982 if (!len)
3983 len = &k;
3985 switch (INTVAL (operands[2]))
3987 default:
3988 if (INTVAL (operands[2]) < 16)
3989 break;
3991 *len = 2;
3992 return (AS1 (clr,%B0) CR_TAB
3993 AS1 (clr,%A0));
3995 case 4:
3996 if (optimize_size && scratch)
3997 break; /* 5 */
3998 if (ldi_ok)
4000 *len = 6;
4001 return (AS1 (swap,%B0) CR_TAB
4002 AS1 (swap,%A0) CR_TAB
4003 AS2 (andi,%A0,0x0f) CR_TAB
4004 AS2 (eor,%A0,%B0) CR_TAB
4005 AS2 (andi,%B0,0x0f) CR_TAB
4006 AS2 (eor,%A0,%B0));
4008 if (scratch)
4010 *len = 7;
4011 return (AS1 (swap,%B0) CR_TAB
4012 AS1 (swap,%A0) CR_TAB
4013 AS2 (ldi,%3,0x0f) CR_TAB
4014 AS2 (and,%A0,%3) CR_TAB
4015 AS2 (eor,%A0,%B0) CR_TAB
4016 AS2 (and,%B0,%3) CR_TAB
4017 AS2 (eor,%A0,%B0));
4019 break; /* optimize_size ? 6 : 8 */
4021 case 5:
4022 if (optimize_size)
4023 break; /* scratch ? 5 : 6 */
4024 if (ldi_ok)
4026 *len = 8;
4027 return (AS1 (lsr,%B0) CR_TAB
4028 AS1 (ror,%A0) CR_TAB
4029 AS1 (swap,%B0) CR_TAB
4030 AS1 (swap,%A0) CR_TAB
4031 AS2 (andi,%A0,0x0f) CR_TAB
4032 AS2 (eor,%A0,%B0) CR_TAB
4033 AS2 (andi,%B0,0x0f) CR_TAB
4034 AS2 (eor,%A0,%B0));
4036 if (scratch)
4038 *len = 9;
4039 return (AS1 (lsr,%B0) CR_TAB
4040 AS1 (ror,%A0) CR_TAB
4041 AS1 (swap,%B0) CR_TAB
4042 AS1 (swap,%A0) CR_TAB
4043 AS2 (ldi,%3,0x0f) CR_TAB
4044 AS2 (and,%A0,%3) CR_TAB
4045 AS2 (eor,%A0,%B0) CR_TAB
4046 AS2 (and,%B0,%3) CR_TAB
4047 AS2 (eor,%A0,%B0));
4049 break; /* 10 */
4051 case 6:
4052 if (optimize_size)
4053 break; /* scratch ? 5 : 6 */
4054 *len = 9;
4055 return (AS1 (clr,__tmp_reg__) CR_TAB
4056 AS1 (lsl,%A0) CR_TAB
4057 AS1 (rol,%B0) CR_TAB
4058 AS1 (rol,__tmp_reg__) CR_TAB
4059 AS1 (lsl,%A0) CR_TAB
4060 AS1 (rol,%B0) CR_TAB
4061 AS1 (rol,__tmp_reg__) CR_TAB
4062 AS2 (mov,%A0,%B0) CR_TAB
4063 AS2 (mov,%B0,__tmp_reg__));
4065 case 7:
4066 *len = 5;
4067 return (AS1 (lsl,%A0) CR_TAB
4068 AS2 (mov,%A0,%B0) CR_TAB
4069 AS1 (rol,%A0) CR_TAB
4070 AS2 (sbc,%B0,%B0) CR_TAB
4071 AS1 (neg,%B0));
4073 case 8:
4074 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4075 AS1 (clr,%B0));
4077 case 9:
4078 *len = 3;
4079 return (AS2 (mov,%A0,%B0) CR_TAB
4080 AS1 (clr,%B0) CR_TAB
4081 AS1 (lsr,%A0));
4083 case 10:
4084 *len = 4;
4085 return (AS2 (mov,%A0,%B0) CR_TAB
4086 AS1 (clr,%B0) CR_TAB
4087 AS1 (lsr,%A0) CR_TAB
4088 AS1 (lsr,%A0));
4090 case 11:
4091 *len = 5;
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS1 (lsr,%A0));
4098 case 12:
4099 if (ldi_ok)
4101 *len = 4;
4102 return (AS2 (mov,%A0,%B0) CR_TAB
4103 AS1 (clr,%B0) CR_TAB
4104 AS1 (swap,%A0) CR_TAB
4105 AS2 (andi,%A0,0x0f));
4107 if (scratch)
4109 *len = 5;
4110 return (AS2 (mov,%A0,%B0) CR_TAB
4111 AS1 (clr,%B0) CR_TAB
4112 AS1 (swap,%A0) CR_TAB
4113 AS2 (ldi,%3,0x0f) CR_TAB
4114 AS2 (and,%A0,%3));
4116 *len = 6;
4117 return (AS2 (mov,%A0,%B0) CR_TAB
4118 AS1 (clr,%B0) CR_TAB
4119 AS1 (lsr,%A0) CR_TAB
4120 AS1 (lsr,%A0) CR_TAB
4121 AS1 (lsr,%A0) CR_TAB
4122 AS1 (lsr,%A0));
4124 case 13:
4125 if (ldi_ok)
4127 *len = 5;
4128 return (AS2 (mov,%A0,%B0) CR_TAB
4129 AS1 (clr,%B0) CR_TAB
4130 AS1 (swap,%A0) CR_TAB
4131 AS1 (lsr,%A0) CR_TAB
4132 AS2 (andi,%A0,0x07));
4134 if (AVR_HAVE_MUL && scratch)
4136 *len = 5;
4137 return (AS2 (ldi,%3,0x08) CR_TAB
4138 AS2 (mul,%B0,%3) CR_TAB
4139 AS2 (mov,%A0,r1) CR_TAB
4140 AS1 (clr,%B0) CR_TAB
4141 AS1 (clr,__zero_reg__));
4143 if (optimize_size && scratch)
4144 break; /* 5 */
4145 if (scratch)
4147 *len = 6;
4148 return (AS2 (mov,%A0,%B0) CR_TAB
4149 AS1 (clr,%B0) CR_TAB
4150 AS1 (swap,%A0) CR_TAB
4151 AS1 (lsr,%A0) CR_TAB
4152 AS2 (ldi,%3,0x07) CR_TAB
4153 AS2 (and,%A0,%3));
4155 if (AVR_HAVE_MUL)
4157 *len = 6;
4158 return ("set" CR_TAB
4159 AS2 (bld,r1,3) CR_TAB
4160 AS2 (mul,%B0,r1) CR_TAB
4161 AS2 (mov,%A0,r1) CR_TAB
4162 AS1 (clr,%B0) CR_TAB
4163 AS1 (clr,__zero_reg__));
4165 *len = 7;
4166 return (AS2 (mov,%A0,%B0) CR_TAB
4167 AS1 (clr,%B0) CR_TAB
4168 AS1 (lsr,%A0) CR_TAB
4169 AS1 (lsr,%A0) CR_TAB
4170 AS1 (lsr,%A0) CR_TAB
4171 AS1 (lsr,%A0) CR_TAB
4172 AS1 (lsr,%A0));
4174 case 14:
4175 if (AVR_HAVE_MUL && ldi_ok)
4177 *len = 5;
4178 return (AS2 (ldi,%A0,0x04) CR_TAB
4179 AS2 (mul,%B0,%A0) CR_TAB
4180 AS2 (mov,%A0,r1) CR_TAB
4181 AS1 (clr,%B0) CR_TAB
4182 AS1 (clr,__zero_reg__));
4184 if (AVR_HAVE_MUL && scratch)
4186 *len = 5;
4187 return (AS2 (ldi,%3,0x04) CR_TAB
4188 AS2 (mul,%B0,%3) CR_TAB
4189 AS2 (mov,%A0,r1) CR_TAB
4190 AS1 (clr,%B0) CR_TAB
4191 AS1 (clr,__zero_reg__));
4193 if (optimize_size && ldi_ok)
4195 *len = 5;
4196 return (AS2 (mov,%A0,%B0) CR_TAB
4197 AS2 (ldi,%B0,6) "\n1:\t"
4198 AS1 (lsr,%A0) CR_TAB
4199 AS1 (dec,%B0) CR_TAB
4200 AS1 (brne,1b));
4202 if (optimize_size && scratch)
4203 break; /* 5 */
4204 *len = 6;
4205 return (AS1 (clr,%A0) CR_TAB
4206 AS1 (lsl,%B0) CR_TAB
4207 AS1 (rol,%A0) CR_TAB
4208 AS1 (lsl,%B0) CR_TAB
4209 AS1 (rol,%A0) CR_TAB
4210 AS1 (clr,%B0));
4212 case 15:
4213 *len = 4;
4214 return (AS1 (clr,%A0) CR_TAB
4215 AS1 (lsl,%B0) CR_TAB
4216 AS1 (rol,%A0) CR_TAB
4217 AS1 (clr,%B0));
4219 len = t;
4221 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4222 AS1 (ror,%A0)),
4223 insn, operands, len, 2);
4224 return "";
4227 /* 32bit logic shift right ((unsigned int)x >> i) */
4229 const char *
4230 lshrsi3_out (rtx insn, rtx operands[], int *len)
4232 if (GET_CODE (operands[2]) == CONST_INT)
4234 int k;
4235 int *t = len;
4237 if (!len)
4238 len = &k;
4240 switch (INTVAL (operands[2]))
4242 default:
4243 if (INTVAL (operands[2]) < 32)
4244 break;
4246 if (AVR_HAVE_MOVW)
4247 return *len = 3, (AS1 (clr,%D0) CR_TAB
4248 AS1 (clr,%C0) CR_TAB
4249 AS2 (movw,%A0,%C0));
4250 *len = 4;
4251 return (AS1 (clr,%D0) CR_TAB
4252 AS1 (clr,%C0) CR_TAB
4253 AS1 (clr,%B0) CR_TAB
4254 AS1 (clr,%A0));
4256 case 8:
4258 int reg0 = true_regnum (operands[0]);
4259 int reg1 = true_regnum (operands[1]);
4260 *len = 4;
4261 if (reg0 <= reg1)
4262 return (AS2 (mov,%A0,%B1) CR_TAB
4263 AS2 (mov,%B0,%C1) CR_TAB
4264 AS2 (mov,%C0,%D1) CR_TAB
4265 AS1 (clr,%D0));
4266 else
4267 return (AS1 (clr,%D0) CR_TAB
4268 AS2 (mov,%C0,%D1) CR_TAB
4269 AS2 (mov,%B0,%C1) CR_TAB
4270 AS2 (mov,%A0,%B1));
4273 case 16:
4275 int reg0 = true_regnum (operands[0]);
4276 int reg1 = true_regnum (operands[1]);
4278 if (reg0 == reg1 + 2)
4279 return *len = 2, (AS1 (clr,%C0) CR_TAB
4280 AS1 (clr,%D0));
4281 if (AVR_HAVE_MOVW)
4282 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4283 AS1 (clr,%C0) CR_TAB
4284 AS1 (clr,%D0));
4285 else
4286 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4287 AS2 (mov,%A0,%C1) CR_TAB
4288 AS1 (clr,%C0) CR_TAB
4289 AS1 (clr,%D0));
4292 case 24:
4293 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4294 AS1 (clr,%B0) CR_TAB
4295 AS1 (clr,%C0) CR_TAB
4296 AS1 (clr,%D0));
4298 case 31:
4299 *len = 6;
4300 return (AS1 (clr,%A0) CR_TAB
4301 AS2 (sbrc,%D0,7) CR_TAB
4302 AS1 (inc,%A0) CR_TAB
4303 AS1 (clr,%B0) CR_TAB
4304 AS1 (clr,%C0) CR_TAB
4305 AS1 (clr,%D0));
4307 len = t;
4309 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4310 AS1 (ror,%C0) CR_TAB
4311 AS1 (ror,%B0) CR_TAB
4312 AS1 (ror,%A0)),
4313 insn, operands, len, 4);
4314 return "";
4317 /* Modifies the length assigned to instruction INSN
4318 LEN is the initially computed length of the insn. */
4321 adjust_insn_length (rtx insn, int len)
4323 rtx patt = PATTERN (insn);
4324 rtx set;
4326 if (GET_CODE (patt) == SET)
4328 rtx op[10];
4329 op[1] = SET_SRC (patt);
4330 op[0] = SET_DEST (patt);
4331 if (general_operand (op[1], VOIDmode)
4332 && general_operand (op[0], VOIDmode))
4334 switch (GET_MODE (op[0]))
4336 case QImode:
4337 output_movqi (insn, op, &len);
4338 break;
4339 case HImode:
4340 output_movhi (insn, op, &len);
4341 break;
4342 case SImode:
4343 case SFmode:
4344 output_movsisf (insn, op, &len);
4345 break;
4346 default:
4347 break;
4350 else if (op[0] == cc0_rtx && REG_P (op[1]))
4352 switch (GET_MODE (op[1]))
4354 case HImode: out_tsthi (insn,&len); break;
4355 case SImode: out_tstsi (insn,&len); break;
4356 default: break;
4359 else if (GET_CODE (op[1]) == AND)
4361 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4363 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4364 if (GET_MODE (op[1]) == SImode)
4365 len = (((mask & 0xff) != 0xff)
4366 + ((mask & 0xff00) != 0xff00)
4367 + ((mask & 0xff0000L) != 0xff0000L)
4368 + ((mask & 0xff000000L) != 0xff000000L));
4369 else if (GET_MODE (op[1]) == HImode)
4370 len = (((mask & 0xff) != 0xff)
4371 + ((mask & 0xff00) != 0xff00));
4374 else if (GET_CODE (op[1]) == IOR)
4376 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4378 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4379 if (GET_MODE (op[1]) == SImode)
4380 len = (((mask & 0xff) != 0)
4381 + ((mask & 0xff00) != 0)
4382 + ((mask & 0xff0000L) != 0)
4383 + ((mask & 0xff000000L) != 0));
4384 else if (GET_MODE (op[1]) == HImode)
4385 len = (((mask & 0xff) != 0)
4386 + ((mask & 0xff00) != 0));
4390 set = single_set (insn);
4391 if (set)
4393 rtx op[10];
4395 op[1] = SET_SRC (set);
4396 op[0] = SET_DEST (set);
4398 if (GET_CODE (patt) == PARALLEL
4399 && general_operand (op[1], VOIDmode)
4400 && general_operand (op[0], VOIDmode))
4402 if (XVECLEN (patt, 0) == 2)
4403 op[2] = XVECEXP (patt, 0, 1);
4405 switch (GET_MODE (op[0]))
4407 case QImode:
4408 len = 2;
4409 break;
4410 case HImode:
4411 output_reload_inhi (insn, op, &len);
4412 break;
4413 case SImode:
4414 case SFmode:
4415 output_reload_insisf (insn, op, &len);
4416 break;
4417 default:
4418 break;
4421 else if (GET_CODE (op[1]) == ASHIFT
4422 || GET_CODE (op[1]) == ASHIFTRT
4423 || GET_CODE (op[1]) == LSHIFTRT)
4425 rtx ops[10];
4426 ops[0] = op[0];
4427 ops[1] = XEXP (op[1],0);
4428 ops[2] = XEXP (op[1],1);
4429 switch (GET_CODE (op[1]))
4431 case ASHIFT:
4432 switch (GET_MODE (op[0]))
4434 case QImode: ashlqi3_out (insn,ops,&len); break;
4435 case HImode: ashlhi3_out (insn,ops,&len); break;
4436 case SImode: ashlsi3_out (insn,ops,&len); break;
4437 default: break;
4439 break;
4440 case ASHIFTRT:
4441 switch (GET_MODE (op[0]))
4443 case QImode: ashrqi3_out (insn,ops,&len); break;
4444 case HImode: ashrhi3_out (insn,ops,&len); break;
4445 case SImode: ashrsi3_out (insn,ops,&len); break;
4446 default: break;
4448 break;
4449 case LSHIFTRT:
4450 switch (GET_MODE (op[0]))
4452 case QImode: lshrqi3_out (insn,ops,&len); break;
4453 case HImode: lshrhi3_out (insn,ops,&len); break;
4454 case SImode: lshrsi3_out (insn,ops,&len); break;
4455 default: break;
4457 break;
4458 default:
4459 break;
4463 return len;
4466 /* Return nonzero if register REG dead after INSN. */
4469 reg_unused_after (rtx insn, rtx reg)
4471 return (dead_or_set_p (insn, reg)
4472 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4475 /* Return nonzero if REG is not used after INSN.
4476 We assume REG is a reload reg, and therefore does
4477 not live past labels. It may live past calls or jumps though. */
4480 _reg_unused_after (rtx insn, rtx reg)
4482 enum rtx_code code;
4483 rtx set;
4485 /* If the reg is set by this instruction, then it is safe for our
4486 case. Disregard the case where this is a store to memory, since
4487 we are checking a register used in the store address. */
4488 set = single_set (insn);
4489 if (set && GET_CODE (SET_DEST (set)) != MEM
4490 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4491 return 1;
4493 while ((insn = NEXT_INSN (insn)))
4495 rtx set;
4496 code = GET_CODE (insn);
4498 #if 0
4499 /* If this is a label that existed before reload, then the register
4500 if dead here. However, if this is a label added by reorg, then
4501 the register may still be live here. We can't tell the difference,
4502 so we just ignore labels completely. */
4503 if (code == CODE_LABEL)
4504 return 1;
4505 /* else */
4506 #endif
4508 if (!INSN_P (insn))
4509 continue;
4511 if (code == JUMP_INSN)
4512 return 0;
4514 /* If this is a sequence, we must handle them all at once.
4515 We could have for instance a call that sets the target register,
4516 and an insn in a delay slot that uses the register. In this case,
4517 we must return 0. */
4518 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4520 int i;
4521 int retval = 0;
4523 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4525 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4526 rtx set = single_set (this_insn);
4528 if (GET_CODE (this_insn) == CALL_INSN)
4529 code = CALL_INSN;
4530 else if (GET_CODE (this_insn) == JUMP_INSN)
4532 if (INSN_ANNULLED_BRANCH_P (this_insn))
4533 return 0;
4534 code = JUMP_INSN;
4537 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4538 return 0;
4539 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4541 if (GET_CODE (SET_DEST (set)) != MEM)
4542 retval = 1;
4543 else
4544 return 0;
4546 if (set == 0
4547 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4548 return 0;
4550 if (retval == 1)
4551 return 1;
4552 else if (code == JUMP_INSN)
4553 return 0;
4556 if (code == CALL_INSN)
4558 rtx tem;
4559 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4560 if (GET_CODE (XEXP (tem, 0)) == USE
4561 && REG_P (XEXP (XEXP (tem, 0), 0))
4562 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4563 return 0;
4564 if (call_used_regs[REGNO (reg)])
4565 return 1;
4568 set = single_set (insn);
4570 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4571 return 0;
4572 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4573 return GET_CODE (SET_DEST (set)) != MEM;
4574 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4575 return 0;
4577 return 1;
4580 /* Target hook for assembling integer objects. The AVR version needs
4581 special handling for references to certain labels. */
4583 static bool
4584 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4586 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4587 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4588 || GET_CODE (x) == LABEL_REF))
4590 fputs ("\t.word\tgs(", asm_out_file);
4591 output_addr_const (asm_out_file, x);
4592 fputs (")\n", asm_out_file);
4593 return true;
4595 return default_assemble_integer (x, size, aligned_p);
4598 /* The routine used to output NUL terminated strings. We use a special
4599 version of this for most svr4 targets because doing so makes the
4600 generated assembly code more compact (and thus faster to assemble)
4601 as well as more readable, especially for targets like the i386
4602 (where the only alternative is to output character sequences as
4603 comma separated lists of numbers). */
4605 void
4606 gas_output_limited_string(FILE *file, const char *str)
4608 const unsigned char *_limited_str = (const unsigned char *) str;
4609 unsigned ch;
4610 fprintf (file, "%s\"", STRING_ASM_OP);
4611 for (; (ch = *_limited_str); _limited_str++)
4613 int escape;
4614 switch (escape = ESCAPES[ch])
4616 case 0:
4617 putc (ch, file);
4618 break;
4619 case 1:
4620 fprintf (file, "\\%03o", ch);
4621 break;
4622 default:
4623 putc ('\\', file);
4624 putc (escape, file);
4625 break;
4628 fprintf (file, "\"\n");
4631 /* The routine used to output sequences of byte values. We use a special
4632 version of this for most svr4 targets because doing so makes the
4633 generated assembly code more compact (and thus faster to assemble)
4634 as well as more readable. Note that if we find subparts of the
4635 character sequence which end with NUL (and which are shorter than
4636 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4638 void
4639 gas_output_ascii(FILE *file, const char *str, size_t length)
4641 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4642 const unsigned char *limit = _ascii_bytes + length;
4643 unsigned bytes_in_chunk = 0;
4644 for (; _ascii_bytes < limit; _ascii_bytes++)
4646 const unsigned char *p;
4647 if (bytes_in_chunk >= 60)
4649 fprintf (file, "\"\n");
4650 bytes_in_chunk = 0;
4652 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4653 continue;
4654 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4656 if (bytes_in_chunk > 0)
4658 fprintf (file, "\"\n");
4659 bytes_in_chunk = 0;
4661 gas_output_limited_string (file, (const char*)_ascii_bytes);
4662 _ascii_bytes = p;
4664 else
4666 int escape;
4667 unsigned ch;
4668 if (bytes_in_chunk == 0)
4669 fprintf (file, "\t.ascii\t\"");
4670 switch (escape = ESCAPES[ch = *_ascii_bytes])
4672 case 0:
4673 putc (ch, file);
4674 bytes_in_chunk++;
4675 break;
4676 case 1:
4677 fprintf (file, "\\%03o", ch);
4678 bytes_in_chunk += 4;
4679 break;
4680 default:
4681 putc ('\\', file);
4682 putc (escape, file);
4683 bytes_in_chunk += 2;
4684 break;
4688 if (bytes_in_chunk > 0)
4689 fprintf (file, "\"\n");
4692 /* Return value is nonzero if pseudos that have been
4693 assigned to registers of class CLASS would likely be spilled
4694 because registers of CLASS are needed for spill registers. */
4696 enum reg_class
4697 class_likely_spilled_p (int c)
4699 return (c != ALL_REGS && c != ADDW_REGS);
4702 /* Valid attributes:
4703 progmem - put data to program memory;
4704 signal - make a function to be hardware interrupt. After function
4705 prologue interrupts are disabled;
4706 interrupt - make a function to be hardware interrupt. After function
4707 prologue interrupts are enabled;
4708 naked - don't generate function prologue/epilogue and `ret' command.
4710 Only `progmem' attribute valid for type. */
4712 const struct attribute_spec avr_attribute_table[] =
4714 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4715 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4716 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4717 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4718 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4719 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4720 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4721 { NULL, 0, 0, false, false, false, NULL }
4724 /* Handle a "progmem" attribute; arguments as in
4725 struct attribute_spec.handler. */
4726 static tree
4727 avr_handle_progmem_attribute (tree *node, tree name,
4728 tree args ATTRIBUTE_UNUSED,
4729 int flags ATTRIBUTE_UNUSED,
4730 bool *no_add_attrs)
4732 if (DECL_P (*node))
4734 if (TREE_CODE (*node) == TYPE_DECL)
4736 /* This is really a decl attribute, not a type attribute,
4737 but try to handle it for GCC 3.0 backwards compatibility. */
4739 tree type = TREE_TYPE (*node);
4740 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4741 tree newtype = build_type_attribute_variant (type, attr);
4743 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4744 TREE_TYPE (*node) = newtype;
4745 *no_add_attrs = true;
4747 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4749 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4751 warning (0, "only initialized variables can be placed into "
4752 "program memory area");
4753 *no_add_attrs = true;
4756 else
4758 warning (OPT_Wattributes, "%qs attribute ignored",
4759 IDENTIFIER_POINTER (name));
4760 *no_add_attrs = true;
4764 return NULL_TREE;
4767 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4768 struct attribute_spec.handler. */
4770 static tree
4771 avr_handle_fndecl_attribute (tree *node, tree name,
4772 tree args ATTRIBUTE_UNUSED,
4773 int flags ATTRIBUTE_UNUSED,
4774 bool *no_add_attrs)
4776 if (TREE_CODE (*node) != FUNCTION_DECL)
4778 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4779 IDENTIFIER_POINTER (name));
4780 *no_add_attrs = true;
4782 else
4784 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4785 const char *attr = IDENTIFIER_POINTER (name);
4787 /* If the function has the 'signal' or 'interrupt' attribute, test to
4788 make sure that the name of the function is "__vector_NN" so as to
4789 catch when the user misspells the interrupt vector name. */
4791 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4793 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4795 warning (0, "%qs appears to be a misspelled interrupt handler",
4796 func_name);
4799 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4801 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4803 warning (0, "%qs appears to be a misspelled signal handler",
4804 func_name);
4809 return NULL_TREE;
4812 static tree
4813 avr_handle_fntype_attribute (tree *node, tree name,
4814 tree args ATTRIBUTE_UNUSED,
4815 int flags ATTRIBUTE_UNUSED,
4816 bool *no_add_attrs)
4818 if (TREE_CODE (*node) != FUNCTION_TYPE)
4820 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4821 IDENTIFIER_POINTER (name));
4822 *no_add_attrs = true;
4825 return NULL_TREE;
4828 /* Look for attribute `progmem' in DECL
4829 if found return 1, otherwise 0. */
4832 avr_progmem_p (tree decl, tree attributes)
4834 tree a;
4836 if (TREE_CODE (decl) != VAR_DECL)
4837 return 0;
4839 if (NULL_TREE
4840 != lookup_attribute ("progmem", attributes))
4841 return 1;
4843 a=decl;
4845 a = TREE_TYPE(a);
4846 while (TREE_CODE (a) == ARRAY_TYPE);
4848 if (a == error_mark_node)
4849 return 0;
4851 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4852 return 1;
4854 return 0;
4857 /* Add the section attribute if the variable is in progmem. */
4859 static void
4860 avr_insert_attributes (tree node, tree *attributes)
4862 if (TREE_CODE (node) == VAR_DECL
4863 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4864 && avr_progmem_p (node, *attributes))
4866 static const char dsec[] = ".progmem.data";
4867 *attributes = tree_cons (get_identifier ("section"),
4868 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4869 *attributes);
4871 /* ??? This seems sketchy. Why can't the user declare the
4872 thing const in the first place? */
4873 TREE_READONLY (node) = 1;
4877 /* A get_unnamed_section callback for switching to progmem_section. */
4879 static void
4880 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4882 fprintf (asm_out_file,
4883 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4884 AVR_HAVE_JMP_CALL ? "a" : "ax");
4885 /* Should already be aligned, this is just to be safe if it isn't. */
4886 fprintf (asm_out_file, "\t.p2align 1\n");
4889 /* Implement TARGET_ASM_INIT_SECTIONS. */
4891 static void
4892 avr_asm_init_sections (void)
4894 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4895 avr_output_progmem_section_asm_op,
4896 NULL);
4897 readonly_data_section = data_section;
4900 static unsigned int
4901 avr_section_type_flags (tree decl, const char *name, int reloc)
4903 unsigned int flags = default_section_type_flags (decl, name, reloc);
4905 if (strncmp (name, ".noinit", 7) == 0)
4907 if (decl && TREE_CODE (decl) == VAR_DECL
4908 && DECL_INITIAL (decl) == NULL_TREE)
4909 flags |= SECTION_BSS; /* @nobits */
4910 else
4911 warning (0, "only uninitialized variables can be placed in the "
4912 ".noinit section");
4915 return flags;
4918 /* Outputs some appropriate text to go at the start of an assembler
4919 file. */
4921 static void
4922 avr_file_start (void)
4924 if (avr_current_arch->asm_only)
4925 error ("MCU %qs supported for assembler only", avr_mcu_name);
4927 default_file_start ();
4929 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4930 fputs ("__SREG__ = 0x3f\n"
4931 "__SP_H__ = 0x3e\n"
4932 "__SP_L__ = 0x3d\n", asm_out_file);
4934 fputs ("__tmp_reg__ = 0\n"
4935 "__zero_reg__ = 1\n", asm_out_file);
4937 /* FIXME: output these only if there is anything in the .data / .bss
4938 sections - some code size could be saved by not linking in the
4939 initialization code from libgcc if one or both sections are empty. */
4940 fputs ("\t.global __do_copy_data\n", asm_out_file);
4941 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4944 /* Outputs to the stdio stream FILE some
4945 appropriate text to go at the end of an assembler file. */
4947 static void
4948 avr_file_end (void)
4952 /* Choose the order in which to allocate hard registers for
4953 pseudo-registers local to a basic block.
4955 Store the desired register order in the array `reg_alloc_order'.
4956 Element 0 should be the register to allocate first; element 1, the
4957 next register; and so on. */
4959 void
4960 order_regs_for_local_alloc (void)
4962 unsigned int i;
4963 static const int order_0[] = {
4964 24,25,
4965 18,19,
4966 20,21,
4967 22,23,
4968 30,31,
4969 26,27,
4970 28,29,
4971 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4972 0,1,
4973 32,33,34,35
4975 static const int order_1[] = {
4976 18,19,
4977 20,21,
4978 22,23,
4979 24,25,
4980 30,31,
4981 26,27,
4982 28,29,
4983 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4984 0,1,
4985 32,33,34,35
4987 static const int order_2[] = {
4988 25,24,
4989 23,22,
4990 21,20,
4991 19,18,
4992 30,31,
4993 26,27,
4994 28,29,
4995 17,16,
4996 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4997 1,0,
4998 32,33,34,35
5001 const int *order = (TARGET_ORDER_1 ? order_1 :
5002 TARGET_ORDER_2 ? order_2 :
5003 order_0);
5004 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5005 reg_alloc_order[i] = order[i];
5009 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5010 cost of an RTX operand given its context. X is the rtx of the
5011 operand, MODE is its mode, and OUTER is the rtx_code of this
5012 operand's parent operator. */
5014 static int
5015 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5016 bool speed)
5018 enum rtx_code code = GET_CODE (x);
5019 int total;
5021 switch (code)
5023 case REG:
5024 case SUBREG:
5025 return 0;
5027 case CONST_INT:
5028 case CONST_DOUBLE:
5029 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5031 default:
5032 break;
5035 total = 0;
5036 avr_rtx_costs (x, code, outer, &total, speed);
5037 return total;
5040 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5041 is to be calculated. Return true if the complete cost has been
5042 computed, and false if subexpressions should be scanned. In either
5043 case, *TOTAL contains the cost result. */
5045 static bool
5046 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
5047 bool speed)
5049 enum machine_mode mode = GET_MODE (x);
5050 HOST_WIDE_INT val;
5052 switch (code)
5054 case CONST_INT:
5055 case CONST_DOUBLE:
5056 /* Immediate constants are as cheap as registers. */
5057 *total = 0;
5058 return true;
5060 case MEM:
5061 case CONST:
5062 case LABEL_REF:
5063 case SYMBOL_REF:
5064 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5065 return true;
5067 case NEG:
5068 switch (mode)
5070 case QImode:
5071 case SFmode:
5072 *total = COSTS_N_INSNS (1);
5073 break;
5075 case HImode:
5076 *total = COSTS_N_INSNS (3);
5077 break;
5079 case SImode:
5080 *total = COSTS_N_INSNS (7);
5081 break;
5083 default:
5084 return false;
5086 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5087 return true;
5089 case ABS:
5090 switch (mode)
5092 case QImode:
5093 case SFmode:
5094 *total = COSTS_N_INSNS (1);
5095 break;
5097 default:
5098 return false;
5100 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5101 return true;
5103 case NOT:
5104 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5105 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5106 return true;
5108 case ZERO_EXTEND:
5109 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5110 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5111 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5112 return true;
5114 case SIGN_EXTEND:
5115 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5116 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5117 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5118 return true;
5120 case PLUS:
5121 switch (mode)
5123 case QImode:
5124 *total = COSTS_N_INSNS (1);
5125 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5126 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5127 break;
5129 case HImode:
5130 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5132 *total = COSTS_N_INSNS (2);
5133 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5135 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5136 *total = COSTS_N_INSNS (1);
5137 else
5138 *total = COSTS_N_INSNS (2);
5139 break;
5141 case SImode:
5142 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5144 *total = COSTS_N_INSNS (4);
5145 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5147 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5148 *total = COSTS_N_INSNS (1);
5149 else
5150 *total = COSTS_N_INSNS (4);
5151 break;
5153 default:
5154 return false;
5156 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5157 return true;
5159 case MINUS:
5160 case AND:
5161 case IOR:
5162 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5163 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5164 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5165 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5166 return true;
5168 case XOR:
5169 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5170 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5171 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5172 return true;
5174 case MULT:
5175 switch (mode)
5177 case QImode:
5178 if (AVR_HAVE_MUL)
5179 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5180 else if (!speed)
5181 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5182 else
5183 return false;
5184 break;
5186 case HImode:
5187 if (AVR_HAVE_MUL)
5188 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5189 else if (!speed)
5190 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5191 else
5192 return false;
5193 break;
5195 default:
5196 return false;
5198 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5199 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5200 return true;
5202 case DIV:
5203 case MOD:
5204 case UDIV:
5205 case UMOD:
5206 if (!speed)
5207 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5208 else
5209 return false;
5210 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5211 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5212 return true;
5214 case ROTATE:
5215 switch (mode)
5217 case QImode:
5218 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5219 *total = COSTS_N_INSNS (1);
5221 break;
5223 case HImode:
5224 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5225 *total = COSTS_N_INSNS (3);
5227 break;
5229 case SImode:
5230 if (CONST_INT_P (XEXP (x, 1)))
5231 switch (INTVAL (XEXP (x, 1)))
5233 case 8:
5234 case 24:
5235 *total = COSTS_N_INSNS (5);
5236 break;
5237 case 16:
5238 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5239 break;
5241 break;
5243 default:
5244 return false;
5246 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5247 return true;
5249 case ASHIFT:
5250 switch (mode)
5252 case QImode:
5253 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5255 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5256 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5258 else
5260 val = INTVAL (XEXP (x, 1));
5261 if (val == 7)
5262 *total = COSTS_N_INSNS (3);
5263 else if (val >= 0 && val <= 7)
5264 *total = COSTS_N_INSNS (val);
5265 else
5266 *total = COSTS_N_INSNS (1);
5268 break;
5270 case HImode:
5271 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5273 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5274 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5276 else
5277 switch (INTVAL (XEXP (x, 1)))
5279 case 0:
5280 *total = 0;
5281 break;
5282 case 1:
5283 case 8:
5284 *total = COSTS_N_INSNS (2);
5285 break;
5286 case 9:
5287 *total = COSTS_N_INSNS (3);
5288 break;
5289 case 2:
5290 case 3:
5291 case 10:
5292 case 15:
5293 *total = COSTS_N_INSNS (4);
5294 break;
5295 case 7:
5296 case 11:
5297 case 12:
5298 *total = COSTS_N_INSNS (5);
5299 break;
5300 case 4:
5301 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5302 break;
5303 case 6:
5304 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5305 break;
5306 case 5:
5307 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5308 break;
5309 default:
5310 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5311 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5313 break;
5315 case SImode:
5316 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5318 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5319 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5321 else
5322 switch (INTVAL (XEXP (x, 1)))
5324 case 0:
5325 *total = 0;
5326 break;
5327 case 24:
5328 *total = COSTS_N_INSNS (3);
5329 break;
5330 case 1:
5331 case 8:
5332 case 16:
5333 *total = COSTS_N_INSNS (4);
5334 break;
5335 case 31:
5336 *total = COSTS_N_INSNS (6);
5337 break;
5338 case 2:
5339 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5340 break;
5341 default:
5342 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5343 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5345 break;
5347 default:
5348 return false;
5350 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5351 return true;
5353 case ASHIFTRT:
5354 switch (mode)
5356 case QImode:
5357 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5359 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5362 else
5364 val = INTVAL (XEXP (x, 1));
5365 if (val == 6)
5366 *total = COSTS_N_INSNS (4);
5367 else if (val == 7)
5368 *total = COSTS_N_INSNS (2);
5369 else if (val >= 0 && val <= 7)
5370 *total = COSTS_N_INSNS (val);
5371 else
5372 *total = COSTS_N_INSNS (1);
5374 break;
5376 case HImode:
5377 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5379 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5380 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5382 else
5383 switch (INTVAL (XEXP (x, 1)))
5385 case 0:
5386 *total = 0;
5387 break;
5388 case 1:
5389 *total = COSTS_N_INSNS (2);
5390 break;
5391 case 15:
5392 *total = COSTS_N_INSNS (3);
5393 break;
5394 case 2:
5395 case 7:
5396 case 8:
5397 case 9:
5398 *total = COSTS_N_INSNS (4);
5399 break;
5400 case 10:
5401 case 14:
5402 *total = COSTS_N_INSNS (5);
5403 break;
5404 case 11:
5405 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5406 break;
5407 case 12:
5408 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5409 break;
5410 case 6:
5411 case 13:
5412 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5413 break;
5414 default:
5415 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5416 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5418 break;
5420 case SImode:
5421 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5423 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5424 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5426 else
5427 switch (INTVAL (XEXP (x, 1)))
5429 case 0:
5430 *total = 0;
5431 break;
5432 case 1:
5433 *total = COSTS_N_INSNS (4);
5434 break;
5435 case 8:
5436 case 16:
5437 case 24:
5438 *total = COSTS_N_INSNS (6);
5439 break;
5440 case 2:
5441 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5442 break;
5443 case 31:
5444 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5445 break;
5446 default:
5447 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5448 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5450 break;
5452 default:
5453 return false;
5455 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5456 return true;
5458 case LSHIFTRT:
5459 switch (mode)
5461 case QImode:
5462 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5464 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5467 else
5469 val = INTVAL (XEXP (x, 1));
5470 if (val == 7)
5471 *total = COSTS_N_INSNS (3);
5472 else if (val >= 0 && val <= 7)
5473 *total = COSTS_N_INSNS (val);
5474 else
5475 *total = COSTS_N_INSNS (1);
5477 break;
5479 case HImode:
5480 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5482 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5483 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5485 else
5486 switch (INTVAL (XEXP (x, 1)))
5488 case 0:
5489 *total = 0;
5490 break;
5491 case 1:
5492 case 8:
5493 *total = COSTS_N_INSNS (2);
5494 break;
5495 case 9:
5496 *total = COSTS_N_INSNS (3);
5497 break;
5498 case 2:
5499 case 10:
5500 case 15:
5501 *total = COSTS_N_INSNS (4);
5502 break;
5503 case 7:
5504 case 11:
5505 *total = COSTS_N_INSNS (5);
5506 break;
5507 case 3:
5508 case 12:
5509 case 13:
5510 case 14:
5511 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5512 break;
5513 case 4:
5514 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5515 break;
5516 case 5:
5517 case 6:
5518 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5519 break;
5520 default:
5521 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5522 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5524 break;
5526 case SImode:
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5529 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5530 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5532 else
5533 switch (INTVAL (XEXP (x, 1)))
5535 case 0:
5536 *total = 0;
5537 break;
5538 case 1:
5539 *total = COSTS_N_INSNS (4);
5540 break;
5541 case 2:
5542 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5543 break;
5544 case 8:
5545 case 16:
5546 case 24:
5547 *total = COSTS_N_INSNS (4);
5548 break;
5549 case 31:
5550 *total = COSTS_N_INSNS (6);
5551 break;
5552 default:
5553 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5554 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5556 break;
5558 default:
5559 return false;
5561 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5562 return true;
5564 case COMPARE:
5565 switch (GET_MODE (XEXP (x, 0)))
5567 case QImode:
5568 *total = COSTS_N_INSNS (1);
5569 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5570 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5571 break;
5573 case HImode:
5574 *total = COSTS_N_INSNS (2);
5575 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5576 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5577 else if (INTVAL (XEXP (x, 1)) != 0)
5578 *total += COSTS_N_INSNS (1);
5579 break;
5581 case SImode:
5582 *total = COSTS_N_INSNS (4);
5583 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5584 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5585 else if (INTVAL (XEXP (x, 1)) != 0)
5586 *total += COSTS_N_INSNS (3);
5587 break;
5589 default:
5590 return false;
5592 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5593 return true;
5595 default:
5596 break;
5598 return false;
5601 /* Calculate the cost of a memory address. */
5603 static int
5604 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5606 if (GET_CODE (x) == PLUS
5607 && GET_CODE (XEXP (x,1)) == CONST_INT
5608 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5609 && INTVAL (XEXP (x,1)) >= 61)
5610 return 18;
5611 if (CONSTANT_ADDRESS_P (x))
5613 if (optimize > 0 && io_address_operand (x, QImode))
5614 return 2;
5615 return 4;
5617 return 4;
5620 /* Test for extra memory constraint 'Q'.
5621 It's a memory address based on Y or Z pointer with valid displacement. */
5624 extra_constraint_Q (rtx x)
5626 if (GET_CODE (XEXP (x,0)) == PLUS
5627 && REG_P (XEXP (XEXP (x,0), 0))
5628 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5629 && (INTVAL (XEXP (XEXP (x,0), 1))
5630 <= MAX_LD_OFFSET (GET_MODE (x))))
5632 rtx xx = XEXP (XEXP (x,0), 0);
5633 int regno = REGNO (xx);
5634 if (TARGET_ALL_DEBUG)
5636 fprintf (stderr, ("extra_constraint:\n"
5637 "reload_completed: %d\n"
5638 "reload_in_progress: %d\n"),
5639 reload_completed, reload_in_progress);
5640 debug_rtx (x);
5642 if (regno >= FIRST_PSEUDO_REGISTER)
5643 return 1; /* allocate pseudos */
5644 else if (regno == REG_Z || regno == REG_Y)
5645 return 1; /* strictly check */
5646 else if (xx == frame_pointer_rtx
5647 || xx == arg_pointer_rtx)
5648 return 1; /* XXX frame & arg pointer checks */
5650 return 0;
5653 /* Convert condition code CONDITION to the valid AVR condition code. */
5655 RTX_CODE
5656 avr_normalize_condition (RTX_CODE condition)
5658 switch (condition)
5660 case GT:
5661 return GE;
5662 case GTU:
5663 return GEU;
5664 case LE:
5665 return LT;
5666 case LEU:
5667 return LTU;
5668 default:
5669 gcc_unreachable ();
5673 /* This function optimizes conditional jumps. */
5675 static void
5676 avr_reorg (void)
5678 rtx insn, pattern;
5680 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5682 if (! (GET_CODE (insn) == INSN
5683 || GET_CODE (insn) == CALL_INSN
5684 || GET_CODE (insn) == JUMP_INSN)
5685 || !single_set (insn))
5686 continue;
5688 pattern = PATTERN (insn);
5690 if (GET_CODE (pattern) == PARALLEL)
5691 pattern = XVECEXP (pattern, 0, 0);
5692 if (GET_CODE (pattern) == SET
5693 && SET_DEST (pattern) == cc0_rtx
5694 && compare_diff_p (insn))
5696 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5698 /* Now we work under compare insn. */
5700 pattern = SET_SRC (pattern);
5701 if (true_regnum (XEXP (pattern,0)) >= 0
5702 && true_regnum (XEXP (pattern,1)) >= 0 )
5704 rtx x = XEXP (pattern,0);
5705 rtx next = next_real_insn (insn);
5706 rtx pat = PATTERN (next);
5707 rtx src = SET_SRC (pat);
5708 rtx t = XEXP (src,0);
5709 PUT_CODE (t, swap_condition (GET_CODE (t)));
5710 XEXP (pattern,0) = XEXP (pattern,1);
5711 XEXP (pattern,1) = x;
5712 INSN_CODE (next) = -1;
5714 else if (true_regnum (XEXP (pattern,0)) >= 0
5715 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5717 rtx x = XEXP (pattern,1);
5718 rtx next = next_real_insn (insn);
5719 rtx pat = PATTERN (next);
5720 rtx src = SET_SRC (pat);
5721 rtx t = XEXP (src,0);
5722 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5724 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5726 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5727 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5728 INSN_CODE (next) = -1;
5729 INSN_CODE (insn) = -1;
5733 else if (true_regnum (SET_SRC (pattern)) >= 0)
5735 /* This is a tst insn */
5736 rtx next = next_real_insn (insn);
5737 rtx pat = PATTERN (next);
5738 rtx src = SET_SRC (pat);
5739 rtx t = XEXP (src,0);
5741 PUT_CODE (t, swap_condition (GET_CODE (t)));
5742 SET_SRC (pattern) = gen_rtx_COMPARE (GET_MODE (SET_SRC (pattern)), const0_rtx,
5743 SET_SRC (pattern));
5744 INSN_CODE (next) = -1;
5745 INSN_CODE (insn) = -1;
5751 /* Returns register number for function return value.*/
5754 avr_ret_register (void)
5756 return 24;
5759 /* Create an RTX representing the place where a
5760 library function returns a value of mode MODE. */
5763 avr_libcall_value (enum machine_mode mode)
5765 int offs = GET_MODE_SIZE (mode);
5766 if (offs < 2)
5767 offs = 2;
5768 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5771 /* Create an RTX representing the place where a
5772 function returns a value of data type VALTYPE. */
5775 avr_function_value (const_tree type,
5776 const_tree func ATTRIBUTE_UNUSED,
5777 bool outgoing ATTRIBUTE_UNUSED)
5779 unsigned int offs;
5781 if (TYPE_MODE (type) != BLKmode)
5782 return avr_libcall_value (TYPE_MODE (type));
5784 offs = int_size_in_bytes (type);
5785 if (offs < 2)
5786 offs = 2;
5787 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5788 offs = GET_MODE_SIZE (SImode);
5789 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5790 offs = GET_MODE_SIZE (DImode);
5792 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5795 /* Places additional restrictions on the register class to
5796 use when it is necessary to copy value X into a register
5797 in class CLASS. */
5799 enum reg_class
5800 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5802 return rclass;
5806 test_hard_reg_class (enum reg_class rclass, rtx x)
5808 int regno = true_regnum (x);
5809 if (regno < 0)
5810 return 0;
5812 if (TEST_HARD_REG_CLASS (rclass, regno))
5813 return 1;
5815 return 0;
5820 jump_over_one_insn_p (rtx insn, rtx dest)
5822 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5823 ? XEXP (dest, 0)
5824 : dest);
5825 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5826 int dest_addr = INSN_ADDRESSES (uid);
5827 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5830 /* Returns 1 if a value of mode MODE can be stored starting with hard
5831 register number REGNO. On the enhanced core, anything larger than
5832 1 byte must start in even numbered register for "movw" to work
5833 (this way we don't have to check for odd registers everywhere). */
5836 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5838 /* Disallow QImode in stack pointer regs. */
5839 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5840 return 0;
5842 /* The only thing that can go into registers r28:r29 is a Pmode. */
5843 if (regno == REG_Y && mode == Pmode)
5844 return 1;
5846 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5847 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5848 return 0;
5850 if (mode == QImode)
5851 return 1;
5853 /* Modes larger than QImode occupy consecutive registers. */
5854 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5855 return 0;
5857 /* All modes larger than QImode should start in an even register. */
5858 return !(regno & 1);
5861 const char *
5862 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5864 int tmp;
5865 if (!len)
5866 len = &tmp;
5868 if (GET_CODE (operands[1]) == CONST_INT)
5870 int val = INTVAL (operands[1]);
5871 if ((val & 0xff) == 0)
5873 *len = 3;
5874 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5875 AS2 (ldi,%2,hi8(%1)) CR_TAB
5876 AS2 (mov,%B0,%2));
5878 else if ((val & 0xff00) == 0)
5880 *len = 3;
5881 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5882 AS2 (mov,%A0,%2) CR_TAB
5883 AS2 (mov,%B0,__zero_reg__));
5885 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5887 *len = 3;
5888 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5889 AS2 (mov,%A0,%2) CR_TAB
5890 AS2 (mov,%B0,%2));
5893 *len = 4;
5894 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5895 AS2 (mov,%A0,%2) CR_TAB
5896 AS2 (ldi,%2,hi8(%1)) CR_TAB
5897 AS2 (mov,%B0,%2));
5901 const char *
5902 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5904 rtx src = operands[1];
5905 int cnst = (GET_CODE (src) == CONST_INT);
5907 if (len)
5909 if (cnst)
5910 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5911 + ((INTVAL (src) & 0xff00) != 0)
5912 + ((INTVAL (src) & 0xff0000) != 0)
5913 + ((INTVAL (src) & 0xff000000) != 0);
5914 else
5915 *len = 8;
5917 return "";
5920 if (cnst && ((INTVAL (src) & 0xff) == 0))
5921 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5922 else
5924 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5925 output_asm_insn (AS2 (mov, %A0, %2), operands);
5927 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5928 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5929 else
5931 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5932 output_asm_insn (AS2 (mov, %B0, %2), operands);
5934 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5935 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5936 else
5938 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5939 output_asm_insn (AS2 (mov, %C0, %2), operands);
5941 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5942 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5943 else
5945 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5946 output_asm_insn (AS2 (mov, %D0, %2), operands);
5948 return "";
5951 void
5952 avr_output_bld (rtx operands[], int bit_nr)
5954 static char s[] = "bld %A0,0";
5956 s[5] = 'A' + (bit_nr >> 3);
5957 s[8] = '0' + (bit_nr & 7);
5958 output_asm_insn (s, operands);
5961 void
5962 avr_output_addr_vec_elt (FILE *stream, int value)
5964 switch_to_section (progmem_section);
5965 if (AVR_HAVE_JMP_CALL)
5966 fprintf (stream, "\t.word gs(.L%d)\n", value);
5967 else
5968 fprintf (stream, "\trjmp .L%d\n", value);
5971 /* Returns true if SCRATCH are safe to be allocated as a scratch
5972 registers (for a define_peephole2) in the current function. */
5974 bool
5975 avr_hard_regno_scratch_ok (unsigned int regno)
5977 /* Interrupt functions can only use registers that have already been saved
5978 by the prologue, even if they would normally be call-clobbered. */
5980 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5981 && !df_regs_ever_live_p (regno))
5982 return false;
5984 return true;
5987 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5990 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5991 unsigned int new_reg)
5993 /* Interrupt functions can only use registers that have already been
5994 saved by the prologue, even if they would normally be
5995 call-clobbered. */
5997 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5998 && !df_regs_ever_live_p (new_reg))
5999 return 0;
6001 return 1;
6004 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
6005 or memory location in the I/O space (QImode only).
6007 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6008 Operand 1: register operand to test, or CONST_INT memory address.
6009 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
6010 Operand 3: label to jump to if the test is true. */
6012 const char *
6013 avr_out_sbxx_branch (rtx insn, rtx operands[])
6015 enum rtx_code comp = GET_CODE (operands[0]);
6016 int long_jump = (get_attr_length (insn) >= 4);
6017 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6019 if (comp == GE)
6020 comp = EQ;
6021 else if (comp == LT)
6022 comp = NE;
6024 if (reverse)
6025 comp = reverse_condition (comp);
6027 if (GET_CODE (operands[1]) == CONST_INT)
6029 if (INTVAL (operands[1]) < 0x40)
6031 if (comp == EQ)
6032 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
6033 else
6034 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
6036 else
6038 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
6039 if (comp == EQ)
6040 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6041 else
6042 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6045 else /* GET_CODE (operands[1]) == REG */
6047 if (GET_MODE (operands[1]) == QImode)
6049 if (comp == EQ)
6050 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6051 else
6052 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6054 else /* HImode or SImode */
6056 static char buf[] = "sbrc %A1,0";
6057 int bit_nr = exact_log2 (INTVAL (operands[2])
6058 & GET_MODE_MASK (GET_MODE (operands[1])));
6060 buf[3] = (comp == EQ) ? 's' : 'c';
6061 buf[6] = 'A' + (bit_nr >> 3);
6062 buf[9] = '0' + (bit_nr & 7);
6063 output_asm_insn (buf, operands);
6067 if (long_jump)
6068 return (AS1 (rjmp,.+4) CR_TAB
6069 AS1 (jmp,%3));
6070 if (!reverse)
6071 return AS1 (rjmp,%3);
6072 return "";
6075 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6077 static void
6078 avr_asm_out_ctor (rtx symbol, int priority)
6080 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6081 default_ctor_section_asm_out_constructor (symbol, priority);
6084 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6086 static void
6087 avr_asm_out_dtor (rtx symbol, int priority)
6089 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6090 default_dtor_section_asm_out_destructor (symbol, priority);
6093 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6095 static bool
6096 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6098 if (TYPE_MODE (type) == BLKmode)
6100 HOST_WIDE_INT size = int_size_in_bytes (type);
6101 return (size == -1 || size > 8);
6103 else
6104 return false;
6107 #include "gt-avr.h"