1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree
);
53 static int interrupt_function_p (tree
);
54 static int signal_function_p (tree
);
55 static int avr_OS_task_function_p (tree
);
56 static int avr_OS_main_function_p (tree
);
57 static int avr_regs_to_save (HARD_REG_SET
*);
58 static int get_sequence_length (rtx insns
);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code
);
62 static int avr_num_arg_regs (enum machine_mode
, tree
);
64 static RTX_CODE
compare_condition (rtx insn
);
65 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
66 static int compare_sign_p (rtx insn
);
67 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
68 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
69 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
70 EXPORTED_CONST
struct attribute_spec avr_attribute_table
[];
71 static bool avr_assemble_integer (rtx
, unsigned int, int);
72 static void avr_file_start (void);
73 static void avr_file_end (void);
74 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
75 static void avr_asm_function_end_prologue (FILE *);
76 static void avr_asm_function_begin_epilogue (FILE *);
77 static rtx
avr_function_value (const_tree
, const_tree
, bool);
78 static void avr_insert_attributes (tree
, tree
*);
79 static void avr_asm_init_sections (void);
80 static unsigned int avr_section_type_flags (tree
, const char *, int);
82 static void avr_reorg (void);
83 static void avr_asm_out_ctor (rtx
, int);
84 static void avr_asm_out_dtor (rtx
, int);
85 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
86 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
87 static int avr_address_cost (rtx
, bool);
88 static bool avr_return_in_memory (const_tree
, const_tree
);
89 static struct machine_function
* avr_init_machine_status (void);
90 static rtx
avr_builtin_setjmp_frame_value (void);
91 static bool avr_hard_regno_scratch_ok (unsigned int);
92 static unsigned int avr_case_values_threshold (void);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx
;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx
;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames
[] = REGISTER_NAMES
;
106 /* This holds the last insn address. */
107 static int last_insn_address
= 0;
109 /* Preprocessor macros to define depending on MCU type. */
110 const char *avr_extra_arch_macro
;
112 /* Current architecture. */
113 const struct base_arch_s
*avr_current_arch
;
115 section
*progmem_section
;
117 static const struct base_arch_s avr_arch_types
[] = {
118 { 1, 0, 0, 0, 0, 0, 0, 0, NULL
}, /* unknown device specified */
119 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
120 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
121 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
122 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
123 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
124 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
125 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
126 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
127 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
128 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
131 /* These names are used as the index into the avr_arch_types[] table
150 const char *const name
;
151 int arch
; /* index in avr_arch_types[] */
152 /* Must lie outside user's namespace. NULL == no macro. */
153 const char *const macro
;
156 /* List of all known AVR MCU types - if updated, it has to be kept
157 in sync in several places (FIXME: is there a better way?):
159 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
160 - t-avr (MULTILIB_MATCHES)
161 - gas/config/tc-avr.c
164 static const struct mcu_type_s avr_mcu_types
[] = {
165 /* Classic, <= 8K. */
166 { "avr2", ARCH_AVR2
, NULL
},
167 { "at90s2313", ARCH_AVR2
, "__AVR_AT90S2313__" },
168 { "at90s2323", ARCH_AVR2
, "__AVR_AT90S2323__" },
169 { "at90s2333", ARCH_AVR2
, "__AVR_AT90S2333__" },
170 { "at90s2343", ARCH_AVR2
, "__AVR_AT90S2343__" },
171 { "attiny22", ARCH_AVR2
, "__AVR_ATtiny22__" },
172 { "attiny26", ARCH_AVR2
, "__AVR_ATtiny26__" },
173 { "at90s4414", ARCH_AVR2
, "__AVR_AT90S4414__" },
174 { "at90s4433", ARCH_AVR2
, "__AVR_AT90S4433__" },
175 { "at90s4434", ARCH_AVR2
, "__AVR_AT90S4434__" },
176 { "at90s8515", ARCH_AVR2
, "__AVR_AT90S8515__" },
177 { "at90c8534", ARCH_AVR2
, "__AVR_AT90C8534__" },
178 { "at90s8535", ARCH_AVR2
, "__AVR_AT90S8535__" },
179 /* Classic + MOVW, <= 8K. */
180 { "avr25", ARCH_AVR25
, NULL
},
181 { "ata6289", ARCH_AVR25
, "__AVR_ATA6289__" },
182 { "attiny13", ARCH_AVR25
, "__AVR_ATtiny13__" },
183 { "attiny13a", ARCH_AVR25
, "__AVR_ATtiny13A__" },
184 { "attiny2313", ARCH_AVR25
, "__AVR_ATtiny2313__" },
185 { "attiny24", ARCH_AVR25
, "__AVR_ATtiny24__" },
186 { "attiny44", ARCH_AVR25
, "__AVR_ATtiny44__" },
187 { "attiny84", ARCH_AVR25
, "__AVR_ATtiny84__" },
188 { "attiny25", ARCH_AVR25
, "__AVR_ATtiny25__" },
189 { "attiny45", ARCH_AVR25
, "__AVR_ATtiny45__" },
190 { "attiny85", ARCH_AVR25
, "__AVR_ATtiny85__" },
191 { "attiny261", ARCH_AVR25
, "__AVR_ATtiny261__" },
192 { "attiny461", ARCH_AVR25
, "__AVR_ATtiny461__" },
193 { "attiny861", ARCH_AVR25
, "__AVR_ATtiny861__" },
194 { "attiny43u", ARCH_AVR25
, "__AVR_ATtiny43U__" },
195 { "attiny87", ARCH_AVR25
, "__AVR_ATtiny87__" },
196 { "attiny48", ARCH_AVR25
, "__AVR_ATtiny48__" },
197 { "attiny88", ARCH_AVR25
, "__AVR_ATtiny88__" },
198 { "at86rf401", ARCH_AVR25
, "__AVR_AT86RF401__" },
199 /* Classic, > 8K, <= 64K. */
200 { "avr3", ARCH_AVR3
, NULL
},
201 { "at43usb355", ARCH_AVR3
, "__AVR_AT43USB355__" },
202 { "at76c711", ARCH_AVR3
, "__AVR_AT76C711__" },
203 /* Classic, == 128K. */
204 { "avr31", ARCH_AVR31
, NULL
},
205 { "atmega103", ARCH_AVR31
, "__AVR_ATmega103__" },
206 { "at43usb320", ARCH_AVR31
, "__AVR_AT43USB320__" },
207 /* Classic + MOVW + JMP/CALL. */
208 { "avr35", ARCH_AVR35
, NULL
},
209 { "at90usb82", ARCH_AVR35
, "__AVR_AT90USB82__" },
210 { "at90usb162", ARCH_AVR35
, "__AVR_AT90USB162__" },
211 { "attiny167", ARCH_AVR35
, "__AVR_ATtiny167__" },
212 { "attiny327", ARCH_AVR35
, "__AVR_ATtiny327__" },
213 /* Enhanced, <= 8K. */
214 { "avr4", ARCH_AVR4
, NULL
},
215 { "atmega8", ARCH_AVR4
, "__AVR_ATmega8__" },
216 { "atmega48", ARCH_AVR4
, "__AVR_ATmega48__" },
217 { "atmega48p", ARCH_AVR4
, "__AVR_ATmega48P__" },
218 { "atmega88", ARCH_AVR4
, "__AVR_ATmega88__" },
219 { "atmega88p", ARCH_AVR4
, "__AVR_ATmega88P__" },
220 { "atmega8515", ARCH_AVR4
, "__AVR_ATmega8515__" },
221 { "atmega8535", ARCH_AVR4
, "__AVR_ATmega8535__" },
222 { "atmega8hva", ARCH_AVR4
, "__AVR_ATmega8HVA__" },
223 { "atmega4hvd", ARCH_AVR4
, "__AVR_ATmega4HVD__" },
224 { "atmega8hvd", ARCH_AVR4
, "__AVR_ATmega8HVD__" },
225 { "atmega8c1", ARCH_AVR4
, "__AVR_ATmega8C1__" },
226 { "atmega8m1", ARCH_AVR4
, "__AVR_ATmega8M1__" },
227 { "at90pwm1", ARCH_AVR4
, "__AVR_AT90PWM1__" },
228 { "at90pwm2", ARCH_AVR4
, "__AVR_AT90PWM2__" },
229 { "at90pwm2b", ARCH_AVR4
, "__AVR_AT90PWM2B__" },
230 { "at90pwm3", ARCH_AVR4
, "__AVR_AT90PWM3__" },
231 { "at90pwm3b", ARCH_AVR4
, "__AVR_AT90PWM3B__" },
232 { "at90pwm81", ARCH_AVR4
, "__AVR_AT90PWM81__" },
233 /* Enhanced, > 8K, <= 64K. */
234 { "avr5", ARCH_AVR5
, NULL
},
235 { "atmega16", ARCH_AVR5
, "__AVR_ATmega16__" },
236 { "atmega161", ARCH_AVR5
, "__AVR_ATmega161__" },
237 { "atmega162", ARCH_AVR5
, "__AVR_ATmega162__" },
238 { "atmega163", ARCH_AVR5
, "__AVR_ATmega163__" },
239 { "atmega164p", ARCH_AVR5
, "__AVR_ATmega164P__" },
240 { "atmega165", ARCH_AVR5
, "__AVR_ATmega165__" },
241 { "atmega165p", ARCH_AVR5
, "__AVR_ATmega165P__" },
242 { "atmega168", ARCH_AVR5
, "__AVR_ATmega168__" },
243 { "atmega168p", ARCH_AVR5
, "__AVR_ATmega168P__" },
244 { "atmega169", ARCH_AVR5
, "__AVR_ATmega169__" },
245 { "atmega169p", ARCH_AVR5
, "__AVR_ATmega169P__" },
246 { "atmega32", ARCH_AVR5
, "__AVR_ATmega32__" },
247 { "atmega323", ARCH_AVR5
, "__AVR_ATmega323__" },
248 { "atmega324p", ARCH_AVR5
, "__AVR_ATmega324P__" },
249 { "atmega325", ARCH_AVR5
, "__AVR_ATmega325__" },
250 { "atmega325p", ARCH_AVR5
, "__AVR_ATmega325P__" },
251 { "atmega3250", ARCH_AVR5
, "__AVR_ATmega3250__" },
252 { "atmega3250p", ARCH_AVR5
, "__AVR_ATmega3250P__" },
253 { "atmega328p", ARCH_AVR5
, "__AVR_ATmega328P__" },
254 { "atmega329", ARCH_AVR5
, "__AVR_ATmega329__" },
255 { "atmega329p", ARCH_AVR5
, "__AVR_ATmega329P__" },
256 { "atmega3290", ARCH_AVR5
, "__AVR_ATmega3290__" },
257 { "atmega3290p", ARCH_AVR5
, "__AVR_ATmega3290P__" },
258 { "atmega406", ARCH_AVR5
, "__AVR_ATmega406__" },
259 { "atmega64", ARCH_AVR5
, "__AVR_ATmega64__" },
260 { "atmega640", ARCH_AVR5
, "__AVR_ATmega640__" },
261 { "atmega644", ARCH_AVR5
, "__AVR_ATmega644__" },
262 { "atmega644p", ARCH_AVR5
, "__AVR_ATmega644P__" },
263 { "atmega645", ARCH_AVR5
, "__AVR_ATmega645__" },
264 { "atmega6450", ARCH_AVR5
, "__AVR_ATmega6450__" },
265 { "atmega649", ARCH_AVR5
, "__AVR_ATmega649__" },
266 { "atmega6490", ARCH_AVR5
, "__AVR_ATmega6490__" },
267 { "atmega16hva", ARCH_AVR5
, "__AVR_ATmega16HVA__" },
268 { "atmega16hvb", ARCH_AVR5
, "__AVR_ATmega16HVB__" },
269 { "atmega32hvb", ARCH_AVR5
, "__AVR_ATmega32HVB__" },
270 { "at90can32", ARCH_AVR5
, "__AVR_AT90CAN32__" },
271 { "at90can64", ARCH_AVR5
, "__AVR_AT90CAN64__" },
272 { "at90pwm216", ARCH_AVR5
, "__AVR_AT90PWM216__" },
273 { "at90pwm316", ARCH_AVR5
, "__AVR_AT90PWM316__" },
274 { "atmega16c1", ARCH_AVR5
, "__AVR_ATmega16C1__" },
275 { "atmega32c1", ARCH_AVR5
, "__AVR_ATmega32C1__" },
276 { "atmega64c1", ARCH_AVR5
, "__AVR_ATmega64C1__" },
277 { "atmega16m1", ARCH_AVR5
, "__AVR_ATmega16M1__" },
278 { "atmega32m1", ARCH_AVR5
, "__AVR_ATmega32M1__" },
279 { "atmega64m1", ARCH_AVR5
, "__AVR_ATmega64M1__" },
280 { "atmega16u4", ARCH_AVR5
, "__AVR_ATmega16U4__" },
281 { "atmega32u4", ARCH_AVR5
, "__AVR_ATmega32U4__" },
282 { "atmega32u6", ARCH_AVR5
, "__AVR_ATmega32U6__" },
283 { "at90scr100", ARCH_AVR5
, "__AVR_AT90SCR100__" },
284 { "at90usb646", ARCH_AVR5
, "__AVR_AT90USB646__" },
285 { "at90usb647", ARCH_AVR5
, "__AVR_AT90USB647__" },
286 { "at94k", ARCH_AVR5
, "__AVR_AT94K__" },
287 /* Enhanced, == 128K. */
288 { "avr51", ARCH_AVR51
, NULL
},
289 { "atmega128", ARCH_AVR51
, "__AVR_ATmega128__" },
290 { "atmega1280", ARCH_AVR51
, "__AVR_ATmega1280__" },
291 { "atmega1281", ARCH_AVR51
, "__AVR_ATmega1281__" },
292 { "atmega1284p", ARCH_AVR51
, "__AVR_ATmega1284P__" },
293 { "atmega128rfa1", ARCH_AVR51
, "__AVR_ATmega128RFA1__" },
294 { "at90can128", ARCH_AVR51
, "__AVR_AT90CAN128__" },
295 { "at90usb1286", ARCH_AVR51
, "__AVR_AT90USB1286__" },
296 { "at90usb1287", ARCH_AVR51
, "__AVR_AT90USB1287__" },
297 { "m3000f", ARCH_AVR51
, "__AVR_M3000F__" },
298 { "m3000s", ARCH_AVR51
, "__AVR_M3000S__" },
299 { "m3001b", ARCH_AVR51
, "__AVR_M3001B__" },
301 { "avr6", ARCH_AVR6
, NULL
},
302 { "atmega2560", ARCH_AVR6
, "__AVR_ATmega2560__" },
303 { "atmega2561", ARCH_AVR6
, "__AVR_ATmega2561__" },
304 /* Assembler only. */
305 { "avr1", ARCH_AVR1
, NULL
},
306 { "at90s1200", ARCH_AVR1
, "__AVR_AT90S1200__" },
307 { "attiny11", ARCH_AVR1
, "__AVR_ATtiny11__" },
308 { "attiny12", ARCH_AVR1
, "__AVR_ATtiny12__" },
309 { "attiny15", ARCH_AVR1
, "__AVR_ATtiny15__" },
310 { "attiny28", ARCH_AVR1
, "__AVR_ATtiny28__" },
311 { NULL
, ARCH_UNKNOWN
, NULL
}
315 /* Initialize the GCC target structure. */
316 #undef TARGET_ASM_ALIGNED_HI_OP
317 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
318 #undef TARGET_ASM_ALIGNED_SI_OP
319 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
320 #undef TARGET_ASM_UNALIGNED_HI_OP
321 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
322 #undef TARGET_ASM_UNALIGNED_SI_OP
323 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
324 #undef TARGET_ASM_INTEGER
325 #define TARGET_ASM_INTEGER avr_assemble_integer
326 #undef TARGET_ASM_FILE_START
327 #define TARGET_ASM_FILE_START avr_file_start
328 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
329 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
330 #undef TARGET_ASM_FILE_END
331 #define TARGET_ASM_FILE_END avr_file_end
333 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
334 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
335 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
336 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
337 #undef TARGET_FUNCTION_VALUE
338 #define TARGET_FUNCTION_VALUE avr_function_value
339 #undef TARGET_ATTRIBUTE_TABLE
340 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
341 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
342 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
343 #undef TARGET_INSERT_ATTRIBUTES
344 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
345 #undef TARGET_SECTION_TYPE_FLAGS
346 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
347 #undef TARGET_RTX_COSTS
348 #define TARGET_RTX_COSTS avr_rtx_costs
349 #undef TARGET_ADDRESS_COST
350 #define TARGET_ADDRESS_COST avr_address_cost
351 #undef TARGET_MACHINE_DEPENDENT_REORG
352 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
354 #undef TARGET_LEGITIMIZE_ADDRESS
355 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
357 #undef TARGET_RETURN_IN_MEMORY
358 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
360 #undef TARGET_STRICT_ARGUMENT_NAMING
361 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
363 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
364 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
366 #undef TARGET_HARD_REGNO_SCRATCH_OK
367 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
368 #undef TARGET_CASE_VALUES_THRESHOLD
369 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
371 #undef TARGET_LEGITIMATE_ADDRESS_P
372 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
374 struct gcc_target targetm
= TARGET_INITIALIZER
;
377 avr_override_options (void)
379 const struct mcu_type_s
*t
;
381 flag_delete_null_pointer_checks
= 0;
383 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST
))
384 set_param_value ("inline-call-cost", 5);
386 for (t
= avr_mcu_types
; t
->name
; t
++)
387 if (strcmp (t
->name
, avr_mcu_name
) == 0)
392 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
394 for (t
= avr_mcu_types
; t
->name
; t
++)
395 fprintf (stderr
," %s\n", t
->name
);
398 avr_current_arch
= &avr_arch_types
[t
->arch
];
399 avr_extra_arch_macro
= t
->macro
;
401 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
402 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
404 init_machine_status
= avr_init_machine_status
;
407 /* return register class from register number. */
409 static const int reg_class_tab
[]={
410 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
411 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
412 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
413 GENERAL_REGS
, /* r0 - r15 */
414 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
415 LD_REGS
, /* r16 - 23 */
416 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
417 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
418 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
419 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
420 STACK_REG
,STACK_REG
/* SPL,SPH */
423 /* Function to set up the backend function structure. */
425 static struct machine_function
*
426 avr_init_machine_status (void)
428 return ((struct machine_function
*)
429 ggc_alloc_cleared (sizeof (struct machine_function
)));
432 /* Return register class for register R. */
435 avr_regno_reg_class (int r
)
438 return reg_class_tab
[r
];
442 /* Return nonzero if FUNC is a naked function. */
445 avr_naked_function_p (tree func
)
449 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
451 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
452 return a
!= NULL_TREE
;
455 /* Return nonzero if FUNC is an interrupt function as specified
456 by the "interrupt" attribute. */
459 interrupt_function_p (tree func
)
463 if (TREE_CODE (func
) != FUNCTION_DECL
)
466 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
467 return a
!= NULL_TREE
;
470 /* Return nonzero if FUNC is a signal function as specified
471 by the "signal" attribute. */
474 signal_function_p (tree func
)
478 if (TREE_CODE (func
) != FUNCTION_DECL
)
481 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
482 return a
!= NULL_TREE
;
485 /* Return nonzero if FUNC is a OS_task function. */
488 avr_OS_task_function_p (tree func
)
492 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
494 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
495 return a
!= NULL_TREE
;
498 /* Return nonzero if FUNC is a OS_main function. */
501 avr_OS_main_function_p (tree func
)
505 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
507 a
= lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
508 return a
!= NULL_TREE
;
511 /* Return the number of hard registers to push/pop in the prologue/epilogue
512 of the current function, and optionally store these registers in SET. */
515 avr_regs_to_save (HARD_REG_SET
*set
)
518 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
519 || signal_function_p (current_function_decl
));
521 if (!reload_completed
)
522 cfun
->machine
->is_leaf
= leaf_function_p ();
525 CLEAR_HARD_REG_SET (*set
);
528 /* No need to save any registers if the function never returns or
529 is have "OS_task" or "OS_main" attribute. */
530 if (TREE_THIS_VOLATILE (current_function_decl
)
531 || cfun
->machine
->is_OS_task
532 || cfun
->machine
->is_OS_main
)
535 for (reg
= 0; reg
< 32; reg
++)
537 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
538 any global register variables. */
542 if ((int_or_sig_p
&& !cfun
->machine
->is_leaf
&& call_used_regs
[reg
])
543 || (df_regs_ever_live_p (reg
)
544 && (int_or_sig_p
|| !call_used_regs
[reg
])
545 && !(frame_pointer_needed
546 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
549 SET_HARD_REG_BIT (*set
, reg
);
556 /* Return true if register FROM can be eliminated via register TO. */
559 avr_can_eliminate (int from
, int to
)
561 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
562 || ((from
== FRAME_POINTER_REGNUM
563 || from
== FRAME_POINTER_REGNUM
+ 1)
564 && !frame_pointer_needed
));
567 /* Compute offset between arg_pointer and frame_pointer. */
570 avr_initial_elimination_offset (int from
, int to
)
572 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
576 int offset
= frame_pointer_needed
? 2 : 0;
577 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
579 offset
+= avr_regs_to_save (NULL
);
580 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
584 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
585 frame pointer by +STARTING_FRAME_OFFSET.
586 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
587 avoids creating add/sub of offset in nonlocal goto and setjmp. */
589 rtx
avr_builtin_setjmp_frame_value (void)
591 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
592 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
595 /* Return 1 if the function epilogue is just a single "ret". */
598 avr_simple_epilogue (void)
600 return (! frame_pointer_needed
601 && get_frame_size () == 0
602 && avr_regs_to_save (NULL
) == 0
603 && ! interrupt_function_p (current_function_decl
)
604 && ! signal_function_p (current_function_decl
)
605 && ! avr_naked_function_p (current_function_decl
)
606 && ! TREE_THIS_VOLATILE (current_function_decl
));
609 /* This function checks sequence of live registers. */
612 sequent_regs_live (void)
618 for (reg
= 0; reg
< 18; ++reg
)
620 if (!call_used_regs
[reg
])
622 if (df_regs_ever_live_p (reg
))
632 if (!frame_pointer_needed
)
634 if (df_regs_ever_live_p (REG_Y
))
642 if (df_regs_ever_live_p (REG_Y
+1))
655 return (cur_seq
== live_seq
) ? live_seq
: 0;
658 /* Obtain the length sequence of insns. */
661 get_sequence_length (rtx insns
)
666 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
667 length
+= get_attr_length (insn
);
672 /* Output function prologue. */
675 expand_prologue (void)
680 HOST_WIDE_INT size
= get_frame_size();
681 /* Define templates for push instructions. */
682 rtx pushbyte
= gen_rtx_MEM (QImode
,
683 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
684 rtx pushword
= gen_rtx_MEM (HImode
,
685 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
688 last_insn_address
= 0;
690 /* Init cfun->machine. */
691 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
692 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
693 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
694 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
695 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
697 /* Prologue: naked. */
698 if (cfun
->machine
->is_naked
)
703 avr_regs_to_save (&set
);
704 live_seq
= sequent_regs_live ();
705 minimize
= (TARGET_CALL_PROLOGUES
706 && !cfun
->machine
->is_interrupt
707 && !cfun
->machine
->is_signal
708 && !cfun
->machine
->is_OS_task
709 && !cfun
->machine
->is_OS_main
712 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
714 if (cfun
->machine
->is_interrupt
)
716 /* Enable interrupts. */
717 insn
= emit_insn (gen_enable_interrupt ());
718 RTX_FRAME_RELATED_P (insn
) = 1;
722 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
723 RTX_FRAME_RELATED_P (insn
) = 1;
726 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
727 RTX_FRAME_RELATED_P (insn
) = 1;
730 insn
= emit_move_insn (tmp_reg_rtx
,
731 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
732 RTX_FRAME_RELATED_P (insn
) = 1;
733 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
734 RTX_FRAME_RELATED_P (insn
) = 1;
738 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
740 insn
= emit_move_insn (tmp_reg_rtx
,
741 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
742 RTX_FRAME_RELATED_P (insn
) = 1;
743 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
744 RTX_FRAME_RELATED_P (insn
) = 1;
747 /* Clear zero reg. */
748 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
749 RTX_FRAME_RELATED_P (insn
) = 1;
751 /* Prevent any attempt to delete the setting of ZERO_REG! */
752 emit_use (zero_reg_rtx
);
754 if (minimize
&& (frame_pointer_needed
755 || (AVR_2_BYTE_PC
&& live_seq
> 6)
758 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
759 gen_int_mode (size
, HImode
));
760 RTX_FRAME_RELATED_P (insn
) = 1;
763 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
764 gen_int_mode (size
+ live_seq
, HImode
)));
765 RTX_FRAME_RELATED_P (insn
) = 1;
770 for (reg
= 0; reg
< 32; ++reg
)
772 if (TEST_HARD_REG_BIT (set
, reg
))
774 /* Emit push of register to save. */
775 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
776 RTX_FRAME_RELATED_P (insn
) = 1;
779 if (frame_pointer_needed
)
781 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
783 /* Push frame pointer. */
784 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
785 RTX_FRAME_RELATED_P (insn
) = 1;
790 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
791 RTX_FRAME_RELATED_P (insn
) = 1;
795 /* Creating a frame can be done by direct manipulation of the
796 stack or via the frame pointer. These two methods are:
803 the optimum method depends on function type, stack and frame size.
804 To avoid a complex logic, both methods are tested and shortest
808 rtx sp_plus_insns
= NULL_RTX
;
810 if (TARGET_TINY_STACK
)
812 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
813 over 'sbiw' (2 cycles, same size). */
814 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
818 /* Normal sized addition. */
819 myfp
= frame_pointer_rtx
;
822 /* Method 1-Adjust frame pointer. */
825 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
826 RTX_FRAME_RELATED_P (insn
) = 1;
829 emit_move_insn (myfp
,
830 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
833 RTX_FRAME_RELATED_P (insn
) = 1;
835 /* Copy to stack pointer. */
836 if (TARGET_TINY_STACK
)
838 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
839 RTX_FRAME_RELATED_P (insn
) = 1;
841 else if (TARGET_NO_INTERRUPTS
842 || cfun
->machine
->is_signal
843 || cfun
->machine
->is_OS_main
)
846 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
848 RTX_FRAME_RELATED_P (insn
) = 1;
850 else if (cfun
->machine
->is_interrupt
)
852 insn
= emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
854 RTX_FRAME_RELATED_P (insn
) = 1;
858 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
859 RTX_FRAME_RELATED_P (insn
) = 1;
862 fp_plus_insns
= get_insns ();
865 /* Method 2-Adjust Stack pointer. */
871 emit_move_insn (stack_pointer_rtx
,
872 gen_rtx_PLUS (HImode
,
876 RTX_FRAME_RELATED_P (insn
) = 1;
879 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
880 RTX_FRAME_RELATED_P (insn
) = 1;
882 sp_plus_insns
= get_insns ();
886 /* Use shortest method. */
887 if (size
<= 6 && (get_sequence_length (sp_plus_insns
)
888 < get_sequence_length (fp_plus_insns
)))
889 emit_insn (sp_plus_insns
);
891 emit_insn (fp_plus_insns
);
897 /* Output summary at end of function prologue. */
900 avr_asm_function_end_prologue (FILE *file
)
902 if (cfun
->machine
->is_naked
)
904 fputs ("/* prologue: naked */\n", file
);
908 if (cfun
->machine
->is_interrupt
)
910 fputs ("/* prologue: Interrupt */\n", file
);
912 else if (cfun
->machine
->is_signal
)
914 fputs ("/* prologue: Signal */\n", file
);
917 fputs ("/* prologue: function */\n", file
);
919 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
924 /* Implement EPILOGUE_USES. */
927 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
931 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
936 /* Output RTL epilogue. */
939 expand_epilogue (void)
945 HOST_WIDE_INT size
= get_frame_size();
947 /* epilogue: naked */
948 if (cfun
->machine
->is_naked
)
950 emit_jump_insn (gen_return ());
954 avr_regs_to_save (&set
);
955 live_seq
= sequent_regs_live ();
956 minimize
= (TARGET_CALL_PROLOGUES
957 && !cfun
->machine
->is_interrupt
958 && !cfun
->machine
->is_signal
959 && !cfun
->machine
->is_OS_task
960 && !cfun
->machine
->is_OS_main
963 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
965 if (frame_pointer_needed
)
967 /* Get rid of frame. */
968 emit_move_insn(frame_pointer_rtx
,
969 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
970 gen_int_mode (size
, HImode
)));
974 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
977 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
981 if (frame_pointer_needed
)
985 /* Try two methods to adjust stack and select shortest. */
988 rtx sp_plus_insns
= NULL_RTX
;
990 if (TARGET_TINY_STACK
)
992 /* The high byte (r29) doesn't change - prefer 'subi'
993 (1 cycle) over 'sbiw' (2 cycles, same size). */
994 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
998 /* Normal sized addition. */
999 myfp
= frame_pointer_rtx
;
1002 /* Method 1-Adjust frame pointer. */
1005 emit_move_insn (myfp
,
1006 gen_rtx_PLUS (HImode
, myfp
,
1010 /* Copy to stack pointer. */
1011 if (TARGET_TINY_STACK
)
1013 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1015 else if (TARGET_NO_INTERRUPTS
1016 || cfun
->machine
->is_signal
)
1018 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
1019 frame_pointer_rtx
));
1021 else if (cfun
->machine
->is_interrupt
)
1023 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
1024 frame_pointer_rtx
));
1028 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1031 fp_plus_insns
= get_insns ();
1034 /* Method 2-Adjust Stack pointer. */
1039 emit_move_insn (stack_pointer_rtx
,
1040 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
1044 sp_plus_insns
= get_insns ();
1048 /* Use shortest method. */
1049 if (size
<= 5 && (get_sequence_length (sp_plus_insns
)
1050 < get_sequence_length (fp_plus_insns
)))
1051 emit_insn (sp_plus_insns
);
1053 emit_insn (fp_plus_insns
);
1055 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1057 /* Restore previous frame_pointer. */
1058 emit_insn (gen_pophi (frame_pointer_rtx
));
1061 /* Restore used registers. */
1062 for (reg
= 31; reg
>= 0; --reg
)
1064 if (TEST_HARD_REG_BIT (set
, reg
))
1065 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
1067 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1069 /* Restore RAMPZ using tmp reg as scratch. */
1071 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
1073 emit_insn (gen_popqi (tmp_reg_rtx
));
1074 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
1078 /* Restore SREG using tmp reg as scratch. */
1079 emit_insn (gen_popqi (tmp_reg_rtx
));
1081 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
1084 /* Restore tmp REG. */
1085 emit_insn (gen_popqi (tmp_reg_rtx
));
1087 /* Restore zero REG. */
1088 emit_insn (gen_popqi (zero_reg_rtx
));
1091 emit_jump_insn (gen_return ());
1095 /* Output summary messages at beginning of function epilogue. */
1098 avr_asm_function_begin_epilogue (FILE *file
)
1100 fprintf (file
, "/* epilogue start */\n");
1103 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1104 machine for a memory operand of mode MODE. */
1107 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1109 enum reg_class r
= NO_REGS
;
1111 if (TARGET_ALL_DEBUG
)
1113 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
1114 GET_MODE_NAME(mode
),
1115 strict
? "(strict)": "",
1116 reload_completed
? "(reload_completed)": "",
1117 reload_in_progress
? "(reload_in_progress)": "",
1118 reg_renumber
? "(reg_renumber)" : "");
1119 if (GET_CODE (x
) == PLUS
1120 && REG_P (XEXP (x
, 0))
1121 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1122 && INTVAL (XEXP (x
, 1)) >= 0
1123 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
1126 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1127 true_regnum (XEXP (x
, 0)));
1130 if (!strict
&& GET_CODE (x
) == SUBREG
)
1132 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1133 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1135 else if (CONSTANT_ADDRESS_P (x
))
1137 else if (GET_CODE (x
) == PLUS
1138 && REG_P (XEXP (x
, 0))
1139 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1140 && INTVAL (XEXP (x
, 1)) >= 0)
1142 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1146 || REGNO (XEXP (x
,0)) == REG_X
1147 || REGNO (XEXP (x
,0)) == REG_Y
1148 || REGNO (XEXP (x
,0)) == REG_Z
)
1149 r
= BASE_POINTER_REGS
;
1150 if (XEXP (x
,0) == frame_pointer_rtx
1151 || XEXP (x
,0) == arg_pointer_rtx
)
1152 r
= BASE_POINTER_REGS
;
1154 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1157 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1158 && REG_P (XEXP (x
, 0))
1159 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1160 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1164 if (TARGET_ALL_DEBUG
)
1166 fprintf (stderr
, " ret = %c\n", r
+ '0');
1168 return r
== NO_REGS
? 0 : (int)r
;
1171 /* Attempts to replace X with a valid
1172 memory address for an operand of mode MODE */
1175 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1178 if (TARGET_ALL_DEBUG
)
1180 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1184 if (GET_CODE (oldx
) == PLUS
1185 && REG_P (XEXP (oldx
,0)))
1187 if (REG_P (XEXP (oldx
,1)))
1188 x
= force_reg (GET_MODE (oldx
), oldx
);
1189 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1191 int offs
= INTVAL (XEXP (oldx
,1));
1192 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1193 if (offs
> MAX_LD_OFFSET (mode
))
1195 if (TARGET_ALL_DEBUG
)
1196 fprintf (stderr
, "force_reg (big offset)\n");
1197 x
= force_reg (GET_MODE (oldx
), oldx
);
1205 /* Return a pointer register name as a string. */
1208 ptrreg_to_str (int regno
)
1212 case REG_X
: return "X";
1213 case REG_Y
: return "Y";
1214 case REG_Z
: return "Z";
1216 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1221 /* Return the condition name as a string.
1222 Used in conditional jump constructing */
1225 cond_string (enum rtx_code code
)
1234 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1239 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1252 /* Output ADDR to FILE as address. */
1255 print_operand_address (FILE *file
, rtx addr
)
1257 switch (GET_CODE (addr
))
1260 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1264 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1268 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1272 if (CONSTANT_ADDRESS_P (addr
)
1273 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1274 || GET_CODE (addr
) == LABEL_REF
))
1276 fprintf (file
, "gs(");
1277 output_addr_const (file
,addr
);
1278 fprintf (file
,")");
1281 output_addr_const (file
, addr
);
1286 /* Output X as assembler operand to file FILE. */
1289 print_operand (FILE *file
, rtx x
, int code
)
1293 if (code
>= 'A' && code
<= 'D')
1298 if (!AVR_HAVE_JMP_CALL
)
1301 else if (code
== '!')
1303 if (AVR_HAVE_EIJMP_EICALL
)
1308 if (x
== zero_reg_rtx
)
1309 fprintf (file
, "__zero_reg__");
1311 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1313 else if (GET_CODE (x
) == CONST_INT
)
1314 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1315 else if (GET_CODE (x
) == MEM
)
1317 rtx addr
= XEXP (x
,0);
1319 if (CONSTANT_P (addr
) && abcd
)
1322 output_address (addr
);
1323 fprintf (file
, ")+%d", abcd
);
1325 else if (code
== 'o')
1327 if (GET_CODE (addr
) != PLUS
)
1328 fatal_insn ("bad address, not (reg+disp):", addr
);
1330 print_operand (file
, XEXP (addr
, 1), 0);
1332 else if (code
== 'p' || code
== 'r')
1334 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1335 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1338 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1340 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1342 else if (GET_CODE (addr
) == PLUS
)
1344 print_operand_address (file
, XEXP (addr
,0));
1345 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1346 fatal_insn ("internal compiler error. Bad address:"
1349 print_operand (file
, XEXP (addr
,1), code
);
1352 print_operand_address (file
, addr
);
1354 else if (GET_CODE (x
) == CONST_DOUBLE
)
1358 if (GET_MODE (x
) != SFmode
)
1359 fatal_insn ("internal compiler error. Unknown mode:", x
);
1360 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1361 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1362 fprintf (file
, "0x%lx", val
);
1364 else if (code
== 'j')
1365 fputs (cond_string (GET_CODE (x
)), file
);
1366 else if (code
== 'k')
1367 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1369 print_operand_address (file
, x
);
1372 /* Update the condition code in the INSN. */
1375 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1379 switch (get_attr_cc (insn
))
1382 /* Insn does not affect CC at all. */
1390 set
= single_set (insn
);
1394 cc_status
.flags
|= CC_NO_OVERFLOW
;
1395 cc_status
.value1
= SET_DEST (set
);
1400 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1401 The V flag may or may not be known but that's ok because
1402 alter_cond will change tests to use EQ/NE. */
1403 set
= single_set (insn
);
1407 cc_status
.value1
= SET_DEST (set
);
1408 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1413 set
= single_set (insn
);
1416 cc_status
.value1
= SET_SRC (set
);
1420 /* Insn doesn't leave CC in a usable state. */
1423 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1424 set
= single_set (insn
);
1427 rtx src
= SET_SRC (set
);
1429 if (GET_CODE (src
) == ASHIFTRT
1430 && GET_MODE (src
) == QImode
)
1432 rtx x
= XEXP (src
, 1);
1434 if (GET_CODE (x
) == CONST_INT
1438 cc_status
.value1
= SET_DEST (set
);
1439 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1447 /* Return maximum number of consecutive registers of
1448 class CLASS needed to hold a value of mode MODE. */
1451 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1453 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1456 /* Choose mode for jump insn:
1457 1 - relative jump in range -63 <= x <= 62 ;
1458 2 - relative jump in range -2046 <= x <= 2045 ;
1459 3 - absolute jump (only for ATmega[16]03). */
1462 avr_jump_mode (rtx x
, rtx insn
)
1464 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_MODE (x
) == LABEL_REF
1465 ? XEXP (x
, 0) : x
));
1466 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1467 int jump_distance
= cur_addr
- dest_addr
;
1469 if (-63 <= jump_distance
&& jump_distance
<= 62)
1471 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1473 else if (AVR_HAVE_JMP_CALL
)
1479 /* return an AVR condition jump commands.
1480 X is a comparison RTX.
1481 LEN is a number returned by avr_jump_mode function.
1482 if REVERSE nonzero then condition code in X must be reversed. */
1485 ret_cond_branch (rtx x
, int len
, int reverse
)
1487 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1492 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1493 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1495 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1496 AS1 (brmi
,.+2) CR_TAB
1498 (AS1 (breq
,.+6) CR_TAB
1499 AS1 (brmi
,.+4) CR_TAB
1503 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1505 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1506 AS1 (brlt
,.+2) CR_TAB
1508 (AS1 (breq
,.+6) CR_TAB
1509 AS1 (brlt
,.+4) CR_TAB
1512 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1514 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1515 AS1 (brlo
,.+2) CR_TAB
1517 (AS1 (breq
,.+6) CR_TAB
1518 AS1 (brlo
,.+4) CR_TAB
1521 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1522 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1524 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1525 AS1 (brpl
,.+2) CR_TAB
1527 (AS1 (breq
,.+2) CR_TAB
1528 AS1 (brpl
,.+4) CR_TAB
1531 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1533 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1534 AS1 (brge
,.+2) CR_TAB
1536 (AS1 (breq
,.+2) CR_TAB
1537 AS1 (brge
,.+4) CR_TAB
1540 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1542 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1543 AS1 (brsh
,.+2) CR_TAB
1545 (AS1 (breq
,.+2) CR_TAB
1546 AS1 (brsh
,.+4) CR_TAB
1554 return AS1 (br
%k1
,%0);
1556 return (AS1 (br
%j1
,.+2) CR_TAB
1559 return (AS1 (br
%j1
,.+4) CR_TAB
1568 return AS1 (br
%j1
,%0);
1570 return (AS1 (br
%k1
,.+2) CR_TAB
1573 return (AS1 (br
%k1
,.+4) CR_TAB
1581 /* Predicate function for immediate operand which fits to byte (8bit) */
1584 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1586 return (GET_CODE (op
) == CONST_INT
1587 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1590 /* Output all insn addresses and their sizes into the assembly language
1591 output file. This is helpful for debugging whether the length attributes
1592 in the md file are correct.
1593 Output insn cost for next insn. */
1596 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1597 int num_operands ATTRIBUTE_UNUSED
)
1599 int uid
= INSN_UID (insn
);
1601 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1603 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1604 INSN_ADDRESSES (uid
),
1605 INSN_ADDRESSES (uid
) - last_insn_address
,
1606 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1608 last_insn_address
= INSN_ADDRESSES (uid
);
1611 /* Return 0 if undefined, 1 if always true or always false. */
1614 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1616 unsigned int max
= (mode
== QImode
? 0xff :
1617 mode
== HImode
? 0xffff :
1618 mode
== SImode
? 0xffffffff : 0);
1619 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1621 if (unsigned_condition (op
) != op
)
1624 if (max
!= (INTVAL (x
) & max
)
1625 && INTVAL (x
) != 0xff)
1632 /* Returns nonzero if REGNO is the number of a hard
1633 register in which function arguments are sometimes passed. */
1636 function_arg_regno_p(int r
)
1638 return (r
>= 8 && r
<= 25);
1641 /* Initializing the variable cum for the state at the beginning
1642 of the argument list. */
1645 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1646 tree fndecl ATTRIBUTE_UNUSED
)
1649 cum
->regno
= FIRST_CUM_REG
;
1650 if (!libname
&& fntype
)
1652 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1653 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1654 != void_type_node
));
1660 /* Returns the number of registers to allocate for a function argument. */
1663 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1667 if (mode
== BLKmode
)
1668 size
= int_size_in_bytes (type
);
1670 size
= GET_MODE_SIZE (mode
);
1672 /* Align all function arguments to start in even-numbered registers.
1673 Odd-sized arguments leave holes above them. */
1675 return (size
+ 1) & ~1;
1678 /* Controls whether a function argument is passed
1679 in a register, and which register. */
1682 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1683 int named ATTRIBUTE_UNUSED
)
1685 int bytes
= avr_num_arg_regs (mode
, type
);
1687 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1688 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1693 /* Update the summarizer variable CUM to advance past an argument
1694 in the argument list. */
1697 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1698 int named ATTRIBUTE_UNUSED
)
1700 int bytes
= avr_num_arg_regs (mode
, type
);
1702 cum
->nregs
-= bytes
;
1703 cum
->regno
-= bytes
;
1705 if (cum
->nregs
<= 0)
1708 cum
->regno
= FIRST_CUM_REG
;
1712 /***********************************************************************
1713 Functions for outputting various mov's for a various modes
1714 ************************************************************************/
1716 output_movqi (rtx insn
, rtx operands
[], int *l
)
1719 rtx dest
= operands
[0];
1720 rtx src
= operands
[1];
1728 if (register_operand (dest
, QImode
))
1730 if (register_operand (src
, QImode
)) /* mov r,r */
1732 if (test_hard_reg_class (STACK_REG
, dest
))
1733 return AS2 (out
,%0,%1);
1734 else if (test_hard_reg_class (STACK_REG
, src
))
1735 return AS2 (in
,%0,%1);
1737 return AS2 (mov
,%0,%1);
1739 else if (CONSTANT_P (src
))
1741 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1742 return AS2 (ldi
,%0,lo8(%1));
1744 if (GET_CODE (src
) == CONST_INT
)
1746 if (src
== const0_rtx
) /* mov r,L */
1747 return AS1 (clr
,%0);
1748 else if (src
== const1_rtx
)
1751 return (AS1 (clr
,%0) CR_TAB
1754 else if (src
== constm1_rtx
)
1756 /* Immediate constants -1 to any register */
1758 return (AS1 (clr
,%0) CR_TAB
1763 int bit_nr
= exact_log2 (INTVAL (src
));
1769 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1772 avr_output_bld (operands
, bit_nr
);
1779 /* Last resort, larger than loading from memory. */
1781 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1782 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1783 AS2 (mov
,%0,r31
) CR_TAB
1784 AS2 (mov
,r31
,__tmp_reg__
));
1786 else if (GET_CODE (src
) == MEM
)
1787 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1789 else if (GET_CODE (dest
) == MEM
)
1793 if (src
== const0_rtx
)
1794 operands
[1] = zero_reg_rtx
;
1796 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1799 output_asm_insn (templ
, operands
);
1808 output_movhi (rtx insn
, rtx operands
[], int *l
)
1811 rtx dest
= operands
[0];
1812 rtx src
= operands
[1];
1818 if (register_operand (dest
, HImode
))
1820 if (register_operand (src
, HImode
)) /* mov r,r */
1822 if (test_hard_reg_class (STACK_REG
, dest
))
1824 if (TARGET_TINY_STACK
)
1825 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1826 /* Use simple load of stack pointer if no interrupts are
1828 else if (TARGET_NO_INTERRUPTS
)
1829 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1830 AS2 (out
,__SP_L__
,%A1
));
1832 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1834 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1835 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1836 AS2 (out
,__SP_L__
,%A1
));
1838 else if (test_hard_reg_class (STACK_REG
, src
))
1841 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1842 AS2 (in
,%B0
,__SP_H__
));
1848 return (AS2 (movw
,%0,%1));
1853 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1857 else if (CONSTANT_P (src
))
1859 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1862 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1863 AS2 (ldi
,%B0
,hi8(%1)));
1866 if (GET_CODE (src
) == CONST_INT
)
1868 if (src
== const0_rtx
) /* mov r,L */
1871 return (AS1 (clr
,%A0
) CR_TAB
1874 else if (src
== const1_rtx
)
1877 return (AS1 (clr
,%A0
) CR_TAB
1878 AS1 (clr
,%B0
) CR_TAB
1881 else if (src
== constm1_rtx
)
1883 /* Immediate constants -1 to any register */
1885 return (AS1 (clr
,%0) CR_TAB
1886 AS1 (dec
,%A0
) CR_TAB
1891 int bit_nr
= exact_log2 (INTVAL (src
));
1897 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1898 AS1 (clr
,%B0
) CR_TAB
1901 avr_output_bld (operands
, bit_nr
);
1907 if ((INTVAL (src
) & 0xff) == 0)
1910 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1911 AS1 (clr
,%A0
) CR_TAB
1912 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1913 AS2 (mov
,%B0
,r31
) CR_TAB
1914 AS2 (mov
,r31
,__tmp_reg__
));
1916 else if ((INTVAL (src
) & 0xff00) == 0)
1919 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1920 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1921 AS2 (mov
,%A0
,r31
) CR_TAB
1922 AS1 (clr
,%B0
) CR_TAB
1923 AS2 (mov
,r31
,__tmp_reg__
));
1927 /* Last resort, equal to loading from memory. */
1929 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1930 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1931 AS2 (mov
,%A0
,r31
) CR_TAB
1932 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1933 AS2 (mov
,%B0
,r31
) CR_TAB
1934 AS2 (mov
,r31
,__tmp_reg__
));
1936 else if (GET_CODE (src
) == MEM
)
1937 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1939 else if (GET_CODE (dest
) == MEM
)
1943 if (src
== const0_rtx
)
1944 operands
[1] = zero_reg_rtx
;
1946 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
1949 output_asm_insn (templ
, operands
);
1954 fatal_insn ("invalid insn:", insn
);
1959 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1963 rtx x
= XEXP (src
, 0);
1969 if (CONSTANT_ADDRESS_P (x
))
1971 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1974 return AS2 (in
,%0,__SREG__
);
1976 if (optimize
> 0 && io_address_operand (x
, QImode
))
1979 return AS2 (in
,%0,%1-0x20);
1982 return AS2 (lds
,%0,%1);
1984 /* memory access by reg+disp */
1985 else if (GET_CODE (x
) == PLUS
1986 && REG_P (XEXP (x
,0))
1987 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1989 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1991 int disp
= INTVAL (XEXP (x
,1));
1992 if (REGNO (XEXP (x
,0)) != REG_Y
)
1993 fatal_insn ("incorrect insn:",insn
);
1995 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1996 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1997 AS2 (ldd
,%0,Y
+63) CR_TAB
1998 AS2 (sbiw
,r28
,%o1
-63));
2000 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2001 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2002 AS2 (ld
,%0,Y
) CR_TAB
2003 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2004 AS2 (sbci
,r29
,hi8(%o1
)));
2006 else if (REGNO (XEXP (x
,0)) == REG_X
)
2008 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2009 it but I have this situation with extremal optimizing options. */
2010 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2011 || reg_unused_after (insn
, XEXP (x
,0)))
2012 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
2015 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
2016 AS2 (ld
,%0,X
) CR_TAB
2017 AS2 (sbiw
,r26
,%o1
));
2020 return AS2 (ldd
,%0,%1);
2023 return AS2 (ld
,%0,%1);
2027 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
2031 rtx base
= XEXP (src
, 0);
2032 int reg_dest
= true_regnum (dest
);
2033 int reg_base
= true_regnum (base
);
2034 /* "volatile" forces reading low byte first, even if less efficient,
2035 for correct operation with 16-bit I/O registers. */
2036 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2044 if (reg_dest
== reg_base
) /* R = (R) */
2047 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
2048 AS2 (ld
,%B0
,%1) CR_TAB
2049 AS2 (mov
,%A0
,__tmp_reg__
));
2051 else if (reg_base
== REG_X
) /* (R26) */
2053 if (reg_unused_after (insn
, base
))
2056 return (AS2 (ld
,%A0
,X
+) CR_TAB
2060 return (AS2 (ld
,%A0
,X
+) CR_TAB
2061 AS2 (ld
,%B0
,X
) CR_TAB
2067 return (AS2 (ld
,%A0
,%1) CR_TAB
2068 AS2 (ldd
,%B0
,%1+1));
2071 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2073 int disp
= INTVAL (XEXP (base
, 1));
2074 int reg_base
= true_regnum (XEXP (base
, 0));
2076 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2078 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2079 fatal_insn ("incorrect insn:",insn
);
2081 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2082 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
2083 AS2 (ldd
,%A0
,Y
+62) CR_TAB
2084 AS2 (ldd
,%B0
,Y
+63) CR_TAB
2085 AS2 (sbiw
,r28
,%o1
-62));
2087 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2088 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2089 AS2 (ld
,%A0
,Y
) CR_TAB
2090 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2091 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2092 AS2 (sbci
,r29
,hi8(%o1
)));
2094 if (reg_base
== REG_X
)
2096 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2097 it but I have this situation with extremal
2098 optimization options. */
2101 if (reg_base
== reg_dest
)
2102 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2103 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2104 AS2 (ld
,%B0
,X
) CR_TAB
2105 AS2 (mov
,%A0
,__tmp_reg__
));
2107 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2108 AS2 (ld
,%A0
,X
+) CR_TAB
2109 AS2 (ld
,%B0
,X
) CR_TAB
2110 AS2 (sbiw
,r26
,%o1
+1));
2113 if (reg_base
== reg_dest
)
2116 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
2117 AS2 (ldd
,%B0
,%B1
) CR_TAB
2118 AS2 (mov
,%A0
,__tmp_reg__
));
2122 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2125 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2127 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2128 fatal_insn ("incorrect insn:", insn
);
2132 if (REGNO (XEXP (base
, 0)) == REG_X
)
2135 return (AS2 (sbiw
,r26
,2) CR_TAB
2136 AS2 (ld
,%A0
,X
+) CR_TAB
2137 AS2 (ld
,%B0
,X
) CR_TAB
2143 return (AS2 (sbiw
,%r1
,2) CR_TAB
2144 AS2 (ld
,%A0
,%p1
) CR_TAB
2145 AS2 (ldd
,%B0
,%p1
+1));
2150 return (AS2 (ld
,%B0
,%1) CR_TAB
2153 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2155 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2156 fatal_insn ("incorrect insn:", insn
);
2159 return (AS2 (ld
,%A0
,%1) CR_TAB
2162 else if (CONSTANT_ADDRESS_P (base
))
2164 if (optimize
> 0 && io_address_operand (base
, HImode
))
2167 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2168 AS2 (in
,%B0
,%B1
-0x20));
2171 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2175 fatal_insn ("unknown move insn:",insn
);
2180 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2184 rtx base
= XEXP (src
, 0);
2185 int reg_dest
= true_regnum (dest
);
2186 int reg_base
= true_regnum (base
);
2194 if (reg_base
== REG_X
) /* (R26) */
2196 if (reg_dest
== REG_X
)
2197 /* "ld r26,-X" is undefined */
2198 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2199 AS2 (ld
,r29
,X
) CR_TAB
2200 AS2 (ld
,r28
,-X
) CR_TAB
2201 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2202 AS2 (sbiw
,r26
,1) CR_TAB
2203 AS2 (ld
,r26
,X
) CR_TAB
2204 AS2 (mov
,r27
,__tmp_reg__
));
2205 else if (reg_dest
== REG_X
- 2)
2206 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2207 AS2 (ld
,%B0
,X
+) CR_TAB
2208 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2209 AS2 (ld
,%D0
,X
) CR_TAB
2210 AS2 (mov
,%C0
,__tmp_reg__
));
2211 else if (reg_unused_after (insn
, base
))
2212 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2213 AS2 (ld
,%B0
,X
+) CR_TAB
2214 AS2 (ld
,%C0
,X
+) CR_TAB
2217 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2218 AS2 (ld
,%B0
,X
+) CR_TAB
2219 AS2 (ld
,%C0
,X
+) CR_TAB
2220 AS2 (ld
,%D0
,X
) CR_TAB
2225 if (reg_dest
== reg_base
)
2226 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2227 AS2 (ldd
,%C0
,%1+2) CR_TAB
2228 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2229 AS2 (ld
,%A0
,%1) CR_TAB
2230 AS2 (mov
,%B0
,__tmp_reg__
));
2231 else if (reg_base
== reg_dest
+ 2)
2232 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2233 AS2 (ldd
,%B0
,%1+1) CR_TAB
2234 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2235 AS2 (ldd
,%D0
,%1+3) CR_TAB
2236 AS2 (mov
,%C0
,__tmp_reg__
));
2238 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2239 AS2 (ldd
,%B0
,%1+1) CR_TAB
2240 AS2 (ldd
,%C0
,%1+2) CR_TAB
2241 AS2 (ldd
,%D0
,%1+3));
2244 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2246 int disp
= INTVAL (XEXP (base
, 1));
2248 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2250 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2251 fatal_insn ("incorrect insn:",insn
);
2253 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2254 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2255 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2256 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2257 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2258 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2259 AS2 (sbiw
,r28
,%o1
-60));
2261 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2262 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2263 AS2 (ld
,%A0
,Y
) CR_TAB
2264 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2265 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2266 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2267 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2268 AS2 (sbci
,r29
,hi8(%o1
)));
2271 reg_base
= true_regnum (XEXP (base
, 0));
2272 if (reg_base
== REG_X
)
2275 if (reg_dest
== REG_X
)
2278 /* "ld r26,-X" is undefined */
2279 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2280 AS2 (ld
,r29
,X
) CR_TAB
2281 AS2 (ld
,r28
,-X
) CR_TAB
2282 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2283 AS2 (sbiw
,r26
,1) CR_TAB
2284 AS2 (ld
,r26
,X
) CR_TAB
2285 AS2 (mov
,r27
,__tmp_reg__
));
2288 if (reg_dest
== REG_X
- 2)
2289 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2290 AS2 (ld
,r24
,X
+) CR_TAB
2291 AS2 (ld
,r25
,X
+) CR_TAB
2292 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2293 AS2 (ld
,r27
,X
) CR_TAB
2294 AS2 (mov
,r26
,__tmp_reg__
));
2296 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2297 AS2 (ld
,%A0
,X
+) CR_TAB
2298 AS2 (ld
,%B0
,X
+) CR_TAB
2299 AS2 (ld
,%C0
,X
+) CR_TAB
2300 AS2 (ld
,%D0
,X
) CR_TAB
2301 AS2 (sbiw
,r26
,%o1
+3));
2303 if (reg_dest
== reg_base
)
2304 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2305 AS2 (ldd
,%C0
,%C1
) CR_TAB
2306 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2307 AS2 (ldd
,%A0
,%A1
) CR_TAB
2308 AS2 (mov
,%B0
,__tmp_reg__
));
2309 else if (reg_dest
== reg_base
- 2)
2310 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2311 AS2 (ldd
,%B0
,%B1
) CR_TAB
2312 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2313 AS2 (ldd
,%D0
,%D1
) CR_TAB
2314 AS2 (mov
,%C0
,__tmp_reg__
));
2315 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2316 AS2 (ldd
,%B0
,%B1
) CR_TAB
2317 AS2 (ldd
,%C0
,%C1
) CR_TAB
2320 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2321 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2322 AS2 (ld
,%C0
,%1) CR_TAB
2323 AS2 (ld
,%B0
,%1) CR_TAB
2325 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2326 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2327 AS2 (ld
,%B0
,%1) CR_TAB
2328 AS2 (ld
,%C0
,%1) CR_TAB
2330 else if (CONSTANT_ADDRESS_P (base
))
2331 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2332 AS2 (lds
,%B0
,%B1
) CR_TAB
2333 AS2 (lds
,%C0
,%C1
) CR_TAB
2336 fatal_insn ("unknown move insn:",insn
);
2341 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2345 rtx base
= XEXP (dest
, 0);
2346 int reg_base
= true_regnum (base
);
2347 int reg_src
= true_regnum (src
);
2353 if (CONSTANT_ADDRESS_P (base
))
2354 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2355 AS2 (sts
,%B0
,%B1
) CR_TAB
2356 AS2 (sts
,%C0
,%C1
) CR_TAB
2358 if (reg_base
> 0) /* (r) */
2360 if (reg_base
== REG_X
) /* (R26) */
2362 if (reg_src
== REG_X
)
2364 /* "st X+,r26" is undefined */
2365 if (reg_unused_after (insn
, base
))
2366 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2367 AS2 (st
,X
,r26
) CR_TAB
2368 AS2 (adiw
,r26
,1) CR_TAB
2369 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2370 AS2 (st
,X
+,r28
) CR_TAB
2373 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2374 AS2 (st
,X
,r26
) CR_TAB
2375 AS2 (adiw
,r26
,1) CR_TAB
2376 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2377 AS2 (st
,X
+,r28
) CR_TAB
2378 AS2 (st
,X
,r29
) CR_TAB
2381 else if (reg_base
== reg_src
+ 2)
2383 if (reg_unused_after (insn
, base
))
2384 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2385 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2386 AS2 (st
,%0+,%A1
) CR_TAB
2387 AS2 (st
,%0+,%B1
) CR_TAB
2388 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2389 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2390 AS1 (clr
,__zero_reg__
));
2392 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2393 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2394 AS2 (st
,%0+,%A1
) CR_TAB
2395 AS2 (st
,%0+,%B1
) CR_TAB
2396 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2397 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2398 AS1 (clr
,__zero_reg__
) CR_TAB
2401 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2402 AS2 (st
,%0+,%B1
) CR_TAB
2403 AS2 (st
,%0+,%C1
) CR_TAB
2404 AS2 (st
,%0,%D1
) CR_TAB
2408 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2409 AS2 (std
,%0+1,%B1
) CR_TAB
2410 AS2 (std
,%0+2,%C1
) CR_TAB
2411 AS2 (std
,%0+3,%D1
));
2413 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2415 int disp
= INTVAL (XEXP (base
, 1));
2416 reg_base
= REGNO (XEXP (base
, 0));
2417 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2419 if (reg_base
!= REG_Y
)
2420 fatal_insn ("incorrect insn:",insn
);
2422 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2423 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2424 AS2 (std
,Y
+60,%A1
) CR_TAB
2425 AS2 (std
,Y
+61,%B1
) CR_TAB
2426 AS2 (std
,Y
+62,%C1
) CR_TAB
2427 AS2 (std
,Y
+63,%D1
) CR_TAB
2428 AS2 (sbiw
,r28
,%o0
-60));
2430 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2431 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2432 AS2 (st
,Y
,%A1
) CR_TAB
2433 AS2 (std
,Y
+1,%B1
) CR_TAB
2434 AS2 (std
,Y
+2,%C1
) CR_TAB
2435 AS2 (std
,Y
+3,%D1
) CR_TAB
2436 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2437 AS2 (sbci
,r29
,hi8(%o0
)));
2439 if (reg_base
== REG_X
)
2442 if (reg_src
== REG_X
)
2445 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2446 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2447 AS2 (adiw
,r26
,%o0
) CR_TAB
2448 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2449 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2450 AS2 (st
,X
+,r28
) CR_TAB
2451 AS2 (st
,X
,r29
) CR_TAB
2452 AS1 (clr
,__zero_reg__
) CR_TAB
2453 AS2 (sbiw
,r26
,%o0
+3));
2455 else if (reg_src
== REG_X
- 2)
2458 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2459 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2460 AS2 (adiw
,r26
,%o0
) CR_TAB
2461 AS2 (st
,X
+,r24
) CR_TAB
2462 AS2 (st
,X
+,r25
) CR_TAB
2463 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2464 AS2 (st
,X
,__zero_reg__
) CR_TAB
2465 AS1 (clr
,__zero_reg__
) CR_TAB
2466 AS2 (sbiw
,r26
,%o0
+3));
2469 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2470 AS2 (st
,X
+,%A1
) CR_TAB
2471 AS2 (st
,X
+,%B1
) CR_TAB
2472 AS2 (st
,X
+,%C1
) CR_TAB
2473 AS2 (st
,X
,%D1
) CR_TAB
2474 AS2 (sbiw
,r26
,%o0
+3));
2476 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2477 AS2 (std
,%B0
,%B1
) CR_TAB
2478 AS2 (std
,%C0
,%C1
) CR_TAB
2481 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2482 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2483 AS2 (st
,%0,%C1
) CR_TAB
2484 AS2 (st
,%0,%B1
) CR_TAB
2486 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2487 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2488 AS2 (st
,%0,%B1
) CR_TAB
2489 AS2 (st
,%0,%C1
) CR_TAB
2491 fatal_insn ("unknown move insn:",insn
);
2496 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2499 rtx dest
= operands
[0];
2500 rtx src
= operands
[1];
2506 if (register_operand (dest
, VOIDmode
))
2508 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2510 if (true_regnum (dest
) > true_regnum (src
))
2515 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2516 AS2 (movw
,%A0
,%A1
));
2519 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2520 AS2 (mov
,%C0
,%C1
) CR_TAB
2521 AS2 (mov
,%B0
,%B1
) CR_TAB
2529 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2530 AS2 (movw
,%C0
,%C1
));
2533 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2534 AS2 (mov
,%B0
,%B1
) CR_TAB
2535 AS2 (mov
,%C0
,%C1
) CR_TAB
2539 else if (CONSTANT_P (src
))
2541 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2544 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2545 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2546 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2547 AS2 (ldi
,%D0
,hhi8(%1)));
2550 if (GET_CODE (src
) == CONST_INT
)
2552 const char *const clr_op0
=
2553 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2554 AS1 (clr
,%B0
) CR_TAB
2556 : (AS1 (clr
,%A0
) CR_TAB
2557 AS1 (clr
,%B0
) CR_TAB
2558 AS1 (clr
,%C0
) CR_TAB
2561 if (src
== const0_rtx
) /* mov r,L */
2563 *l
= AVR_HAVE_MOVW
? 3 : 4;
2566 else if (src
== const1_rtx
)
2569 output_asm_insn (clr_op0
, operands
);
2570 *l
= AVR_HAVE_MOVW
? 4 : 5;
2571 return AS1 (inc
,%A0
);
2573 else if (src
== constm1_rtx
)
2575 /* Immediate constants -1 to any register */
2579 return (AS1 (clr
,%A0
) CR_TAB
2580 AS1 (dec
,%A0
) CR_TAB
2581 AS2 (mov
,%B0
,%A0
) CR_TAB
2582 AS2 (movw
,%C0
,%A0
));
2585 return (AS1 (clr
,%A0
) CR_TAB
2586 AS1 (dec
,%A0
) CR_TAB
2587 AS2 (mov
,%B0
,%A0
) CR_TAB
2588 AS2 (mov
,%C0
,%A0
) CR_TAB
2593 int bit_nr
= exact_log2 (INTVAL (src
));
2597 *l
= AVR_HAVE_MOVW
? 5 : 6;
2600 output_asm_insn (clr_op0
, operands
);
2601 output_asm_insn ("set", operands
);
2604 avr_output_bld (operands
, bit_nr
);
2611 /* Last resort, better than loading from memory. */
2613 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2614 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2615 AS2 (mov
,%A0
,r31
) CR_TAB
2616 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2617 AS2 (mov
,%B0
,r31
) CR_TAB
2618 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2619 AS2 (mov
,%C0
,r31
) CR_TAB
2620 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2621 AS2 (mov
,%D0
,r31
) CR_TAB
2622 AS2 (mov
,r31
,__tmp_reg__
));
2624 else if (GET_CODE (src
) == MEM
)
2625 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2627 else if (GET_CODE (dest
) == MEM
)
2631 if (src
== const0_rtx
)
2632 operands
[1] = zero_reg_rtx
;
2634 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2637 output_asm_insn (templ
, operands
);
2642 fatal_insn ("invalid insn:", insn
);
2647 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2651 rtx x
= XEXP (dest
, 0);
2657 if (CONSTANT_ADDRESS_P (x
))
2659 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2662 return AS2 (out
,__SREG__
,%1);
2664 if (optimize
> 0 && io_address_operand (x
, QImode
))
2667 return AS2 (out
,%0-0x20,%1);
2670 return AS2 (sts
,%0,%1);
2672 /* memory access by reg+disp */
2673 else if (GET_CODE (x
) == PLUS
2674 && REG_P (XEXP (x
,0))
2675 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2677 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2679 int disp
= INTVAL (XEXP (x
,1));
2680 if (REGNO (XEXP (x
,0)) != REG_Y
)
2681 fatal_insn ("incorrect insn:",insn
);
2683 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2684 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2685 AS2 (std
,Y
+63,%1) CR_TAB
2686 AS2 (sbiw
,r28
,%o0
-63));
2688 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2689 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2690 AS2 (st
,Y
,%1) CR_TAB
2691 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2692 AS2 (sbci
,r29
,hi8(%o0
)));
2694 else if (REGNO (XEXP (x
,0)) == REG_X
)
2696 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2698 if (reg_unused_after (insn
, XEXP (x
,0)))
2699 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2700 AS2 (adiw
,r26
,%o0
) CR_TAB
2701 AS2 (st
,X
,__tmp_reg__
));
2703 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2704 AS2 (adiw
,r26
,%o0
) CR_TAB
2705 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2706 AS2 (sbiw
,r26
,%o0
));
2710 if (reg_unused_after (insn
, XEXP (x
,0)))
2711 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2714 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2715 AS2 (st
,X
,%1) CR_TAB
2716 AS2 (sbiw
,r26
,%o0
));
2720 return AS2 (std
,%0,%1);
2723 return AS2 (st
,%0,%1);
2727 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2731 rtx base
= XEXP (dest
, 0);
2732 int reg_base
= true_regnum (base
);
2733 int reg_src
= true_regnum (src
);
2734 /* "volatile" forces writing high byte first, even if less efficient,
2735 for correct operation with 16-bit I/O registers. */
2736 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2741 if (CONSTANT_ADDRESS_P (base
))
2743 if (optimize
> 0 && io_address_operand (base
, HImode
))
2746 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2747 AS2 (out
,%A0
-0x20,%A1
));
2749 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2754 if (reg_base
== REG_X
)
2756 if (reg_src
== REG_X
)
2758 /* "st X+,r26" and "st -X,r26" are undefined. */
2759 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2760 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2761 AS2 (st
,X
,r26
) CR_TAB
2762 AS2 (adiw
,r26
,1) CR_TAB
2763 AS2 (st
,X
,__tmp_reg__
));
2765 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2766 AS2 (adiw
,r26
,1) CR_TAB
2767 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2768 AS2 (sbiw
,r26
,1) CR_TAB
2773 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2774 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2777 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2778 AS2 (st
,X
,%B1
) CR_TAB
2783 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2786 else if (GET_CODE (base
) == PLUS
)
2788 int disp
= INTVAL (XEXP (base
, 1));
2789 reg_base
= REGNO (XEXP (base
, 0));
2790 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2792 if (reg_base
!= REG_Y
)
2793 fatal_insn ("incorrect insn:",insn
);
2795 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2796 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2797 AS2 (std
,Y
+63,%B1
) CR_TAB
2798 AS2 (std
,Y
+62,%A1
) CR_TAB
2799 AS2 (sbiw
,r28
,%o0
-62));
2801 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2802 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2803 AS2 (std
,Y
+1,%B1
) CR_TAB
2804 AS2 (st
,Y
,%A1
) CR_TAB
2805 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2806 AS2 (sbci
,r29
,hi8(%o0
)));
2808 if (reg_base
== REG_X
)
2811 if (reg_src
== REG_X
)
2814 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2815 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2816 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2817 AS2 (st
,X
,__zero_reg__
) CR_TAB
2818 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2819 AS1 (clr
,__zero_reg__
) CR_TAB
2820 AS2 (sbiw
,r26
,%o0
));
2823 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2824 AS2 (st
,X
,%B1
) CR_TAB
2825 AS2 (st
,-X
,%A1
) CR_TAB
2826 AS2 (sbiw
,r26
,%o0
));
2828 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2831 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2832 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2834 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2838 if (REGNO (XEXP (base
, 0)) == REG_X
)
2841 return (AS2 (adiw
,r26
,1) CR_TAB
2842 AS2 (st
,X
,%B1
) CR_TAB
2843 AS2 (st
,-X
,%A1
) CR_TAB
2849 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2850 AS2 (st
,%p0
,%A1
) CR_TAB
2856 return (AS2 (st
,%0,%A1
) CR_TAB
2859 fatal_insn ("unknown move insn:",insn
);
2863 /* Return 1 if frame pointer for current function required. */
2866 avr_frame_pointer_required_p (void)
2868 return (cfun
->calls_alloca
2869 || crtl
->args
.info
.nregs
== 0
2870 || get_frame_size () > 0);
2873 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2876 compare_condition (rtx insn
)
2878 rtx next
= next_real_insn (insn
);
2879 RTX_CODE cond
= UNKNOWN
;
2880 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2882 rtx pat
= PATTERN (next
);
2883 rtx src
= SET_SRC (pat
);
2884 rtx t
= XEXP (src
, 0);
2885 cond
= GET_CODE (t
);
2890 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2893 compare_sign_p (rtx insn
)
2895 RTX_CODE cond
= compare_condition (insn
);
2896 return (cond
== GE
|| cond
== LT
);
2899 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2900 that needs to be swapped (GT, GTU, LE, LEU). */
2903 compare_diff_p (rtx insn
)
2905 RTX_CODE cond
= compare_condition (insn
);
2906 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2909 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2912 compare_eq_p (rtx insn
)
2914 RTX_CODE cond
= compare_condition (insn
);
2915 return (cond
== EQ
|| cond
== NE
);
2919 /* Output test instruction for HImode. */
2922 out_tsthi (rtx insn
, rtx op
, int *l
)
2924 if (compare_sign_p (insn
))
2927 return AS1 (tst
,%B0
);
2929 if (reg_unused_after (insn
, op
)
2930 && compare_eq_p (insn
))
2932 /* Faster than sbiw if we can clobber the operand. */
2934 return AS2 (or,%A0
,%B0
);
2936 if (test_hard_reg_class (ADDW_REGS
, op
))
2939 return AS2 (sbiw
,%0,0);
2942 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2943 AS2 (cpc
,%B0
,__zero_reg__
));
2947 /* Output test instruction for SImode. */
2950 out_tstsi (rtx insn
, rtx op
, int *l
)
2952 if (compare_sign_p (insn
))
2955 return AS1 (tst
,%D0
);
2957 if (test_hard_reg_class (ADDW_REGS
, op
))
2960 return (AS2 (sbiw
,%A0
,0) CR_TAB
2961 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2962 AS2 (cpc
,%D0
,__zero_reg__
));
2965 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2966 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2967 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2968 AS2 (cpc
,%D0
,__zero_reg__
));
2972 /* Generate asm equivalent for various shifts.
2973 Shift count is a CONST_INT, MEM or REG.
2974 This only handles cases that are not already
2975 carefully hand-optimized in ?sh??i3_out. */
2978 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
2979 int *len
, int t_len
)
2983 int second_label
= 1;
2984 int saved_in_tmp
= 0;
2985 int use_zero_reg
= 0;
2987 op
[0] = operands
[0];
2988 op
[1] = operands
[1];
2989 op
[2] = operands
[2];
2990 op
[3] = operands
[3];
2996 if (GET_CODE (operands
[2]) == CONST_INT
)
2998 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2999 int count
= INTVAL (operands
[2]);
3000 int max_len
= 10; /* If larger than this, always use a loop. */
3009 if (count
< 8 && !scratch
)
3013 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
3015 if (t_len
* count
<= max_len
)
3017 /* Output shifts inline with no loop - faster. */
3019 *len
= t_len
* count
;
3023 output_asm_insn (templ
, op
);
3032 strcat (str
, AS2 (ldi
,%3,%2));
3034 else if (use_zero_reg
)
3036 /* Hack to save one word: use __zero_reg__ as loop counter.
3037 Set one bit, then shift in a loop until it is 0 again. */
3039 op
[3] = zero_reg_rtx
;
3043 strcat (str
, ("set" CR_TAB
3044 AS2 (bld
,%3,%2-1)));
3048 /* No scratch register available, use one from LD_REGS (saved in
3049 __tmp_reg__) that doesn't overlap with registers to shift. */
3051 op
[3] = gen_rtx_REG (QImode
,
3052 ((true_regnum (operands
[0]) - 1) & 15) + 16);
3053 op
[4] = tmp_reg_rtx
;
3057 *len
= 3; /* Includes "mov %3,%4" after the loop. */
3059 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
3065 else if (GET_CODE (operands
[2]) == MEM
)
3069 op
[3] = op_mov
[0] = tmp_reg_rtx
;
3073 out_movqi_r_mr (insn
, op_mov
, len
);
3075 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
3077 else if (register_operand (operands
[2], QImode
))
3079 if (reg_unused_after (insn
, operands
[2]))
3083 op
[3] = tmp_reg_rtx
;
3085 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
3089 fatal_insn ("bad shift insn:", insn
);
3096 strcat (str
, AS1 (rjmp
,2f
));
3100 *len
+= t_len
+ 2; /* template + dec + brXX */
3103 strcat (str
, "\n1:\t");
3104 strcat (str
, templ
);
3105 strcat (str
, second_label
? "\n2:\t" : "\n\t");
3106 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
3107 strcat (str
, CR_TAB
);
3108 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
3110 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
3111 output_asm_insn (str
, op
);
3116 /* 8bit shift left ((char)x << i) */
3119 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3121 if (GET_CODE (operands
[2]) == CONST_INT
)
3128 switch (INTVAL (operands
[2]))
3131 if (INTVAL (operands
[2]) < 8)
3135 return AS1 (clr
,%0);
3139 return AS1 (lsl
,%0);
3143 return (AS1 (lsl
,%0) CR_TAB
3148 return (AS1 (lsl
,%0) CR_TAB
3153 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3156 return (AS1 (swap
,%0) CR_TAB
3157 AS2 (andi
,%0,0xf0));
3160 return (AS1 (lsl
,%0) CR_TAB
3166 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3169 return (AS1 (swap
,%0) CR_TAB
3171 AS2 (andi
,%0,0xe0));
3174 return (AS1 (lsl
,%0) CR_TAB
3181 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3184 return (AS1 (swap
,%0) CR_TAB
3187 AS2 (andi
,%0,0xc0));
3190 return (AS1 (lsl
,%0) CR_TAB
3199 return (AS1 (ror
,%0) CR_TAB
3204 else if (CONSTANT_P (operands
[2]))
3205 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3207 out_shift_with_cnt (AS1 (lsl
,%0),
3208 insn
, operands
, len
, 1);
3213 /* 16bit shift left ((short)x << i) */
3216 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3218 if (GET_CODE (operands
[2]) == CONST_INT
)
3220 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3221 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3228 switch (INTVAL (operands
[2]))
3231 if (INTVAL (operands
[2]) < 16)
3235 return (AS1 (clr
,%B0
) CR_TAB
3239 if (optimize_size
&& scratch
)
3244 return (AS1 (swap
,%A0
) CR_TAB
3245 AS1 (swap
,%B0
) CR_TAB
3246 AS2 (andi
,%B0
,0xf0) CR_TAB
3247 AS2 (eor
,%B0
,%A0
) CR_TAB
3248 AS2 (andi
,%A0
,0xf0) CR_TAB
3254 return (AS1 (swap
,%A0
) CR_TAB
3255 AS1 (swap
,%B0
) CR_TAB
3256 AS2 (ldi
,%3,0xf0) CR_TAB
3257 AS2 (and,%B0
,%3) CR_TAB
3258 AS2 (eor
,%B0
,%A0
) CR_TAB
3259 AS2 (and,%A0
,%3) CR_TAB
3262 break; /* optimize_size ? 6 : 8 */
3266 break; /* scratch ? 5 : 6 */
3270 return (AS1 (lsl
,%A0
) CR_TAB
3271 AS1 (rol
,%B0
) CR_TAB
3272 AS1 (swap
,%A0
) CR_TAB
3273 AS1 (swap
,%B0
) CR_TAB
3274 AS2 (andi
,%B0
,0xf0) CR_TAB
3275 AS2 (eor
,%B0
,%A0
) CR_TAB
3276 AS2 (andi
,%A0
,0xf0) CR_TAB
3282 return (AS1 (lsl
,%A0
) CR_TAB
3283 AS1 (rol
,%B0
) CR_TAB
3284 AS1 (swap
,%A0
) CR_TAB
3285 AS1 (swap
,%B0
) CR_TAB
3286 AS2 (ldi
,%3,0xf0) CR_TAB
3287 AS2 (and,%B0
,%3) CR_TAB
3288 AS2 (eor
,%B0
,%A0
) CR_TAB
3289 AS2 (and,%A0
,%3) CR_TAB
3296 break; /* scratch ? 5 : 6 */
3298 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3299 AS1 (lsr
,%B0
) CR_TAB
3300 AS1 (ror
,%A0
) CR_TAB
3301 AS1 (ror
,__tmp_reg__
) CR_TAB
3302 AS1 (lsr
,%B0
) CR_TAB
3303 AS1 (ror
,%A0
) CR_TAB
3304 AS1 (ror
,__tmp_reg__
) CR_TAB
3305 AS2 (mov
,%B0
,%A0
) CR_TAB
3306 AS2 (mov
,%A0
,__tmp_reg__
));
3310 return (AS1 (lsr
,%B0
) CR_TAB
3311 AS2 (mov
,%B0
,%A0
) CR_TAB
3312 AS1 (clr
,%A0
) CR_TAB
3313 AS1 (ror
,%B0
) CR_TAB
3317 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3322 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3323 AS1 (clr
,%A0
) CR_TAB
3328 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3329 AS1 (clr
,%A0
) CR_TAB
3330 AS1 (lsl
,%B0
) CR_TAB
3335 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3336 AS1 (clr
,%A0
) CR_TAB
3337 AS1 (lsl
,%B0
) CR_TAB
3338 AS1 (lsl
,%B0
) CR_TAB
3345 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3346 AS1 (clr
,%A0
) CR_TAB
3347 AS1 (swap
,%B0
) CR_TAB
3348 AS2 (andi
,%B0
,0xf0));
3353 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3354 AS1 (clr
,%A0
) CR_TAB
3355 AS1 (swap
,%B0
) CR_TAB
3356 AS2 (ldi
,%3,0xf0) CR_TAB
3360 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3361 AS1 (clr
,%A0
) CR_TAB
3362 AS1 (lsl
,%B0
) CR_TAB
3363 AS1 (lsl
,%B0
) CR_TAB
3364 AS1 (lsl
,%B0
) CR_TAB
3371 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3372 AS1 (clr
,%A0
) CR_TAB
3373 AS1 (swap
,%B0
) CR_TAB
3374 AS1 (lsl
,%B0
) CR_TAB
3375 AS2 (andi
,%B0
,0xe0));
3377 if (AVR_HAVE_MUL
&& scratch
)
3380 return (AS2 (ldi
,%3,0x20) CR_TAB
3381 AS2 (mul
,%A0
,%3) CR_TAB
3382 AS2 (mov
,%B0
,r0
) CR_TAB
3383 AS1 (clr
,%A0
) CR_TAB
3384 AS1 (clr
,__zero_reg__
));
3386 if (optimize_size
&& scratch
)
3391 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3392 AS1 (clr
,%A0
) CR_TAB
3393 AS1 (swap
,%B0
) CR_TAB
3394 AS1 (lsl
,%B0
) CR_TAB
3395 AS2 (ldi
,%3,0xe0) CR_TAB
3401 return ("set" CR_TAB
3402 AS2 (bld
,r1
,5) CR_TAB
3403 AS2 (mul
,%A0
,r1
) CR_TAB
3404 AS2 (mov
,%B0
,r0
) CR_TAB
3405 AS1 (clr
,%A0
) CR_TAB
3406 AS1 (clr
,__zero_reg__
));
3409 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3410 AS1 (clr
,%A0
) CR_TAB
3411 AS1 (lsl
,%B0
) CR_TAB
3412 AS1 (lsl
,%B0
) CR_TAB
3413 AS1 (lsl
,%B0
) CR_TAB
3414 AS1 (lsl
,%B0
) CR_TAB
3418 if (AVR_HAVE_MUL
&& ldi_ok
)
3421 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3422 AS2 (mul
,%A0
,%B0
) CR_TAB
3423 AS2 (mov
,%B0
,r0
) CR_TAB
3424 AS1 (clr
,%A0
) CR_TAB
3425 AS1 (clr
,__zero_reg__
));
3427 if (AVR_HAVE_MUL
&& scratch
)
3430 return (AS2 (ldi
,%3,0x40) CR_TAB
3431 AS2 (mul
,%A0
,%3) CR_TAB
3432 AS2 (mov
,%B0
,r0
) CR_TAB
3433 AS1 (clr
,%A0
) CR_TAB
3434 AS1 (clr
,__zero_reg__
));
3436 if (optimize_size
&& ldi_ok
)
3439 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3440 AS2 (ldi
,%A0
,6) "\n1:\t"
3441 AS1 (lsl
,%B0
) CR_TAB
3442 AS1 (dec
,%A0
) CR_TAB
3445 if (optimize_size
&& scratch
)
3448 return (AS1 (clr
,%B0
) CR_TAB
3449 AS1 (lsr
,%A0
) CR_TAB
3450 AS1 (ror
,%B0
) CR_TAB
3451 AS1 (lsr
,%A0
) CR_TAB
3452 AS1 (ror
,%B0
) CR_TAB
3457 return (AS1 (clr
,%B0
) CR_TAB
3458 AS1 (lsr
,%A0
) CR_TAB
3459 AS1 (ror
,%B0
) CR_TAB
3464 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3466 insn
, operands
, len
, 2);
3471 /* 32bit shift left ((long)x << i) */
3474 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3476 if (GET_CODE (operands
[2]) == CONST_INT
)
3484 switch (INTVAL (operands
[2]))
3487 if (INTVAL (operands
[2]) < 32)
3491 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3492 AS1 (clr
,%C0
) CR_TAB
3493 AS2 (movw
,%A0
,%C0
));
3495 return (AS1 (clr
,%D0
) CR_TAB
3496 AS1 (clr
,%C0
) CR_TAB
3497 AS1 (clr
,%B0
) CR_TAB
3502 int reg0
= true_regnum (operands
[0]);
3503 int reg1
= true_regnum (operands
[1]);
3506 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3507 AS2 (mov
,%C0
,%B1
) CR_TAB
3508 AS2 (mov
,%B0
,%A1
) CR_TAB
3511 return (AS1 (clr
,%A0
) CR_TAB
3512 AS2 (mov
,%B0
,%A1
) CR_TAB
3513 AS2 (mov
,%C0
,%B1
) CR_TAB
3519 int reg0
= true_regnum (operands
[0]);
3520 int reg1
= true_regnum (operands
[1]);
3521 if (reg0
+ 2 == reg1
)
3522 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3525 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3526 AS1 (clr
,%B0
) CR_TAB
3529 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3530 AS2 (mov
,%D0
,%B1
) CR_TAB
3531 AS1 (clr
,%B0
) CR_TAB
3537 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3538 AS1 (clr
,%C0
) CR_TAB
3539 AS1 (clr
,%B0
) CR_TAB
3544 return (AS1 (clr
,%D0
) CR_TAB
3545 AS1 (lsr
,%A0
) CR_TAB
3546 AS1 (ror
,%D0
) CR_TAB
3547 AS1 (clr
,%C0
) CR_TAB
3548 AS1 (clr
,%B0
) CR_TAB
3553 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3554 AS1 (rol
,%B0
) CR_TAB
3555 AS1 (rol
,%C0
) CR_TAB
3557 insn
, operands
, len
, 4);
3561 /* 8bit arithmetic shift right ((signed char)x >> i) */
3564 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3566 if (GET_CODE (operands
[2]) == CONST_INT
)
3573 switch (INTVAL (operands
[2]))
3577 return AS1 (asr
,%0);
3581 return (AS1 (asr
,%0) CR_TAB
3586 return (AS1 (asr
,%0) CR_TAB
3592 return (AS1 (asr
,%0) CR_TAB
3599 return (AS1 (asr
,%0) CR_TAB
3607 return (AS2 (bst
,%0,6) CR_TAB
3609 AS2 (sbc
,%0,%0) CR_TAB
3613 if (INTVAL (operands
[2]) < 8)
3620 return (AS1 (lsl
,%0) CR_TAB
3624 else if (CONSTANT_P (operands
[2]))
3625 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3627 out_shift_with_cnt (AS1 (asr
,%0),
3628 insn
, operands
, len
, 1);
3633 /* 16bit arithmetic shift right ((signed short)x >> i) */
3636 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3638 if (GET_CODE (operands
[2]) == CONST_INT
)
3640 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3641 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3648 switch (INTVAL (operands
[2]))
3652 /* XXX try to optimize this too? */
3657 break; /* scratch ? 5 : 6 */
3659 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3660 AS2 (mov
,%A0
,%B0
) CR_TAB
3661 AS1 (lsl
,__tmp_reg__
) CR_TAB
3662 AS1 (rol
,%A0
) CR_TAB
3663 AS2 (sbc
,%B0
,%B0
) CR_TAB
3664 AS1 (lsl
,__tmp_reg__
) CR_TAB
3665 AS1 (rol
,%A0
) CR_TAB
3670 return (AS1 (lsl
,%A0
) CR_TAB
3671 AS2 (mov
,%A0
,%B0
) CR_TAB
3672 AS1 (rol
,%A0
) CR_TAB
3677 int reg0
= true_regnum (operands
[0]);
3678 int reg1
= true_regnum (operands
[1]);
3681 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3682 AS1 (lsl
,%B0
) CR_TAB
3685 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3686 AS1 (clr
,%B0
) CR_TAB
3687 AS2 (sbrc
,%A0
,7) CR_TAB
3693 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3694 AS1 (lsl
,%B0
) CR_TAB
3695 AS2 (sbc
,%B0
,%B0
) CR_TAB
3700 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3701 AS1 (lsl
,%B0
) CR_TAB
3702 AS2 (sbc
,%B0
,%B0
) CR_TAB
3703 AS1 (asr
,%A0
) CR_TAB
3707 if (AVR_HAVE_MUL
&& ldi_ok
)
3710 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3711 AS2 (muls
,%B0
,%A0
) CR_TAB
3712 AS2 (mov
,%A0
,r1
) CR_TAB
3713 AS2 (sbc
,%B0
,%B0
) CR_TAB
3714 AS1 (clr
,__zero_reg__
));
3716 if (optimize_size
&& scratch
)
3719 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3720 AS1 (lsl
,%B0
) CR_TAB
3721 AS2 (sbc
,%B0
,%B0
) CR_TAB
3722 AS1 (asr
,%A0
) CR_TAB
3723 AS1 (asr
,%A0
) CR_TAB
3727 if (AVR_HAVE_MUL
&& ldi_ok
)
3730 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3731 AS2 (muls
,%B0
,%A0
) CR_TAB
3732 AS2 (mov
,%A0
,r1
) CR_TAB
3733 AS2 (sbc
,%B0
,%B0
) CR_TAB
3734 AS1 (clr
,__zero_reg__
));
3736 if (optimize_size
&& scratch
)
3739 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3740 AS1 (lsl
,%B0
) CR_TAB
3741 AS2 (sbc
,%B0
,%B0
) CR_TAB
3742 AS1 (asr
,%A0
) CR_TAB
3743 AS1 (asr
,%A0
) CR_TAB
3744 AS1 (asr
,%A0
) CR_TAB
3748 if (AVR_HAVE_MUL
&& ldi_ok
)
3751 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3752 AS2 (muls
,%B0
,%A0
) CR_TAB
3753 AS2 (mov
,%A0
,r1
) CR_TAB
3754 AS2 (sbc
,%B0
,%B0
) CR_TAB
3755 AS1 (clr
,__zero_reg__
));
3758 break; /* scratch ? 5 : 7 */
3760 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3761 AS1 (lsl
,%B0
) CR_TAB
3762 AS2 (sbc
,%B0
,%B0
) CR_TAB
3763 AS1 (asr
,%A0
) CR_TAB
3764 AS1 (asr
,%A0
) CR_TAB
3765 AS1 (asr
,%A0
) CR_TAB
3766 AS1 (asr
,%A0
) CR_TAB
3771 return (AS1 (lsl
,%B0
) CR_TAB
3772 AS2 (sbc
,%A0
,%A0
) CR_TAB
3773 AS1 (lsl
,%B0
) CR_TAB
3774 AS2 (mov
,%B0
,%A0
) CR_TAB
3778 if (INTVAL (operands
[2]) < 16)
3784 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3785 AS2 (sbc
,%A0
,%A0
) CR_TAB
3790 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3792 insn
, operands
, len
, 2);
3797 /* 32bit arithmetic shift right ((signed long)x >> i) */
3800 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3802 if (GET_CODE (operands
[2]) == CONST_INT
)
3810 switch (INTVAL (operands
[2]))
3814 int reg0
= true_regnum (operands
[0]);
3815 int reg1
= true_regnum (operands
[1]);
3818 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3819 AS2 (mov
,%B0
,%C1
) CR_TAB
3820 AS2 (mov
,%C0
,%D1
) CR_TAB
3821 AS1 (clr
,%D0
) CR_TAB
3822 AS2 (sbrc
,%C0
,7) CR_TAB
3825 return (AS1 (clr
,%D0
) CR_TAB
3826 AS2 (sbrc
,%D1
,7) CR_TAB
3827 AS1 (dec
,%D0
) CR_TAB
3828 AS2 (mov
,%C0
,%D1
) CR_TAB
3829 AS2 (mov
,%B0
,%C1
) CR_TAB
3835 int reg0
= true_regnum (operands
[0]);
3836 int reg1
= true_regnum (operands
[1]);
3838 if (reg0
== reg1
+ 2)
3839 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3840 AS2 (sbrc
,%B0
,7) CR_TAB
3841 AS1 (com
,%D0
) CR_TAB
3844 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3845 AS1 (clr
,%D0
) CR_TAB
3846 AS2 (sbrc
,%B0
,7) CR_TAB
3847 AS1 (com
,%D0
) CR_TAB
3850 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3851 AS2 (mov
,%A0
,%C1
) CR_TAB
3852 AS1 (clr
,%D0
) CR_TAB
3853 AS2 (sbrc
,%B0
,7) CR_TAB
3854 AS1 (com
,%D0
) CR_TAB
3859 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3860 AS1 (clr
,%D0
) CR_TAB
3861 AS2 (sbrc
,%A0
,7) CR_TAB
3862 AS1 (com
,%D0
) CR_TAB
3863 AS2 (mov
,%B0
,%D0
) CR_TAB
3867 if (INTVAL (operands
[2]) < 32)
3874 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3875 AS2 (sbc
,%A0
,%A0
) CR_TAB
3876 AS2 (mov
,%B0
,%A0
) CR_TAB
3877 AS2 (movw
,%C0
,%A0
));
3879 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3880 AS2 (sbc
,%A0
,%A0
) CR_TAB
3881 AS2 (mov
,%B0
,%A0
) CR_TAB
3882 AS2 (mov
,%C0
,%A0
) CR_TAB
3887 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3888 AS1 (ror
,%C0
) CR_TAB
3889 AS1 (ror
,%B0
) CR_TAB
3891 insn
, operands
, len
, 4);
3895 /* 8bit logic shift right ((unsigned char)x >> i) */
3898 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3900 if (GET_CODE (operands
[2]) == CONST_INT
)
3907 switch (INTVAL (operands
[2]))
3910 if (INTVAL (operands
[2]) < 8)
3914 return AS1 (clr
,%0);
3918 return AS1 (lsr
,%0);
3922 return (AS1 (lsr
,%0) CR_TAB
3926 return (AS1 (lsr
,%0) CR_TAB
3931 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3934 return (AS1 (swap
,%0) CR_TAB
3935 AS2 (andi
,%0,0x0f));
3938 return (AS1 (lsr
,%0) CR_TAB
3944 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3947 return (AS1 (swap
,%0) CR_TAB
3952 return (AS1 (lsr
,%0) CR_TAB
3959 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3962 return (AS1 (swap
,%0) CR_TAB
3968 return (AS1 (lsr
,%0) CR_TAB
3977 return (AS1 (rol
,%0) CR_TAB
3982 else if (CONSTANT_P (operands
[2]))
3983 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3985 out_shift_with_cnt (AS1 (lsr
,%0),
3986 insn
, operands
, len
, 1);
3990 /* 16bit logic shift right ((unsigned short)x >> i) */
3993 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3995 if (GET_CODE (operands
[2]) == CONST_INT
)
3997 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3998 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4005 switch (INTVAL (operands
[2]))
4008 if (INTVAL (operands
[2]) < 16)
4012 return (AS1 (clr
,%B0
) CR_TAB
4016 if (optimize_size
&& scratch
)
4021 return (AS1 (swap
,%B0
) CR_TAB
4022 AS1 (swap
,%A0
) CR_TAB
4023 AS2 (andi
,%A0
,0x0f) CR_TAB
4024 AS2 (eor
,%A0
,%B0
) CR_TAB
4025 AS2 (andi
,%B0
,0x0f) CR_TAB
4031 return (AS1 (swap
,%B0
) CR_TAB
4032 AS1 (swap
,%A0
) CR_TAB
4033 AS2 (ldi
,%3,0x0f) CR_TAB
4034 AS2 (and,%A0
,%3) CR_TAB
4035 AS2 (eor
,%A0
,%B0
) CR_TAB
4036 AS2 (and,%B0
,%3) CR_TAB
4039 break; /* optimize_size ? 6 : 8 */
4043 break; /* scratch ? 5 : 6 */
4047 return (AS1 (lsr
,%B0
) CR_TAB
4048 AS1 (ror
,%A0
) CR_TAB
4049 AS1 (swap
,%B0
) CR_TAB
4050 AS1 (swap
,%A0
) CR_TAB
4051 AS2 (andi
,%A0
,0x0f) CR_TAB
4052 AS2 (eor
,%A0
,%B0
) CR_TAB
4053 AS2 (andi
,%B0
,0x0f) CR_TAB
4059 return (AS1 (lsr
,%B0
) CR_TAB
4060 AS1 (ror
,%A0
) CR_TAB
4061 AS1 (swap
,%B0
) CR_TAB
4062 AS1 (swap
,%A0
) CR_TAB
4063 AS2 (ldi
,%3,0x0f) CR_TAB
4064 AS2 (and,%A0
,%3) CR_TAB
4065 AS2 (eor
,%A0
,%B0
) CR_TAB
4066 AS2 (and,%B0
,%3) CR_TAB
4073 break; /* scratch ? 5 : 6 */
4075 return (AS1 (clr
,__tmp_reg__
) CR_TAB
4076 AS1 (lsl
,%A0
) CR_TAB
4077 AS1 (rol
,%B0
) CR_TAB
4078 AS1 (rol
,__tmp_reg__
) CR_TAB
4079 AS1 (lsl
,%A0
) CR_TAB
4080 AS1 (rol
,%B0
) CR_TAB
4081 AS1 (rol
,__tmp_reg__
) CR_TAB
4082 AS2 (mov
,%A0
,%B0
) CR_TAB
4083 AS2 (mov
,%B0
,__tmp_reg__
));
4087 return (AS1 (lsl
,%A0
) CR_TAB
4088 AS2 (mov
,%A0
,%B0
) CR_TAB
4089 AS1 (rol
,%A0
) CR_TAB
4090 AS2 (sbc
,%B0
,%B0
) CR_TAB
4094 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
4099 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4100 AS1 (clr
,%B0
) CR_TAB
4105 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4106 AS1 (clr
,%B0
) CR_TAB
4107 AS1 (lsr
,%A0
) CR_TAB
4112 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4113 AS1 (clr
,%B0
) CR_TAB
4114 AS1 (lsr
,%A0
) CR_TAB
4115 AS1 (lsr
,%A0
) CR_TAB
4122 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4123 AS1 (clr
,%B0
) CR_TAB
4124 AS1 (swap
,%A0
) CR_TAB
4125 AS2 (andi
,%A0
,0x0f));
4130 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4131 AS1 (clr
,%B0
) CR_TAB
4132 AS1 (swap
,%A0
) CR_TAB
4133 AS2 (ldi
,%3,0x0f) CR_TAB
4137 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4138 AS1 (clr
,%B0
) CR_TAB
4139 AS1 (lsr
,%A0
) CR_TAB
4140 AS1 (lsr
,%A0
) CR_TAB
4141 AS1 (lsr
,%A0
) CR_TAB
4148 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4149 AS1 (clr
,%B0
) CR_TAB
4150 AS1 (swap
,%A0
) CR_TAB
4151 AS1 (lsr
,%A0
) CR_TAB
4152 AS2 (andi
,%A0
,0x07));
4154 if (AVR_HAVE_MUL
&& scratch
)
4157 return (AS2 (ldi
,%3,0x08) CR_TAB
4158 AS2 (mul
,%B0
,%3) CR_TAB
4159 AS2 (mov
,%A0
,r1
) CR_TAB
4160 AS1 (clr
,%B0
) CR_TAB
4161 AS1 (clr
,__zero_reg__
));
4163 if (optimize_size
&& scratch
)
4168 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4169 AS1 (clr
,%B0
) CR_TAB
4170 AS1 (swap
,%A0
) CR_TAB
4171 AS1 (lsr
,%A0
) CR_TAB
4172 AS2 (ldi
,%3,0x07) CR_TAB
4178 return ("set" CR_TAB
4179 AS2 (bld
,r1
,3) CR_TAB
4180 AS2 (mul
,%B0
,r1
) CR_TAB
4181 AS2 (mov
,%A0
,r1
) CR_TAB
4182 AS1 (clr
,%B0
) CR_TAB
4183 AS1 (clr
,__zero_reg__
));
4186 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4187 AS1 (clr
,%B0
) CR_TAB
4188 AS1 (lsr
,%A0
) CR_TAB
4189 AS1 (lsr
,%A0
) CR_TAB
4190 AS1 (lsr
,%A0
) CR_TAB
4191 AS1 (lsr
,%A0
) CR_TAB
4195 if (AVR_HAVE_MUL
&& ldi_ok
)
4198 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4199 AS2 (mul
,%B0
,%A0
) CR_TAB
4200 AS2 (mov
,%A0
,r1
) CR_TAB
4201 AS1 (clr
,%B0
) CR_TAB
4202 AS1 (clr
,__zero_reg__
));
4204 if (AVR_HAVE_MUL
&& scratch
)
4207 return (AS2 (ldi
,%3,0x04) CR_TAB
4208 AS2 (mul
,%B0
,%3) CR_TAB
4209 AS2 (mov
,%A0
,r1
) CR_TAB
4210 AS1 (clr
,%B0
) CR_TAB
4211 AS1 (clr
,__zero_reg__
));
4213 if (optimize_size
&& ldi_ok
)
4216 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4217 AS2 (ldi
,%B0
,6) "\n1:\t"
4218 AS1 (lsr
,%A0
) CR_TAB
4219 AS1 (dec
,%B0
) CR_TAB
4222 if (optimize_size
&& scratch
)
4225 return (AS1 (clr
,%A0
) CR_TAB
4226 AS1 (lsl
,%B0
) CR_TAB
4227 AS1 (rol
,%A0
) CR_TAB
4228 AS1 (lsl
,%B0
) CR_TAB
4229 AS1 (rol
,%A0
) CR_TAB
4234 return (AS1 (clr
,%A0
) CR_TAB
4235 AS1 (lsl
,%B0
) CR_TAB
4236 AS1 (rol
,%A0
) CR_TAB
4241 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4243 insn
, operands
, len
, 2);
4247 /* 32bit logic shift right ((unsigned int)x >> i) */
4250 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4252 if (GET_CODE (operands
[2]) == CONST_INT
)
4260 switch (INTVAL (operands
[2]))
4263 if (INTVAL (operands
[2]) < 32)
4267 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4268 AS1 (clr
,%C0
) CR_TAB
4269 AS2 (movw
,%A0
,%C0
));
4271 return (AS1 (clr
,%D0
) CR_TAB
4272 AS1 (clr
,%C0
) CR_TAB
4273 AS1 (clr
,%B0
) CR_TAB
4278 int reg0
= true_regnum (operands
[0]);
4279 int reg1
= true_regnum (operands
[1]);
4282 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4283 AS2 (mov
,%B0
,%C1
) CR_TAB
4284 AS2 (mov
,%C0
,%D1
) CR_TAB
4287 return (AS1 (clr
,%D0
) CR_TAB
4288 AS2 (mov
,%C0
,%D1
) CR_TAB
4289 AS2 (mov
,%B0
,%C1
) CR_TAB
4295 int reg0
= true_regnum (operands
[0]);
4296 int reg1
= true_regnum (operands
[1]);
4298 if (reg0
== reg1
+ 2)
4299 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4302 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4303 AS1 (clr
,%C0
) CR_TAB
4306 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4307 AS2 (mov
,%A0
,%C1
) CR_TAB
4308 AS1 (clr
,%C0
) CR_TAB
4313 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4314 AS1 (clr
,%B0
) CR_TAB
4315 AS1 (clr
,%C0
) CR_TAB
4320 return (AS1 (clr
,%A0
) CR_TAB
4321 AS2 (sbrc
,%D0
,7) CR_TAB
4322 AS1 (inc
,%A0
) CR_TAB
4323 AS1 (clr
,%B0
) CR_TAB
4324 AS1 (clr
,%C0
) CR_TAB
4329 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4330 AS1 (ror
,%C0
) CR_TAB
4331 AS1 (ror
,%B0
) CR_TAB
4333 insn
, operands
, len
, 4);
4337 /* Modifies the length assigned to instruction INSN
4338 LEN is the initially computed length of the insn. */
4341 adjust_insn_length (rtx insn
, int len
)
4343 rtx patt
= PATTERN (insn
);
4346 if (GET_CODE (patt
) == SET
)
4349 op
[1] = SET_SRC (patt
);
4350 op
[0] = SET_DEST (patt
);
4351 if (general_operand (op
[1], VOIDmode
)
4352 && general_operand (op
[0], VOIDmode
))
4354 switch (GET_MODE (op
[0]))
4357 output_movqi (insn
, op
, &len
);
4360 output_movhi (insn
, op
, &len
);
4364 output_movsisf (insn
, op
, &len
);
4370 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4372 switch (GET_MODE (op
[1]))
4374 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4375 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4379 else if (GET_CODE (op
[1]) == AND
)
4381 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4383 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4384 if (GET_MODE (op
[1]) == SImode
)
4385 len
= (((mask
& 0xff) != 0xff)
4386 + ((mask
& 0xff00) != 0xff00)
4387 + ((mask
& 0xff0000L
) != 0xff0000L
)
4388 + ((mask
& 0xff000000L
) != 0xff000000L
));
4389 else if (GET_MODE (op
[1]) == HImode
)
4390 len
= (((mask
& 0xff) != 0xff)
4391 + ((mask
& 0xff00) != 0xff00));
4394 else if (GET_CODE (op
[1]) == IOR
)
4396 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4398 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4399 if (GET_MODE (op
[1]) == SImode
)
4400 len
= (((mask
& 0xff) != 0)
4401 + ((mask
& 0xff00) != 0)
4402 + ((mask
& 0xff0000L
) != 0)
4403 + ((mask
& 0xff000000L
) != 0));
4404 else if (GET_MODE (op
[1]) == HImode
)
4405 len
= (((mask
& 0xff) != 0)
4406 + ((mask
& 0xff00) != 0));
4410 set
= single_set (insn
);
4415 op
[1] = SET_SRC (set
);
4416 op
[0] = SET_DEST (set
);
4418 if (GET_CODE (patt
) == PARALLEL
4419 && general_operand (op
[1], VOIDmode
)
4420 && general_operand (op
[0], VOIDmode
))
4422 if (XVECLEN (patt
, 0) == 2)
4423 op
[2] = XVECEXP (patt
, 0, 1);
4425 switch (GET_MODE (op
[0]))
4431 output_reload_inhi (insn
, op
, &len
);
4435 output_reload_insisf (insn
, op
, &len
);
4441 else if (GET_CODE (op
[1]) == ASHIFT
4442 || GET_CODE (op
[1]) == ASHIFTRT
4443 || GET_CODE (op
[1]) == LSHIFTRT
)
4447 ops
[1] = XEXP (op
[1],0);
4448 ops
[2] = XEXP (op
[1],1);
4449 switch (GET_CODE (op
[1]))
4452 switch (GET_MODE (op
[0]))
4454 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4455 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4456 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4461 switch (GET_MODE (op
[0]))
4463 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4464 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4465 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4470 switch (GET_MODE (op
[0]))
4472 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4473 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4474 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4486 /* Return nonzero if register REG dead after INSN. */
4489 reg_unused_after (rtx insn
, rtx reg
)
4491 return (dead_or_set_p (insn
, reg
)
4492 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4495 /* Return nonzero if REG is not used after INSN.
4496 We assume REG is a reload reg, and therefore does
4497 not live past labels. It may live past calls or jumps though. */
4500 _reg_unused_after (rtx insn
, rtx reg
)
4505 /* If the reg is set by this instruction, then it is safe for our
4506 case. Disregard the case where this is a store to memory, since
4507 we are checking a register used in the store address. */
4508 set
= single_set (insn
);
4509 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4510 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4513 while ((insn
= NEXT_INSN (insn
)))
4516 code
= GET_CODE (insn
);
4519 /* If this is a label that existed before reload, then the register
4520 if dead here. However, if this is a label added by reorg, then
4521 the register may still be live here. We can't tell the difference,
4522 so we just ignore labels completely. */
4523 if (code
== CODE_LABEL
)
4531 if (code
== JUMP_INSN
)
4534 /* If this is a sequence, we must handle them all at once.
4535 We could have for instance a call that sets the target register,
4536 and an insn in a delay slot that uses the register. In this case,
4537 we must return 0. */
4538 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4543 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4545 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4546 rtx set
= single_set (this_insn
);
4548 if (GET_CODE (this_insn
) == CALL_INSN
)
4550 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4552 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4557 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4559 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4561 if (GET_CODE (SET_DEST (set
)) != MEM
)
4567 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4572 else if (code
== JUMP_INSN
)
4576 if (code
== CALL_INSN
)
4579 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4580 if (GET_CODE (XEXP (tem
, 0)) == USE
4581 && REG_P (XEXP (XEXP (tem
, 0), 0))
4582 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4584 if (call_used_regs
[REGNO (reg
)])
4588 set
= single_set (insn
);
4590 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4592 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4593 return GET_CODE (SET_DEST (set
)) != MEM
;
4594 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4600 /* Target hook for assembling integer objects. The AVR version needs
4601 special handling for references to certain labels. */
4604 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4606 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4607 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4608 || GET_CODE (x
) == LABEL_REF
))
4610 fputs ("\t.word\tgs(", asm_out_file
);
4611 output_addr_const (asm_out_file
, x
);
4612 fputs (")\n", asm_out_file
);
4615 return default_assemble_integer (x
, size
, aligned_p
);
4618 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4621 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4624 /* If the function has the 'signal' or 'interrupt' attribute, test to
4625 make sure that the name of the function is "__vector_NN" so as to
4626 catch when the user misspells the interrupt vector name. */
4628 if (cfun
->machine
->is_interrupt
)
4630 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4632 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4633 "%qs appears to be a misspelled interrupt handler",
4637 else if (cfun
->machine
->is_signal
)
4639 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4641 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4642 "%qs appears to be a misspelled signal handler",
4647 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4648 ASM_OUTPUT_LABEL (file
, name
);
4651 /* The routine used to output NUL terminated strings. We use a special
4652 version of this for most svr4 targets because doing so makes the
4653 generated assembly code more compact (and thus faster to assemble)
4654 as well as more readable, especially for targets like the i386
4655 (where the only alternative is to output character sequences as
4656 comma separated lists of numbers). */
4659 gas_output_limited_string(FILE *file
, const char *str
)
4661 const unsigned char *_limited_str
= (const unsigned char *) str
;
4663 fprintf (file
, "%s\"", STRING_ASM_OP
);
4664 for (; (ch
= *_limited_str
); _limited_str
++)
4667 switch (escape
= ESCAPES
[ch
])
4673 fprintf (file
, "\\%03o", ch
);
4677 putc (escape
, file
);
4681 fprintf (file
, "\"\n");
4684 /* The routine used to output sequences of byte values. We use a special
4685 version of this for most svr4 targets because doing so makes the
4686 generated assembly code more compact (and thus faster to assemble)
4687 as well as more readable. Note that if we find subparts of the
4688 character sequence which end with NUL (and which are shorter than
4689 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4692 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4694 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4695 const unsigned char *limit
= _ascii_bytes
+ length
;
4696 unsigned bytes_in_chunk
= 0;
4697 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4699 const unsigned char *p
;
4700 if (bytes_in_chunk
>= 60)
4702 fprintf (file
, "\"\n");
4705 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4707 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4709 if (bytes_in_chunk
> 0)
4711 fprintf (file
, "\"\n");
4714 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4721 if (bytes_in_chunk
== 0)
4722 fprintf (file
, "\t.ascii\t\"");
4723 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4730 fprintf (file
, "\\%03o", ch
);
4731 bytes_in_chunk
+= 4;
4735 putc (escape
, file
);
4736 bytes_in_chunk
+= 2;
4741 if (bytes_in_chunk
> 0)
4742 fprintf (file
, "\"\n");
4745 /* Return value is nonzero if pseudos that have been
4746 assigned to registers of class CLASS would likely be spilled
4747 because registers of CLASS are needed for spill registers. */
4750 class_likely_spilled_p (int c
)
4752 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4755 /* Valid attributes:
4756 progmem - put data to program memory;
4757 signal - make a function to be hardware interrupt. After function
4758 prologue interrupts are disabled;
4759 interrupt - make a function to be hardware interrupt. After function
4760 prologue interrupts are enabled;
4761 naked - don't generate function prologue/epilogue and `ret' command.
4763 Only `progmem' attribute valid for type. */
4765 const struct attribute_spec avr_attribute_table
[] =
4767 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4768 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
4769 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4770 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4771 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4772 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4773 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4774 { NULL
, 0, 0, false, false, false, NULL
}
4777 /* Handle a "progmem" attribute; arguments as in
4778 struct attribute_spec.handler. */
4780 avr_handle_progmem_attribute (tree
*node
, tree name
,
4781 tree args ATTRIBUTE_UNUSED
,
4782 int flags ATTRIBUTE_UNUSED
,
4787 if (TREE_CODE (*node
) == TYPE_DECL
)
4789 /* This is really a decl attribute, not a type attribute,
4790 but try to handle it for GCC 3.0 backwards compatibility. */
4792 tree type
= TREE_TYPE (*node
);
4793 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4794 tree newtype
= build_type_attribute_variant (type
, attr
);
4796 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4797 TREE_TYPE (*node
) = newtype
;
4798 *no_add_attrs
= true;
4800 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4802 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4804 warning (0, "only initialized variables can be placed into "
4805 "program memory area");
4806 *no_add_attrs
= true;
4811 warning (OPT_Wattributes
, "%qE attribute ignored",
4813 *no_add_attrs
= true;
4820 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4821 struct attribute_spec.handler. */
4824 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4825 tree args ATTRIBUTE_UNUSED
,
4826 int flags ATTRIBUTE_UNUSED
,
4829 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4831 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4833 *no_add_attrs
= true;
4840 avr_handle_fntype_attribute (tree
*node
, tree name
,
4841 tree args ATTRIBUTE_UNUSED
,
4842 int flags ATTRIBUTE_UNUSED
,
4845 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4847 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4849 *no_add_attrs
= true;
4855 /* Look for attribute `progmem' in DECL
4856 if found return 1, otherwise 0. */
4859 avr_progmem_p (tree decl
, tree attributes
)
4863 if (TREE_CODE (decl
) != VAR_DECL
)
4867 != lookup_attribute ("progmem", attributes
))
4873 while (TREE_CODE (a
) == ARRAY_TYPE
);
4875 if (a
== error_mark_node
)
4878 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4884 /* Add the section attribute if the variable is in progmem. */
4887 avr_insert_attributes (tree node
, tree
*attributes
)
4889 if (TREE_CODE (node
) == VAR_DECL
4890 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4891 && avr_progmem_p (node
, *attributes
))
4893 static const char dsec
[] = ".progmem.data";
4894 *attributes
= tree_cons (get_identifier ("section"),
4895 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4898 /* ??? This seems sketchy. Why can't the user declare the
4899 thing const in the first place? */
4900 TREE_READONLY (node
) = 1;
4904 /* A get_unnamed_section callback for switching to progmem_section. */
4907 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4909 fprintf (asm_out_file
,
4910 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4911 AVR_HAVE_JMP_CALL
? "a" : "ax");
4912 /* Should already be aligned, this is just to be safe if it isn't. */
4913 fprintf (asm_out_file
, "\t.p2align 1\n");
4916 /* Implement TARGET_ASM_INIT_SECTIONS. */
4919 avr_asm_init_sections (void)
4921 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4922 avr_output_progmem_section_asm_op
,
4924 readonly_data_section
= data_section
;
4928 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4930 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4932 if (strncmp (name
, ".noinit", 7) == 0)
4934 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4935 && DECL_INITIAL (decl
) == NULL_TREE
)
4936 flags
|= SECTION_BSS
; /* @nobits */
4938 warning (0, "only uninitialized variables can be placed in the "
4945 /* Outputs some appropriate text to go at the start of an assembler
4949 avr_file_start (void)
4951 if (avr_current_arch
->asm_only
)
4952 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4954 default_file_start ();
4956 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4957 fputs ("__SREG__ = 0x3f\n"
4959 "__SP_L__ = 0x3d\n", asm_out_file
);
4961 fputs ("__tmp_reg__ = 0\n"
4962 "__zero_reg__ = 1\n", asm_out_file
);
4964 /* FIXME: output these only if there is anything in the .data / .bss
4965 sections - some code size could be saved by not linking in the
4966 initialization code from libgcc if one or both sections are empty. */
4967 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4968 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4971 /* Outputs to the stdio stream FILE some
4972 appropriate text to go at the end of an assembler file. */
4979 /* Choose the order in which to allocate hard registers for
4980 pseudo-registers local to a basic block.
4982 Store the desired register order in the array `reg_alloc_order'.
4983 Element 0 should be the register to allocate first; element 1, the
4984 next register; and so on. */
4987 order_regs_for_local_alloc (void)
4990 static const int order_0
[] = {
4998 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5002 static const int order_1
[] = {
5010 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5014 static const int order_2
[] = {
5023 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5028 const int *order
= (TARGET_ORDER_1
? order_1
:
5029 TARGET_ORDER_2
? order_2
:
5031 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5032 reg_alloc_order
[i
] = order
[i
];
5036 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5037 cost of an RTX operand given its context. X is the rtx of the
5038 operand, MODE is its mode, and OUTER is the rtx_code of this
5039 operand's parent operator. */
5042 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5045 enum rtx_code code
= GET_CODE (x
);
5056 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5063 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5067 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5068 is to be calculated. Return true if the complete cost has been
5069 computed, and false if subexpressions should be scanned. In either
5070 case, *TOTAL contains the cost result. */
5073 avr_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5076 enum machine_mode mode
= GET_MODE (x
);
5083 /* Immediate constants are as cheap as registers. */
5091 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5099 *total
= COSTS_N_INSNS (1);
5103 *total
= COSTS_N_INSNS (3);
5107 *total
= COSTS_N_INSNS (7);
5113 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5121 *total
= COSTS_N_INSNS (1);
5127 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5131 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5132 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5136 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5137 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5138 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5142 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5143 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5144 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5151 *total
= COSTS_N_INSNS (1);
5152 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5153 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5157 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5159 *total
= COSTS_N_INSNS (2);
5160 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5162 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5163 *total
= COSTS_N_INSNS (1);
5165 *total
= COSTS_N_INSNS (2);
5169 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5171 *total
= COSTS_N_INSNS (4);
5172 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5174 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5175 *total
= COSTS_N_INSNS (1);
5177 *total
= COSTS_N_INSNS (4);
5183 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5189 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5190 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5191 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5192 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5196 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5197 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5198 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5206 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5208 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5215 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5217 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5225 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5226 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5234 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5237 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5238 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5245 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5246 *total
= COSTS_N_INSNS (1);
5251 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5252 *total
= COSTS_N_INSNS (3);
5257 if (CONST_INT_P (XEXP (x
, 1)))
5258 switch (INTVAL (XEXP (x
, 1)))
5262 *total
= COSTS_N_INSNS (5);
5265 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5273 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5280 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5282 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5283 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5287 val
= INTVAL (XEXP (x
, 1));
5289 *total
= COSTS_N_INSNS (3);
5290 else if (val
>= 0 && val
<= 7)
5291 *total
= COSTS_N_INSNS (val
);
5293 *total
= COSTS_N_INSNS (1);
5298 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5300 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5301 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5304 switch (INTVAL (XEXP (x
, 1)))
5311 *total
= COSTS_N_INSNS (2);
5314 *total
= COSTS_N_INSNS (3);
5320 *total
= COSTS_N_INSNS (4);
5325 *total
= COSTS_N_INSNS (5);
5328 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5331 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5334 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5337 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5338 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5343 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5345 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5346 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5349 switch (INTVAL (XEXP (x
, 1)))
5355 *total
= COSTS_N_INSNS (3);
5360 *total
= COSTS_N_INSNS (4);
5363 *total
= COSTS_N_INSNS (6);
5366 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5369 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5370 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5377 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5384 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5386 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5387 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5391 val
= INTVAL (XEXP (x
, 1));
5393 *total
= COSTS_N_INSNS (4);
5395 *total
= COSTS_N_INSNS (2);
5396 else if (val
>= 0 && val
<= 7)
5397 *total
= COSTS_N_INSNS (val
);
5399 *total
= COSTS_N_INSNS (1);
5404 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5406 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5407 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5410 switch (INTVAL (XEXP (x
, 1)))
5416 *total
= COSTS_N_INSNS (2);
5419 *total
= COSTS_N_INSNS (3);
5425 *total
= COSTS_N_INSNS (4);
5429 *total
= COSTS_N_INSNS (5);
5432 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5435 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5439 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5442 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5443 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5448 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5450 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5451 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5454 switch (INTVAL (XEXP (x
, 1)))
5460 *total
= COSTS_N_INSNS (4);
5465 *total
= COSTS_N_INSNS (6);
5468 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5471 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5474 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5475 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5482 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5489 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5491 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5492 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5496 val
= INTVAL (XEXP (x
, 1));
5498 *total
= COSTS_N_INSNS (3);
5499 else if (val
>= 0 && val
<= 7)
5500 *total
= COSTS_N_INSNS (val
);
5502 *total
= COSTS_N_INSNS (1);
5507 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5509 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5510 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5513 switch (INTVAL (XEXP (x
, 1)))
5520 *total
= COSTS_N_INSNS (2);
5523 *total
= COSTS_N_INSNS (3);
5528 *total
= COSTS_N_INSNS (4);
5532 *total
= COSTS_N_INSNS (5);
5538 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5541 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5545 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5548 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5549 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5554 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5556 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5557 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5560 switch (INTVAL (XEXP (x
, 1)))
5566 *total
= COSTS_N_INSNS (4);
5569 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5574 *total
= COSTS_N_INSNS (4);
5577 *total
= COSTS_N_INSNS (6);
5580 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5581 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5588 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5592 switch (GET_MODE (XEXP (x
, 0)))
5595 *total
= COSTS_N_INSNS (1);
5596 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5597 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5601 *total
= COSTS_N_INSNS (2);
5602 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5603 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5604 else if (INTVAL (XEXP (x
, 1)) != 0)
5605 *total
+= COSTS_N_INSNS (1);
5609 *total
= COSTS_N_INSNS (4);
5610 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5611 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5612 else if (INTVAL (XEXP (x
, 1)) != 0)
5613 *total
+= COSTS_N_INSNS (3);
5619 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5628 /* Calculate the cost of a memory address. */
5631 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5633 if (GET_CODE (x
) == PLUS
5634 && GET_CODE (XEXP (x
,1)) == CONST_INT
5635 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5636 && INTVAL (XEXP (x
,1)) >= 61)
5638 if (CONSTANT_ADDRESS_P (x
))
5640 if (optimize
> 0 && io_address_operand (x
, QImode
))
5647 /* Test for extra memory constraint 'Q'.
5648 It's a memory address based on Y or Z pointer with valid displacement. */
5651 extra_constraint_Q (rtx x
)
5653 if (GET_CODE (XEXP (x
,0)) == PLUS
5654 && REG_P (XEXP (XEXP (x
,0), 0))
5655 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5656 && (INTVAL (XEXP (XEXP (x
,0), 1))
5657 <= MAX_LD_OFFSET (GET_MODE (x
))))
5659 rtx xx
= XEXP (XEXP (x
,0), 0);
5660 int regno
= REGNO (xx
);
5661 if (TARGET_ALL_DEBUG
)
5663 fprintf (stderr
, ("extra_constraint:\n"
5664 "reload_completed: %d\n"
5665 "reload_in_progress: %d\n"),
5666 reload_completed
, reload_in_progress
);
5669 if (regno
>= FIRST_PSEUDO_REGISTER
)
5670 return 1; /* allocate pseudos */
5671 else if (regno
== REG_Z
|| regno
== REG_Y
)
5672 return 1; /* strictly check */
5673 else if (xx
== frame_pointer_rtx
5674 || xx
== arg_pointer_rtx
)
5675 return 1; /* XXX frame & arg pointer checks */
5680 /* Convert condition code CONDITION to the valid AVR condition code. */
5683 avr_normalize_condition (RTX_CODE condition
)
5700 /* This function optimizes conditional jumps. */
5707 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5709 if (! (GET_CODE (insn
) == INSN
5710 || GET_CODE (insn
) == CALL_INSN
5711 || GET_CODE (insn
) == JUMP_INSN
)
5712 || !single_set (insn
))
5715 pattern
= PATTERN (insn
);
5717 if (GET_CODE (pattern
) == PARALLEL
)
5718 pattern
= XVECEXP (pattern
, 0, 0);
5719 if (GET_CODE (pattern
) == SET
5720 && SET_DEST (pattern
) == cc0_rtx
5721 && compare_diff_p (insn
))
5723 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5725 /* Now we work under compare insn. */
5727 pattern
= SET_SRC (pattern
);
5728 if (true_regnum (XEXP (pattern
,0)) >= 0
5729 && true_regnum (XEXP (pattern
,1)) >= 0 )
5731 rtx x
= XEXP (pattern
,0);
5732 rtx next
= next_real_insn (insn
);
5733 rtx pat
= PATTERN (next
);
5734 rtx src
= SET_SRC (pat
);
5735 rtx t
= XEXP (src
,0);
5736 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5737 XEXP (pattern
,0) = XEXP (pattern
,1);
5738 XEXP (pattern
,1) = x
;
5739 INSN_CODE (next
) = -1;
5741 else if (true_regnum (XEXP (pattern
, 0)) >= 0
5742 && XEXP (pattern
, 1) == const0_rtx
)
5744 /* This is a tst insn, we can reverse it. */
5745 rtx next
= next_real_insn (insn
);
5746 rtx pat
= PATTERN (next
);
5747 rtx src
= SET_SRC (pat
);
5748 rtx t
= XEXP (src
,0);
5750 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5751 XEXP (pattern
, 1) = XEXP (pattern
, 0);
5752 XEXP (pattern
, 0) = const0_rtx
;
5753 INSN_CODE (next
) = -1;
5754 INSN_CODE (insn
) = -1;
5756 else if (true_regnum (XEXP (pattern
,0)) >= 0
5757 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5759 rtx x
= XEXP (pattern
,1);
5760 rtx next
= next_real_insn (insn
);
5761 rtx pat
= PATTERN (next
);
5762 rtx src
= SET_SRC (pat
);
5763 rtx t
= XEXP (src
,0);
5764 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5766 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5768 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5769 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5770 INSN_CODE (next
) = -1;
5771 INSN_CODE (insn
) = -1;
5779 /* Returns register number for function return value.*/
5782 avr_ret_register (void)
5787 /* Create an RTX representing the place where a
5788 library function returns a value of mode MODE. */
5791 avr_libcall_value (enum machine_mode mode
)
5793 int offs
= GET_MODE_SIZE (mode
);
5796 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5799 /* Create an RTX representing the place where a
5800 function returns a value of data type VALTYPE. */
5803 avr_function_value (const_tree type
,
5804 const_tree func ATTRIBUTE_UNUSED
,
5805 bool outgoing ATTRIBUTE_UNUSED
)
5809 if (TYPE_MODE (type
) != BLKmode
)
5810 return avr_libcall_value (TYPE_MODE (type
));
5812 offs
= int_size_in_bytes (type
);
5815 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5816 offs
= GET_MODE_SIZE (SImode
);
5817 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5818 offs
= GET_MODE_SIZE (DImode
);
5820 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5823 /* Places additional restrictions on the register class to
5824 use when it is necessary to copy value X into a register
5828 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
5834 test_hard_reg_class (enum reg_class rclass
, rtx x
)
5836 int regno
= true_regnum (x
);
5840 if (TEST_HARD_REG_CLASS (rclass
, regno
))
5848 jump_over_one_insn_p (rtx insn
, rtx dest
)
5850 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5853 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5854 int dest_addr
= INSN_ADDRESSES (uid
);
5855 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5858 /* Returns 1 if a value of mode MODE can be stored starting with hard
5859 register number REGNO. On the enhanced core, anything larger than
5860 1 byte must start in even numbered register for "movw" to work
5861 (this way we don't have to check for odd registers everywhere). */
5864 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5866 /* Disallow QImode in stack pointer regs. */
5867 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5870 /* The only thing that can go into registers r28:r29 is a Pmode. */
5871 if (regno
== REG_Y
&& mode
== Pmode
)
5874 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5875 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5881 /* Modes larger than QImode occupy consecutive registers. */
5882 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5885 /* All modes larger than QImode should start in an even register. */
5886 return !(regno
& 1);
5890 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5896 if (GET_CODE (operands
[1]) == CONST_INT
)
5898 int val
= INTVAL (operands
[1]);
5899 if ((val
& 0xff) == 0)
5902 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5903 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5906 else if ((val
& 0xff00) == 0)
5909 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5910 AS2 (mov
,%A0
,%2) CR_TAB
5911 AS2 (mov
,%B0
,__zero_reg__
));
5913 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5916 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5917 AS2 (mov
,%A0
,%2) CR_TAB
5922 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5923 AS2 (mov
,%A0
,%2) CR_TAB
5924 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5930 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5932 rtx src
= operands
[1];
5933 int cnst
= (GET_CODE (src
) == CONST_INT
);
5938 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5939 + ((INTVAL (src
) & 0xff00) != 0)
5940 + ((INTVAL (src
) & 0xff0000) != 0)
5941 + ((INTVAL (src
) & 0xff000000) != 0);
5948 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5949 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5952 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5953 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5955 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5956 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5959 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5960 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5962 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5963 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5966 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5967 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5969 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5970 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5973 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5974 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5980 avr_output_bld (rtx operands
[], int bit_nr
)
5982 static char s
[] = "bld %A0,0";
5984 s
[5] = 'A' + (bit_nr
>> 3);
5985 s
[8] = '0' + (bit_nr
& 7);
5986 output_asm_insn (s
, operands
);
5990 avr_output_addr_vec_elt (FILE *stream
, int value
)
5992 switch_to_section (progmem_section
);
5993 if (AVR_HAVE_JMP_CALL
)
5994 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5996 fprintf (stream
, "\trjmp .L%d\n", value
);
5999 /* Returns true if SCRATCH are safe to be allocated as a scratch
6000 registers (for a define_peephole2) in the current function. */
6003 avr_hard_regno_scratch_ok (unsigned int regno
)
6005 /* Interrupt functions can only use registers that have already been saved
6006 by the prologue, even if they would normally be call-clobbered. */
6008 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6009 && !df_regs_ever_live_p (regno
))
6015 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6018 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
6019 unsigned int new_reg
)
6021 /* Interrupt functions can only use registers that have already been
6022 saved by the prologue, even if they would normally be
6025 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6026 && !df_regs_ever_live_p (new_reg
))
6032 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
6033 or memory location in the I/O space (QImode only).
6035 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6036 Operand 1: register operand to test, or CONST_INT memory address.
6037 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
6038 Operand 3: label to jump to if the test is true. */
6041 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6043 enum rtx_code comp
= GET_CODE (operands
[0]);
6044 int long_jump
= (get_attr_length (insn
) >= 4);
6045 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6049 else if (comp
== LT
)
6053 comp
= reverse_condition (comp
);
6055 if (GET_CODE (operands
[1]) == CONST_INT
)
6057 if (INTVAL (operands
[1]) < 0x40)
6060 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
6062 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
6066 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
6068 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6070 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6073 else /* GET_CODE (operands[1]) == REG */
6075 if (GET_MODE (operands
[1]) == QImode
)
6078 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6080 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6082 else /* HImode or SImode */
6084 static char buf
[] = "sbrc %A1,0";
6085 int bit_nr
= exact_log2 (INTVAL (operands
[2])
6086 & GET_MODE_MASK (GET_MODE (operands
[1])));
6088 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6089 buf
[6] = 'A' + (bit_nr
>> 3);
6090 buf
[9] = '0' + (bit_nr
& 7);
6091 output_asm_insn (buf
, operands
);
6096 return (AS1 (rjmp
,.+4) CR_TAB
6099 return AS1 (rjmp
,%3);
6103 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6106 avr_asm_out_ctor (rtx symbol
, int priority
)
6108 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6109 default_ctor_section_asm_out_constructor (symbol
, priority
);
6112 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6115 avr_asm_out_dtor (rtx symbol
, int priority
)
6117 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6118 default_dtor_section_asm_out_destructor (symbol
, priority
);
6121 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6124 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6126 if (TYPE_MODE (type
) == BLKmode
)
6128 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6129 return (size
== -1 || size
> 8);
6135 /* Worker function for CASE_VALUES_THRESHOLD. */
6137 unsigned int avr_case_values_threshold (void)
6139 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;