PR target/52261
[official-gcc.git] / gcc / config / avr / avr.c
blob8959553f0fa191f88ea01c7dc0da200378c2e55f
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
40 #include "obstack.h"
41 #include "function.h"
42 #include "recog.h"
43 #include "optabs.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "params.h"
50 #include "df.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 do { \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
74 } while (0)
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
93 { 0 , 0, 0, NULL, 0 }
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
99 ".progmem.data",
100 ".progmem1.data",
101 ".progmem2.data",
102 ".progmem3.data",
103 ".progmem4.data",
104 ".progmem5.data"
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
110 typedef struct
112 /* SREG: The pocessor status */
113 int sreg;
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
116 int ccp;
117 int rampd;
118 int rampx;
119 int rampy;
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
122 int rampz;
124 /* SP: The stack pointer and its low and high byte */
125 int sp_l;
126 int sp_h;
127 } avr_addr_t;
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int avr_naked_function_p (tree);
142 static int interrupt_function_p (tree);
143 static int signal_function_p (tree);
144 static int avr_OS_task_function_p (tree);
145 static int avr_OS_main_function_p (tree);
146 static int avr_regs_to_save (HARD_REG_SET *);
147 static int get_sequence_length (rtx insns);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code);
151 static int avr_num_arg_regs (enum machine_mode, const_tree);
152 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
153 int, bool);
154 static void output_reload_in_const (rtx*, rtx, int*, bool);
155 static struct machine_function * avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx;
168 rtx lpm_reg_rtx;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx;
172 rtx lpm_addr_reg_rtx;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx;
176 rtx tmp_reg_rtx;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx;
180 rtx zero_reg_rtx;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx[32];
184 rtx all_regs_rtx[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx;
188 rtx sreg_rtx;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx;
192 extern GTY(()) rtx rampx_rtx;
193 extern GTY(()) rtx rampy_rtx;
194 extern GTY(()) rtx rampz_rtx;
195 rtx rampd_rtx;
196 rtx rampx_rtx;
197 rtx rampy_rtx;
198 rtx rampz_rtx;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty;
202 static GTY(()) rtx xstring_e;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro;
207 /* Current architecture. */
208 const struct base_arch_s *avr_current_arch;
210 /* Current device. */
211 const struct mcu_type_s *avr_current_device;
213 /* Section to put switch tables in. */
214 static GTY(()) section *progmem_swtable_section;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section *progmem_section[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode = true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p = false;
225 bool avr_need_copy_data_p = false;
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_ALIGNED_HI_OP
230 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
233 #undef TARGET_ASM_UNALIGNED_HI_OP
234 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
235 #undef TARGET_ASM_UNALIGNED_SI_OP
236 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
237 #undef TARGET_ASM_INTEGER
238 #define TARGET_ASM_INTEGER avr_assemble_integer
239 #undef TARGET_ASM_FILE_START
240 #define TARGET_ASM_FILE_START avr_file_start
241 #undef TARGET_ASM_FILE_END
242 #define TARGET_ASM_FILE_END avr_file_end
244 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
245 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
246 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
247 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
249 #undef TARGET_FUNCTION_VALUE
250 #define TARGET_FUNCTION_VALUE avr_function_value
251 #undef TARGET_LIBCALL_VALUE
252 #define TARGET_LIBCALL_VALUE avr_libcall_value
253 #undef TARGET_FUNCTION_VALUE_REGNO_P
254 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
256 #undef TARGET_ATTRIBUTE_TABLE
257 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
258 #undef TARGET_INSERT_ATTRIBUTES
259 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
260 #undef TARGET_SECTION_TYPE_FLAGS
261 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
263 #undef TARGET_ASM_NAMED_SECTION
264 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
265 #undef TARGET_ASM_INIT_SECTIONS
266 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
269 #undef TARGET_ASM_SELECT_SECTION
270 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
272 #undef TARGET_REGISTER_MOVE_COST
273 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
274 #undef TARGET_MEMORY_MOVE_COST
275 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
276 #undef TARGET_RTX_COSTS
277 #define TARGET_RTX_COSTS avr_rtx_costs
278 #undef TARGET_ADDRESS_COST
279 #define TARGET_ADDRESS_COST avr_address_cost
280 #undef TARGET_MACHINE_DEPENDENT_REORG
281 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
282 #undef TARGET_FUNCTION_ARG
283 #define TARGET_FUNCTION_ARG avr_function_arg
284 #undef TARGET_FUNCTION_ARG_ADVANCE
285 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
287 #undef TARGET_RETURN_IN_MEMORY
288 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
290 #undef TARGET_STRICT_ARGUMENT_NAMING
291 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
293 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
294 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
296 #undef TARGET_HARD_REGNO_SCRATCH_OK
297 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
298 #undef TARGET_CASE_VALUES_THRESHOLD
299 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
301 #undef TARGET_FRAME_POINTER_REQUIRED
302 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE avr_can_eliminate
306 #undef TARGET_CLASS_LIKELY_SPILLED_P
307 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
309 #undef TARGET_OPTION_OVERRIDE
310 #define TARGET_OPTION_OVERRIDE avr_option_override
312 #undef TARGET_CANNOT_MODIFY_JUMPS_P
313 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
315 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
316 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
318 #undef TARGET_INIT_BUILTINS
319 #define TARGET_INIT_BUILTINS avr_init_builtins
321 #undef TARGET_EXPAND_BUILTIN
322 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
324 #undef TARGET_FOLD_BUILTIN
325 #define TARGET_FOLD_BUILTIN avr_fold_builtin
327 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
328 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
330 #undef TARGET_SCALAR_MODE_SUPPORTED_P
331 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
333 #undef TARGET_ADDR_SPACE_SUBSET_P
334 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
336 #undef TARGET_ADDR_SPACE_CONVERT
337 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
339 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
340 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
342 #undef TARGET_ADDR_SPACE_POINTER_MODE
343 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
345 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
346 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
348 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
349 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
351 #undef TARGET_PRINT_OPERAND
352 #define TARGET_PRINT_OPERAND avr_print_operand
353 #undef TARGET_PRINT_OPERAND_ADDRESS
354 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
355 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
356 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
360 /* Custom function to count number of set bits. */
362 static inline int
363 avr_popcount (unsigned int val)
365 int pop = 0;
367 while (val)
369 val &= val-1;
370 pop++;
373 return pop;
377 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
378 Return true if the least significant N_BYTES bytes of XVAL all have a
379 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
380 of integers which contains an integer N iff bit N of POP_MASK is set. */
382 bool
383 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
385 int i;
387 enum machine_mode mode = GET_MODE (xval);
389 if (VOIDmode == mode)
390 mode = SImode;
392 for (i = 0; i < n_bytes; i++)
394 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
395 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
397 if (0 == (pop_mask & (1 << avr_popcount (val8))))
398 return false;
401 return true;
404 static void
405 avr_option_override (void)
407 flag_delete_null_pointer_checks = 0;
409 /* caller-save.c looks for call-clobbered hard registers that are assigned
410 to pseudos that cross calls and tries so save-restore them around calls
411 in order to reduce the number of stack slots needed.
413 This might leads to situations where reload is no more able to cope
414 with the challenge of AVR's very few address registers and fails to
415 perform the requested spills. */
417 if (avr_strict_X)
418 flag_caller_saves = 0;
420 /* Unwind tables currently require a frame pointer for correctness,
421 see toplev.c:process_options(). */
423 if ((flag_unwind_tables
424 || flag_non_call_exceptions
425 || flag_asynchronous_unwind_tables)
426 && !ACCUMULATE_OUTGOING_ARGS)
428 flag_omit_frame_pointer = 0;
431 avr_current_device = &avr_mcu_types[avr_mcu_index];
432 avr_current_arch = &avr_arch_types[avr_current_device->arch];
433 avr_extra_arch_macro = avr_current_device->macro;
435 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
437 /* SREG: Status Register containing flags like I (global IRQ) */
438 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
440 /* RAMPZ: Address' high part when loading via ELPM */
441 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
443 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
444 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
445 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
446 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
448 /* SP: Stack Pointer (SP_H:SP_L) */
449 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
450 avr_addr.sp_h = avr_addr.sp_l + 1;
452 init_machine_status = avr_init_machine_status;
454 avr_log_set_avr_log();
457 /* Function to set up the backend function structure. */
459 static struct machine_function *
460 avr_init_machine_status (void)
462 return ggc_alloc_cleared_machine_function ();
466 /* Implement `INIT_EXPANDERS'. */
467 /* The function works like a singleton. */
469 void
470 avr_init_expanders (void)
472 int regno;
474 for (regno = 0; regno < 32; regno ++)
475 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
477 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
478 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
479 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
481 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
483 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
484 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
485 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
486 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
487 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
489 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
490 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
494 /* Return register class for register R. */
496 enum reg_class
497 avr_regno_reg_class (int r)
499 static const enum reg_class reg_class_tab[] =
501 R0_REG,
502 /* r1 - r15 */
503 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
504 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
505 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
506 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
507 /* r16 - r23 */
508 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
509 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
510 /* r24, r25 */
511 ADDW_REGS, ADDW_REGS,
512 /* X: r26, 27 */
513 POINTER_X_REGS, POINTER_X_REGS,
514 /* Y: r28, r29 */
515 POINTER_Y_REGS, POINTER_Y_REGS,
516 /* Z: r30, r31 */
517 POINTER_Z_REGS, POINTER_Z_REGS,
518 /* SP: SPL, SPH */
519 STACK_REG, STACK_REG
522 if (r <= 33)
523 return reg_class_tab[r];
525 return ALL_REGS;
529 static bool
530 avr_scalar_mode_supported_p (enum machine_mode mode)
532 if (PSImode == mode)
533 return true;
535 return default_scalar_mode_supported_p (mode);
539 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
541 static bool
542 avr_decl_flash_p (tree decl)
544 if (TREE_CODE (decl) != VAR_DECL
545 || TREE_TYPE (decl) == error_mark_node)
547 return false;
550 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
554 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
555 address space and FALSE, otherwise. */
557 static bool
558 avr_decl_memx_p (tree decl)
560 if (TREE_CODE (decl) != VAR_DECL
561 || TREE_TYPE (decl) == error_mark_node)
563 return false;
566 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
570 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
572 bool
573 avr_mem_flash_p (rtx x)
575 return (MEM_P (x)
576 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
580 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
581 address space and FALSE, otherwise. */
583 bool
584 avr_mem_memx_p (rtx x)
586 return (MEM_P (x)
587 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
591 /* A helper for the subsequent function attribute used to dig for
592 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
594 static inline int
595 avr_lookup_function_attribute1 (const_tree func, const char *name)
597 if (FUNCTION_DECL == TREE_CODE (func))
599 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
601 return true;
604 func = TREE_TYPE (func);
607 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
608 || TREE_CODE (func) == METHOD_TYPE);
610 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
613 /* Return nonzero if FUNC is a naked function. */
615 static int
616 avr_naked_function_p (tree func)
618 return avr_lookup_function_attribute1 (func, "naked");
621 /* Return nonzero if FUNC is an interrupt function as specified
622 by the "interrupt" attribute. */
624 static int
625 interrupt_function_p (tree func)
627 return avr_lookup_function_attribute1 (func, "interrupt");
630 /* Return nonzero if FUNC is a signal function as specified
631 by the "signal" attribute. */
633 static int
634 signal_function_p (tree func)
636 return avr_lookup_function_attribute1 (func, "signal");
639 /* Return nonzero if FUNC is an OS_task function. */
641 static int
642 avr_OS_task_function_p (tree func)
644 return avr_lookup_function_attribute1 (func, "OS_task");
647 /* Return nonzero if FUNC is an OS_main function. */
649 static int
650 avr_OS_main_function_p (tree func)
652 return avr_lookup_function_attribute1 (func, "OS_main");
656 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
659 avr_accumulate_outgoing_args (void)
661 if (!cfun)
662 return TARGET_ACCUMULATE_OUTGOING_ARGS;
664 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
665 what offset is correct. In some cases it is relative to
666 virtual_outgoing_args_rtx and in others it is relative to
667 virtual_stack_vars_rtx. For example code see
668 gcc.c-torture/execute/built-in-setjmp.c
669 gcc.c-torture/execute/builtins/sprintf-chk.c */
671 return (TARGET_ACCUMULATE_OUTGOING_ARGS
672 && !(cfun->calls_setjmp
673 || cfun->has_nonlocal_label));
677 /* Report contribution of accumulated outgoing arguments to stack size. */
679 static inline int
680 avr_outgoing_args_size (void)
682 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
686 /* Implement `STARTING_FRAME_OFFSET'. */
687 /* This is the offset from the frame pointer register to the first stack slot
688 that contains a variable living in the frame. */
691 avr_starting_frame_offset (void)
693 return 1 + avr_outgoing_args_size ();
697 /* Return the number of hard registers to push/pop in the prologue/epilogue
698 of the current function, and optionally store these registers in SET. */
700 static int
701 avr_regs_to_save (HARD_REG_SET *set)
703 int reg, count;
704 int int_or_sig_p = (interrupt_function_p (current_function_decl)
705 || signal_function_p (current_function_decl));
707 if (set)
708 CLEAR_HARD_REG_SET (*set);
709 count = 0;
711 /* No need to save any registers if the function never returns or
712 has the "OS_task" or "OS_main" attribute. */
713 if (TREE_THIS_VOLATILE (current_function_decl)
714 || cfun->machine->is_OS_task
715 || cfun->machine->is_OS_main)
716 return 0;
718 for (reg = 0; reg < 32; reg++)
720 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
721 any global register variables. */
722 if (fixed_regs[reg])
723 continue;
725 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
726 || (df_regs_ever_live_p (reg)
727 && (int_or_sig_p || !call_used_regs[reg])
728 /* Don't record frame pointer registers here. They are treated
729 indivitually in prologue. */
730 && !(frame_pointer_needed
731 && (reg == REG_Y || reg == (REG_Y+1)))))
733 if (set)
734 SET_HARD_REG_BIT (*set, reg);
735 count++;
738 return count;
741 /* Return true if register FROM can be eliminated via register TO. */
743 static bool
744 avr_can_eliminate (const int from, const int to)
746 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
747 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
748 || ((from == FRAME_POINTER_REGNUM
749 || from == FRAME_POINTER_REGNUM + 1)
750 && !frame_pointer_needed));
753 /* Compute offset between arg_pointer and frame_pointer. */
756 avr_initial_elimination_offset (int from, int to)
758 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
759 return 0;
760 else
762 int offset = frame_pointer_needed ? 2 : 0;
763 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
765 offset += avr_regs_to_save (NULL);
766 return (get_frame_size () + avr_outgoing_args_size()
767 + avr_pc_size + 1 + offset);
771 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
772 frame pointer by +STARTING_FRAME_OFFSET.
773 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
774 avoids creating add/sub of offset in nonlocal goto and setjmp. */
776 static rtx
777 avr_builtin_setjmp_frame_value (void)
779 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
780 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
783 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
784 This is return address of function. */
785 rtx
786 avr_return_addr_rtx (int count, rtx tem)
788 rtx r;
790 /* Can only return this function's return address. Others not supported. */
791 if (count)
792 return NULL;
794 if (AVR_3_BYTE_PC)
796 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
797 warning (0, "'builtin_return_address' contains only 2 bytes of address");
799 else
800 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
802 r = gen_rtx_PLUS (Pmode, tem, r);
803 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
804 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
805 return r;
808 /* Return 1 if the function epilogue is just a single "ret". */
811 avr_simple_epilogue (void)
813 return (! frame_pointer_needed
814 && get_frame_size () == 0
815 && avr_outgoing_args_size() == 0
816 && avr_regs_to_save (NULL) == 0
817 && ! interrupt_function_p (current_function_decl)
818 && ! signal_function_p (current_function_decl)
819 && ! avr_naked_function_p (current_function_decl)
820 && ! TREE_THIS_VOLATILE (current_function_decl));
823 /* This function checks sequence of live registers. */
825 static int
826 sequent_regs_live (void)
828 int reg;
829 int live_seq=0;
830 int cur_seq=0;
832 for (reg = 0; reg < 18; ++reg)
834 if (fixed_regs[reg])
836 /* Don't recognize sequences that contain global register
837 variables. */
839 if (live_seq != 0)
840 return 0;
841 else
842 continue;
845 if (!call_used_regs[reg])
847 if (df_regs_ever_live_p (reg))
849 ++live_seq;
850 ++cur_seq;
852 else
853 cur_seq = 0;
857 if (!frame_pointer_needed)
859 if (df_regs_ever_live_p (REG_Y))
861 ++live_seq;
862 ++cur_seq;
864 else
865 cur_seq = 0;
867 if (df_regs_ever_live_p (REG_Y+1))
869 ++live_seq;
870 ++cur_seq;
872 else
873 cur_seq = 0;
875 else
877 cur_seq += 2;
878 live_seq += 2;
880 return (cur_seq == live_seq) ? live_seq : 0;
883 /* Obtain the length sequence of insns. */
886 get_sequence_length (rtx insns)
888 rtx insn;
889 int length;
891 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
892 length += get_attr_length (insn);
894 return length;
897 /* Implement INCOMING_RETURN_ADDR_RTX. */
900 avr_incoming_return_addr_rtx (void)
902 /* The return address is at the top of the stack. Note that the push
903 was via post-decrement, which means the actual address is off by one. */
904 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
907 /* Helper for expand_prologue. Emit a push of a byte register. */
909 static void
910 emit_push_byte (unsigned regno, bool frame_related_p)
912 rtx mem, reg, insn;
914 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
915 mem = gen_frame_mem (QImode, mem);
916 reg = gen_rtx_REG (QImode, regno);
918 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
919 if (frame_related_p)
920 RTX_FRAME_RELATED_P (insn) = 1;
922 cfun->machine->stack_usage++;
926 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
927 SFR is a MEM representing the memory location of the SFR.
928 If CLR_P then clear the SFR after the push using zero_reg. */
930 static void
931 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
933 rtx insn;
935 gcc_assert (MEM_P (sfr));
937 /* IN __tmp_reg__, IO(SFR) */
938 insn = emit_move_insn (tmp_reg_rtx, sfr);
939 if (frame_related_p)
940 RTX_FRAME_RELATED_P (insn) = 1;
942 /* PUSH __tmp_reg__ */
943 emit_push_byte (TMP_REGNO, frame_related_p);
945 if (clr_p)
947 /* OUT IO(SFR), __zero_reg__ */
948 insn = emit_move_insn (sfr, const0_rtx);
949 if (frame_related_p)
950 RTX_FRAME_RELATED_P (insn) = 1;
954 static void
955 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
957 rtx insn;
958 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
959 int live_seq = sequent_regs_live ();
961 bool minimize = (TARGET_CALL_PROLOGUES
962 && live_seq
963 && !isr_p
964 && !cfun->machine->is_OS_task
965 && !cfun->machine->is_OS_main);
967 if (minimize
968 && (frame_pointer_needed
969 || avr_outgoing_args_size() > 8
970 || (AVR_2_BYTE_PC && live_seq > 6)
971 || live_seq > 7))
973 rtx pattern;
974 int first_reg, reg, offset;
976 emit_move_insn (gen_rtx_REG (HImode, REG_X),
977 gen_int_mode (size, HImode));
979 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
980 gen_int_mode (live_seq+size, HImode));
981 insn = emit_insn (pattern);
982 RTX_FRAME_RELATED_P (insn) = 1;
984 /* Describe the effect of the unspec_volatile call to prologue_saves.
985 Note that this formulation assumes that add_reg_note pushes the
986 notes to the front. Thus we build them in the reverse order of
987 how we want dwarf2out to process them. */
989 /* The function does always set frame_pointer_rtx, but whether that
990 is going to be permanent in the function is frame_pointer_needed. */
992 add_reg_note (insn, REG_CFA_ADJUST_CFA,
993 gen_rtx_SET (VOIDmode, (frame_pointer_needed
994 ? frame_pointer_rtx
995 : stack_pointer_rtx),
996 plus_constant (stack_pointer_rtx,
997 -(size + live_seq))));
999 /* Note that live_seq always contains r28+r29, but the other
1000 registers to be saved are all below 18. */
1002 first_reg = 18 - (live_seq - 2);
1004 for (reg = 29, offset = -live_seq + 1;
1005 reg >= first_reg;
1006 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1008 rtx m, r;
1010 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
1011 r = gen_rtx_REG (QImode, reg);
1012 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1015 cfun->machine->stack_usage += size + live_seq;
1017 else /* !minimize */
1019 int reg;
1021 for (reg = 0; reg < 32; ++reg)
1022 if (TEST_HARD_REG_BIT (set, reg))
1023 emit_push_byte (reg, true);
1025 if (frame_pointer_needed
1026 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1028 /* Push frame pointer. Always be consistent about the
1029 ordering of pushes -- epilogue_restores expects the
1030 register pair to be pushed low byte first. */
1032 emit_push_byte (REG_Y, true);
1033 emit_push_byte (REG_Y + 1, true);
1036 if (frame_pointer_needed
1037 && size == 0)
1039 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1040 RTX_FRAME_RELATED_P (insn) = 1;
1043 if (size != 0)
1045 /* Creating a frame can be done by direct manipulation of the
1046 stack or via the frame pointer. These two methods are:
1047 fp = sp
1048 fp -= size
1049 sp = fp
1051 sp -= size
1052 fp = sp (*)
1053 the optimum method depends on function type, stack and
1054 frame size. To avoid a complex logic, both methods are
1055 tested and shortest is selected.
1057 There is also the case where SIZE != 0 and no frame pointer is
1058 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1059 In that case, insn (*) is not needed in that case.
1060 We use the X register as scratch. This is save because in X
1061 is call-clobbered.
1062 In an interrupt routine, the case of SIZE != 0 together with
1063 !frame_pointer_needed can only occur if the function is not a
1064 leaf function and thus X has already been saved. */
1066 int irq_state = -1;
1067 rtx fp_plus_insns, fp, my_fp;
1069 gcc_assert (frame_pointer_needed
1070 || !isr_p
1071 || !current_function_is_leaf);
1073 fp = my_fp = (frame_pointer_needed
1074 ? frame_pointer_rtx
1075 : gen_rtx_REG (Pmode, REG_X));
1077 if (AVR_HAVE_8BIT_SP)
1079 /* The high byte (r29) does not change:
1080 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1082 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1085 /************ Method 1: Adjust frame pointer ************/
1087 start_sequence ();
1089 /* Normally, the dwarf2out frame-related-expr interpreter does
1090 not expect to have the CFA change once the frame pointer is
1091 set up. Thus, we avoid marking the move insn below and
1092 instead indicate that the entire operation is complete after
1093 the frame pointer subtraction is done. */
1095 insn = emit_move_insn (fp, stack_pointer_rtx);
1096 if (frame_pointer_needed)
1098 RTX_FRAME_RELATED_P (insn) = 1;
1099 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1100 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1103 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1104 if (frame_pointer_needed)
1106 RTX_FRAME_RELATED_P (insn) = 1;
1107 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1108 gen_rtx_SET (VOIDmode, fp,
1109 plus_constant (fp, -size)));
1112 /* Copy to stack pointer. Note that since we've already
1113 changed the CFA to the frame pointer this operation
1114 need not be annotated if frame pointer is needed.
1115 Always move through unspec, see PR50063.
1116 For meaning of irq_state see movhi_sp_r insn. */
1118 if (cfun->machine->is_interrupt)
1119 irq_state = 1;
1121 if (TARGET_NO_INTERRUPTS
1122 || cfun->machine->is_signal
1123 || cfun->machine->is_OS_main)
1124 irq_state = 0;
1126 if (AVR_HAVE_8BIT_SP)
1127 irq_state = 2;
1129 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1130 fp, GEN_INT (irq_state)));
1131 if (!frame_pointer_needed)
1133 RTX_FRAME_RELATED_P (insn) = 1;
1134 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1135 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1136 plus_constant (stack_pointer_rtx,
1137 -size)));
1140 fp_plus_insns = get_insns ();
1141 end_sequence ();
1143 /************ Method 2: Adjust Stack pointer ************/
1145 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1146 can only handle specific offsets. */
1148 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1150 rtx sp_plus_insns;
1152 start_sequence ();
1154 insn = emit_move_insn (stack_pointer_rtx,
1155 plus_constant (stack_pointer_rtx, -size));
1156 RTX_FRAME_RELATED_P (insn) = 1;
1157 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1158 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1159 plus_constant (stack_pointer_rtx,
1160 -size)));
1161 if (frame_pointer_needed)
1163 insn = emit_move_insn (fp, stack_pointer_rtx);
1164 RTX_FRAME_RELATED_P (insn) = 1;
1167 sp_plus_insns = get_insns ();
1168 end_sequence ();
1170 /************ Use shortest method ************/
1172 emit_insn (get_sequence_length (sp_plus_insns)
1173 < get_sequence_length (fp_plus_insns)
1174 ? sp_plus_insns
1175 : fp_plus_insns);
1177 else
1179 emit_insn (fp_plus_insns);
1182 cfun->machine->stack_usage += size;
1183 } /* !minimize && size != 0 */
1184 } /* !minimize */
1188 /* Output function prologue. */
1190 void
1191 expand_prologue (void)
1193 HARD_REG_SET set;
1194 HOST_WIDE_INT size;
1196 size = get_frame_size() + avr_outgoing_args_size();
1198 /* Init cfun->machine. */
1199 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1200 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1201 cfun->machine->is_signal = signal_function_p (current_function_decl);
1202 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1203 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1204 cfun->machine->stack_usage = 0;
1206 /* Prologue: naked. */
1207 if (cfun->machine->is_naked)
1209 return;
1212 avr_regs_to_save (&set);
1214 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1216 /* Enable interrupts. */
1217 if (cfun->machine->is_interrupt)
1218 emit_insn (gen_enable_interrupt ());
1220 /* Push zero reg. */
1221 emit_push_byte (ZERO_REGNO, true);
1223 /* Push tmp reg. */
1224 emit_push_byte (TMP_REGNO, true);
1226 /* Push SREG. */
1227 /* ??? There's no dwarf2 column reserved for SREG. */
1228 emit_push_sfr (sreg_rtx, false, false /* clr */);
1230 /* Clear zero reg. */
1231 emit_move_insn (zero_reg_rtx, const0_rtx);
1233 /* Prevent any attempt to delete the setting of ZERO_REG! */
1234 emit_use (zero_reg_rtx);
1236 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1237 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1239 if (AVR_HAVE_RAMPD)
1240 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1242 if (AVR_HAVE_RAMPX
1243 && TEST_HARD_REG_BIT (set, REG_X)
1244 && TEST_HARD_REG_BIT (set, REG_X + 1))
1246 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1249 if (AVR_HAVE_RAMPY
1250 && (frame_pointer_needed
1251 || (TEST_HARD_REG_BIT (set, REG_Y)
1252 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1254 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1257 if (AVR_HAVE_RAMPZ
1258 && TEST_HARD_REG_BIT (set, REG_Z)
1259 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1261 emit_push_sfr (rampz_rtx, false /* frame-related */, true /* clr */);
1263 } /* is_interrupt is_signal */
1265 avr_prologue_setup_frame (size, set);
1267 if (flag_stack_usage_info)
1268 current_function_static_stack_size = cfun->machine->stack_usage;
1271 /* Output summary at end of function prologue. */
1273 static void
1274 avr_asm_function_end_prologue (FILE *file)
1276 if (cfun->machine->is_naked)
1278 fputs ("/* prologue: naked */\n", file);
1280 else
1282 if (cfun->machine->is_interrupt)
1284 fputs ("/* prologue: Interrupt */\n", file);
1286 else if (cfun->machine->is_signal)
1288 fputs ("/* prologue: Signal */\n", file);
1290 else
1291 fputs ("/* prologue: function */\n", file);
1294 if (ACCUMULATE_OUTGOING_ARGS)
1295 fprintf (file, "/* outgoing args size = %d */\n",
1296 avr_outgoing_args_size());
1298 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1299 get_frame_size());
1300 fprintf (file, "/* stack size = %d */\n",
1301 cfun->machine->stack_usage);
1302 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1303 usage for offset so that SP + .L__stack_offset = return address. */
1304 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1308 /* Implement EPILOGUE_USES. */
1311 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1313 if (reload_completed
1314 && cfun->machine
1315 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1316 return 1;
1317 return 0;
1320 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1322 static void
1323 emit_pop_byte (unsigned regno)
1325 rtx mem, reg;
1327 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1328 mem = gen_frame_mem (QImode, mem);
1329 reg = gen_rtx_REG (QImode, regno);
1331 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1334 /* Output RTL epilogue. */
1336 void
1337 expand_epilogue (bool sibcall_p)
1339 int reg;
1340 int live_seq;
1341 HARD_REG_SET set;
1342 int minimize;
1343 HOST_WIDE_INT size;
1344 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1346 size = get_frame_size() + avr_outgoing_args_size();
1348 /* epilogue: naked */
1349 if (cfun->machine->is_naked)
1351 gcc_assert (!sibcall_p);
1353 emit_jump_insn (gen_return ());
1354 return;
1357 avr_regs_to_save (&set);
1358 live_seq = sequent_regs_live ();
1360 minimize = (TARGET_CALL_PROLOGUES
1361 && live_seq
1362 && !isr_p
1363 && !cfun->machine->is_OS_task
1364 && !cfun->machine->is_OS_main);
1366 if (minimize
1367 && (live_seq > 4
1368 || frame_pointer_needed
1369 || size))
1371 /* Get rid of frame. */
1373 if (!frame_pointer_needed)
1375 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1378 if (size)
1380 emit_move_insn (frame_pointer_rtx,
1381 plus_constant (frame_pointer_rtx, size));
1384 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1385 return;
1388 if (size)
1390 /* Try two methods to adjust stack and select shortest. */
1392 int irq_state = -1;
1393 rtx fp, my_fp;
1394 rtx fp_plus_insns;
1396 gcc_assert (frame_pointer_needed
1397 || !isr_p
1398 || !current_function_is_leaf);
1400 fp = my_fp = (frame_pointer_needed
1401 ? frame_pointer_rtx
1402 : gen_rtx_REG (Pmode, REG_X));
1404 if (AVR_HAVE_8BIT_SP)
1406 /* The high byte (r29) does not change:
1407 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1409 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1412 /********** Method 1: Adjust fp register **********/
1414 start_sequence ();
1416 if (!frame_pointer_needed)
1417 emit_move_insn (fp, stack_pointer_rtx);
1419 emit_move_insn (my_fp, plus_constant (my_fp, size));
1421 /* Copy to stack pointer. */
1423 if (TARGET_NO_INTERRUPTS)
1424 irq_state = 0;
1426 if (AVR_HAVE_8BIT_SP)
1427 irq_state = 2;
1429 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1430 GEN_INT (irq_state)));
1432 fp_plus_insns = get_insns ();
1433 end_sequence ();
1435 /********** Method 2: Adjust Stack pointer **********/
1437 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1439 rtx sp_plus_insns;
1441 start_sequence ();
1443 emit_move_insn (stack_pointer_rtx,
1444 plus_constant (stack_pointer_rtx, size));
1446 sp_plus_insns = get_insns ();
1447 end_sequence ();
1449 /************ Use shortest method ************/
1451 emit_insn (get_sequence_length (sp_plus_insns)
1452 < get_sequence_length (fp_plus_insns)
1453 ? sp_plus_insns
1454 : fp_plus_insns);
1456 else
1457 emit_insn (fp_plus_insns);
1458 } /* size != 0 */
1460 if (frame_pointer_needed
1461 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1463 /* Restore previous frame_pointer. See expand_prologue for
1464 rationale for not using pophi. */
1466 emit_pop_byte (REG_Y + 1);
1467 emit_pop_byte (REG_Y);
1470 /* Restore used registers. */
1472 for (reg = 31; reg >= 0; --reg)
1473 if (TEST_HARD_REG_BIT (set, reg))
1474 emit_pop_byte (reg);
1476 if (isr_p)
1478 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1479 The conditions to restore them must be tha same as in prologue. */
1481 if (AVR_HAVE_RAMPX
1482 && TEST_HARD_REG_BIT (set, REG_X)
1483 && TEST_HARD_REG_BIT (set, REG_X + 1))
1485 emit_pop_byte (TMP_REGNO);
1486 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1489 if (AVR_HAVE_RAMPY
1490 && (frame_pointer_needed
1491 || (TEST_HARD_REG_BIT (set, REG_Y)
1492 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1494 emit_pop_byte (TMP_REGNO);
1495 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1498 if (AVR_HAVE_RAMPZ
1499 && TEST_HARD_REG_BIT (set, REG_Z)
1500 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1502 emit_pop_byte (TMP_REGNO);
1503 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1506 if (AVR_HAVE_RAMPD)
1508 emit_pop_byte (TMP_REGNO);
1509 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1512 /* Restore SREG using tmp_reg as scratch. */
1514 emit_pop_byte (TMP_REGNO);
1515 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1517 /* Restore tmp REG. */
1518 emit_pop_byte (TMP_REGNO);
1520 /* Restore zero REG. */
1521 emit_pop_byte (ZERO_REGNO);
1524 if (!sibcall_p)
1525 emit_jump_insn (gen_return ());
1528 /* Output summary messages at beginning of function epilogue. */
1530 static void
1531 avr_asm_function_begin_epilogue (FILE *file)
1533 fprintf (file, "/* epilogue start */\n");
1537 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1539 static bool
1540 avr_cannot_modify_jumps_p (void)
1543 /* Naked Functions must not have any instructions after
1544 their epilogue, see PR42240 */
1546 if (reload_completed
1547 && cfun->machine
1548 && cfun->machine->is_naked)
1550 return true;
1553 return false;
1557 /* Helper function for `avr_legitimate_address_p'. */
1559 static inline bool
1560 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1561 RTX_CODE outer_code, bool strict)
1563 return (REG_P (reg)
1564 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1565 as, outer_code, UNKNOWN)
1566 || (!strict
1567 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1571 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1572 machine for a memory operand of mode MODE. */
1574 static bool
1575 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1577 bool ok = CONSTANT_ADDRESS_P (x);
1579 switch (GET_CODE (x))
1581 case REG:
1582 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1583 MEM, strict);
1585 if (strict
1586 && DImode == mode
1587 && REG_X == REGNO (x))
1589 ok = false;
1591 break;
1593 case POST_INC:
1594 case PRE_DEC:
1595 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1596 GET_CODE (x), strict);
1597 break;
1599 case PLUS:
1601 rtx reg = XEXP (x, 0);
1602 rtx op1 = XEXP (x, 1);
1604 if (REG_P (reg)
1605 && CONST_INT_P (op1)
1606 && INTVAL (op1) >= 0)
1608 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1610 if (fit)
1612 ok = (! strict
1613 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1614 PLUS, strict));
1616 if (reg == frame_pointer_rtx
1617 || reg == arg_pointer_rtx)
1619 ok = true;
1622 else if (frame_pointer_needed
1623 && reg == frame_pointer_rtx)
1625 ok = true;
1629 break;
1631 default:
1632 break;
1635 if (avr_log.legitimate_address_p)
1637 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1638 "reload_completed=%d reload_in_progress=%d %s:",
1639 ok, mode, strict, reload_completed, reload_in_progress,
1640 reg_renumber ? "(reg_renumber)" : "");
1642 if (GET_CODE (x) == PLUS
1643 && REG_P (XEXP (x, 0))
1644 && CONST_INT_P (XEXP (x, 1))
1645 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1646 && reg_renumber)
1648 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1649 true_regnum (XEXP (x, 0)));
1652 avr_edump ("\n%r\n", x);
1655 return ok;
1659 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1660 now only a helper for avr_addr_space_legitimize_address. */
1661 /* Attempts to replace X with a valid
1662 memory address for an operand of mode MODE */
1664 static rtx
1665 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1667 bool big_offset_p = false;
1669 x = oldx;
1671 if (GET_CODE (oldx) == PLUS
1672 && REG_P (XEXP (oldx, 0)))
1674 if (REG_P (XEXP (oldx, 1)))
1675 x = force_reg (GET_MODE (oldx), oldx);
1676 else if (CONST_INT_P (XEXP (oldx, 1)))
1678 int offs = INTVAL (XEXP (oldx, 1));
1679 if (frame_pointer_rtx != XEXP (oldx, 0)
1680 && offs > MAX_LD_OFFSET (mode))
1682 big_offset_p = true;
1683 x = force_reg (GET_MODE (oldx), oldx);
1688 if (avr_log.legitimize_address)
1690 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1692 if (x != oldx)
1693 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1696 return x;
1700 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1701 /* This will allow register R26/27 to be used where it is no worse than normal
1702 base pointers R28/29 or R30/31. For example, if base offset is greater
1703 than 63 bytes or for R++ or --R addressing. */
1706 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1707 int opnum, int type, int addr_type,
1708 int ind_levels ATTRIBUTE_UNUSED,
1709 rtx (*mk_memloc)(rtx,int))
1711 rtx x = *px;
1713 if (avr_log.legitimize_reload_address)
1714 avr_edump ("\n%?:%m %r\n", mode, x);
1716 if (1 && (GET_CODE (x) == POST_INC
1717 || GET_CODE (x) == PRE_DEC))
1719 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1720 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1721 opnum, RELOAD_OTHER);
1723 if (avr_log.legitimize_reload_address)
1724 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1725 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1727 return x;
1730 if (GET_CODE (x) == PLUS
1731 && REG_P (XEXP (x, 0))
1732 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1733 && CONST_INT_P (XEXP (x, 1))
1734 && INTVAL (XEXP (x, 1)) >= 1)
1736 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1738 if (fit)
1740 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1742 int regno = REGNO (XEXP (x, 0));
1743 rtx mem = mk_memloc (x, regno);
1745 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1746 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1747 1, addr_type);
1749 if (avr_log.legitimize_reload_address)
1750 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1751 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1753 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1754 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1755 opnum, type);
1757 if (avr_log.legitimize_reload_address)
1758 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1759 BASE_POINTER_REGS, mem, NULL_RTX);
1761 return x;
1764 else if (! (frame_pointer_needed
1765 && XEXP (x, 0) == frame_pointer_rtx))
1767 push_reload (x, NULL_RTX, px, NULL,
1768 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1769 opnum, type);
1771 if (avr_log.legitimize_reload_address)
1772 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1773 POINTER_REGS, x, NULL_RTX);
1775 return x;
1779 return NULL_RTX;
1783 /* Helper function to print assembler resp. track instruction
1784 sequence lengths. Always return "".
1786 If PLEN == NULL:
1787 Output assembler code from template TPL with operands supplied
1788 by OPERANDS. This is just forwarding to output_asm_insn.
1790 If PLEN != NULL:
1791 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1792 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1793 Don't output anything.
1796 static const char*
1797 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1799 if (NULL == plen)
1801 output_asm_insn (tpl, operands);
1803 else
1805 if (n_words < 0)
1806 *plen = -n_words;
1807 else
1808 *plen += n_words;
1811 return "";
1815 /* Return a pointer register name as a string. */
1817 static const char *
1818 ptrreg_to_str (int regno)
1820 switch (regno)
1822 case REG_X: return "X";
1823 case REG_Y: return "Y";
1824 case REG_Z: return "Z";
1825 default:
1826 output_operand_lossage ("address operand requires constraint for"
1827 " X, Y, or Z register");
1829 return NULL;
1832 /* Return the condition name as a string.
1833 Used in conditional jump constructing */
1835 static const char *
1836 cond_string (enum rtx_code code)
1838 switch (code)
1840 case NE:
1841 return "ne";
1842 case EQ:
1843 return "eq";
1844 case GE:
1845 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1846 return "pl";
1847 else
1848 return "ge";
1849 case LT:
1850 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1851 return "mi";
1852 else
1853 return "lt";
1854 case GEU:
1855 return "sh";
1856 case LTU:
1857 return "lo";
1858 default:
1859 gcc_unreachable ();
1862 return "";
1866 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1867 /* Output ADDR to FILE as address. */
1869 static void
1870 avr_print_operand_address (FILE *file, rtx addr)
1872 switch (GET_CODE (addr))
1874 case REG:
1875 fprintf (file, ptrreg_to_str (REGNO (addr)));
1876 break;
1878 case PRE_DEC:
1879 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1880 break;
1882 case POST_INC:
1883 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1884 break;
1886 default:
1887 if (CONSTANT_ADDRESS_P (addr)
1888 && text_segment_operand (addr, VOIDmode))
1890 rtx x = addr;
1891 if (GET_CODE (x) == CONST)
1892 x = XEXP (x, 0);
1893 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1895 /* Assembler gs() will implant word address. Make offset
1896 a byte offset inside gs() for assembler. This is
1897 needed because the more logical (constant+gs(sym)) is not
1898 accepted by gas. For 128K and lower devices this is ok.
1899 For large devices it will create a Trampoline to offset
1900 from symbol which may not be what the user really wanted. */
1901 fprintf (file, "gs(");
1902 output_addr_const (file, XEXP (x,0));
1903 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1904 2 * INTVAL (XEXP (x, 1)));
1905 if (AVR_3_BYTE_PC)
1906 if (warning (0, "pointer offset from symbol maybe incorrect"))
1908 output_addr_const (stderr, addr);
1909 fprintf(stderr,"\n");
1912 else
1914 fprintf (file, "gs(");
1915 output_addr_const (file, addr);
1916 fprintf (file, ")");
1919 else
1920 output_addr_const (file, addr);
1925 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1927 static bool
1928 avr_print_operand_punct_valid_p (unsigned char code)
1930 return code == '~' || code == '!';
1934 /* Implement `TARGET_PRINT_OPERAND'. */
1935 /* Output X as assembler operand to file FILE.
1936 For a description of supported %-codes, see top of avr.md. */
1938 static void
1939 avr_print_operand (FILE *file, rtx x, int code)
1941 int abcd = 0;
1943 if (code >= 'A' && code <= 'D')
1944 abcd = code - 'A';
1946 if (code == '~')
1948 if (!AVR_HAVE_JMP_CALL)
1949 fputc ('r', file);
1951 else if (code == '!')
1953 if (AVR_HAVE_EIJMP_EICALL)
1954 fputc ('e', file);
1956 else if (code == 't'
1957 || code == 'T')
1959 static int t_regno = -1;
1960 static int t_nbits = -1;
1962 if (REG_P (x) && t_regno < 0 && code == 'T')
1964 t_regno = REGNO (x);
1965 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1967 else if (CONST_INT_P (x) && t_regno >= 0
1968 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1970 int bpos = INTVAL (x);
1972 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1973 if (code == 'T')
1974 fprintf (file, ",%d", bpos % 8);
1976 t_regno = -1;
1978 else
1979 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1981 else if (REG_P (x))
1983 if (x == zero_reg_rtx)
1984 fprintf (file, "__zero_reg__");
1985 else
1986 fprintf (file, reg_names[true_regnum (x) + abcd]);
1988 else if (CONST_INT_P (x))
1990 HOST_WIDE_INT ival = INTVAL (x);
1992 if ('i' != code)
1993 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1994 else if (low_io_address_operand (x, VOIDmode)
1995 || high_io_address_operand (x, VOIDmode))
1997 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1998 fprintf (file, "__RAMPZ__");
1999 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2000 fprintf (file, "__RAMPY__");
2001 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2002 fprintf (file, "__RAMPX__");
2003 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2004 fprintf (file, "__RAMPD__");
2005 else if (AVR_XMEGA && ival == avr_addr.ccp)
2006 fprintf (file, "__CCP__");
2007 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2008 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2009 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2010 else
2012 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2013 ival - avr_current_arch->sfr_offset);
2016 else
2017 fatal_insn ("bad address, not an I/O address:", x);
2019 else if (MEM_P (x))
2021 rtx addr = XEXP (x, 0);
2023 if (code == 'm')
2025 if (!CONSTANT_P (addr))
2026 fatal_insn ("bad address, not a constant:", addr);
2027 /* Assembler template with m-code is data - not progmem section */
2028 if (text_segment_operand (addr, VOIDmode))
2029 if (warning (0, "accessing data memory with"
2030 " program memory address"))
2032 output_addr_const (stderr, addr);
2033 fprintf(stderr,"\n");
2035 output_addr_const (file, addr);
2037 else if (code == 'i')
2039 avr_print_operand (file, addr, 'i');
2041 else if (code == 'o')
2043 if (GET_CODE (addr) != PLUS)
2044 fatal_insn ("bad address, not (reg+disp):", addr);
2046 avr_print_operand (file, XEXP (addr, 1), 0);
2048 else if (code == 'p' || code == 'r')
2050 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2051 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2053 if (code == 'p')
2054 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2055 else
2056 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2058 else if (GET_CODE (addr) == PLUS)
2060 avr_print_operand_address (file, XEXP (addr,0));
2061 if (REGNO (XEXP (addr, 0)) == REG_X)
2062 fatal_insn ("internal compiler error. Bad address:"
2063 ,addr);
2064 fputc ('+', file);
2065 avr_print_operand (file, XEXP (addr,1), code);
2067 else
2068 avr_print_operand_address (file, addr);
2070 else if (code == 'i')
2072 fatal_insn ("bad address, not an I/O address:", x);
2074 else if (code == 'x')
2076 /* Constant progmem address - like used in jmp or call */
2077 if (0 == text_segment_operand (x, VOIDmode))
2078 if (warning (0, "accessing program memory"
2079 " with data memory address"))
2081 output_addr_const (stderr, x);
2082 fprintf(stderr,"\n");
2084 /* Use normal symbol for direct address no linker trampoline needed */
2085 output_addr_const (file, x);
2087 else if (GET_CODE (x) == CONST_DOUBLE)
2089 long val;
2090 REAL_VALUE_TYPE rv;
2091 if (GET_MODE (x) != SFmode)
2092 fatal_insn ("internal compiler error. Unknown mode:", x);
2093 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2094 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2095 fprintf (file, "0x%lx", val);
2097 else if (GET_CODE (x) == CONST_STRING)
2098 fputs (XSTR (x, 0), file);
2099 else if (code == 'j')
2100 fputs (cond_string (GET_CODE (x)), file);
2101 else if (code == 'k')
2102 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2103 else
2104 avr_print_operand_address (file, x);
2107 /* Update the condition code in the INSN. */
2109 void
2110 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2112 rtx set;
2113 enum attr_cc cc = get_attr_cc (insn);
2115 switch (cc)
2117 default:
2118 break;
2120 case CC_OUT_PLUS:
2121 case CC_OUT_PLUS_NOCLOBBER:
2122 case CC_LDI:
2124 rtx *op = recog_data.operand;
2125 int len_dummy, icc;
2127 /* Extract insn's operands. */
2128 extract_constrain_insn_cached (insn);
2130 switch (cc)
2132 default:
2133 gcc_unreachable();
2135 case CC_OUT_PLUS:
2136 avr_out_plus (op, &len_dummy, &icc);
2137 cc = (enum attr_cc) icc;
2138 break;
2140 case CC_OUT_PLUS_NOCLOBBER:
2141 avr_out_plus_noclobber (op, &len_dummy, &icc);
2142 cc = (enum attr_cc) icc;
2143 break;
2145 case CC_LDI:
2147 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2148 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2149 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2150 ? CC_CLOBBER
2151 /* Any other "r,rL" combination does not alter cc0. */
2152 : CC_NONE;
2154 break;
2155 } /* inner switch */
2157 break;
2159 } /* outer swicth */
2161 switch (cc)
2163 default:
2164 /* Special values like CC_OUT_PLUS from above have been
2165 mapped to "standard" CC_* values so we never come here. */
2167 gcc_unreachable();
2168 break;
2170 case CC_NONE:
2171 /* Insn does not affect CC at all. */
2172 break;
2174 case CC_SET_N:
2175 CC_STATUS_INIT;
2176 break;
2178 case CC_SET_ZN:
2179 set = single_set (insn);
2180 CC_STATUS_INIT;
2181 if (set)
2183 cc_status.flags |= CC_NO_OVERFLOW;
2184 cc_status.value1 = SET_DEST (set);
2186 break;
2188 case CC_SET_CZN:
2189 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2190 The V flag may or may not be known but that's ok because
2191 alter_cond will change tests to use EQ/NE. */
2192 set = single_set (insn);
2193 CC_STATUS_INIT;
2194 if (set)
2196 cc_status.value1 = SET_DEST (set);
2197 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2199 break;
2201 case CC_COMPARE:
2202 set = single_set (insn);
2203 CC_STATUS_INIT;
2204 if (set)
2205 cc_status.value1 = SET_SRC (set);
2206 break;
2208 case CC_CLOBBER:
2209 /* Insn doesn't leave CC in a usable state. */
2210 CC_STATUS_INIT;
2211 break;
2215 /* Choose mode for jump insn:
2216 1 - relative jump in range -63 <= x <= 62 ;
2217 2 - relative jump in range -2046 <= x <= 2045 ;
2218 3 - absolute jump (only for ATmega[16]03). */
2221 avr_jump_mode (rtx x, rtx insn)
2223 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2224 ? XEXP (x, 0) : x));
2225 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2226 int jump_distance = cur_addr - dest_addr;
2228 if (-63 <= jump_distance && jump_distance <= 62)
2229 return 1;
2230 else if (-2046 <= jump_distance && jump_distance <= 2045)
2231 return 2;
2232 else if (AVR_HAVE_JMP_CALL)
2233 return 3;
2235 return 2;
2238 /* return an AVR condition jump commands.
2239 X is a comparison RTX.
2240 LEN is a number returned by avr_jump_mode function.
2241 if REVERSE nonzero then condition code in X must be reversed. */
2243 const char *
2244 ret_cond_branch (rtx x, int len, int reverse)
2246 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2248 switch (cond)
2250 case GT:
2251 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2252 return (len == 1 ? ("breq .+2" CR_TAB
2253 "brpl %0") :
2254 len == 2 ? ("breq .+4" CR_TAB
2255 "brmi .+2" CR_TAB
2256 "rjmp %0") :
2257 ("breq .+6" CR_TAB
2258 "brmi .+4" CR_TAB
2259 "jmp %0"));
2261 else
2262 return (len == 1 ? ("breq .+2" CR_TAB
2263 "brge %0") :
2264 len == 2 ? ("breq .+4" CR_TAB
2265 "brlt .+2" CR_TAB
2266 "rjmp %0") :
2267 ("breq .+6" CR_TAB
2268 "brlt .+4" CR_TAB
2269 "jmp %0"));
2270 case GTU:
2271 return (len == 1 ? ("breq .+2" CR_TAB
2272 "brsh %0") :
2273 len == 2 ? ("breq .+4" CR_TAB
2274 "brlo .+2" CR_TAB
2275 "rjmp %0") :
2276 ("breq .+6" CR_TAB
2277 "brlo .+4" CR_TAB
2278 "jmp %0"));
2279 case LE:
2280 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2281 return (len == 1 ? ("breq %0" CR_TAB
2282 "brmi %0") :
2283 len == 2 ? ("breq .+2" CR_TAB
2284 "brpl .+2" CR_TAB
2285 "rjmp %0") :
2286 ("breq .+2" CR_TAB
2287 "brpl .+4" CR_TAB
2288 "jmp %0"));
2289 else
2290 return (len == 1 ? ("breq %0" CR_TAB
2291 "brlt %0") :
2292 len == 2 ? ("breq .+2" CR_TAB
2293 "brge .+2" CR_TAB
2294 "rjmp %0") :
2295 ("breq .+2" CR_TAB
2296 "brge .+4" CR_TAB
2297 "jmp %0"));
2298 case LEU:
2299 return (len == 1 ? ("breq %0" CR_TAB
2300 "brlo %0") :
2301 len == 2 ? ("breq .+2" CR_TAB
2302 "brsh .+2" CR_TAB
2303 "rjmp %0") :
2304 ("breq .+2" CR_TAB
2305 "brsh .+4" CR_TAB
2306 "jmp %0"));
2307 default:
2308 if (reverse)
2310 switch (len)
2312 case 1:
2313 return "br%k1 %0";
2314 case 2:
2315 return ("br%j1 .+2" CR_TAB
2316 "rjmp %0");
2317 default:
2318 return ("br%j1 .+4" CR_TAB
2319 "jmp %0");
2322 else
2324 switch (len)
2326 case 1:
2327 return "br%j1 %0";
2328 case 2:
2329 return ("br%k1 .+2" CR_TAB
2330 "rjmp %0");
2331 default:
2332 return ("br%k1 .+4" CR_TAB
2333 "jmp %0");
2337 return "";
2340 /* Output insn cost for next insn. */
2342 void
2343 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2344 int num_operands ATTRIBUTE_UNUSED)
2346 if (avr_log.rtx_costs)
2348 rtx set = single_set (insn);
2350 if (set)
2351 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2352 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2353 else
2354 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2355 rtx_cost (PATTERN (insn), INSN, 0,
2356 optimize_insn_for_speed_p()));
2360 /* Return 0 if undefined, 1 if always true or always false. */
2363 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2365 unsigned int max = (mode == QImode ? 0xff :
2366 mode == HImode ? 0xffff :
2367 mode == PSImode ? 0xffffff :
2368 mode == SImode ? 0xffffffff : 0);
2369 if (max && op && GET_CODE (x) == CONST_INT)
2371 if (unsigned_condition (op) != op)
2372 max >>= 1;
2374 if (max != (INTVAL (x) & max)
2375 && INTVAL (x) != 0xff)
2376 return 1;
2378 return 0;
2382 /* Returns nonzero if REGNO is the number of a hard
2383 register in which function arguments are sometimes passed. */
2386 function_arg_regno_p(int r)
2388 return (r >= 8 && r <= 25);
2391 /* Initializing the variable cum for the state at the beginning
2392 of the argument list. */
2394 void
2395 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2396 tree fndecl ATTRIBUTE_UNUSED)
2398 cum->nregs = 18;
2399 cum->regno = FIRST_CUM_REG;
2400 if (!libname && stdarg_p (fntype))
2401 cum->nregs = 0;
2403 /* Assume the calle may be tail called */
2405 cfun->machine->sibcall_fails = 0;
2408 /* Returns the number of registers to allocate for a function argument. */
2410 static int
2411 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2413 int size;
2415 if (mode == BLKmode)
2416 size = int_size_in_bytes (type);
2417 else
2418 size = GET_MODE_SIZE (mode);
2420 /* Align all function arguments to start in even-numbered registers.
2421 Odd-sized arguments leave holes above them. */
2423 return (size + 1) & ~1;
2426 /* Controls whether a function argument is passed
2427 in a register, and which register. */
2429 static rtx
2430 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2431 const_tree type, bool named ATTRIBUTE_UNUSED)
2433 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2434 int bytes = avr_num_arg_regs (mode, type);
2436 if (cum->nregs && bytes <= cum->nregs)
2437 return gen_rtx_REG (mode, cum->regno - bytes);
2439 return NULL_RTX;
2442 /* Update the summarizer variable CUM to advance past an argument
2443 in the argument list. */
2445 static void
2446 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2447 const_tree type, bool named ATTRIBUTE_UNUSED)
2449 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2450 int bytes = avr_num_arg_regs (mode, type);
2452 cum->nregs -= bytes;
2453 cum->regno -= bytes;
2455 /* A parameter is being passed in a call-saved register. As the original
2456 contents of these regs has to be restored before leaving the function,
2457 a function must not pass arguments in call-saved regs in order to get
2458 tail-called. */
2460 if (cum->regno >= 8
2461 && cum->nregs >= 0
2462 && !call_used_regs[cum->regno])
2464 /* FIXME: We ship info on failing tail-call in struct machine_function.
2465 This uses internals of calls.c:expand_call() and the way args_so_far
2466 is used. targetm.function_ok_for_sibcall() needs to be extended to
2467 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2468 dependent so that such an extension is not wanted. */
2470 cfun->machine->sibcall_fails = 1;
2473 /* Test if all registers needed by the ABI are actually available. If the
2474 user has fixed a GPR needed to pass an argument, an (implicit) function
2475 call will clobber that fixed register. See PR45099 for an example. */
2477 if (cum->regno >= 8
2478 && cum->nregs >= 0)
2480 int regno;
2482 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2483 if (fixed_regs[regno])
2484 warning (0, "fixed register %s used to pass parameter to function",
2485 reg_names[regno]);
2488 if (cum->nregs <= 0)
2490 cum->nregs = 0;
2491 cum->regno = FIRST_CUM_REG;
2495 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2496 /* Decide whether we can make a sibling call to a function. DECL is the
2497 declaration of the function being targeted by the call and EXP is the
2498 CALL_EXPR representing the call. */
2500 static bool
2501 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2503 tree fntype_callee;
2505 /* Tail-calling must fail if callee-saved regs are used to pass
2506 function args. We must not tail-call when `epilogue_restores'
2507 is used. Unfortunately, we cannot tell at this point if that
2508 actually will happen or not, and we cannot step back from
2509 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2511 if (cfun->machine->sibcall_fails
2512 || TARGET_CALL_PROLOGUES)
2514 return false;
2517 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2519 if (decl_callee)
2521 decl_callee = TREE_TYPE (decl_callee);
2523 else
2525 decl_callee = fntype_callee;
2527 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2528 && METHOD_TYPE != TREE_CODE (decl_callee))
2530 decl_callee = TREE_TYPE (decl_callee);
2534 /* Ensure that caller and callee have compatible epilogues */
2536 if (interrupt_function_p (current_function_decl)
2537 || signal_function_p (current_function_decl)
2538 || avr_naked_function_p (decl_callee)
2539 || avr_naked_function_p (current_function_decl)
2540 /* FIXME: For OS_task and OS_main, we are over-conservative.
2541 This is due to missing documentation of these attributes
2542 and what they actually should do and should not do. */
2543 || (avr_OS_task_function_p (decl_callee)
2544 != avr_OS_task_function_p (current_function_decl))
2545 || (avr_OS_main_function_p (decl_callee)
2546 != avr_OS_main_function_p (current_function_decl)))
2548 return false;
2551 return true;
2554 /***********************************************************************
2555 Functions for outputting various mov's for a various modes
2556 ************************************************************************/
2558 /* Return true if a value of mode MODE is read from flash by
2559 __load_* function from libgcc. */
2561 bool
2562 avr_load_libgcc_p (rtx op)
2564 enum machine_mode mode = GET_MODE (op);
2565 int n_bytes = GET_MODE_SIZE (mode);
2567 return (n_bytes > 2
2568 && !AVR_HAVE_LPMX
2569 && avr_mem_flash_p (op));
2572 /* Return true if a value of mode MODE is read by __xload_* function. */
2574 bool
2575 avr_xload_libgcc_p (enum machine_mode mode)
2577 int n_bytes = GET_MODE_SIZE (mode);
2579 return (n_bytes > 1
2580 || avr_current_arch->n_segments > 1);
2584 /* Find an unused d-register to be used as scratch in INSN.
2585 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2586 is a register, skip all possible return values that overlap EXCLUDE.
2587 The policy for the returned register is similar to that of
2588 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2589 of INSN.
2591 Return a QImode d-register or NULL_RTX if nothing found. */
2593 static rtx
2594 avr_find_unused_d_reg (rtx insn, rtx exclude)
2596 int regno;
2597 bool isr_p = (interrupt_function_p (current_function_decl)
2598 || signal_function_p (current_function_decl));
2600 for (regno = 16; regno < 32; regno++)
2602 rtx reg = all_regs_rtx[regno];
2604 if ((exclude
2605 && reg_overlap_mentioned_p (exclude, reg))
2606 || fixed_regs[regno])
2608 continue;
2611 /* Try non-live register */
2613 if (!df_regs_ever_live_p (regno)
2614 && (TREE_THIS_VOLATILE (current_function_decl)
2615 || cfun->machine->is_OS_task
2616 || cfun->machine->is_OS_main
2617 || (!isr_p && call_used_regs[regno])))
2619 return reg;
2622 /* Any live register can be used if it is unused after.
2623 Prologue/epilogue will care for it as needed. */
2625 if (df_regs_ever_live_p (regno)
2626 && reg_unused_after (insn, reg))
2628 return reg;
2632 return NULL_RTX;
2636 /* Helper function for the next function in the case where only restricted
2637 version of LPM instruction is available. */
2639 static const char*
2640 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2642 rtx dest = xop[0];
2643 rtx addr = xop[1];
2644 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2645 int regno_dest;
2647 regno_dest = REGNO (dest);
2649 /* The implicit target register of LPM. */
2650 xop[3] = lpm_reg_rtx;
2652 switch (GET_CODE (addr))
2654 default:
2655 gcc_unreachable();
2657 case REG:
2659 gcc_assert (REG_Z == REGNO (addr));
2661 switch (n_bytes)
2663 default:
2664 gcc_unreachable();
2666 case 1:
2667 avr_asm_len ("%4lpm", xop, plen, 1);
2669 if (regno_dest != LPM_REGNO)
2670 avr_asm_len ("mov %0,%3", xop, plen, 1);
2672 return "";
2674 case 2:
2675 if (REGNO (dest) == REG_Z)
2676 return avr_asm_len ("%4lpm" CR_TAB
2677 "push %3" CR_TAB
2678 "adiw %2,1" CR_TAB
2679 "%4lpm" CR_TAB
2680 "mov %B0,%3" CR_TAB
2681 "pop %A0", xop, plen, 6);
2683 avr_asm_len ("%4lpm" CR_TAB
2684 "mov %A0,%3" CR_TAB
2685 "adiw %2,1" CR_TAB
2686 "%4lpm" CR_TAB
2687 "mov %B0,%3", xop, plen, 5);
2689 if (!reg_unused_after (insn, addr))
2690 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2692 break; /* 2 */
2695 break; /* REG */
2697 case POST_INC:
2699 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2700 && n_bytes <= 4);
2702 if (regno_dest == LPM_REGNO)
2703 avr_asm_len ("%4lpm" CR_TAB
2704 "adiw %2,1", xop, plen, 2);
2705 else
2706 avr_asm_len ("%4lpm" CR_TAB
2707 "mov %A0,%3" CR_TAB
2708 "adiw %2,1", xop, plen, 3);
2710 if (n_bytes >= 2)
2711 avr_asm_len ("%4lpm" CR_TAB
2712 "mov %B0,%3" CR_TAB
2713 "adiw %2,1", xop, plen, 3);
2715 if (n_bytes >= 3)
2716 avr_asm_len ("%4lpm" CR_TAB
2717 "mov %C0,%3" CR_TAB
2718 "adiw %2,1", xop, plen, 3);
2720 if (n_bytes >= 4)
2721 avr_asm_len ("%4lpm" CR_TAB
2722 "mov %D0,%3" CR_TAB
2723 "adiw %2,1", xop, plen, 3);
2725 break; /* POST_INC */
2727 } /* switch CODE (addr) */
2729 return "";
2733 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2734 OP[1] in AS1 to register OP[0].
2735 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2736 Return "". */
2738 static const char*
2739 avr_out_lpm (rtx insn, rtx *op, int *plen)
2741 rtx xop[6];
2742 rtx dest = op[0];
2743 rtx src = SET_SRC (single_set (insn));
2744 rtx addr;
2745 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2746 int regno_dest;
2747 int segment;
2748 RTX_CODE code;
2749 addr_space_t as = MEM_ADDR_SPACE (src);
2751 if (plen)
2752 *plen = 0;
2754 if (MEM_P (dest))
2756 warning (0, "writing to address space %qs not supported",
2757 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2759 return "";
2762 addr = XEXP (src, 0);
2763 code = GET_CODE (addr);
2765 gcc_assert (REG_P (dest));
2766 gcc_assert (REG == code || POST_INC == code);
2768 xop[0] = dest;
2769 xop[1] = addr;
2770 xop[2] = lpm_addr_reg_rtx;
2771 xop[4] = xstring_empty;
2772 xop[5] = tmp_reg_rtx;
2774 regno_dest = REGNO (dest);
2776 /* Cut down segment number to a number the device actually supports.
2777 We do this late to preserve the address space's name for diagnostics. */
2779 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2781 /* Set RAMPZ as needed. */
2783 if (segment)
2785 xop[4] = GEN_INT (segment);
2787 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2788 xop[3])
2790 avr_asm_len ("ldi %3,%4" CR_TAB
2791 "out __RAMPZ__,%3", xop, plen, 2);
2793 else if (segment == 1)
2795 avr_asm_len ("clr %5" CR_TAB
2796 "inc %5" CR_TAB
2797 "out __RAMPZ__,%5", xop, plen, 3);
2799 else
2801 avr_asm_len ("mov %5,%2" CR_TAB
2802 "ldi %2,%4" CR_TAB
2803 "out __RAMPZ__,%2" CR_TAB
2804 "mov %2,%5", xop, plen, 4);
2807 xop[4] = xstring_e;
2809 if (!AVR_HAVE_ELPMX)
2810 return avr_out_lpm_no_lpmx (insn, xop, plen);
2812 else if (!AVR_HAVE_LPMX)
2814 return avr_out_lpm_no_lpmx (insn, xop, plen);
2817 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2819 switch (GET_CODE (addr))
2821 default:
2822 gcc_unreachable();
2824 case REG:
2826 gcc_assert (REG_Z == REGNO (addr));
2828 switch (n_bytes)
2830 default:
2831 gcc_unreachable();
2833 case 1:
2834 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2836 case 2:
2837 if (REGNO (dest) == REG_Z)
2838 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2839 "%4lpm %B0,%a2" CR_TAB
2840 "mov %A0,%5", xop, plen, 3);
2841 else
2843 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2844 "%4lpm %B0,%a2", xop, plen, 2);
2846 if (!reg_unused_after (insn, addr))
2847 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2850 break; /* 2 */
2852 case 3:
2854 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2855 "%4lpm %B0,%a2+" CR_TAB
2856 "%4lpm %C0,%a2", xop, plen, 3);
2858 if (!reg_unused_after (insn, addr))
2859 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2861 break; /* 3 */
2863 case 4:
2865 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2866 "%4lpm %B0,%a2+", xop, plen, 2);
2868 if (REGNO (dest) == REG_Z - 2)
2869 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2870 "%4lpm %C0,%a2" CR_TAB
2871 "mov %D0,%5", xop, plen, 3);
2872 else
2874 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2875 "%4lpm %D0,%a2", xop, plen, 2);
2877 if (!reg_unused_after (insn, addr))
2878 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2881 break; /* 4 */
2882 } /* n_bytes */
2884 break; /* REG */
2886 case POST_INC:
2888 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2889 && n_bytes <= 4);
2891 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2892 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2893 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2894 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2896 break; /* POST_INC */
2898 } /* switch CODE (addr) */
2900 return "";
2904 /* Worker function for xload_8 insn. */
2906 const char*
2907 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2909 rtx xop[4];
2911 xop[0] = op[0];
2912 xop[1] = op[1];
2913 xop[2] = lpm_addr_reg_rtx;
2914 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2916 if (plen)
2917 *plen = 0;
2919 avr_asm_len ("ld %3,%a2" CR_TAB
2920 "sbrs %1,7", xop, plen, 2);
2922 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2924 if (REGNO (xop[0]) != REGNO (xop[3]))
2925 avr_asm_len ("mov %0,%3", xop, plen, 1);
2927 return "";
2931 const char *
2932 output_movqi (rtx insn, rtx operands[], int *l)
2934 int dummy;
2935 rtx dest = operands[0];
2936 rtx src = operands[1];
2937 int *real_l = l;
2939 if (avr_mem_flash_p (src)
2940 || avr_mem_flash_p (dest))
2942 return avr_out_lpm (insn, operands, real_l);
2945 if (!l)
2946 l = &dummy;
2948 *l = 1;
2950 if (register_operand (dest, QImode))
2952 if (register_operand (src, QImode)) /* mov r,r */
2954 if (test_hard_reg_class (STACK_REG, dest))
2955 return "out %0,%1";
2956 else if (test_hard_reg_class (STACK_REG, src))
2957 return "in %0,%1";
2959 return "mov %0,%1";
2961 else if (CONSTANT_P (src))
2963 output_reload_in_const (operands, NULL_RTX, real_l, false);
2964 return "";
2966 else if (GET_CODE (src) == MEM)
2967 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2969 else if (GET_CODE (dest) == MEM)
2971 rtx xop[2];
2973 xop[0] = dest;
2974 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2976 return out_movqi_mr_r (insn, xop, real_l);
2978 return "";
2982 const char *
2983 output_movhi (rtx insn, rtx xop[], int *plen)
2985 rtx dest = xop[0];
2986 rtx src = xop[1];
2988 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2990 if (avr_mem_flash_p (src)
2991 || avr_mem_flash_p (dest))
2993 return avr_out_lpm (insn, xop, plen);
2996 if (REG_P (dest))
2998 if (REG_P (src)) /* mov r,r */
3000 if (test_hard_reg_class (STACK_REG, dest))
3002 if (AVR_HAVE_8BIT_SP)
3003 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3005 if (AVR_XMEGA)
3006 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3007 "out __SP_H__,%B1", xop, plen, -2);
3009 /* Use simple load of SP if no interrupts are used. */
3011 return TARGET_NO_INTERRUPTS
3012 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3013 "out __SP_L__,%A1", xop, plen, -2)
3015 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3016 "cli" CR_TAB
3017 "out __SP_H__,%B1" CR_TAB
3018 "out __SREG__,__tmp_reg__" CR_TAB
3019 "out __SP_L__,%A1", xop, plen, -5);
3021 else if (test_hard_reg_class (STACK_REG, src))
3023 return AVR_HAVE_8BIT_SP
3024 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3025 "clr %B0", xop, plen, -2)
3027 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3028 "in %B0,__SP_H__", xop, plen, -2);
3031 return AVR_HAVE_MOVW
3032 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3034 : avr_asm_len ("mov %A0,%A1" CR_TAB
3035 "mov %B0,%B1", xop, plen, -2);
3036 } /* REG_P (src) */
3037 else if (CONSTANT_P (src))
3039 return output_reload_inhi (xop, NULL, plen);
3041 else if (MEM_P (src))
3043 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3046 else if (MEM_P (dest))
3048 rtx xop[2];
3050 xop[0] = dest;
3051 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
3053 return out_movhi_mr_r (insn, xop, plen);
3056 fatal_insn ("invalid insn:", insn);
3058 return "";
3061 static const char*
3062 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3064 rtx dest = op[0];
3065 rtx src = op[1];
3066 rtx x = XEXP (src, 0);
3068 if (CONSTANT_ADDRESS_P (x))
3070 return optimize > 0 && io_address_operand (x, QImode)
3071 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3072 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3074 else if (GET_CODE (x) == PLUS
3075 && REG_P (XEXP (x, 0))
3076 && CONST_INT_P (XEXP (x, 1)))
3078 /* memory access by reg+disp */
3080 int disp = INTVAL (XEXP (x, 1));
3082 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3084 if (REGNO (XEXP (x, 0)) != REG_Y)
3085 fatal_insn ("incorrect insn:",insn);
3087 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3088 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3089 "ldd %0,Y+63" CR_TAB
3090 "sbiw r28,%o1-63", op, plen, -3);
3092 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3093 "sbci r29,hi8(-%o1)" CR_TAB
3094 "ld %0,Y" CR_TAB
3095 "subi r28,lo8(%o1)" CR_TAB
3096 "sbci r29,hi8(%o1)", op, plen, -5);
3098 else if (REGNO (XEXP (x, 0)) == REG_X)
3100 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3101 it but I have this situation with extremal optimizing options. */
3103 avr_asm_len ("adiw r26,%o1" CR_TAB
3104 "ld %0,X", op, plen, -2);
3106 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3107 && !reg_unused_after (insn, XEXP (x,0)))
3109 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3112 return "";
3115 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3118 return avr_asm_len ("ld %0,%1", op, plen, -1);
3121 static const char*
3122 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3124 rtx dest = op[0];
3125 rtx src = op[1];
3126 rtx base = XEXP (src, 0);
3127 int reg_dest = true_regnum (dest);
3128 int reg_base = true_regnum (base);
3129 /* "volatile" forces reading low byte first, even if less efficient,
3130 for correct operation with 16-bit I/O registers. */
3131 int mem_volatile_p = MEM_VOLATILE_P (src);
3133 if (reg_base > 0)
3135 if (reg_dest == reg_base) /* R = (R) */
3136 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3137 "ld %B0,%1" CR_TAB
3138 "mov %A0,__tmp_reg__", op, plen, -3);
3140 if (reg_base != REG_X)
3141 return avr_asm_len ("ld %A0,%1" CR_TAB
3142 "ldd %B0,%1+1", op, plen, -2);
3144 avr_asm_len ("ld %A0,X+" CR_TAB
3145 "ld %B0,X", op, plen, -2);
3147 if (!reg_unused_after (insn, base))
3148 avr_asm_len ("sbiw r26,1", op, plen, 1);
3150 return "";
3152 else if (GET_CODE (base) == PLUS) /* (R + i) */
3154 int disp = INTVAL (XEXP (base, 1));
3155 int reg_base = true_regnum (XEXP (base, 0));
3157 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3159 if (REGNO (XEXP (base, 0)) != REG_Y)
3160 fatal_insn ("incorrect insn:",insn);
3162 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3163 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3164 "ldd %A0,Y+62" CR_TAB
3165 "ldd %B0,Y+63" CR_TAB
3166 "sbiw r28,%o1-62", op, plen, -4)
3168 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3169 "sbci r29,hi8(-%o1)" CR_TAB
3170 "ld %A0,Y" CR_TAB
3171 "ldd %B0,Y+1" CR_TAB
3172 "subi r28,lo8(%o1)" CR_TAB
3173 "sbci r29,hi8(%o1)", op, plen, -6);
3176 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3177 it but I have this situation with extremal
3178 optimization options. */
3180 if (reg_base == REG_X)
3181 return reg_base == reg_dest
3182 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3183 "ld __tmp_reg__,X+" CR_TAB
3184 "ld %B0,X" CR_TAB
3185 "mov %A0,__tmp_reg__", op, plen, -4)
3187 : avr_asm_len ("adiw r26,%o1" CR_TAB
3188 "ld %A0,X+" CR_TAB
3189 "ld %B0,X" CR_TAB
3190 "sbiw r26,%o1+1", op, plen, -4);
3192 return reg_base == reg_dest
3193 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3194 "ldd %B0,%B1" CR_TAB
3195 "mov %A0,__tmp_reg__", op, plen, -3)
3197 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3198 "ldd %B0,%B1", op, plen, -2);
3200 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3202 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3203 fatal_insn ("incorrect insn:", insn);
3205 if (!mem_volatile_p)
3206 return avr_asm_len ("ld %B0,%1" CR_TAB
3207 "ld %A0,%1", op, plen, -2);
3209 return REGNO (XEXP (base, 0)) == REG_X
3210 ? avr_asm_len ("sbiw r26,2" CR_TAB
3211 "ld %A0,X+" CR_TAB
3212 "ld %B0,X" CR_TAB
3213 "sbiw r26,1", op, plen, -4)
3215 : avr_asm_len ("sbiw %r1,2" CR_TAB
3216 "ld %A0,%p1" CR_TAB
3217 "ldd %B0,%p1+1", op, plen, -3);
3219 else if (GET_CODE (base) == POST_INC) /* (R++) */
3221 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3222 fatal_insn ("incorrect insn:", insn);
3224 return avr_asm_len ("ld %A0,%1" CR_TAB
3225 "ld %B0,%1", op, plen, -2);
3227 else if (CONSTANT_ADDRESS_P (base))
3229 return optimize > 0 && io_address_operand (base, HImode)
3230 ? avr_asm_len ("in %A0,%i1" CR_TAB
3231 "in %B0,%i1+1", op, plen, -2)
3233 : avr_asm_len ("lds %A0,%m1" CR_TAB
3234 "lds %B0,%m1+1", op, plen, -4);
3237 fatal_insn ("unknown move insn:",insn);
3238 return "";
3241 static const char*
3242 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3244 rtx dest = op[0];
3245 rtx src = op[1];
3246 rtx base = XEXP (src, 0);
3247 int reg_dest = true_regnum (dest);
3248 int reg_base = true_regnum (base);
3249 int tmp;
3251 if (!l)
3252 l = &tmp;
3254 if (reg_base > 0)
3256 if (reg_base == REG_X) /* (R26) */
3258 if (reg_dest == REG_X)
3259 /* "ld r26,-X" is undefined */
3260 return *l=7, ("adiw r26,3" CR_TAB
3261 "ld r29,X" CR_TAB
3262 "ld r28,-X" CR_TAB
3263 "ld __tmp_reg__,-X" CR_TAB
3264 "sbiw r26,1" CR_TAB
3265 "ld r26,X" CR_TAB
3266 "mov r27,__tmp_reg__");
3267 else if (reg_dest == REG_X - 2)
3268 return *l=5, ("ld %A0,X+" CR_TAB
3269 "ld %B0,X+" CR_TAB
3270 "ld __tmp_reg__,X+" CR_TAB
3271 "ld %D0,X" CR_TAB
3272 "mov %C0,__tmp_reg__");
3273 else if (reg_unused_after (insn, base))
3274 return *l=4, ("ld %A0,X+" CR_TAB
3275 "ld %B0,X+" CR_TAB
3276 "ld %C0,X+" CR_TAB
3277 "ld %D0,X");
3278 else
3279 return *l=5, ("ld %A0,X+" CR_TAB
3280 "ld %B0,X+" CR_TAB
3281 "ld %C0,X+" CR_TAB
3282 "ld %D0,X" CR_TAB
3283 "sbiw r26,3");
3285 else
3287 if (reg_dest == reg_base)
3288 return *l=5, ("ldd %D0,%1+3" CR_TAB
3289 "ldd %C0,%1+2" CR_TAB
3290 "ldd __tmp_reg__,%1+1" CR_TAB
3291 "ld %A0,%1" CR_TAB
3292 "mov %B0,__tmp_reg__");
3293 else if (reg_base == reg_dest + 2)
3294 return *l=5, ("ld %A0,%1" CR_TAB
3295 "ldd %B0,%1+1" CR_TAB
3296 "ldd __tmp_reg__,%1+2" CR_TAB
3297 "ldd %D0,%1+3" CR_TAB
3298 "mov %C0,__tmp_reg__");
3299 else
3300 return *l=4, ("ld %A0,%1" CR_TAB
3301 "ldd %B0,%1+1" CR_TAB
3302 "ldd %C0,%1+2" CR_TAB
3303 "ldd %D0,%1+3");
3306 else if (GET_CODE (base) == PLUS) /* (R + i) */
3308 int disp = INTVAL (XEXP (base, 1));
3310 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3312 if (REGNO (XEXP (base, 0)) != REG_Y)
3313 fatal_insn ("incorrect insn:",insn);
3315 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3316 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3317 "ldd %A0,Y+60" CR_TAB
3318 "ldd %B0,Y+61" CR_TAB
3319 "ldd %C0,Y+62" CR_TAB
3320 "ldd %D0,Y+63" CR_TAB
3321 "sbiw r28,%o1-60");
3323 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3324 "sbci r29,hi8(-%o1)" CR_TAB
3325 "ld %A0,Y" CR_TAB
3326 "ldd %B0,Y+1" CR_TAB
3327 "ldd %C0,Y+2" CR_TAB
3328 "ldd %D0,Y+3" CR_TAB
3329 "subi r28,lo8(%o1)" CR_TAB
3330 "sbci r29,hi8(%o1)");
3333 reg_base = true_regnum (XEXP (base, 0));
3334 if (reg_base == REG_X)
3336 /* R = (X + d) */
3337 if (reg_dest == REG_X)
3339 *l = 7;
3340 /* "ld r26,-X" is undefined */
3341 return ("adiw r26,%o1+3" CR_TAB
3342 "ld r29,X" CR_TAB
3343 "ld r28,-X" CR_TAB
3344 "ld __tmp_reg__,-X" CR_TAB
3345 "sbiw r26,1" CR_TAB
3346 "ld r26,X" CR_TAB
3347 "mov r27,__tmp_reg__");
3349 *l = 6;
3350 if (reg_dest == REG_X - 2)
3351 return ("adiw r26,%o1" CR_TAB
3352 "ld r24,X+" CR_TAB
3353 "ld r25,X+" CR_TAB
3354 "ld __tmp_reg__,X+" CR_TAB
3355 "ld r27,X" CR_TAB
3356 "mov r26,__tmp_reg__");
3358 return ("adiw r26,%o1" CR_TAB
3359 "ld %A0,X+" CR_TAB
3360 "ld %B0,X+" CR_TAB
3361 "ld %C0,X+" CR_TAB
3362 "ld %D0,X" CR_TAB
3363 "sbiw r26,%o1+3");
3365 if (reg_dest == reg_base)
3366 return *l=5, ("ldd %D0,%D1" CR_TAB
3367 "ldd %C0,%C1" CR_TAB
3368 "ldd __tmp_reg__,%B1" CR_TAB
3369 "ldd %A0,%A1" CR_TAB
3370 "mov %B0,__tmp_reg__");
3371 else if (reg_dest == reg_base - 2)
3372 return *l=5, ("ldd %A0,%A1" CR_TAB
3373 "ldd %B0,%B1" CR_TAB
3374 "ldd __tmp_reg__,%C1" CR_TAB
3375 "ldd %D0,%D1" CR_TAB
3376 "mov %C0,__tmp_reg__");
3377 return *l=4, ("ldd %A0,%A1" CR_TAB
3378 "ldd %B0,%B1" CR_TAB
3379 "ldd %C0,%C1" CR_TAB
3380 "ldd %D0,%D1");
3382 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3383 return *l=4, ("ld %D0,%1" CR_TAB
3384 "ld %C0,%1" CR_TAB
3385 "ld %B0,%1" CR_TAB
3386 "ld %A0,%1");
3387 else if (GET_CODE (base) == POST_INC) /* (R++) */
3388 return *l=4, ("ld %A0,%1" CR_TAB
3389 "ld %B0,%1" CR_TAB
3390 "ld %C0,%1" CR_TAB
3391 "ld %D0,%1");
3392 else if (CONSTANT_ADDRESS_P (base))
3393 return *l=8, ("lds %A0,%m1" CR_TAB
3394 "lds %B0,%m1+1" CR_TAB
3395 "lds %C0,%m1+2" CR_TAB
3396 "lds %D0,%m1+3");
3398 fatal_insn ("unknown move insn:",insn);
3399 return "";
3402 static const char*
3403 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3405 rtx dest = op[0];
3406 rtx src = op[1];
3407 rtx base = XEXP (dest, 0);
3408 int reg_base = true_regnum (base);
3409 int reg_src = true_regnum (src);
3410 int tmp;
3412 if (!l)
3413 l = &tmp;
3415 if (CONSTANT_ADDRESS_P (base))
3416 return *l=8,("sts %m0,%A1" CR_TAB
3417 "sts %m0+1,%B1" CR_TAB
3418 "sts %m0+2,%C1" CR_TAB
3419 "sts %m0+3,%D1");
3420 if (reg_base > 0) /* (r) */
3422 if (reg_base == REG_X) /* (R26) */
3424 if (reg_src == REG_X)
3426 /* "st X+,r26" is undefined */
3427 if (reg_unused_after (insn, base))
3428 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3429 "st X,r26" CR_TAB
3430 "adiw r26,1" CR_TAB
3431 "st X+,__tmp_reg__" CR_TAB
3432 "st X+,r28" CR_TAB
3433 "st X,r29");
3434 else
3435 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3436 "st X,r26" CR_TAB
3437 "adiw r26,1" CR_TAB
3438 "st X+,__tmp_reg__" CR_TAB
3439 "st X+,r28" CR_TAB
3440 "st X,r29" CR_TAB
3441 "sbiw r26,3");
3443 else if (reg_base == reg_src + 2)
3445 if (reg_unused_after (insn, base))
3446 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3447 "mov __tmp_reg__,%D1" CR_TAB
3448 "st %0+,%A1" CR_TAB
3449 "st %0+,%B1" CR_TAB
3450 "st %0+,__zero_reg__" CR_TAB
3451 "st %0,__tmp_reg__" CR_TAB
3452 "clr __zero_reg__");
3453 else
3454 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3455 "mov __tmp_reg__,%D1" CR_TAB
3456 "st %0+,%A1" CR_TAB
3457 "st %0+,%B1" CR_TAB
3458 "st %0+,__zero_reg__" CR_TAB
3459 "st %0,__tmp_reg__" CR_TAB
3460 "clr __zero_reg__" CR_TAB
3461 "sbiw r26,3");
3463 return *l=5, ("st %0+,%A1" CR_TAB
3464 "st %0+,%B1" CR_TAB
3465 "st %0+,%C1" CR_TAB
3466 "st %0,%D1" CR_TAB
3467 "sbiw r26,3");
3469 else
3470 return *l=4, ("st %0,%A1" CR_TAB
3471 "std %0+1,%B1" CR_TAB
3472 "std %0+2,%C1" CR_TAB
3473 "std %0+3,%D1");
3475 else if (GET_CODE (base) == PLUS) /* (R + i) */
3477 int disp = INTVAL (XEXP (base, 1));
3478 reg_base = REGNO (XEXP (base, 0));
3479 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3481 if (reg_base != REG_Y)
3482 fatal_insn ("incorrect insn:",insn);
3484 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3485 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3486 "std Y+60,%A1" CR_TAB
3487 "std Y+61,%B1" CR_TAB
3488 "std Y+62,%C1" CR_TAB
3489 "std Y+63,%D1" CR_TAB
3490 "sbiw r28,%o0-60");
3492 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3493 "sbci r29,hi8(-%o0)" CR_TAB
3494 "st Y,%A1" CR_TAB
3495 "std Y+1,%B1" CR_TAB
3496 "std Y+2,%C1" CR_TAB
3497 "std Y+3,%D1" CR_TAB
3498 "subi r28,lo8(%o0)" CR_TAB
3499 "sbci r29,hi8(%o0)");
3501 if (reg_base == REG_X)
3503 /* (X + d) = R */
3504 if (reg_src == REG_X)
3506 *l = 9;
3507 return ("mov __tmp_reg__,r26" CR_TAB
3508 "mov __zero_reg__,r27" CR_TAB
3509 "adiw r26,%o0" CR_TAB
3510 "st X+,__tmp_reg__" CR_TAB
3511 "st X+,__zero_reg__" CR_TAB
3512 "st X+,r28" CR_TAB
3513 "st X,r29" CR_TAB
3514 "clr __zero_reg__" CR_TAB
3515 "sbiw r26,%o0+3");
3517 else if (reg_src == REG_X - 2)
3519 *l = 9;
3520 return ("mov __tmp_reg__,r26" CR_TAB
3521 "mov __zero_reg__,r27" CR_TAB
3522 "adiw r26,%o0" CR_TAB
3523 "st X+,r24" CR_TAB
3524 "st X+,r25" CR_TAB
3525 "st X+,__tmp_reg__" CR_TAB
3526 "st X,__zero_reg__" CR_TAB
3527 "clr __zero_reg__" CR_TAB
3528 "sbiw r26,%o0+3");
3530 *l = 6;
3531 return ("adiw r26,%o0" CR_TAB
3532 "st X+,%A1" CR_TAB
3533 "st X+,%B1" CR_TAB
3534 "st X+,%C1" CR_TAB
3535 "st X,%D1" CR_TAB
3536 "sbiw r26,%o0+3");
3538 return *l=4, ("std %A0,%A1" CR_TAB
3539 "std %B0,%B1" CR_TAB
3540 "std %C0,%C1" CR_TAB
3541 "std %D0,%D1");
3543 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3544 return *l=4, ("st %0,%D1" CR_TAB
3545 "st %0,%C1" CR_TAB
3546 "st %0,%B1" CR_TAB
3547 "st %0,%A1");
3548 else if (GET_CODE (base) == POST_INC) /* (R++) */
3549 return *l=4, ("st %0,%A1" CR_TAB
3550 "st %0,%B1" CR_TAB
3551 "st %0,%C1" CR_TAB
3552 "st %0,%D1");
3553 fatal_insn ("unknown move insn:",insn);
3554 return "";
3557 const char *
3558 output_movsisf (rtx insn, rtx operands[], int *l)
3560 int dummy;
3561 rtx dest = operands[0];
3562 rtx src = operands[1];
3563 int *real_l = l;
3565 if (avr_mem_flash_p (src)
3566 || avr_mem_flash_p (dest))
3568 return avr_out_lpm (insn, operands, real_l);
3571 if (!l)
3572 l = &dummy;
3574 if (register_operand (dest, VOIDmode))
3576 if (register_operand (src, VOIDmode)) /* mov r,r */
3578 if (true_regnum (dest) > true_regnum (src))
3580 if (AVR_HAVE_MOVW)
3582 *l = 2;
3583 return ("movw %C0,%C1" CR_TAB
3584 "movw %A0,%A1");
3586 *l = 4;
3587 return ("mov %D0,%D1" CR_TAB
3588 "mov %C0,%C1" CR_TAB
3589 "mov %B0,%B1" CR_TAB
3590 "mov %A0,%A1");
3592 else
3594 if (AVR_HAVE_MOVW)
3596 *l = 2;
3597 return ("movw %A0,%A1" CR_TAB
3598 "movw %C0,%C1");
3600 *l = 4;
3601 return ("mov %A0,%A1" CR_TAB
3602 "mov %B0,%B1" CR_TAB
3603 "mov %C0,%C1" CR_TAB
3604 "mov %D0,%D1");
3607 else if (CONSTANT_P (src))
3609 return output_reload_insisf (operands, NULL_RTX, real_l);
3611 else if (GET_CODE (src) == MEM)
3612 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3614 else if (GET_CODE (dest) == MEM)
3616 const char *templ;
3618 if (src == CONST0_RTX (GET_MODE (dest)))
3619 operands[1] = zero_reg_rtx;
3621 templ = out_movsi_mr_r (insn, operands, real_l);
3623 if (!real_l)
3624 output_asm_insn (templ, operands);
3626 operands[1] = src;
3627 return "";
3629 fatal_insn ("invalid insn:", insn);
3630 return "";
3634 /* Handle loads of 24-bit types from memory to register. */
3636 static const char*
3637 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3639 rtx dest = op[0];
3640 rtx src = op[1];
3641 rtx base = XEXP (src, 0);
3642 int reg_dest = true_regnum (dest);
3643 int reg_base = true_regnum (base);
3645 if (reg_base > 0)
3647 if (reg_base == REG_X) /* (R26) */
3649 if (reg_dest == REG_X)
3650 /* "ld r26,-X" is undefined */
3651 return avr_asm_len ("adiw r26,2" CR_TAB
3652 "ld r28,X" CR_TAB
3653 "ld __tmp_reg__,-X" CR_TAB
3654 "sbiw r26,1" CR_TAB
3655 "ld r26,X" CR_TAB
3656 "mov r27,__tmp_reg__", op, plen, -6);
3657 else
3659 avr_asm_len ("ld %A0,X+" CR_TAB
3660 "ld %B0,X+" CR_TAB
3661 "ld %C0,X", op, plen, -3);
3663 if (reg_dest != REG_X - 2
3664 && !reg_unused_after (insn, base))
3666 avr_asm_len ("sbiw r26,2", op, plen, 1);
3669 return "";
3672 else /* reg_base != REG_X */
3674 if (reg_dest == reg_base)
3675 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3676 "ldd __tmp_reg__,%1+1" CR_TAB
3677 "ld %A0,%1" CR_TAB
3678 "mov %B0,__tmp_reg__", op, plen, -4);
3679 else
3680 return avr_asm_len ("ld %A0,%1" CR_TAB
3681 "ldd %B0,%1+1" CR_TAB
3682 "ldd %C0,%1+2", op, plen, -3);
3685 else if (GET_CODE (base) == PLUS) /* (R + i) */
3687 int disp = INTVAL (XEXP (base, 1));
3689 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3691 if (REGNO (XEXP (base, 0)) != REG_Y)
3692 fatal_insn ("incorrect insn:",insn);
3694 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3695 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3696 "ldd %A0,Y+61" CR_TAB
3697 "ldd %B0,Y+62" CR_TAB
3698 "ldd %C0,Y+63" CR_TAB
3699 "sbiw r28,%o1-61", op, plen, -5);
3701 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3702 "sbci r29,hi8(-%o1)" CR_TAB
3703 "ld %A0,Y" CR_TAB
3704 "ldd %B0,Y+1" CR_TAB
3705 "ldd %C0,Y+2" CR_TAB
3706 "subi r28,lo8(%o1)" CR_TAB
3707 "sbci r29,hi8(%o1)", op, plen, -7);
3710 reg_base = true_regnum (XEXP (base, 0));
3711 if (reg_base == REG_X)
3713 /* R = (X + d) */
3714 if (reg_dest == REG_X)
3716 /* "ld r26,-X" is undefined */
3717 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3718 "ld r28,X" CR_TAB
3719 "ld __tmp_reg__,-X" CR_TAB
3720 "sbiw r26,1" CR_TAB
3721 "ld r26,X" CR_TAB
3722 "mov r27,__tmp_reg__", op, plen, -6);
3725 avr_asm_len ("adiw r26,%o1" CR_TAB
3726 "ld r24,X+" CR_TAB
3727 "ld r25,X+" CR_TAB
3728 "ld r26,X", op, plen, -4);
3730 if (reg_dest != REG_X - 2)
3731 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3733 return "";
3736 if (reg_dest == reg_base)
3737 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3738 "ldd __tmp_reg__,%B1" CR_TAB
3739 "ldd %A0,%A1" CR_TAB
3740 "mov %B0,__tmp_reg__", op, plen, -4);
3742 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3743 "ldd %B0,%B1" CR_TAB
3744 "ldd %C0,%C1", op, plen, -3);
3746 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3747 return avr_asm_len ("ld %C0,%1" CR_TAB
3748 "ld %B0,%1" CR_TAB
3749 "ld %A0,%1", op, plen, -3);
3750 else if (GET_CODE (base) == POST_INC) /* (R++) */
3751 return avr_asm_len ("ld %A0,%1" CR_TAB
3752 "ld %B0,%1" CR_TAB
3753 "ld %C0,%1", op, plen, -3);
3755 else if (CONSTANT_ADDRESS_P (base))
3756 return avr_asm_len ("lds %A0,%m1" CR_TAB
3757 "lds %B0,%m1+1" CR_TAB
3758 "lds %C0,%m1+2", op, plen , -6);
3760 fatal_insn ("unknown move insn:",insn);
3761 return "";
3764 /* Handle store of 24-bit type from register or zero to memory. */
3766 static const char*
3767 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3769 rtx dest = op[0];
3770 rtx src = op[1];
3771 rtx base = XEXP (dest, 0);
3772 int reg_base = true_regnum (base);
3774 if (CONSTANT_ADDRESS_P (base))
3775 return avr_asm_len ("sts %m0,%A1" CR_TAB
3776 "sts %m0+1,%B1" CR_TAB
3777 "sts %m0+2,%C1", op, plen, -6);
3779 if (reg_base > 0) /* (r) */
3781 if (reg_base == REG_X) /* (R26) */
3783 gcc_assert (!reg_overlap_mentioned_p (base, src));
3785 avr_asm_len ("st %0+,%A1" CR_TAB
3786 "st %0+,%B1" CR_TAB
3787 "st %0,%C1", op, plen, -3);
3789 if (!reg_unused_after (insn, base))
3790 avr_asm_len ("sbiw r26,2", op, plen, 1);
3792 return "";
3794 else
3795 return avr_asm_len ("st %0,%A1" CR_TAB
3796 "std %0+1,%B1" CR_TAB
3797 "std %0+2,%C1", op, plen, -3);
3799 else if (GET_CODE (base) == PLUS) /* (R + i) */
3801 int disp = INTVAL (XEXP (base, 1));
3802 reg_base = REGNO (XEXP (base, 0));
3804 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3806 if (reg_base != REG_Y)
3807 fatal_insn ("incorrect insn:",insn);
3809 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3810 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3811 "std Y+61,%A1" CR_TAB
3812 "std Y+62,%B1" CR_TAB
3813 "std Y+63,%C1" CR_TAB
3814 "sbiw r28,%o0-60", op, plen, -5);
3816 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3817 "sbci r29,hi8(-%o0)" CR_TAB
3818 "st Y,%A1" CR_TAB
3819 "std Y+1,%B1" CR_TAB
3820 "std Y+2,%C1" CR_TAB
3821 "subi r28,lo8(%o0)" CR_TAB
3822 "sbci r29,hi8(%o0)", op, plen, -7);
3824 if (reg_base == REG_X)
3826 /* (X + d) = R */
3827 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3829 avr_asm_len ("adiw r26,%o0" CR_TAB
3830 "st X+,%A1" CR_TAB
3831 "st X+,%B1" CR_TAB
3832 "st X,%C1", op, plen, -4);
3834 if (!reg_unused_after (insn, XEXP (base, 0)))
3835 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3837 return "";
3840 return avr_asm_len ("std %A0,%A1" CR_TAB
3841 "std %B0,%B1" CR_TAB
3842 "std %C0,%C1", op, plen, -3);
3844 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3845 return avr_asm_len ("st %0,%C1" CR_TAB
3846 "st %0,%B1" CR_TAB
3847 "st %0,%A1", op, plen, -3);
3848 else if (GET_CODE (base) == POST_INC) /* (R++) */
3849 return avr_asm_len ("st %0,%A1" CR_TAB
3850 "st %0,%B1" CR_TAB
3851 "st %0,%C1", op, plen, -3);
3853 fatal_insn ("unknown move insn:",insn);
3854 return "";
3858 /* Move around 24-bit stuff. */
3860 const char *
3861 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3863 rtx dest = op[0];
3864 rtx src = op[1];
3866 if (avr_mem_flash_p (src)
3867 || avr_mem_flash_p (dest))
3869 return avr_out_lpm (insn, op, plen);
3872 if (register_operand (dest, VOIDmode))
3874 if (register_operand (src, VOIDmode)) /* mov r,r */
3876 if (true_regnum (dest) > true_regnum (src))
3878 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3880 if (AVR_HAVE_MOVW)
3881 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3882 else
3883 return avr_asm_len ("mov %B0,%B1" CR_TAB
3884 "mov %A0,%A1", op, plen, 2);
3886 else
3888 if (AVR_HAVE_MOVW)
3889 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3890 else
3891 avr_asm_len ("mov %A0,%A1" CR_TAB
3892 "mov %B0,%B1", op, plen, -2);
3894 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3897 else if (CONSTANT_P (src))
3899 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3901 else if (MEM_P (src))
3902 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3904 else if (MEM_P (dest))
3906 rtx xop[2];
3908 xop[0] = dest;
3909 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3911 return avr_out_store_psi (insn, xop, plen);
3914 fatal_insn ("invalid insn:", insn);
3915 return "";
3919 static const char*
3920 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3922 rtx dest = op[0];
3923 rtx src = op[1];
3924 rtx x = XEXP (dest, 0);
3926 if (CONSTANT_ADDRESS_P (x))
3928 return optimize > 0 && io_address_operand (x, QImode)
3929 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3930 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3932 else if (GET_CODE (x) == PLUS
3933 && REG_P (XEXP (x, 0))
3934 && CONST_INT_P (XEXP (x, 1)))
3936 /* memory access by reg+disp */
3938 int disp = INTVAL (XEXP (x, 1));
3940 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3942 if (REGNO (XEXP (x, 0)) != REG_Y)
3943 fatal_insn ("incorrect insn:",insn);
3945 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3946 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3947 "std Y+63,%1" CR_TAB
3948 "sbiw r28,%o0-63", op, plen, -3);
3950 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3951 "sbci r29,hi8(-%o0)" CR_TAB
3952 "st Y,%1" CR_TAB
3953 "subi r28,lo8(%o0)" CR_TAB
3954 "sbci r29,hi8(%o0)", op, plen, -5);
3956 else if (REGNO (XEXP (x,0)) == REG_X)
3958 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3960 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3961 "adiw r26,%o0" CR_TAB
3962 "st X,__tmp_reg__", op, plen, -3);
3964 else
3966 avr_asm_len ("adiw r26,%o0" CR_TAB
3967 "st X,%1", op, plen, -2);
3970 if (!reg_unused_after (insn, XEXP (x,0)))
3971 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3973 return "";
3976 return avr_asm_len ("std %0,%1", op, plen, -1);
3979 return avr_asm_len ("st %0,%1", op, plen, -1);
3983 /* Helper for the next function for XMEGA. It does the same
3984 but with low byte first. */
3986 static const char*
3987 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3989 rtx dest = op[0];
3990 rtx src = op[1];
3991 rtx base = XEXP (dest, 0);
3992 int reg_base = true_regnum (base);
3993 int reg_src = true_regnum (src);
3995 /* "volatile" forces writing low byte first, even if less efficient,
3996 for correct operation with 16-bit I/O registers like SP. */
3997 int mem_volatile_p = MEM_VOLATILE_P (dest);
3999 if (CONSTANT_ADDRESS_P (base))
4000 return optimize > 0 && io_address_operand (base, HImode)
4001 ? avr_asm_len ("out %i0,%A1" CR_TAB
4002 "out %i0+1,%B1", op, plen, -2)
4004 : avr_asm_len ("sts %m0,%A1" CR_TAB
4005 "sts %m0+1,%B1", op, plen, -4);
4007 if (reg_base > 0)
4009 if (reg_base != REG_X)
4010 return avr_asm_len ("st %0,%A1" CR_TAB
4011 "std %0+1,%B1", op, plen, -2);
4013 if (reg_src == REG_X)
4014 /* "st X+,r26" and "st -X,r26" are undefined. */
4015 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4016 "st X,r26" CR_TAB
4017 "adiw r26,1" CR_TAB
4018 "st X,__tmp_reg__", op, plen, -4);
4019 else
4020 avr_asm_len ("st X+,%A1" CR_TAB
4021 "st X,%B1", op, plen, -2);
4023 return reg_unused_after (insn, base)
4024 ? ""
4025 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4027 else if (GET_CODE (base) == PLUS)
4029 int disp = INTVAL (XEXP (base, 1));
4030 reg_base = REGNO (XEXP (base, 0));
4031 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4033 if (reg_base != REG_Y)
4034 fatal_insn ("incorrect insn:",insn);
4036 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4037 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4038 "std Y+62,%A1" CR_TAB
4039 "std Y+63,%B1" CR_TAB
4040 "sbiw r28,%o0-62", op, plen, -4)
4042 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4043 "sbci r29,hi8(-%o0)" CR_TAB
4044 "st Y,%A1" CR_TAB
4045 "std Y+1,%B1" CR_TAB
4046 "subi r28,lo8(%o0)" CR_TAB
4047 "sbci r29,hi8(%o0)", op, plen, -6);
4050 if (reg_base != REG_X)
4051 return avr_asm_len ("std %A0,%A1" CR_TAB
4052 "std %B0,%B1", op, plen, -2);
4053 /* (X + d) = R */
4054 return reg_src == REG_X
4055 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4056 "mov __zero_reg__,r27" CR_TAB
4057 "adiw r26,%o0" CR_TAB
4058 "st X+,__tmp_reg__" CR_TAB
4059 "st X,__zero_reg__" CR_TAB
4060 "clr __zero_reg__" CR_TAB
4061 "sbiw r26,%o0+1", op, plen, -7)
4063 : avr_asm_len ("adiw r26,%o0" CR_TAB
4064 "st X+,%A1" CR_TAB
4065 "st X,%B1" CR_TAB
4066 "sbiw r26,%o0+1", op, plen, -4);
4068 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4070 if (!mem_volatile_p)
4071 return avr_asm_len ("st %0,%B1" CR_TAB
4072 "st %0,%A1", op, plen, -2);
4074 return REGNO (XEXP (base, 0)) == REG_X
4075 ? avr_asm_len ("sbiw r26,2" CR_TAB
4076 "st X+,%A1" CR_TAB
4077 "st X,%B1" CR_TAB
4078 "sbiw r26,1", op, plen, -4)
4080 : avr_asm_len ("sbiw %r0,2" CR_TAB
4081 "st %p0,%A1" CR_TAB
4082 "std %p0+1,%B1", op, plen, -3);
4084 else if (GET_CODE (base) == POST_INC) /* (R++) */
4086 return avr_asm_len ("st %0,%A1" CR_TAB
4087 "st %0,%B1", op, plen, -2);
4090 fatal_insn ("unknown move insn:",insn);
4091 return "";
4095 static const char*
4096 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4098 rtx dest = op[0];
4099 rtx src = op[1];
4100 rtx base = XEXP (dest, 0);
4101 int reg_base = true_regnum (base);
4102 int reg_src = true_regnum (src);
4103 int mem_volatile_p;
4105 /* "volatile" forces writing high-byte first (no-xmega) resp.
4106 low-byte first (xmega) even if less efficient, for correct
4107 operation with 16-bit I/O registers like. */
4109 if (AVR_XMEGA)
4110 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4112 mem_volatile_p = MEM_VOLATILE_P (dest);
4114 if (CONSTANT_ADDRESS_P (base))
4115 return optimize > 0 && io_address_operand (base, HImode)
4116 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4117 "out %i0,%A1", op, plen, -2)
4119 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4120 "sts %m0,%A1", op, plen, -4);
4122 if (reg_base > 0)
4124 if (reg_base != REG_X)
4125 return avr_asm_len ("std %0+1,%B1" CR_TAB
4126 "st %0,%A1", op, plen, -2);
4128 if (reg_src == REG_X)
4129 /* "st X+,r26" and "st -X,r26" are undefined. */
4130 return !mem_volatile_p && reg_unused_after (insn, src)
4131 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4132 "st X,r26" CR_TAB
4133 "adiw r26,1" CR_TAB
4134 "st X,__tmp_reg__", op, plen, -4)
4136 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4137 "adiw r26,1" CR_TAB
4138 "st X,__tmp_reg__" CR_TAB
4139 "sbiw r26,1" CR_TAB
4140 "st X,r26", op, plen, -5);
4142 return !mem_volatile_p && reg_unused_after (insn, base)
4143 ? avr_asm_len ("st X+,%A1" CR_TAB
4144 "st X,%B1", op, plen, -2)
4145 : avr_asm_len ("adiw r26,1" CR_TAB
4146 "st X,%B1" CR_TAB
4147 "st -X,%A1", op, plen, -3);
4149 else if (GET_CODE (base) == PLUS)
4151 int disp = INTVAL (XEXP (base, 1));
4152 reg_base = REGNO (XEXP (base, 0));
4153 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4155 if (reg_base != REG_Y)
4156 fatal_insn ("incorrect insn:",insn);
4158 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4159 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4160 "std Y+63,%B1" CR_TAB
4161 "std Y+62,%A1" CR_TAB
4162 "sbiw r28,%o0-62", op, plen, -4)
4164 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4165 "sbci r29,hi8(-%o0)" CR_TAB
4166 "std Y+1,%B1" CR_TAB
4167 "st Y,%A1" CR_TAB
4168 "subi r28,lo8(%o0)" CR_TAB
4169 "sbci r29,hi8(%o0)", op, plen, -6);
4172 if (reg_base != REG_X)
4173 return avr_asm_len ("std %B0,%B1" CR_TAB
4174 "std %A0,%A1", op, plen, -2);
4175 /* (X + d) = R */
4176 return reg_src == REG_X
4177 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4178 "mov __zero_reg__,r27" CR_TAB
4179 "adiw r26,%o0+1" CR_TAB
4180 "st X,__zero_reg__" CR_TAB
4181 "st -X,__tmp_reg__" CR_TAB
4182 "clr __zero_reg__" CR_TAB
4183 "sbiw r26,%o0", op, plen, -7)
4185 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4186 "st X,%B1" CR_TAB
4187 "st -X,%A1" CR_TAB
4188 "sbiw r26,%o0", op, plen, -4);
4190 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4192 return avr_asm_len ("st %0,%B1" CR_TAB
4193 "st %0,%A1", op, plen, -2);
4195 else if (GET_CODE (base) == POST_INC) /* (R++) */
4197 if (!mem_volatile_p)
4198 return avr_asm_len ("st %0,%A1" CR_TAB
4199 "st %0,%B1", op, plen, -2);
4201 return REGNO (XEXP (base, 0)) == REG_X
4202 ? avr_asm_len ("adiw r26,1" CR_TAB
4203 "st X,%B1" CR_TAB
4204 "st -X,%A1" CR_TAB
4205 "adiw r26,2", op, plen, -4)
4207 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4208 "st %p0,%A1" CR_TAB
4209 "adiw %r0,2", op, plen, -3);
4211 fatal_insn ("unknown move insn:",insn);
4212 return "";
4215 /* Return 1 if frame pointer for current function required. */
4217 static bool
4218 avr_frame_pointer_required_p (void)
4220 return (cfun->calls_alloca
4221 || cfun->calls_setjmp
4222 || cfun->has_nonlocal_label
4223 || crtl->args.info.nregs == 0
4224 || get_frame_size () > 0);
4227 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4229 static RTX_CODE
4230 compare_condition (rtx insn)
4232 rtx next = next_real_insn (insn);
4234 if (next && JUMP_P (next))
4236 rtx pat = PATTERN (next);
4237 rtx src = SET_SRC (pat);
4239 if (IF_THEN_ELSE == GET_CODE (src))
4240 return GET_CODE (XEXP (src, 0));
4243 return UNKNOWN;
4247 /* Returns true iff INSN is a tst insn that only tests the sign. */
4249 static bool
4250 compare_sign_p (rtx insn)
4252 RTX_CODE cond = compare_condition (insn);
4253 return (cond == GE || cond == LT);
4257 /* Returns true iff the next insn is a JUMP_INSN with a condition
4258 that needs to be swapped (GT, GTU, LE, LEU). */
4260 static bool
4261 compare_diff_p (rtx insn)
4263 RTX_CODE cond = compare_condition (insn);
4264 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4267 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4269 static bool
4270 compare_eq_p (rtx insn)
4272 RTX_CODE cond = compare_condition (insn);
4273 return (cond == EQ || cond == NE);
4277 /* Output compare instruction
4279 compare (XOP[0], XOP[1])
4281 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4282 XOP[2] is an 8-bit scratch register as needed.
4284 PLEN == NULL: Output instructions.
4285 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4286 Don't output anything. */
4288 const char*
4289 avr_out_compare (rtx insn, rtx *xop, int *plen)
4291 /* Register to compare and value to compare against. */
4292 rtx xreg = xop[0];
4293 rtx xval = xop[1];
4295 /* MODE of the comparison. */
4296 enum machine_mode mode = GET_MODE (xreg);
4298 /* Number of bytes to operate on. */
4299 int i, n_bytes = GET_MODE_SIZE (mode);
4301 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4302 int clobber_val = -1;
4304 gcc_assert (REG_P (xreg));
4305 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4306 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4308 if (plen)
4309 *plen = 0;
4311 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4312 against 0 by ORing the bytes. This is one instruction shorter.
4313 Notice that DImode comparisons are always against reg:DI 18
4314 and therefore don't use this. */
4316 if (!test_hard_reg_class (LD_REGS, xreg)
4317 && compare_eq_p (insn)
4318 && reg_unused_after (insn, xreg))
4320 if (xval == const1_rtx)
4322 avr_asm_len ("dec %A0" CR_TAB
4323 "or %A0,%B0", xop, plen, 2);
4325 if (n_bytes >= 3)
4326 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4328 if (n_bytes >= 4)
4329 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4331 return "";
4333 else if (xval == constm1_rtx)
4335 if (n_bytes >= 4)
4336 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4338 if (n_bytes >= 3)
4339 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4341 return avr_asm_len ("and %A0,%B0" CR_TAB
4342 "com %A0", xop, plen, 2);
4346 for (i = 0; i < n_bytes; i++)
4348 /* We compare byte-wise. */
4349 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4350 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4352 /* 8-bit value to compare with this byte. */
4353 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4355 /* Registers R16..R31 can operate with immediate. */
4356 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4358 xop[0] = reg8;
4359 xop[1] = gen_int_mode (val8, QImode);
4361 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4363 if (i == 0
4364 && test_hard_reg_class (ADDW_REGS, reg8))
4366 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4368 if (IN_RANGE (val16, 0, 63)
4369 && (val8 == 0
4370 || reg_unused_after (insn, xreg)))
4372 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4373 i++;
4374 continue;
4377 if (n_bytes == 2
4378 && IN_RANGE (val16, -63, -1)
4379 && compare_eq_p (insn)
4380 && reg_unused_after (insn, xreg))
4382 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4386 /* Comparing against 0 is easy. */
4388 if (val8 == 0)
4390 avr_asm_len (i == 0
4391 ? "cp %0,__zero_reg__"
4392 : "cpc %0,__zero_reg__", xop, plen, 1);
4393 continue;
4396 /* Upper registers can compare and subtract-with-carry immediates.
4397 Notice that compare instructions do the same as respective subtract
4398 instruction; the only difference is that comparisons don't write
4399 the result back to the target register. */
4401 if (ld_reg_p)
4403 if (i == 0)
4405 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4406 continue;
4408 else if (reg_unused_after (insn, xreg))
4410 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4411 continue;
4415 /* Must load the value into the scratch register. */
4417 gcc_assert (REG_P (xop[2]));
4419 if (clobber_val != (int) val8)
4420 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4421 clobber_val = (int) val8;
4423 avr_asm_len (i == 0
4424 ? "cp %0,%2"
4425 : "cpc %0,%2", xop, plen, 1);
4428 return "";
4432 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4434 const char*
4435 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4437 rtx xop[3];
4439 xop[0] = gen_rtx_REG (DImode, 18);
4440 xop[1] = op[0];
4441 xop[2] = op[1];
4443 return avr_out_compare (insn, xop, plen);
4446 /* Output test instruction for HImode. */
4448 const char*
4449 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4451 if (compare_sign_p (insn))
4453 avr_asm_len ("tst %B0", op, plen, -1);
4455 else if (reg_unused_after (insn, op[0])
4456 && compare_eq_p (insn))
4458 /* Faster than sbiw if we can clobber the operand. */
4459 avr_asm_len ("or %A0,%B0", op, plen, -1);
4461 else
4463 avr_out_compare (insn, op, plen);
4466 return "";
4470 /* Output test instruction for PSImode. */
4472 const char*
4473 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4475 if (compare_sign_p (insn))
4477 avr_asm_len ("tst %C0", op, plen, -1);
4479 else if (reg_unused_after (insn, op[0])
4480 && compare_eq_p (insn))
4482 /* Faster than sbiw if we can clobber the operand. */
4483 avr_asm_len ("or %A0,%B0" CR_TAB
4484 "or %A0,%C0", op, plen, -2);
4486 else
4488 avr_out_compare (insn, op, plen);
4491 return "";
4495 /* Output test instruction for SImode. */
4497 const char*
4498 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4500 if (compare_sign_p (insn))
4502 avr_asm_len ("tst %D0", op, plen, -1);
4504 else if (reg_unused_after (insn, op[0])
4505 && compare_eq_p (insn))
4507 /* Faster than sbiw if we can clobber the operand. */
4508 avr_asm_len ("or %A0,%B0" CR_TAB
4509 "or %A0,%C0" CR_TAB
4510 "or %A0,%D0", op, plen, -3);
4512 else
4514 avr_out_compare (insn, op, plen);
4517 return "";
4521 /* Generate asm equivalent for various shifts. This only handles cases
4522 that are not already carefully hand-optimized in ?sh??i3_out.
4524 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4525 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4526 OPERANDS[3] is a QImode scratch register from LD regs if
4527 available and SCRATCH, otherwise (no scratch available)
4529 TEMPL is an assembler template that shifts by one position.
4530 T_LEN is the length of this template. */
4532 void
4533 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4534 int *plen, int t_len)
4536 bool second_label = true;
4537 bool saved_in_tmp = false;
4538 bool use_zero_reg = false;
4539 rtx op[5];
4541 op[0] = operands[0];
4542 op[1] = operands[1];
4543 op[2] = operands[2];
4544 op[3] = operands[3];
4546 if (plen)
4547 *plen = 0;
4549 if (CONST_INT_P (operands[2]))
4551 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4552 && REG_P (operands[3]));
4553 int count = INTVAL (operands[2]);
4554 int max_len = 10; /* If larger than this, always use a loop. */
4556 if (count <= 0)
4557 return;
4559 if (count < 8 && !scratch)
4560 use_zero_reg = true;
4562 if (optimize_size)
4563 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4565 if (t_len * count <= max_len)
4567 /* Output shifts inline with no loop - faster. */
4569 while (count-- > 0)
4570 avr_asm_len (templ, op, plen, t_len);
4572 return;
4575 if (scratch)
4577 avr_asm_len ("ldi %3,%2", op, plen, 1);
4579 else if (use_zero_reg)
4581 /* Hack to save one word: use __zero_reg__ as loop counter.
4582 Set one bit, then shift in a loop until it is 0 again. */
4584 op[3] = zero_reg_rtx;
4586 avr_asm_len ("set" CR_TAB
4587 "bld %3,%2-1", op, plen, 2);
4589 else
4591 /* No scratch register available, use one from LD_REGS (saved in
4592 __tmp_reg__) that doesn't overlap with registers to shift. */
4594 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4595 op[4] = tmp_reg_rtx;
4596 saved_in_tmp = true;
4598 avr_asm_len ("mov %4,%3" CR_TAB
4599 "ldi %3,%2", op, plen, 2);
4602 second_label = false;
4604 else if (MEM_P (op[2]))
4606 rtx op_mov[2];
4608 op_mov[0] = op[3] = tmp_reg_rtx;
4609 op_mov[1] = op[2];
4611 out_movqi_r_mr (insn, op_mov, plen);
4613 else if (register_operand (op[2], QImode))
4615 op[3] = op[2];
4617 if (!reg_unused_after (insn, op[2])
4618 || reg_overlap_mentioned_p (op[0], op[2]))
4620 op[3] = tmp_reg_rtx;
4621 avr_asm_len ("mov %3,%2", op, plen, 1);
4624 else
4625 fatal_insn ("bad shift insn:", insn);
4627 if (second_label)
4628 avr_asm_len ("rjmp 2f", op, plen, 1);
4630 avr_asm_len ("1:", op, plen, 0);
4631 avr_asm_len (templ, op, plen, t_len);
4633 if (second_label)
4634 avr_asm_len ("2:", op, plen, 0);
4636 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4637 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4639 if (saved_in_tmp)
4640 avr_asm_len ("mov %3,%4", op, plen, 1);
4644 /* 8bit shift left ((char)x << i) */
4646 const char *
4647 ashlqi3_out (rtx insn, rtx operands[], int *len)
4649 if (GET_CODE (operands[2]) == CONST_INT)
4651 int k;
4653 if (!len)
4654 len = &k;
4656 switch (INTVAL (operands[2]))
4658 default:
4659 if (INTVAL (operands[2]) < 8)
4660 break;
4662 *len = 1;
4663 return "clr %0";
4665 case 1:
4666 *len = 1;
4667 return "lsl %0";
4669 case 2:
4670 *len = 2;
4671 return ("lsl %0" CR_TAB
4672 "lsl %0");
4674 case 3:
4675 *len = 3;
4676 return ("lsl %0" CR_TAB
4677 "lsl %0" CR_TAB
4678 "lsl %0");
4680 case 4:
4681 if (test_hard_reg_class (LD_REGS, operands[0]))
4683 *len = 2;
4684 return ("swap %0" CR_TAB
4685 "andi %0,0xf0");
4687 *len = 4;
4688 return ("lsl %0" CR_TAB
4689 "lsl %0" CR_TAB
4690 "lsl %0" CR_TAB
4691 "lsl %0");
4693 case 5:
4694 if (test_hard_reg_class (LD_REGS, operands[0]))
4696 *len = 3;
4697 return ("swap %0" CR_TAB
4698 "lsl %0" CR_TAB
4699 "andi %0,0xe0");
4701 *len = 5;
4702 return ("lsl %0" CR_TAB
4703 "lsl %0" CR_TAB
4704 "lsl %0" CR_TAB
4705 "lsl %0" CR_TAB
4706 "lsl %0");
4708 case 6:
4709 if (test_hard_reg_class (LD_REGS, operands[0]))
4711 *len = 4;
4712 return ("swap %0" CR_TAB
4713 "lsl %0" CR_TAB
4714 "lsl %0" CR_TAB
4715 "andi %0,0xc0");
4717 *len = 6;
4718 return ("lsl %0" CR_TAB
4719 "lsl %0" CR_TAB
4720 "lsl %0" CR_TAB
4721 "lsl %0" CR_TAB
4722 "lsl %0" CR_TAB
4723 "lsl %0");
4725 case 7:
4726 *len = 3;
4727 return ("ror %0" CR_TAB
4728 "clr %0" CR_TAB
4729 "ror %0");
4732 else if (CONSTANT_P (operands[2]))
4733 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4735 out_shift_with_cnt ("lsl %0",
4736 insn, operands, len, 1);
4737 return "";
4741 /* 16bit shift left ((short)x << i) */
4743 const char *
4744 ashlhi3_out (rtx insn, rtx operands[], int *len)
4746 if (GET_CODE (operands[2]) == CONST_INT)
4748 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4749 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4750 int k;
4751 int *t = len;
4753 if (!len)
4754 len = &k;
4756 switch (INTVAL (operands[2]))
4758 default:
4759 if (INTVAL (operands[2]) < 16)
4760 break;
4762 *len = 2;
4763 return ("clr %B0" CR_TAB
4764 "clr %A0");
4766 case 4:
4767 if (optimize_size && scratch)
4768 break; /* 5 */
4769 if (ldi_ok)
4771 *len = 6;
4772 return ("swap %A0" CR_TAB
4773 "swap %B0" CR_TAB
4774 "andi %B0,0xf0" CR_TAB
4775 "eor %B0,%A0" CR_TAB
4776 "andi %A0,0xf0" CR_TAB
4777 "eor %B0,%A0");
4779 if (scratch)
4781 *len = 7;
4782 return ("swap %A0" CR_TAB
4783 "swap %B0" CR_TAB
4784 "ldi %3,0xf0" CR_TAB
4785 "and %B0,%3" CR_TAB
4786 "eor %B0,%A0" CR_TAB
4787 "and %A0,%3" CR_TAB
4788 "eor %B0,%A0");
4790 break; /* optimize_size ? 6 : 8 */
4792 case 5:
4793 if (optimize_size)
4794 break; /* scratch ? 5 : 6 */
4795 if (ldi_ok)
4797 *len = 8;
4798 return ("lsl %A0" CR_TAB
4799 "rol %B0" CR_TAB
4800 "swap %A0" CR_TAB
4801 "swap %B0" CR_TAB
4802 "andi %B0,0xf0" CR_TAB
4803 "eor %B0,%A0" CR_TAB
4804 "andi %A0,0xf0" CR_TAB
4805 "eor %B0,%A0");
4807 if (scratch)
4809 *len = 9;
4810 return ("lsl %A0" CR_TAB
4811 "rol %B0" CR_TAB
4812 "swap %A0" CR_TAB
4813 "swap %B0" CR_TAB
4814 "ldi %3,0xf0" CR_TAB
4815 "and %B0,%3" CR_TAB
4816 "eor %B0,%A0" CR_TAB
4817 "and %A0,%3" CR_TAB
4818 "eor %B0,%A0");
4820 break; /* 10 */
4822 case 6:
4823 if (optimize_size)
4824 break; /* scratch ? 5 : 6 */
4825 *len = 9;
4826 return ("clr __tmp_reg__" CR_TAB
4827 "lsr %B0" CR_TAB
4828 "ror %A0" CR_TAB
4829 "ror __tmp_reg__" CR_TAB
4830 "lsr %B0" CR_TAB
4831 "ror %A0" CR_TAB
4832 "ror __tmp_reg__" CR_TAB
4833 "mov %B0,%A0" CR_TAB
4834 "mov %A0,__tmp_reg__");
4836 case 7:
4837 *len = 5;
4838 return ("lsr %B0" CR_TAB
4839 "mov %B0,%A0" CR_TAB
4840 "clr %A0" CR_TAB
4841 "ror %B0" CR_TAB
4842 "ror %A0");
4844 case 8:
4845 return *len = 2, ("mov %B0,%A1" CR_TAB
4846 "clr %A0");
4848 case 9:
4849 *len = 3;
4850 return ("mov %B0,%A0" CR_TAB
4851 "clr %A0" CR_TAB
4852 "lsl %B0");
4854 case 10:
4855 *len = 4;
4856 return ("mov %B0,%A0" CR_TAB
4857 "clr %A0" CR_TAB
4858 "lsl %B0" CR_TAB
4859 "lsl %B0");
4861 case 11:
4862 *len = 5;
4863 return ("mov %B0,%A0" CR_TAB
4864 "clr %A0" CR_TAB
4865 "lsl %B0" CR_TAB
4866 "lsl %B0" CR_TAB
4867 "lsl %B0");
4869 case 12:
4870 if (ldi_ok)
4872 *len = 4;
4873 return ("mov %B0,%A0" CR_TAB
4874 "clr %A0" CR_TAB
4875 "swap %B0" CR_TAB
4876 "andi %B0,0xf0");
4878 if (scratch)
4880 *len = 5;
4881 return ("mov %B0,%A0" CR_TAB
4882 "clr %A0" CR_TAB
4883 "swap %B0" CR_TAB
4884 "ldi %3,0xf0" CR_TAB
4885 "and %B0,%3");
4887 *len = 6;
4888 return ("mov %B0,%A0" CR_TAB
4889 "clr %A0" CR_TAB
4890 "lsl %B0" CR_TAB
4891 "lsl %B0" CR_TAB
4892 "lsl %B0" CR_TAB
4893 "lsl %B0");
4895 case 13:
4896 if (ldi_ok)
4898 *len = 5;
4899 return ("mov %B0,%A0" CR_TAB
4900 "clr %A0" CR_TAB
4901 "swap %B0" CR_TAB
4902 "lsl %B0" CR_TAB
4903 "andi %B0,0xe0");
4905 if (AVR_HAVE_MUL && scratch)
4907 *len = 5;
4908 return ("ldi %3,0x20" CR_TAB
4909 "mul %A0,%3" CR_TAB
4910 "mov %B0,r0" CR_TAB
4911 "clr %A0" CR_TAB
4912 "clr __zero_reg__");
4914 if (optimize_size && scratch)
4915 break; /* 5 */
4916 if (scratch)
4918 *len = 6;
4919 return ("mov %B0,%A0" CR_TAB
4920 "clr %A0" CR_TAB
4921 "swap %B0" CR_TAB
4922 "lsl %B0" CR_TAB
4923 "ldi %3,0xe0" CR_TAB
4924 "and %B0,%3");
4926 if (AVR_HAVE_MUL)
4928 *len = 6;
4929 return ("set" CR_TAB
4930 "bld r1,5" CR_TAB
4931 "mul %A0,r1" CR_TAB
4932 "mov %B0,r0" CR_TAB
4933 "clr %A0" CR_TAB
4934 "clr __zero_reg__");
4936 *len = 7;
4937 return ("mov %B0,%A0" CR_TAB
4938 "clr %A0" CR_TAB
4939 "lsl %B0" CR_TAB
4940 "lsl %B0" CR_TAB
4941 "lsl %B0" CR_TAB
4942 "lsl %B0" CR_TAB
4943 "lsl %B0");
4945 case 14:
4946 if (AVR_HAVE_MUL && ldi_ok)
4948 *len = 5;
4949 return ("ldi %B0,0x40" CR_TAB
4950 "mul %A0,%B0" CR_TAB
4951 "mov %B0,r0" CR_TAB
4952 "clr %A0" CR_TAB
4953 "clr __zero_reg__");
4955 if (AVR_HAVE_MUL && scratch)
4957 *len = 5;
4958 return ("ldi %3,0x40" CR_TAB
4959 "mul %A0,%3" CR_TAB
4960 "mov %B0,r0" CR_TAB
4961 "clr %A0" CR_TAB
4962 "clr __zero_reg__");
4964 if (optimize_size && ldi_ok)
4966 *len = 5;
4967 return ("mov %B0,%A0" CR_TAB
4968 "ldi %A0,6" "\n1:\t"
4969 "lsl %B0" CR_TAB
4970 "dec %A0" CR_TAB
4971 "brne 1b");
4973 if (optimize_size && scratch)
4974 break; /* 5 */
4975 *len = 6;
4976 return ("clr %B0" CR_TAB
4977 "lsr %A0" CR_TAB
4978 "ror %B0" CR_TAB
4979 "lsr %A0" CR_TAB
4980 "ror %B0" CR_TAB
4981 "clr %A0");
4983 case 15:
4984 *len = 4;
4985 return ("clr %B0" CR_TAB
4986 "lsr %A0" CR_TAB
4987 "ror %B0" CR_TAB
4988 "clr %A0");
4990 len = t;
4992 out_shift_with_cnt ("lsl %A0" CR_TAB
4993 "rol %B0", insn, operands, len, 2);
4994 return "";
4998 /* 24-bit shift left */
5000 const char*
5001 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5003 if (plen)
5004 *plen = 0;
5006 if (CONST_INT_P (op[2]))
5008 switch (INTVAL (op[2]))
5010 default:
5011 if (INTVAL (op[2]) < 24)
5012 break;
5014 return avr_asm_len ("clr %A0" CR_TAB
5015 "clr %B0" CR_TAB
5016 "clr %C0", op, plen, 3);
5018 case 8:
5020 int reg0 = REGNO (op[0]);
5021 int reg1 = REGNO (op[1]);
5023 if (reg0 >= reg1)
5024 return avr_asm_len ("mov %C0,%B1" CR_TAB
5025 "mov %B0,%A1" CR_TAB
5026 "clr %A0", op, plen, 3);
5027 else
5028 return avr_asm_len ("clr %A0" CR_TAB
5029 "mov %B0,%A1" CR_TAB
5030 "mov %C0,%B1", op, plen, 3);
5033 case 16:
5035 int reg0 = REGNO (op[0]);
5036 int reg1 = REGNO (op[1]);
5038 if (reg0 + 2 != reg1)
5039 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5041 return avr_asm_len ("clr %B0" CR_TAB
5042 "clr %A0", op, plen, 2);
5045 case 23:
5046 return avr_asm_len ("clr %C0" CR_TAB
5047 "lsr %A0" CR_TAB
5048 "ror %C0" CR_TAB
5049 "clr %B0" CR_TAB
5050 "clr %A0", op, plen, 5);
5054 out_shift_with_cnt ("lsl %A0" CR_TAB
5055 "rol %B0" CR_TAB
5056 "rol %C0", insn, op, plen, 3);
5057 return "";
5061 /* 32bit shift left ((long)x << i) */
5063 const char *
5064 ashlsi3_out (rtx insn, rtx operands[], int *len)
5066 if (GET_CODE (operands[2]) == CONST_INT)
5068 int k;
5069 int *t = len;
5071 if (!len)
5072 len = &k;
5074 switch (INTVAL (operands[2]))
5076 default:
5077 if (INTVAL (operands[2]) < 32)
5078 break;
5080 if (AVR_HAVE_MOVW)
5081 return *len = 3, ("clr %D0" CR_TAB
5082 "clr %C0" CR_TAB
5083 "movw %A0,%C0");
5084 *len = 4;
5085 return ("clr %D0" CR_TAB
5086 "clr %C0" CR_TAB
5087 "clr %B0" CR_TAB
5088 "clr %A0");
5090 case 8:
5092 int reg0 = true_regnum (operands[0]);
5093 int reg1 = true_regnum (operands[1]);
5094 *len = 4;
5095 if (reg0 >= reg1)
5096 return ("mov %D0,%C1" CR_TAB
5097 "mov %C0,%B1" CR_TAB
5098 "mov %B0,%A1" CR_TAB
5099 "clr %A0");
5100 else
5101 return ("clr %A0" CR_TAB
5102 "mov %B0,%A1" CR_TAB
5103 "mov %C0,%B1" CR_TAB
5104 "mov %D0,%C1");
5107 case 16:
5109 int reg0 = true_regnum (operands[0]);
5110 int reg1 = true_regnum (operands[1]);
5111 if (reg0 + 2 == reg1)
5112 return *len = 2, ("clr %B0" CR_TAB
5113 "clr %A0");
5114 if (AVR_HAVE_MOVW)
5115 return *len = 3, ("movw %C0,%A1" CR_TAB
5116 "clr %B0" CR_TAB
5117 "clr %A0");
5118 else
5119 return *len = 4, ("mov %C0,%A1" CR_TAB
5120 "mov %D0,%B1" CR_TAB
5121 "clr %B0" CR_TAB
5122 "clr %A0");
5125 case 24:
5126 *len = 4;
5127 return ("mov %D0,%A1" CR_TAB
5128 "clr %C0" CR_TAB
5129 "clr %B0" CR_TAB
5130 "clr %A0");
5132 case 31:
5133 *len = 6;
5134 return ("clr %D0" CR_TAB
5135 "lsr %A0" CR_TAB
5136 "ror %D0" CR_TAB
5137 "clr %C0" CR_TAB
5138 "clr %B0" CR_TAB
5139 "clr %A0");
5141 len = t;
5143 out_shift_with_cnt ("lsl %A0" CR_TAB
5144 "rol %B0" CR_TAB
5145 "rol %C0" CR_TAB
5146 "rol %D0", insn, operands, len, 4);
5147 return "";
5150 /* 8bit arithmetic shift right ((signed char)x >> i) */
5152 const char *
5153 ashrqi3_out (rtx insn, rtx operands[], int *len)
5155 if (GET_CODE (operands[2]) == CONST_INT)
5157 int k;
5159 if (!len)
5160 len = &k;
5162 switch (INTVAL (operands[2]))
5164 case 1:
5165 *len = 1;
5166 return "asr %0";
5168 case 2:
5169 *len = 2;
5170 return ("asr %0" CR_TAB
5171 "asr %0");
5173 case 3:
5174 *len = 3;
5175 return ("asr %0" CR_TAB
5176 "asr %0" CR_TAB
5177 "asr %0");
5179 case 4:
5180 *len = 4;
5181 return ("asr %0" CR_TAB
5182 "asr %0" CR_TAB
5183 "asr %0" CR_TAB
5184 "asr %0");
5186 case 5:
5187 *len = 5;
5188 return ("asr %0" CR_TAB
5189 "asr %0" CR_TAB
5190 "asr %0" CR_TAB
5191 "asr %0" CR_TAB
5192 "asr %0");
5194 case 6:
5195 *len = 4;
5196 return ("bst %0,6" CR_TAB
5197 "lsl %0" CR_TAB
5198 "sbc %0,%0" CR_TAB
5199 "bld %0,0");
5201 default:
5202 if (INTVAL (operands[2]) < 8)
5203 break;
5205 /* fall through */
5207 case 7:
5208 *len = 2;
5209 return ("lsl %0" CR_TAB
5210 "sbc %0,%0");
5213 else if (CONSTANT_P (operands[2]))
5214 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5216 out_shift_with_cnt ("asr %0",
5217 insn, operands, len, 1);
5218 return "";
5222 /* 16bit arithmetic shift right ((signed short)x >> i) */
5224 const char *
5225 ashrhi3_out (rtx insn, rtx operands[], int *len)
5227 if (GET_CODE (operands[2]) == CONST_INT)
5229 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5230 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5231 int k;
5232 int *t = len;
5234 if (!len)
5235 len = &k;
5237 switch (INTVAL (operands[2]))
5239 case 4:
5240 case 5:
5241 /* XXX try to optimize this too? */
5242 break;
5244 case 6:
5245 if (optimize_size)
5246 break; /* scratch ? 5 : 6 */
5247 *len = 8;
5248 return ("mov __tmp_reg__,%A0" CR_TAB
5249 "mov %A0,%B0" CR_TAB
5250 "lsl __tmp_reg__" CR_TAB
5251 "rol %A0" CR_TAB
5252 "sbc %B0,%B0" CR_TAB
5253 "lsl __tmp_reg__" CR_TAB
5254 "rol %A0" CR_TAB
5255 "rol %B0");
5257 case 7:
5258 *len = 4;
5259 return ("lsl %A0" CR_TAB
5260 "mov %A0,%B0" CR_TAB
5261 "rol %A0" CR_TAB
5262 "sbc %B0,%B0");
5264 case 8:
5266 int reg0 = true_regnum (operands[0]);
5267 int reg1 = true_regnum (operands[1]);
5269 if (reg0 == reg1)
5270 return *len = 3, ("mov %A0,%B0" CR_TAB
5271 "lsl %B0" CR_TAB
5272 "sbc %B0,%B0");
5273 else
5274 return *len = 4, ("mov %A0,%B1" CR_TAB
5275 "clr %B0" CR_TAB
5276 "sbrc %A0,7" CR_TAB
5277 "dec %B0");
5280 case 9:
5281 *len = 4;
5282 return ("mov %A0,%B0" CR_TAB
5283 "lsl %B0" CR_TAB
5284 "sbc %B0,%B0" CR_TAB
5285 "asr %A0");
5287 case 10:
5288 *len = 5;
5289 return ("mov %A0,%B0" CR_TAB
5290 "lsl %B0" CR_TAB
5291 "sbc %B0,%B0" CR_TAB
5292 "asr %A0" CR_TAB
5293 "asr %A0");
5295 case 11:
5296 if (AVR_HAVE_MUL && ldi_ok)
5298 *len = 5;
5299 return ("ldi %A0,0x20" CR_TAB
5300 "muls %B0,%A0" CR_TAB
5301 "mov %A0,r1" CR_TAB
5302 "sbc %B0,%B0" CR_TAB
5303 "clr __zero_reg__");
5305 if (optimize_size && scratch)
5306 break; /* 5 */
5307 *len = 6;
5308 return ("mov %A0,%B0" CR_TAB
5309 "lsl %B0" CR_TAB
5310 "sbc %B0,%B0" CR_TAB
5311 "asr %A0" CR_TAB
5312 "asr %A0" CR_TAB
5313 "asr %A0");
5315 case 12:
5316 if (AVR_HAVE_MUL && ldi_ok)
5318 *len = 5;
5319 return ("ldi %A0,0x10" CR_TAB
5320 "muls %B0,%A0" CR_TAB
5321 "mov %A0,r1" CR_TAB
5322 "sbc %B0,%B0" CR_TAB
5323 "clr __zero_reg__");
5325 if (optimize_size && scratch)
5326 break; /* 5 */
5327 *len = 7;
5328 return ("mov %A0,%B0" CR_TAB
5329 "lsl %B0" CR_TAB
5330 "sbc %B0,%B0" CR_TAB
5331 "asr %A0" CR_TAB
5332 "asr %A0" CR_TAB
5333 "asr %A0" CR_TAB
5334 "asr %A0");
5336 case 13:
5337 if (AVR_HAVE_MUL && ldi_ok)
5339 *len = 5;
5340 return ("ldi %A0,0x08" CR_TAB
5341 "muls %B0,%A0" CR_TAB
5342 "mov %A0,r1" CR_TAB
5343 "sbc %B0,%B0" CR_TAB
5344 "clr __zero_reg__");
5346 if (optimize_size)
5347 break; /* scratch ? 5 : 7 */
5348 *len = 8;
5349 return ("mov %A0,%B0" CR_TAB
5350 "lsl %B0" CR_TAB
5351 "sbc %B0,%B0" CR_TAB
5352 "asr %A0" CR_TAB
5353 "asr %A0" CR_TAB
5354 "asr %A0" CR_TAB
5355 "asr %A0" CR_TAB
5356 "asr %A0");
5358 case 14:
5359 *len = 5;
5360 return ("lsl %B0" CR_TAB
5361 "sbc %A0,%A0" CR_TAB
5362 "lsl %B0" CR_TAB
5363 "mov %B0,%A0" CR_TAB
5364 "rol %A0");
5366 default:
5367 if (INTVAL (operands[2]) < 16)
5368 break;
5370 /* fall through */
5372 case 15:
5373 return *len = 3, ("lsl %B0" CR_TAB
5374 "sbc %A0,%A0" CR_TAB
5375 "mov %B0,%A0");
5377 len = t;
5379 out_shift_with_cnt ("asr %B0" CR_TAB
5380 "ror %A0", insn, operands, len, 2);
5381 return "";
5385 /* 24-bit arithmetic shift right */
5387 const char*
5388 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5390 int dest = REGNO (op[0]);
5391 int src = REGNO (op[1]);
5393 if (CONST_INT_P (op[2]))
5395 if (plen)
5396 *plen = 0;
5398 switch (INTVAL (op[2]))
5400 case 8:
5401 if (dest <= src)
5402 return avr_asm_len ("mov %A0,%B1" CR_TAB
5403 "mov %B0,%C1" CR_TAB
5404 "clr %C0" CR_TAB
5405 "sbrc %B0,7" CR_TAB
5406 "dec %C0", op, plen, 5);
5407 else
5408 return avr_asm_len ("clr %C0" CR_TAB
5409 "sbrc %C1,7" CR_TAB
5410 "dec %C0" CR_TAB
5411 "mov %B0,%C1" CR_TAB
5412 "mov %A0,%B1", op, plen, 5);
5414 case 16:
5415 if (dest != src + 2)
5416 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5418 return avr_asm_len ("clr %B0" CR_TAB
5419 "sbrc %A0,7" CR_TAB
5420 "com %B0" CR_TAB
5421 "mov %C0,%B0", op, plen, 4);
5423 default:
5424 if (INTVAL (op[2]) < 24)
5425 break;
5427 /* fall through */
5429 case 23:
5430 return avr_asm_len ("lsl %C0" CR_TAB
5431 "sbc %A0,%A0" CR_TAB
5432 "mov %B0,%A0" CR_TAB
5433 "mov %C0,%A0", op, plen, 4);
5434 } /* switch */
5437 out_shift_with_cnt ("asr %C0" CR_TAB
5438 "ror %B0" CR_TAB
5439 "ror %A0", insn, op, plen, 3);
5440 return "";
5444 /* 32bit arithmetic shift right ((signed long)x >> i) */
5446 const char *
5447 ashrsi3_out (rtx insn, rtx operands[], int *len)
5449 if (GET_CODE (operands[2]) == CONST_INT)
5451 int k;
5452 int *t = len;
5454 if (!len)
5455 len = &k;
5457 switch (INTVAL (operands[2]))
5459 case 8:
5461 int reg0 = true_regnum (operands[0]);
5462 int reg1 = true_regnum (operands[1]);
5463 *len=6;
5464 if (reg0 <= reg1)
5465 return ("mov %A0,%B1" CR_TAB
5466 "mov %B0,%C1" CR_TAB
5467 "mov %C0,%D1" CR_TAB
5468 "clr %D0" CR_TAB
5469 "sbrc %C0,7" CR_TAB
5470 "dec %D0");
5471 else
5472 return ("clr %D0" CR_TAB
5473 "sbrc %D1,7" CR_TAB
5474 "dec %D0" CR_TAB
5475 "mov %C0,%D1" CR_TAB
5476 "mov %B0,%C1" CR_TAB
5477 "mov %A0,%B1");
5480 case 16:
5482 int reg0 = true_regnum (operands[0]);
5483 int reg1 = true_regnum (operands[1]);
5485 if (reg0 == reg1 + 2)
5486 return *len = 4, ("clr %D0" CR_TAB
5487 "sbrc %B0,7" CR_TAB
5488 "com %D0" CR_TAB
5489 "mov %C0,%D0");
5490 if (AVR_HAVE_MOVW)
5491 return *len = 5, ("movw %A0,%C1" CR_TAB
5492 "clr %D0" CR_TAB
5493 "sbrc %B0,7" CR_TAB
5494 "com %D0" CR_TAB
5495 "mov %C0,%D0");
5496 else
5497 return *len = 6, ("mov %B0,%D1" CR_TAB
5498 "mov %A0,%C1" CR_TAB
5499 "clr %D0" CR_TAB
5500 "sbrc %B0,7" CR_TAB
5501 "com %D0" CR_TAB
5502 "mov %C0,%D0");
5505 case 24:
5506 return *len = 6, ("mov %A0,%D1" CR_TAB
5507 "clr %D0" CR_TAB
5508 "sbrc %A0,7" CR_TAB
5509 "com %D0" CR_TAB
5510 "mov %B0,%D0" CR_TAB
5511 "mov %C0,%D0");
5513 default:
5514 if (INTVAL (operands[2]) < 32)
5515 break;
5517 /* fall through */
5519 case 31:
5520 if (AVR_HAVE_MOVW)
5521 return *len = 4, ("lsl %D0" CR_TAB
5522 "sbc %A0,%A0" CR_TAB
5523 "mov %B0,%A0" CR_TAB
5524 "movw %C0,%A0");
5525 else
5526 return *len = 5, ("lsl %D0" CR_TAB
5527 "sbc %A0,%A0" CR_TAB
5528 "mov %B0,%A0" CR_TAB
5529 "mov %C0,%A0" CR_TAB
5530 "mov %D0,%A0");
5532 len = t;
5534 out_shift_with_cnt ("asr %D0" CR_TAB
5535 "ror %C0" CR_TAB
5536 "ror %B0" CR_TAB
5537 "ror %A0", insn, operands, len, 4);
5538 return "";
5541 /* 8bit logic shift right ((unsigned char)x >> i) */
5543 const char *
5544 lshrqi3_out (rtx insn, rtx operands[], int *len)
5546 if (GET_CODE (operands[2]) == CONST_INT)
5548 int k;
5550 if (!len)
5551 len = &k;
5553 switch (INTVAL (operands[2]))
5555 default:
5556 if (INTVAL (operands[2]) < 8)
5557 break;
5559 *len = 1;
5560 return "clr %0";
5562 case 1:
5563 *len = 1;
5564 return "lsr %0";
5566 case 2:
5567 *len = 2;
5568 return ("lsr %0" CR_TAB
5569 "lsr %0");
5570 case 3:
5571 *len = 3;
5572 return ("lsr %0" CR_TAB
5573 "lsr %0" CR_TAB
5574 "lsr %0");
5576 case 4:
5577 if (test_hard_reg_class (LD_REGS, operands[0]))
5579 *len=2;
5580 return ("swap %0" CR_TAB
5581 "andi %0,0x0f");
5583 *len = 4;
5584 return ("lsr %0" CR_TAB
5585 "lsr %0" CR_TAB
5586 "lsr %0" CR_TAB
5587 "lsr %0");
5589 case 5:
5590 if (test_hard_reg_class (LD_REGS, operands[0]))
5592 *len = 3;
5593 return ("swap %0" CR_TAB
5594 "lsr %0" CR_TAB
5595 "andi %0,0x7");
5597 *len = 5;
5598 return ("lsr %0" CR_TAB
5599 "lsr %0" CR_TAB
5600 "lsr %0" CR_TAB
5601 "lsr %0" CR_TAB
5602 "lsr %0");
5604 case 6:
5605 if (test_hard_reg_class (LD_REGS, operands[0]))
5607 *len = 4;
5608 return ("swap %0" CR_TAB
5609 "lsr %0" CR_TAB
5610 "lsr %0" CR_TAB
5611 "andi %0,0x3");
5613 *len = 6;
5614 return ("lsr %0" CR_TAB
5615 "lsr %0" CR_TAB
5616 "lsr %0" CR_TAB
5617 "lsr %0" CR_TAB
5618 "lsr %0" CR_TAB
5619 "lsr %0");
5621 case 7:
5622 *len = 3;
5623 return ("rol %0" CR_TAB
5624 "clr %0" CR_TAB
5625 "rol %0");
5628 else if (CONSTANT_P (operands[2]))
5629 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5631 out_shift_with_cnt ("lsr %0",
5632 insn, operands, len, 1);
5633 return "";
5636 /* 16bit logic shift right ((unsigned short)x >> i) */
5638 const char *
5639 lshrhi3_out (rtx insn, rtx operands[], int *len)
5641 if (GET_CODE (operands[2]) == CONST_INT)
5643 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5644 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5645 int k;
5646 int *t = len;
5648 if (!len)
5649 len = &k;
5651 switch (INTVAL (operands[2]))
5653 default:
5654 if (INTVAL (operands[2]) < 16)
5655 break;
5657 *len = 2;
5658 return ("clr %B0" CR_TAB
5659 "clr %A0");
5661 case 4:
5662 if (optimize_size && scratch)
5663 break; /* 5 */
5664 if (ldi_ok)
5666 *len = 6;
5667 return ("swap %B0" CR_TAB
5668 "swap %A0" CR_TAB
5669 "andi %A0,0x0f" CR_TAB
5670 "eor %A0,%B0" CR_TAB
5671 "andi %B0,0x0f" CR_TAB
5672 "eor %A0,%B0");
5674 if (scratch)
5676 *len = 7;
5677 return ("swap %B0" CR_TAB
5678 "swap %A0" CR_TAB
5679 "ldi %3,0x0f" CR_TAB
5680 "and %A0,%3" CR_TAB
5681 "eor %A0,%B0" CR_TAB
5682 "and %B0,%3" CR_TAB
5683 "eor %A0,%B0");
5685 break; /* optimize_size ? 6 : 8 */
5687 case 5:
5688 if (optimize_size)
5689 break; /* scratch ? 5 : 6 */
5690 if (ldi_ok)
5692 *len = 8;
5693 return ("lsr %B0" CR_TAB
5694 "ror %A0" CR_TAB
5695 "swap %B0" CR_TAB
5696 "swap %A0" CR_TAB
5697 "andi %A0,0x0f" CR_TAB
5698 "eor %A0,%B0" CR_TAB
5699 "andi %B0,0x0f" CR_TAB
5700 "eor %A0,%B0");
5702 if (scratch)
5704 *len = 9;
5705 return ("lsr %B0" CR_TAB
5706 "ror %A0" CR_TAB
5707 "swap %B0" CR_TAB
5708 "swap %A0" CR_TAB
5709 "ldi %3,0x0f" CR_TAB
5710 "and %A0,%3" CR_TAB
5711 "eor %A0,%B0" CR_TAB
5712 "and %B0,%3" CR_TAB
5713 "eor %A0,%B0");
5715 break; /* 10 */
5717 case 6:
5718 if (optimize_size)
5719 break; /* scratch ? 5 : 6 */
5720 *len = 9;
5721 return ("clr __tmp_reg__" CR_TAB
5722 "lsl %A0" CR_TAB
5723 "rol %B0" CR_TAB
5724 "rol __tmp_reg__" CR_TAB
5725 "lsl %A0" CR_TAB
5726 "rol %B0" CR_TAB
5727 "rol __tmp_reg__" CR_TAB
5728 "mov %A0,%B0" CR_TAB
5729 "mov %B0,__tmp_reg__");
5731 case 7:
5732 *len = 5;
5733 return ("lsl %A0" CR_TAB
5734 "mov %A0,%B0" CR_TAB
5735 "rol %A0" CR_TAB
5736 "sbc %B0,%B0" CR_TAB
5737 "neg %B0");
5739 case 8:
5740 return *len = 2, ("mov %A0,%B1" CR_TAB
5741 "clr %B0");
5743 case 9:
5744 *len = 3;
5745 return ("mov %A0,%B0" CR_TAB
5746 "clr %B0" CR_TAB
5747 "lsr %A0");
5749 case 10:
5750 *len = 4;
5751 return ("mov %A0,%B0" CR_TAB
5752 "clr %B0" CR_TAB
5753 "lsr %A0" CR_TAB
5754 "lsr %A0");
5756 case 11:
5757 *len = 5;
5758 return ("mov %A0,%B0" CR_TAB
5759 "clr %B0" CR_TAB
5760 "lsr %A0" CR_TAB
5761 "lsr %A0" CR_TAB
5762 "lsr %A0");
5764 case 12:
5765 if (ldi_ok)
5767 *len = 4;
5768 return ("mov %A0,%B0" CR_TAB
5769 "clr %B0" CR_TAB
5770 "swap %A0" CR_TAB
5771 "andi %A0,0x0f");
5773 if (scratch)
5775 *len = 5;
5776 return ("mov %A0,%B0" CR_TAB
5777 "clr %B0" CR_TAB
5778 "swap %A0" CR_TAB
5779 "ldi %3,0x0f" CR_TAB
5780 "and %A0,%3");
5782 *len = 6;
5783 return ("mov %A0,%B0" CR_TAB
5784 "clr %B0" CR_TAB
5785 "lsr %A0" CR_TAB
5786 "lsr %A0" CR_TAB
5787 "lsr %A0" CR_TAB
5788 "lsr %A0");
5790 case 13:
5791 if (ldi_ok)
5793 *len = 5;
5794 return ("mov %A0,%B0" CR_TAB
5795 "clr %B0" CR_TAB
5796 "swap %A0" CR_TAB
5797 "lsr %A0" CR_TAB
5798 "andi %A0,0x07");
5800 if (AVR_HAVE_MUL && scratch)
5802 *len = 5;
5803 return ("ldi %3,0x08" CR_TAB
5804 "mul %B0,%3" CR_TAB
5805 "mov %A0,r1" CR_TAB
5806 "clr %B0" CR_TAB
5807 "clr __zero_reg__");
5809 if (optimize_size && scratch)
5810 break; /* 5 */
5811 if (scratch)
5813 *len = 6;
5814 return ("mov %A0,%B0" CR_TAB
5815 "clr %B0" CR_TAB
5816 "swap %A0" CR_TAB
5817 "lsr %A0" CR_TAB
5818 "ldi %3,0x07" CR_TAB
5819 "and %A0,%3");
5821 if (AVR_HAVE_MUL)
5823 *len = 6;
5824 return ("set" CR_TAB
5825 "bld r1,3" CR_TAB
5826 "mul %B0,r1" CR_TAB
5827 "mov %A0,r1" CR_TAB
5828 "clr %B0" CR_TAB
5829 "clr __zero_reg__");
5831 *len = 7;
5832 return ("mov %A0,%B0" CR_TAB
5833 "clr %B0" CR_TAB
5834 "lsr %A0" CR_TAB
5835 "lsr %A0" CR_TAB
5836 "lsr %A0" CR_TAB
5837 "lsr %A0" CR_TAB
5838 "lsr %A0");
5840 case 14:
5841 if (AVR_HAVE_MUL && ldi_ok)
5843 *len = 5;
5844 return ("ldi %A0,0x04" CR_TAB
5845 "mul %B0,%A0" CR_TAB
5846 "mov %A0,r1" CR_TAB
5847 "clr %B0" CR_TAB
5848 "clr __zero_reg__");
5850 if (AVR_HAVE_MUL && scratch)
5852 *len = 5;
5853 return ("ldi %3,0x04" CR_TAB
5854 "mul %B0,%3" CR_TAB
5855 "mov %A0,r1" CR_TAB
5856 "clr %B0" CR_TAB
5857 "clr __zero_reg__");
5859 if (optimize_size && ldi_ok)
5861 *len = 5;
5862 return ("mov %A0,%B0" CR_TAB
5863 "ldi %B0,6" "\n1:\t"
5864 "lsr %A0" CR_TAB
5865 "dec %B0" CR_TAB
5866 "brne 1b");
5868 if (optimize_size && scratch)
5869 break; /* 5 */
5870 *len = 6;
5871 return ("clr %A0" CR_TAB
5872 "lsl %B0" CR_TAB
5873 "rol %A0" CR_TAB
5874 "lsl %B0" CR_TAB
5875 "rol %A0" CR_TAB
5876 "clr %B0");
5878 case 15:
5879 *len = 4;
5880 return ("clr %A0" CR_TAB
5881 "lsl %B0" CR_TAB
5882 "rol %A0" CR_TAB
5883 "clr %B0");
5885 len = t;
5887 out_shift_with_cnt ("lsr %B0" CR_TAB
5888 "ror %A0", insn, operands, len, 2);
5889 return "";
5893 /* 24-bit logic shift right */
5895 const char*
5896 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5898 int dest = REGNO (op[0]);
5899 int src = REGNO (op[1]);
5901 if (CONST_INT_P (op[2]))
5903 if (plen)
5904 *plen = 0;
5906 switch (INTVAL (op[2]))
5908 case 8:
5909 if (dest <= src)
5910 return avr_asm_len ("mov %A0,%B1" CR_TAB
5911 "mov %B0,%C1" CR_TAB
5912 "clr %C0", op, plen, 3);
5913 else
5914 return avr_asm_len ("clr %C0" CR_TAB
5915 "mov %B0,%C1" CR_TAB
5916 "mov %A0,%B1", op, plen, 3);
5918 case 16:
5919 if (dest != src + 2)
5920 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5922 return avr_asm_len ("clr %B0" CR_TAB
5923 "clr %C0", op, plen, 2);
5925 default:
5926 if (INTVAL (op[2]) < 24)
5927 break;
5929 /* fall through */
5931 case 23:
5932 return avr_asm_len ("clr %A0" CR_TAB
5933 "sbrc %C0,7" CR_TAB
5934 "inc %A0" CR_TAB
5935 "clr %B0" CR_TAB
5936 "clr %C0", op, plen, 5);
5937 } /* switch */
5940 out_shift_with_cnt ("lsr %C0" CR_TAB
5941 "ror %B0" CR_TAB
5942 "ror %A0", insn, op, plen, 3);
5943 return "";
5947 /* 32bit logic shift right ((unsigned int)x >> i) */
5949 const char *
5950 lshrsi3_out (rtx insn, rtx operands[], int *len)
5952 if (GET_CODE (operands[2]) == CONST_INT)
5954 int k;
5955 int *t = len;
5957 if (!len)
5958 len = &k;
5960 switch (INTVAL (operands[2]))
5962 default:
5963 if (INTVAL (operands[2]) < 32)
5964 break;
5966 if (AVR_HAVE_MOVW)
5967 return *len = 3, ("clr %D0" CR_TAB
5968 "clr %C0" CR_TAB
5969 "movw %A0,%C0");
5970 *len = 4;
5971 return ("clr %D0" CR_TAB
5972 "clr %C0" CR_TAB
5973 "clr %B0" CR_TAB
5974 "clr %A0");
5976 case 8:
5978 int reg0 = true_regnum (operands[0]);
5979 int reg1 = true_regnum (operands[1]);
5980 *len = 4;
5981 if (reg0 <= reg1)
5982 return ("mov %A0,%B1" CR_TAB
5983 "mov %B0,%C1" CR_TAB
5984 "mov %C0,%D1" CR_TAB
5985 "clr %D0");
5986 else
5987 return ("clr %D0" CR_TAB
5988 "mov %C0,%D1" CR_TAB
5989 "mov %B0,%C1" CR_TAB
5990 "mov %A0,%B1");
5993 case 16:
5995 int reg0 = true_regnum (operands[0]);
5996 int reg1 = true_regnum (operands[1]);
5998 if (reg0 == reg1 + 2)
5999 return *len = 2, ("clr %C0" CR_TAB
6000 "clr %D0");
6001 if (AVR_HAVE_MOVW)
6002 return *len = 3, ("movw %A0,%C1" CR_TAB
6003 "clr %C0" CR_TAB
6004 "clr %D0");
6005 else
6006 return *len = 4, ("mov %B0,%D1" CR_TAB
6007 "mov %A0,%C1" CR_TAB
6008 "clr %C0" CR_TAB
6009 "clr %D0");
6012 case 24:
6013 return *len = 4, ("mov %A0,%D1" CR_TAB
6014 "clr %B0" CR_TAB
6015 "clr %C0" CR_TAB
6016 "clr %D0");
6018 case 31:
6019 *len = 6;
6020 return ("clr %A0" CR_TAB
6021 "sbrc %D0,7" CR_TAB
6022 "inc %A0" CR_TAB
6023 "clr %B0" CR_TAB
6024 "clr %C0" CR_TAB
6025 "clr %D0");
6027 len = t;
6029 out_shift_with_cnt ("lsr %D0" CR_TAB
6030 "ror %C0" CR_TAB
6031 "ror %B0" CR_TAB
6032 "ror %A0", insn, operands, len, 4);
6033 return "";
6037 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6039 XOP[0] = XOP[0] + XOP[2]
6041 and return "". If PLEN == NULL, print assembler instructions to perform the
6042 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6043 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
6044 CODE == PLUS: perform addition by using ADD instructions.
6045 CODE == MINUS: perform addition by using SUB instructions.
6046 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
6048 static void
6049 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
6051 /* MODE of the operation. */
6052 enum machine_mode mode = GET_MODE (xop[0]);
6054 /* Number of bytes to operate on. */
6055 int i, n_bytes = GET_MODE_SIZE (mode);
6057 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6058 int clobber_val = -1;
6060 /* op[0]: 8-bit destination register
6061 op[1]: 8-bit const int
6062 op[2]: 8-bit scratch register */
6063 rtx op[3];
6065 /* Started the operation? Before starting the operation we may skip
6066 adding 0. This is no more true after the operation started because
6067 carry must be taken into account. */
6068 bool started = false;
6070 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6071 rtx xval = xop[2];
6073 /* Except in the case of ADIW with 16-bit register (see below)
6074 addition does not set cc0 in a usable way. */
6076 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6078 if (MINUS == code)
6079 xval = simplify_unary_operation (NEG, mode, xval, mode);
6081 op[2] = xop[3];
6083 if (plen)
6084 *plen = 0;
6086 for (i = 0; i < n_bytes; i++)
6088 /* We operate byte-wise on the destination. */
6089 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6090 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6092 /* 8-bit value to operate with this byte. */
6093 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6095 /* Registers R16..R31 can operate with immediate. */
6096 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6098 op[0] = reg8;
6099 op[1] = gen_int_mode (val8, QImode);
6101 /* To get usable cc0 no low-bytes must have been skipped. */
6103 if (i && !started)
6104 *pcc = CC_CLOBBER;
6106 if (!started
6107 && i % 2 == 0
6108 && i + 2 <= n_bytes
6109 && test_hard_reg_class (ADDW_REGS, reg8))
6111 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
6112 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6114 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6115 i.e. operate word-wise. */
6117 if (val16 < 64)
6119 if (val16 != 0)
6121 started = true;
6122 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6123 op, plen, 1);
6125 if (n_bytes == 2 && PLUS == code)
6126 *pcc = CC_SET_ZN;
6129 i++;
6130 continue;
6134 if (val8 == 0)
6136 if (started)
6137 avr_asm_len (code == PLUS
6138 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6139 op, plen, 1);
6140 continue;
6142 else if ((val8 == 1 || val8 == 0xff)
6143 && !started
6144 && i == n_bytes - 1)
6146 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6147 op, plen, 1);
6148 break;
6151 switch (code)
6153 case PLUS:
6155 gcc_assert (plen != NULL || REG_P (op[2]));
6157 if (clobber_val != (int) val8)
6158 avr_asm_len ("ldi %2,%1", op, plen, 1);
6159 clobber_val = (int) val8;
6161 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6163 break; /* PLUS */
6165 case MINUS:
6167 if (ld_reg_p)
6168 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6169 else
6171 gcc_assert (plen != NULL || REG_P (op[2]));
6173 if (clobber_val != (int) val8)
6174 avr_asm_len ("ldi %2,%1", op, plen, 1);
6175 clobber_val = (int) val8;
6177 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6180 break; /* MINUS */
6182 default:
6183 /* Unknown code */
6184 gcc_unreachable();
6187 started = true;
6189 } /* for all sub-bytes */
6191 /* No output doesn't change cc0. */
6193 if (plen && *plen == 0)
6194 *pcc = CC_NONE;
6198 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6200 XOP[0] = XOP[0] + XOP[2]
6202 and return "". If PLEN == NULL, print assembler instructions to perform the
6203 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6204 words) printed with PLEN == NULL.
6205 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6206 condition code (with respect to XOP[0]). */
6208 const char*
6209 avr_out_plus (rtx *xop, int *plen, int *pcc)
6211 int len_plus, len_minus;
6212 int cc_plus, cc_minus, cc_dummy;
6214 if (!pcc)
6215 pcc = &cc_dummy;
6217 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6219 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6220 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6222 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6224 if (plen)
6226 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6227 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6229 else if (len_minus <= len_plus)
6230 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6231 else
6232 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6234 return "";
6238 /* Same as above but XOP has just 3 entries.
6239 Supply a dummy 4th operand. */
6241 const char*
6242 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6244 rtx op[4];
6246 op[0] = xop[0];
6247 op[1] = xop[1];
6248 op[2] = xop[2];
6249 op[3] = NULL_RTX;
6251 return avr_out_plus (op, plen, pcc);
6255 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6257 const char*
6258 avr_out_plus64 (rtx addend, int *plen)
6260 int cc_dummy;
6261 rtx op[4];
6263 op[0] = gen_rtx_REG (DImode, 18);
6264 op[1] = op[0];
6265 op[2] = addend;
6266 op[3] = NULL_RTX;
6268 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6270 return "";
6273 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6274 time constant XOP[2]:
6276 XOP[0] = XOP[0] <op> XOP[2]
6278 and return "". If PLEN == NULL, print assembler instructions to perform the
6279 operation; otherwise, set *PLEN to the length of the instruction sequence
6280 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6281 register or SCRATCH if no clobber register is needed for the operation. */
6283 const char*
6284 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6286 /* CODE and MODE of the operation. */
6287 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6288 enum machine_mode mode = GET_MODE (xop[0]);
6290 /* Number of bytes to operate on. */
6291 int i, n_bytes = GET_MODE_SIZE (mode);
6293 /* Value of T-flag (0 or 1) or -1 if unknow. */
6294 int set_t = -1;
6296 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6297 int clobber_val = -1;
6299 /* op[0]: 8-bit destination register
6300 op[1]: 8-bit const int
6301 op[2]: 8-bit clobber register or SCRATCH
6302 op[3]: 8-bit register containing 0xff or NULL_RTX */
6303 rtx op[4];
6305 op[2] = xop[3];
6306 op[3] = NULL_RTX;
6308 if (plen)
6309 *plen = 0;
6311 for (i = 0; i < n_bytes; i++)
6313 /* We operate byte-wise on the destination. */
6314 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6315 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6317 /* 8-bit value to operate with this byte. */
6318 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6320 /* Number of bits set in the current byte of the constant. */
6321 int pop8 = avr_popcount (val8);
6323 /* Registers R16..R31 can operate with immediate. */
6324 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6326 op[0] = reg8;
6327 op[1] = GEN_INT (val8);
6329 switch (code)
6331 case IOR:
6333 if (0 == pop8)
6334 continue;
6335 else if (ld_reg_p)
6336 avr_asm_len ("ori %0,%1", op, plen, 1);
6337 else if (1 == pop8)
6339 if (set_t != 1)
6340 avr_asm_len ("set", op, plen, 1);
6341 set_t = 1;
6343 op[1] = GEN_INT (exact_log2 (val8));
6344 avr_asm_len ("bld %0,%1", op, plen, 1);
6346 else if (8 == pop8)
6348 if (op[3] != NULL_RTX)
6349 avr_asm_len ("mov %0,%3", op, plen, 1);
6350 else
6351 avr_asm_len ("clr %0" CR_TAB
6352 "dec %0", op, plen, 2);
6354 op[3] = op[0];
6356 else
6358 if (clobber_val != (int) val8)
6359 avr_asm_len ("ldi %2,%1", op, plen, 1);
6360 clobber_val = (int) val8;
6362 avr_asm_len ("or %0,%2", op, plen, 1);
6365 continue; /* IOR */
6367 case AND:
6369 if (8 == pop8)
6370 continue;
6371 else if (0 == pop8)
6372 avr_asm_len ("clr %0", op, plen, 1);
6373 else if (ld_reg_p)
6374 avr_asm_len ("andi %0,%1", op, plen, 1);
6375 else if (7 == pop8)
6377 if (set_t != 0)
6378 avr_asm_len ("clt", op, plen, 1);
6379 set_t = 0;
6381 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6382 avr_asm_len ("bld %0,%1", op, plen, 1);
6384 else
6386 if (clobber_val != (int) val8)
6387 avr_asm_len ("ldi %2,%1", op, plen, 1);
6388 clobber_val = (int) val8;
6390 avr_asm_len ("and %0,%2", op, plen, 1);
6393 continue; /* AND */
6395 case XOR:
6397 if (0 == pop8)
6398 continue;
6399 else if (8 == pop8)
6400 avr_asm_len ("com %0", op, plen, 1);
6401 else if (ld_reg_p && val8 == (1 << 7))
6402 avr_asm_len ("subi %0,%1", op, plen, 1);
6403 else
6405 if (clobber_val != (int) val8)
6406 avr_asm_len ("ldi %2,%1", op, plen, 1);
6407 clobber_val = (int) val8;
6409 avr_asm_len ("eor %0,%2", op, plen, 1);
6412 continue; /* XOR */
6414 default:
6415 /* Unknown rtx_code */
6416 gcc_unreachable();
6418 } /* for all sub-bytes */
6420 return "";
6424 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6425 PLEN != NULL: Set *PLEN to the length of that sequence.
6426 Return "". */
6428 const char*
6429 avr_out_addto_sp (rtx *op, int *plen)
6431 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6432 int addend = INTVAL (op[0]);
6434 if (plen)
6435 *plen = 0;
6437 if (addend < 0)
6439 if (flag_verbose_asm || flag_print_asm_name)
6440 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6442 while (addend <= -pc_len)
6444 addend += pc_len;
6445 avr_asm_len ("rcall .", op, plen, 1);
6448 while (addend++ < 0)
6449 avr_asm_len ("push __zero_reg__", op, plen, 1);
6451 else if (addend > 0)
6453 if (flag_verbose_asm || flag_print_asm_name)
6454 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6456 while (addend-- > 0)
6457 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6460 return "";
6464 /* Create RTL split patterns for byte sized rotate expressions. This
6465 produces a series of move instructions and considers overlap situations.
6466 Overlapping non-HImode operands need a scratch register. */
6468 bool
6469 avr_rotate_bytes (rtx operands[])
6471 int i, j;
6472 enum machine_mode mode = GET_MODE (operands[0]);
6473 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6474 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6475 int num = INTVAL (operands[2]);
6476 rtx scratch = operands[3];
6477 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6478 Word move if no scratch is needed, otherwise use size of scratch. */
6479 enum machine_mode move_mode = QImode;
6480 int move_size, offset, size;
6482 if (num & 0xf)
6483 move_mode = QImode;
6484 else if ((mode == SImode && !same_reg) || !overlapped)
6485 move_mode = HImode;
6486 else
6487 move_mode = GET_MODE (scratch);
6489 /* Force DI rotate to use QI moves since other DI moves are currently split
6490 into QI moves so forward propagation works better. */
6491 if (mode == DImode)
6492 move_mode = QImode;
6493 /* Make scratch smaller if needed. */
6494 if (SCRATCH != GET_CODE (scratch)
6495 && HImode == GET_MODE (scratch)
6496 && QImode == move_mode)
6497 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6499 move_size = GET_MODE_SIZE (move_mode);
6500 /* Number of bytes/words to rotate. */
6501 offset = (num >> 3) / move_size;
6502 /* Number of moves needed. */
6503 size = GET_MODE_SIZE (mode) / move_size;
6504 /* Himode byte swap is special case to avoid a scratch register. */
6505 if (mode == HImode && same_reg)
6507 /* HImode byte swap, using xor. This is as quick as using scratch. */
6508 rtx src, dst;
6509 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6510 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6511 if (!rtx_equal_p (dst, src))
6513 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6514 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6515 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6518 else
6520 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6521 /* Create linked list of moves to determine move order. */
6522 struct {
6523 rtx src, dst;
6524 int links;
6525 } move[MAX_SIZE + 8];
6526 int blocked, moves;
6528 gcc_assert (size <= MAX_SIZE);
6529 /* Generate list of subreg moves. */
6530 for (i = 0; i < size; i++)
6532 int from = i;
6533 int to = (from + offset) % size;
6534 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6535 mode, from * move_size);
6536 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6537 mode, to * move_size);
6538 move[i].links = -1;
6540 /* Mark dependence where a dst of one move is the src of another move.
6541 The first move is a conflict as it must wait until second is
6542 performed. We ignore moves to self - we catch this later. */
6543 if (overlapped)
6544 for (i = 0; i < size; i++)
6545 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6546 for (j = 0; j < size; j++)
6547 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6549 /* The dst of move i is the src of move j. */
6550 move[i].links = j;
6551 break;
6554 blocked = -1;
6555 moves = 0;
6556 /* Go through move list and perform non-conflicting moves. As each
6557 non-overlapping move is made, it may remove other conflicts
6558 so the process is repeated until no conflicts remain. */
6561 blocked = -1;
6562 moves = 0;
6563 /* Emit move where dst is not also a src or we have used that
6564 src already. */
6565 for (i = 0; i < size; i++)
6566 if (move[i].src != NULL_RTX)
6568 if (move[i].links == -1
6569 || move[move[i].links].src == NULL_RTX)
6571 moves++;
6572 /* Ignore NOP moves to self. */
6573 if (!rtx_equal_p (move[i].dst, move[i].src))
6574 emit_move_insn (move[i].dst, move[i].src);
6576 /* Remove conflict from list. */
6577 move[i].src = NULL_RTX;
6579 else
6580 blocked = i;
6583 /* Check for deadlock. This is when no moves occurred and we have
6584 at least one blocked move. */
6585 if (moves == 0 && blocked != -1)
6587 /* Need to use scratch register to break deadlock.
6588 Add move to put dst of blocked move into scratch.
6589 When this move occurs, it will break chain deadlock.
6590 The scratch register is substituted for real move. */
6592 gcc_assert (SCRATCH != GET_CODE (scratch));
6594 move[size].src = move[blocked].dst;
6595 move[size].dst = scratch;
6596 /* Scratch move is never blocked. */
6597 move[size].links = -1;
6598 /* Make sure we have valid link. */
6599 gcc_assert (move[blocked].links != -1);
6600 /* Replace src of blocking move with scratch reg. */
6601 move[move[blocked].links].src = scratch;
6602 /* Make dependent on scratch move occuring. */
6603 move[blocked].links = size;
6604 size=size+1;
6607 while (blocked != -1);
6609 return true;
6612 /* Modifies the length assigned to instruction INSN
6613 LEN is the initially computed length of the insn. */
6616 adjust_insn_length (rtx insn, int len)
6618 rtx *op = recog_data.operand;
6619 enum attr_adjust_len adjust_len;
6621 /* Some complex insns don't need length adjustment and therefore
6622 the length need not/must not be adjusted for these insns.
6623 It is easier to state this in an insn attribute "adjust_len" than
6624 to clutter up code here... */
6626 if (-1 == recog_memoized (insn))
6628 return len;
6631 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6633 adjust_len = get_attr_adjust_len (insn);
6635 if (adjust_len == ADJUST_LEN_NO)
6637 /* Nothing to adjust: The length from attribute "length" is fine.
6638 This is the default. */
6640 return len;
6643 /* Extract insn's operands. */
6645 extract_constrain_insn_cached (insn);
6647 /* Dispatch to right function. */
6649 switch (adjust_len)
6651 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6652 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6653 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6655 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6657 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6658 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6659 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6660 avr_out_plus_noclobber (op, &len, NULL); break;
6662 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6664 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6665 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6666 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6667 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6668 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6669 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6671 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6672 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6673 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6674 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6675 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6677 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6678 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6679 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6681 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6682 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6683 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6685 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6686 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6687 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6689 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6690 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6691 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6693 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6695 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6697 default:
6698 gcc_unreachable();
6701 return len;
6704 /* Return nonzero if register REG dead after INSN. */
6707 reg_unused_after (rtx insn, rtx reg)
6709 return (dead_or_set_p (insn, reg)
6710 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6713 /* Return nonzero if REG is not used after INSN.
6714 We assume REG is a reload reg, and therefore does
6715 not live past labels. It may live past calls or jumps though. */
6718 _reg_unused_after (rtx insn, rtx reg)
6720 enum rtx_code code;
6721 rtx set;
6723 /* If the reg is set by this instruction, then it is safe for our
6724 case. Disregard the case where this is a store to memory, since
6725 we are checking a register used in the store address. */
6726 set = single_set (insn);
6727 if (set && GET_CODE (SET_DEST (set)) != MEM
6728 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6729 return 1;
6731 while ((insn = NEXT_INSN (insn)))
6733 rtx set;
6734 code = GET_CODE (insn);
6736 #if 0
6737 /* If this is a label that existed before reload, then the register
6738 if dead here. However, if this is a label added by reorg, then
6739 the register may still be live here. We can't tell the difference,
6740 so we just ignore labels completely. */
6741 if (code == CODE_LABEL)
6742 return 1;
6743 /* else */
6744 #endif
6746 if (!INSN_P (insn))
6747 continue;
6749 if (code == JUMP_INSN)
6750 return 0;
6752 /* If this is a sequence, we must handle them all at once.
6753 We could have for instance a call that sets the target register,
6754 and an insn in a delay slot that uses the register. In this case,
6755 we must return 0. */
6756 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6758 int i;
6759 int retval = 0;
6761 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6763 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6764 rtx set = single_set (this_insn);
6766 if (GET_CODE (this_insn) == CALL_INSN)
6767 code = CALL_INSN;
6768 else if (GET_CODE (this_insn) == JUMP_INSN)
6770 if (INSN_ANNULLED_BRANCH_P (this_insn))
6771 return 0;
6772 code = JUMP_INSN;
6775 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6776 return 0;
6777 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6779 if (GET_CODE (SET_DEST (set)) != MEM)
6780 retval = 1;
6781 else
6782 return 0;
6784 if (set == 0
6785 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6786 return 0;
6788 if (retval == 1)
6789 return 1;
6790 else if (code == JUMP_INSN)
6791 return 0;
6794 if (code == CALL_INSN)
6796 rtx tem;
6797 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6798 if (GET_CODE (XEXP (tem, 0)) == USE
6799 && REG_P (XEXP (XEXP (tem, 0), 0))
6800 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6801 return 0;
6802 if (call_used_regs[REGNO (reg)])
6803 return 1;
6806 set = single_set (insn);
6808 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6809 return 0;
6810 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6811 return GET_CODE (SET_DEST (set)) != MEM;
6812 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6813 return 0;
6815 return 1;
6819 /* Return RTX that represents the lower 16 bits of a constant address.
6820 Unfortunately, simplify_gen_subreg does not handle this case. */
6822 static rtx
6823 avr_const_address_lo16 (rtx x)
6825 rtx lo16;
6827 switch (GET_CODE (x))
6829 default:
6830 break;
6832 case CONST:
6833 if (PLUS == GET_CODE (XEXP (x, 0))
6834 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6835 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6837 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6838 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6840 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6841 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6843 return lo16;
6846 break;
6848 case SYMBOL_REF:
6850 const char *name = XSTR (x, 0);
6852 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6856 avr_edump ("\n%?: %r\n", x);
6857 gcc_unreachable();
6861 /* Target hook for assembling integer objects. The AVR version needs
6862 special handling for references to certain labels. */
6864 static bool
6865 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6867 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6868 && text_segment_operand (x, VOIDmode) )
6870 fputs ("\t.word\tgs(", asm_out_file);
6871 output_addr_const (asm_out_file, x);
6872 fputs (")\n", asm_out_file);
6874 return true;
6876 else if (GET_MODE (x) == PSImode)
6878 default_assemble_integer (avr_const_address_lo16 (x),
6879 GET_MODE_SIZE (HImode), aligned_p);
6881 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6882 " extension for hh8(", asm_out_file);
6883 output_addr_const (asm_out_file, x);
6884 fputs (")\"\n", asm_out_file);
6886 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6887 output_addr_const (asm_out_file, x);
6888 fputs (")\n", asm_out_file);
6890 return true;
6893 return default_assemble_integer (x, size, aligned_p);
6897 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6899 void
6900 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6903 /* If the function has the 'signal' or 'interrupt' attribute, test to
6904 make sure that the name of the function is "__vector_NN" so as to
6905 catch when the user misspells the interrupt vector name. */
6907 if (cfun->machine->is_interrupt)
6909 if (!STR_PREFIX_P (name, "__vector"))
6911 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6912 "%qs appears to be a misspelled interrupt handler",
6913 name);
6916 else if (cfun->machine->is_signal)
6918 if (!STR_PREFIX_P (name, "__vector"))
6920 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6921 "%qs appears to be a misspelled signal handler",
6922 name);
6926 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6927 ASM_OUTPUT_LABEL (file, name);
6931 /* Return value is nonzero if pseudos that have been
6932 assigned to registers of class CLASS would likely be spilled
6933 because registers of CLASS are needed for spill registers. */
6935 static bool
6936 avr_class_likely_spilled_p (reg_class_t c)
6938 return (c != ALL_REGS && c != ADDW_REGS);
6941 /* Valid attributes:
6942 progmem - put data to program memory;
6943 signal - make a function to be hardware interrupt. After function
6944 prologue interrupts are disabled;
6945 interrupt - make a function to be hardware interrupt. After function
6946 prologue interrupts are enabled;
6947 naked - don't generate function prologue/epilogue and `ret' command.
6949 Only `progmem' attribute valid for type. */
6951 /* Handle a "progmem" attribute; arguments as in
6952 struct attribute_spec.handler. */
6953 static tree
6954 avr_handle_progmem_attribute (tree *node, tree name,
6955 tree args ATTRIBUTE_UNUSED,
6956 int flags ATTRIBUTE_UNUSED,
6957 bool *no_add_attrs)
6959 if (DECL_P (*node))
6961 if (TREE_CODE (*node) == TYPE_DECL)
6963 /* This is really a decl attribute, not a type attribute,
6964 but try to handle it for GCC 3.0 backwards compatibility. */
6966 tree type = TREE_TYPE (*node);
6967 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6968 tree newtype = build_type_attribute_variant (type, attr);
6970 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6971 TREE_TYPE (*node) = newtype;
6972 *no_add_attrs = true;
6974 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6976 *no_add_attrs = false;
6978 else
6980 warning (OPT_Wattributes, "%qE attribute ignored",
6981 name);
6982 *no_add_attrs = true;
6986 return NULL_TREE;
6989 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6990 struct attribute_spec.handler. */
6992 static tree
6993 avr_handle_fndecl_attribute (tree *node, tree name,
6994 tree args ATTRIBUTE_UNUSED,
6995 int flags ATTRIBUTE_UNUSED,
6996 bool *no_add_attrs)
6998 if (TREE_CODE (*node) != FUNCTION_DECL)
7000 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7001 name);
7002 *no_add_attrs = true;
7005 return NULL_TREE;
7008 static tree
7009 avr_handle_fntype_attribute (tree *node, tree name,
7010 tree args ATTRIBUTE_UNUSED,
7011 int flags ATTRIBUTE_UNUSED,
7012 bool *no_add_attrs)
7014 if (TREE_CODE (*node) != FUNCTION_TYPE)
7016 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7017 name);
7018 *no_add_attrs = true;
7021 return NULL_TREE;
7025 /* AVR attributes. */
7026 static const struct attribute_spec
7027 avr_attribute_table[] =
7029 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7030 affects_type_identity } */
7031 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
7032 false },
7033 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7034 false },
7035 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7036 false },
7037 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
7038 false },
7039 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
7040 false },
7041 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
7042 false },
7043 { NULL, 0, 0, false, false, false, NULL, false }
7047 /* Look if DECL shall be placed in program memory space by
7048 means of attribute `progmem' or some address-space qualifier.
7049 Return non-zero if DECL is data that must end up in Flash and
7050 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7052 Return 2 if DECL is located in 24-bit flash address-space
7053 Return 1 if DECL is located in 16-bit flash address-space
7054 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7055 Return 0 otherwise */
7058 avr_progmem_p (tree decl, tree attributes)
7060 tree a;
7062 if (TREE_CODE (decl) != VAR_DECL)
7063 return 0;
7065 if (avr_decl_memx_p (decl))
7066 return 2;
7068 if (avr_decl_flash_p (decl))
7069 return 1;
7071 if (NULL_TREE
7072 != lookup_attribute ("progmem", attributes))
7073 return -1;
7075 a = decl;
7078 a = TREE_TYPE(a);
7079 while (TREE_CODE (a) == ARRAY_TYPE);
7081 if (a == error_mark_node)
7082 return 0;
7084 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
7085 return -1;
7087 return 0;
7091 /* Scan type TYP for pointer references to address space ASn.
7092 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7093 the AS are also declared to be CONST.
7094 Otherwise, return the respective addres space, i.e. a value != 0. */
7096 static addr_space_t
7097 avr_nonconst_pointer_addrspace (tree typ)
7099 while (ARRAY_TYPE == TREE_CODE (typ))
7100 typ = TREE_TYPE (typ);
7102 if (POINTER_TYPE_P (typ))
7104 tree target = TREE_TYPE (typ);
7106 /* Pointer to function: Test the function's return type. */
7108 if (FUNCTION_TYPE == TREE_CODE (target))
7109 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
7111 /* "Ordinary" pointers... */
7113 while (TREE_CODE (target) == ARRAY_TYPE)
7114 target = TREE_TYPE (target);
7116 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
7117 && !TYPE_READONLY (target))
7119 /* Pointers to non-generic address space must be const. */
7121 return TYPE_ADDR_SPACE (target);
7124 /* Scan pointer's target type. */
7126 return avr_nonconst_pointer_addrspace (target);
7129 return ADDR_SPACE_GENERIC;
7133 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
7134 go along with CONST qualifier. Writing to these address spaces should
7135 be detected and complained about as early as possible. */
7137 static bool
7138 avr_pgm_check_var_decl (tree node)
7140 const char *reason = NULL;
7142 addr_space_t as = ADDR_SPACE_GENERIC;
7144 gcc_assert (as == 0);
7146 if (avr_log.progmem)
7147 avr_edump ("%?: %t\n", node);
7149 switch (TREE_CODE (node))
7151 default:
7152 break;
7154 case VAR_DECL:
7155 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7156 reason = "variable";
7157 break;
7159 case PARM_DECL:
7160 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7161 reason = "function parameter";
7162 break;
7164 case FIELD_DECL:
7165 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7166 reason = "structure field";
7167 break;
7169 case FUNCTION_DECL:
7170 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7172 reason = "return type of function";
7173 break;
7175 case POINTER_TYPE:
7176 if (as = avr_nonconst_pointer_addrspace (node), as)
7177 reason = "pointer";
7178 break;
7181 if (reason)
7183 if (TYPE_P (node))
7184 error ("pointer targeting address space %qs must be const in %qT",
7185 avr_addrspace[as].name, node);
7186 else
7187 error ("pointer targeting address space %qs must be const in %s %q+D",
7188 avr_addrspace[as].name, reason, node);
7191 return reason == NULL;
7195 /* Add the section attribute if the variable is in progmem. */
7197 static void
7198 avr_insert_attributes (tree node, tree *attributes)
7200 avr_pgm_check_var_decl (node);
7202 if (TREE_CODE (node) == VAR_DECL
7203 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7204 && avr_progmem_p (node, *attributes))
7206 tree node0 = node;
7208 /* For C++, we have to peel arrays in order to get correct
7209 determination of readonlyness. */
7212 node0 = TREE_TYPE (node0);
7213 while (TREE_CODE (node0) == ARRAY_TYPE);
7215 if (error_mark_node == node0)
7216 return;
7218 if (!TYPE_READONLY (node0)
7219 && !TREE_READONLY (node))
7221 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7222 const char *reason = "__attribute__((progmem))";
7224 if (!ADDR_SPACE_GENERIC_P (as))
7225 reason = avr_addrspace[as].name;
7227 if (avr_log.progmem)
7228 avr_edump ("\n%?: %t\n%t\n", node, node0);
7230 error ("variable %q+D must be const in order to be put into"
7231 " read-only section by means of %qs", node, reason);
7237 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7238 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7239 /* Track need of __do_clear_bss. */
7241 void
7242 avr_asm_output_aligned_decl_common (FILE * stream,
7243 const_tree decl ATTRIBUTE_UNUSED,
7244 const char *name,
7245 unsigned HOST_WIDE_INT size,
7246 unsigned int align, bool local_p)
7248 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7249 There is no need to trigger __do_clear_bss code for them. */
7251 if (!STR_PREFIX_P (name, "__gnu_lto"))
7252 avr_need_clear_bss_p = true;
7254 if (local_p)
7255 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7256 else
7257 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7261 /* Unnamed section callback for data_section
7262 to track need of __do_copy_data. */
7264 static void
7265 avr_output_data_section_asm_op (const void *data)
7267 avr_need_copy_data_p = true;
7269 /* Dispatch to default. */
7270 output_section_asm_op (data);
7274 /* Unnamed section callback for bss_section
7275 to track need of __do_clear_bss. */
7277 static void
7278 avr_output_bss_section_asm_op (const void *data)
7280 avr_need_clear_bss_p = true;
7282 /* Dispatch to default. */
7283 output_section_asm_op (data);
7287 /* Unnamed section callback for progmem*.data sections. */
7289 static void
7290 avr_output_progmem_section_asm_op (const void *data)
7292 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7293 (const char*) data);
7297 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7299 static void
7300 avr_asm_init_sections (void)
7302 unsigned int n;
7304 /* Set up a section for jump tables. Alignment is handled by
7305 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7307 if (AVR_HAVE_JMP_CALL)
7309 progmem_swtable_section
7310 = get_unnamed_section (0, output_section_asm_op,
7311 "\t.section\t.progmem.gcc_sw_table"
7312 ",\"a\",@progbits");
7314 else
7316 progmem_swtable_section
7317 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7318 "\t.section\t.progmem.gcc_sw_table"
7319 ",\"ax\",@progbits");
7322 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7324 progmem_section[n]
7325 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7326 progmem_section_prefix[n]);
7329 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7330 resp. `avr_need_copy_data_p'. */
7332 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7333 data_section->unnamed.callback = avr_output_data_section_asm_op;
7334 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7338 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7340 static section*
7341 avr_asm_function_rodata_section (tree decl)
7343 /* If a function is unused and optimized out by -ffunction-sections
7344 and --gc-sections, ensure that the same will happen for its jump
7345 tables by putting them into individual sections. */
7347 unsigned int flags;
7348 section * frodata;
7350 /* Get the frodata section from the default function in varasm.c
7351 but treat function-associated data-like jump tables as code
7352 rather than as user defined data. AVR has no constant pools. */
7354 int fdata = flag_data_sections;
7356 flag_data_sections = flag_function_sections;
7357 frodata = default_function_rodata_section (decl);
7358 flag_data_sections = fdata;
7359 flags = frodata->common.flags;
7362 if (frodata != readonly_data_section
7363 && flags & SECTION_NAMED)
7365 /* Adjust section flags and replace section name prefix. */
7367 unsigned int i;
7369 static const char* const prefix[] =
7371 ".rodata", ".progmem.gcc_sw_table",
7372 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7375 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7377 const char * old_prefix = prefix[i];
7378 const char * new_prefix = prefix[i+1];
7379 const char * name = frodata->named.name;
7381 if (STR_PREFIX_P (name, old_prefix))
7383 const char *rname = ACONCAT ((new_prefix,
7384 name + strlen (old_prefix), NULL));
7385 flags &= ~SECTION_CODE;
7386 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7388 return get_section (rname, flags, frodata->named.decl);
7393 return progmem_swtable_section;
7397 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7398 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7400 static void
7401 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7403 if (flags & AVR_SECTION_PROGMEM)
7405 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7406 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7407 const char *old_prefix = ".rodata";
7408 const char *new_prefix = progmem_section_prefix[segment];
7410 if (STR_PREFIX_P (name, old_prefix))
7412 const char *sname = ACONCAT ((new_prefix,
7413 name + strlen (old_prefix), NULL));
7414 default_elf_asm_named_section (sname, flags, decl);
7415 return;
7418 default_elf_asm_named_section (new_prefix, flags, decl);
7419 return;
7422 if (!avr_need_copy_data_p)
7423 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7424 || STR_PREFIX_P (name, ".rodata")
7425 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7427 if (!avr_need_clear_bss_p)
7428 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7430 default_elf_asm_named_section (name, flags, decl);
7433 static unsigned int
7434 avr_section_type_flags (tree decl, const char *name, int reloc)
7436 unsigned int flags = default_section_type_flags (decl, name, reloc);
7438 if (STR_PREFIX_P (name, ".noinit"))
7440 if (decl && TREE_CODE (decl) == VAR_DECL
7441 && DECL_INITIAL (decl) == NULL_TREE)
7442 flags |= SECTION_BSS; /* @nobits */
7443 else
7444 warning (0, "only uninitialized variables can be placed in the "
7445 ".noinit section");
7448 if (decl && DECL_P (decl)
7449 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7451 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7453 /* Attribute progmem puts data in generic address space.
7454 Set section flags as if it was in __flash to get the right
7455 section prefix in the remainder. */
7457 if (ADDR_SPACE_GENERIC_P (as))
7458 as = ADDR_SPACE_FLASH;
7460 flags |= as * SECTION_MACH_DEP;
7461 flags &= ~SECTION_WRITE;
7462 flags &= ~SECTION_BSS;
7465 return flags;
7469 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7471 static void
7472 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7474 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7475 readily available, see PR34734. So we postpone the warning
7476 about uninitialized data in program memory section until here. */
7478 if (new_decl_p
7479 && decl && DECL_P (decl)
7480 && NULL_TREE == DECL_INITIAL (decl)
7481 && !DECL_EXTERNAL (decl)
7482 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7484 warning (OPT_Wuninitialized,
7485 "uninitialized variable %q+D put into "
7486 "program memory area", decl);
7489 default_encode_section_info (decl, rtl, new_decl_p);
7491 if (decl && DECL_P (decl)
7492 && TREE_CODE (decl) != FUNCTION_DECL
7493 && MEM_P (rtl)
7494 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7496 rtx sym = XEXP (rtl, 0);
7497 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7499 /* PSTR strings are in generic space but located in flash:
7500 patch address space. */
7502 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7503 as = ADDR_SPACE_FLASH;
7505 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7510 /* Implement `TARGET_ASM_SELECT_SECTION' */
7512 static section *
7513 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7515 section * sect = default_elf_select_section (decl, reloc, align);
7517 if (decl && DECL_P (decl)
7518 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7520 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7521 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7523 if (sect->common.flags & SECTION_NAMED)
7525 const char * name = sect->named.name;
7526 const char * old_prefix = ".rodata";
7527 const char * new_prefix = progmem_section_prefix[segment];
7529 if (STR_PREFIX_P (name, old_prefix))
7531 const char *sname = ACONCAT ((new_prefix,
7532 name + strlen (old_prefix), NULL));
7533 return get_section (sname, sect->common.flags, sect->named.decl);
7537 return progmem_section[segment];
7540 return sect;
7543 /* Implement `TARGET_ASM_FILE_START'. */
7544 /* Outputs some text at the start of each assembler file. */
7546 static void
7547 avr_file_start (void)
7549 int sfr_offset = avr_current_arch->sfr_offset;
7551 if (avr_current_arch->asm_only)
7552 error ("MCU %qs supported for assembler only", avr_current_device->name);
7554 default_file_start ();
7556 /* Print I/O addresses of some SFRs used with IN and OUT. */
7558 if (!AVR_HAVE_8BIT_SP)
7559 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7561 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7562 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7563 if (AVR_HAVE_RAMPZ)
7564 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7565 if (AVR_HAVE_RAMPY)
7566 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7567 if (AVR_HAVE_RAMPX)
7568 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7569 if (AVR_HAVE_RAMPD)
7570 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7571 if (AVR_XMEGA)
7572 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7573 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7574 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7578 /* Implement `TARGET_ASM_FILE_END'. */
7579 /* Outputs to the stdio stream FILE some
7580 appropriate text to go at the end of an assembler file. */
7582 static void
7583 avr_file_end (void)
7585 /* Output these only if there is anything in the
7586 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7587 input section(s) - some code size can be saved by not
7588 linking in the initialization code from libgcc if resp.
7589 sections are empty. */
7591 if (avr_need_copy_data_p)
7592 fputs (".global __do_copy_data\n", asm_out_file);
7594 if (avr_need_clear_bss_p)
7595 fputs (".global __do_clear_bss\n", asm_out_file);
7598 /* Choose the order in which to allocate hard registers for
7599 pseudo-registers local to a basic block.
7601 Store the desired register order in the array `reg_alloc_order'.
7602 Element 0 should be the register to allocate first; element 1, the
7603 next register; and so on. */
7605 void
7606 order_regs_for_local_alloc (void)
7608 unsigned int i;
7609 static const int order_0[] = {
7610 24,25,
7611 18,19,
7612 20,21,
7613 22,23,
7614 30,31,
7615 26,27,
7616 28,29,
7617 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7618 0,1,
7619 32,33,34,35
7621 static const int order_1[] = {
7622 18,19,
7623 20,21,
7624 22,23,
7625 24,25,
7626 30,31,
7627 26,27,
7628 28,29,
7629 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7630 0,1,
7631 32,33,34,35
7633 static const int order_2[] = {
7634 25,24,
7635 23,22,
7636 21,20,
7637 19,18,
7638 30,31,
7639 26,27,
7640 28,29,
7641 17,16,
7642 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7643 1,0,
7644 32,33,34,35
7647 const int *order = (TARGET_ORDER_1 ? order_1 :
7648 TARGET_ORDER_2 ? order_2 :
7649 order_0);
7650 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7651 reg_alloc_order[i] = order[i];
7655 /* Implement `TARGET_REGISTER_MOVE_COST' */
7657 static int
7658 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7659 reg_class_t from, reg_class_t to)
7661 return (from == STACK_REG ? 6
7662 : to == STACK_REG ? 12
7663 : 2);
7667 /* Implement `TARGET_MEMORY_MOVE_COST' */
7669 static int
7670 avr_memory_move_cost (enum machine_mode mode,
7671 reg_class_t rclass ATTRIBUTE_UNUSED,
7672 bool in ATTRIBUTE_UNUSED)
7674 return (mode == QImode ? 2
7675 : mode == HImode ? 4
7676 : mode == SImode ? 8
7677 : mode == SFmode ? 8
7678 : 16);
7682 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7683 cost of an RTX operand given its context. X is the rtx of the
7684 operand, MODE is its mode, and OUTER is the rtx_code of this
7685 operand's parent operator. */
7687 static int
7688 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7689 int opno, bool speed)
7691 enum rtx_code code = GET_CODE (x);
7692 int total;
7694 switch (code)
7696 case REG:
7697 case SUBREG:
7698 return 0;
7700 case CONST_INT:
7701 case CONST_DOUBLE:
7702 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7704 default:
7705 break;
7708 total = 0;
7709 avr_rtx_costs (x, code, outer, opno, &total, speed);
7710 return total;
7713 /* Worker function for AVR backend's rtx_cost function.
7714 X is rtx expression whose cost is to be calculated.
7715 Return true if the complete cost has been computed.
7716 Return false if subexpressions should be scanned.
7717 In either case, *TOTAL contains the cost result. */
7719 static bool
7720 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7721 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7723 enum rtx_code code = (enum rtx_code) codearg;
7724 enum machine_mode mode = GET_MODE (x);
7725 HOST_WIDE_INT val;
7727 switch (code)
7729 case CONST_INT:
7730 case CONST_DOUBLE:
7731 case SYMBOL_REF:
7732 case CONST:
7733 case LABEL_REF:
7734 /* Immediate constants are as cheap as registers. */
7735 *total = 0;
7736 return true;
7738 case MEM:
7739 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7740 return true;
7742 case NEG:
7743 switch (mode)
7745 case QImode:
7746 case SFmode:
7747 *total = COSTS_N_INSNS (1);
7748 break;
7750 case HImode:
7751 case PSImode:
7752 case SImode:
7753 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7754 break;
7756 default:
7757 return false;
7759 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7760 return true;
7762 case ABS:
7763 switch (mode)
7765 case QImode:
7766 case SFmode:
7767 *total = COSTS_N_INSNS (1);
7768 break;
7770 default:
7771 return false;
7773 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7774 return true;
7776 case NOT:
7777 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7778 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7779 return true;
7781 case ZERO_EXTEND:
7782 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7783 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7784 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7785 return true;
7787 case SIGN_EXTEND:
7788 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7789 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7790 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7791 return true;
7793 case PLUS:
7794 switch (mode)
7796 case QImode:
7797 if (AVR_HAVE_MUL
7798 && MULT == GET_CODE (XEXP (x, 0))
7799 && register_operand (XEXP (x, 1), QImode))
7801 /* multiply-add */
7802 *total = COSTS_N_INSNS (speed ? 4 : 3);
7803 /* multiply-add with constant: will be split and load constant. */
7804 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7805 *total = COSTS_N_INSNS (1) + *total;
7806 return true;
7808 *total = COSTS_N_INSNS (1);
7809 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7810 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7811 break;
7813 case HImode:
7814 if (AVR_HAVE_MUL
7815 && (MULT == GET_CODE (XEXP (x, 0))
7816 || ASHIFT == GET_CODE (XEXP (x, 0)))
7817 && register_operand (XEXP (x, 1), HImode)
7818 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7819 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7821 /* multiply-add */
7822 *total = COSTS_N_INSNS (speed ? 5 : 4);
7823 /* multiply-add with constant: will be split and load constant. */
7824 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7825 *total = COSTS_N_INSNS (1) + *total;
7826 return true;
7828 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7830 *total = COSTS_N_INSNS (2);
7831 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7832 speed);
7834 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7835 *total = COSTS_N_INSNS (1);
7836 else
7837 *total = COSTS_N_INSNS (2);
7838 break;
7840 case PSImode:
7841 if (!CONST_INT_P (XEXP (x, 1)))
7843 *total = COSTS_N_INSNS (3);
7844 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7845 speed);
7847 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7848 *total = COSTS_N_INSNS (2);
7849 else
7850 *total = COSTS_N_INSNS (3);
7851 break;
7853 case SImode:
7854 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7856 *total = COSTS_N_INSNS (4);
7857 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7858 speed);
7860 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7861 *total = COSTS_N_INSNS (1);
7862 else
7863 *total = COSTS_N_INSNS (4);
7864 break;
7866 default:
7867 return false;
7869 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7870 return true;
7872 case MINUS:
7873 if (AVR_HAVE_MUL
7874 && QImode == mode
7875 && register_operand (XEXP (x, 0), QImode)
7876 && MULT == GET_CODE (XEXP (x, 1)))
7878 /* multiply-sub */
7879 *total = COSTS_N_INSNS (speed ? 4 : 3);
7880 /* multiply-sub with constant: will be split and load constant. */
7881 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7882 *total = COSTS_N_INSNS (1) + *total;
7883 return true;
7885 if (AVR_HAVE_MUL
7886 && HImode == mode
7887 && register_operand (XEXP (x, 0), HImode)
7888 && (MULT == GET_CODE (XEXP (x, 1))
7889 || ASHIFT == GET_CODE (XEXP (x, 1)))
7890 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7891 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7893 /* multiply-sub */
7894 *total = COSTS_N_INSNS (speed ? 5 : 4);
7895 /* multiply-sub with constant: will be split and load constant. */
7896 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7897 *total = COSTS_N_INSNS (1) + *total;
7898 return true;
7900 /* FALLTHRU */
7901 case AND:
7902 case IOR:
7903 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7904 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7905 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7906 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7907 return true;
7909 case XOR:
7910 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7911 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7912 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7913 return true;
7915 case MULT:
7916 switch (mode)
7918 case QImode:
7919 if (AVR_HAVE_MUL)
7920 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7921 else if (!speed)
7922 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7923 else
7924 return false;
7925 break;
7927 case HImode:
7928 if (AVR_HAVE_MUL)
7930 rtx op0 = XEXP (x, 0);
7931 rtx op1 = XEXP (x, 1);
7932 enum rtx_code code0 = GET_CODE (op0);
7933 enum rtx_code code1 = GET_CODE (op1);
7934 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7935 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7937 if (ex0
7938 && (u8_operand (op1, HImode)
7939 || s8_operand (op1, HImode)))
7941 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7942 return true;
7944 if (ex0
7945 && register_operand (op1, HImode))
7947 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7948 return true;
7950 else if (ex0 || ex1)
7952 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7953 return true;
7955 else if (register_operand (op0, HImode)
7956 && (u8_operand (op1, HImode)
7957 || s8_operand (op1, HImode)))
7959 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7960 return true;
7962 else
7963 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7965 else if (!speed)
7966 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7967 else
7968 return false;
7969 break;
7971 case PSImode:
7972 if (!speed)
7973 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7974 else
7975 *total = 10;
7976 break;
7978 case SImode:
7979 if (AVR_HAVE_MUL)
7981 if (!speed)
7983 /* Add some additional costs besides CALL like moves etc. */
7985 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7987 else
7989 /* Just a rough estimate. Even with -O2 we don't want bulky
7990 code expanded inline. */
7992 *total = COSTS_N_INSNS (25);
7995 else
7997 if (speed)
7998 *total = COSTS_N_INSNS (300);
7999 else
8000 /* Add some additional costs besides CALL like moves etc. */
8001 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8004 return true;
8006 default:
8007 return false;
8009 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8010 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8011 return true;
8013 case DIV:
8014 case MOD:
8015 case UDIV:
8016 case UMOD:
8017 if (!speed)
8018 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8019 else
8020 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
8021 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8022 /* For div/mod with const-int divisor we have at least the cost of
8023 loading the divisor. */
8024 if (CONST_INT_P (XEXP (x, 1)))
8025 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
8026 /* Add some overall penaly for clobbering and moving around registers */
8027 *total += COSTS_N_INSNS (2);
8028 return true;
8030 case ROTATE:
8031 switch (mode)
8033 case QImode:
8034 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
8035 *total = COSTS_N_INSNS (1);
8037 break;
8039 case HImode:
8040 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
8041 *total = COSTS_N_INSNS (3);
8043 break;
8045 case SImode:
8046 if (CONST_INT_P (XEXP (x, 1)))
8047 switch (INTVAL (XEXP (x, 1)))
8049 case 8:
8050 case 24:
8051 *total = COSTS_N_INSNS (5);
8052 break;
8053 case 16:
8054 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
8055 break;
8057 break;
8059 default:
8060 return false;
8062 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8063 return true;
8065 case ASHIFT:
8066 switch (mode)
8068 case QImode:
8069 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8071 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8072 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8073 speed);
8075 else
8077 val = INTVAL (XEXP (x, 1));
8078 if (val == 7)
8079 *total = COSTS_N_INSNS (3);
8080 else if (val >= 0 && val <= 7)
8081 *total = COSTS_N_INSNS (val);
8082 else
8083 *total = COSTS_N_INSNS (1);
8085 break;
8087 case HImode:
8088 if (AVR_HAVE_MUL)
8090 if (const_2_to_7_operand (XEXP (x, 1), HImode)
8091 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
8092 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
8094 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8095 return true;
8099 if (const1_rtx == (XEXP (x, 1))
8100 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8102 *total = COSTS_N_INSNS (2);
8103 return true;
8106 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8108 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8109 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8110 speed);
8112 else
8113 switch (INTVAL (XEXP (x, 1)))
8115 case 0:
8116 *total = 0;
8117 break;
8118 case 1:
8119 case 8:
8120 *total = COSTS_N_INSNS (2);
8121 break;
8122 case 9:
8123 *total = COSTS_N_INSNS (3);
8124 break;
8125 case 2:
8126 case 3:
8127 case 10:
8128 case 15:
8129 *total = COSTS_N_INSNS (4);
8130 break;
8131 case 7:
8132 case 11:
8133 case 12:
8134 *total = COSTS_N_INSNS (5);
8135 break;
8136 case 4:
8137 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8138 break;
8139 case 6:
8140 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8141 break;
8142 case 5:
8143 *total = COSTS_N_INSNS (!speed ? 5 : 10);
8144 break;
8145 default:
8146 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8147 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8148 speed);
8150 break;
8152 case PSImode:
8153 if (!CONST_INT_P (XEXP (x, 1)))
8155 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8157 else
8158 switch (INTVAL (XEXP (x, 1)))
8160 case 0:
8161 *total = 0;
8162 break;
8163 case 1:
8164 case 8:
8165 case 16:
8166 *total = COSTS_N_INSNS (3);
8167 break;
8168 case 23:
8169 *total = COSTS_N_INSNS (5);
8170 break;
8171 default:
8172 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8173 break;
8175 break;
8177 case SImode:
8178 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8180 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8181 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8182 speed);
8184 else
8185 switch (INTVAL (XEXP (x, 1)))
8187 case 0:
8188 *total = 0;
8189 break;
8190 case 24:
8191 *total = COSTS_N_INSNS (3);
8192 break;
8193 case 1:
8194 case 8:
8195 case 16:
8196 *total = COSTS_N_INSNS (4);
8197 break;
8198 case 31:
8199 *total = COSTS_N_INSNS (6);
8200 break;
8201 case 2:
8202 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8203 break;
8204 default:
8205 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8206 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8207 speed);
8209 break;
8211 default:
8212 return false;
8214 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8215 return true;
8217 case ASHIFTRT:
8218 switch (mode)
8220 case QImode:
8221 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8223 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8224 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8225 speed);
8227 else
8229 val = INTVAL (XEXP (x, 1));
8230 if (val == 6)
8231 *total = COSTS_N_INSNS (4);
8232 else if (val == 7)
8233 *total = COSTS_N_INSNS (2);
8234 else if (val >= 0 && val <= 7)
8235 *total = COSTS_N_INSNS (val);
8236 else
8237 *total = COSTS_N_INSNS (1);
8239 break;
8241 case HImode:
8242 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8244 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8245 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8246 speed);
8248 else
8249 switch (INTVAL (XEXP (x, 1)))
8251 case 0:
8252 *total = 0;
8253 break;
8254 case 1:
8255 *total = COSTS_N_INSNS (2);
8256 break;
8257 case 15:
8258 *total = COSTS_N_INSNS (3);
8259 break;
8260 case 2:
8261 case 7:
8262 case 8:
8263 case 9:
8264 *total = COSTS_N_INSNS (4);
8265 break;
8266 case 10:
8267 case 14:
8268 *total = COSTS_N_INSNS (5);
8269 break;
8270 case 11:
8271 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8272 break;
8273 case 12:
8274 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8275 break;
8276 case 6:
8277 case 13:
8278 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8279 break;
8280 default:
8281 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8282 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8283 speed);
8285 break;
8287 case PSImode:
8288 if (!CONST_INT_P (XEXP (x, 1)))
8290 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8292 else
8293 switch (INTVAL (XEXP (x, 1)))
8295 case 0:
8296 *total = 0;
8297 break;
8298 case 1:
8299 *total = COSTS_N_INSNS (3);
8300 break;
8301 case 16:
8302 case 8:
8303 *total = COSTS_N_INSNS (5);
8304 break;
8305 case 23:
8306 *total = COSTS_N_INSNS (4);
8307 break;
8308 default:
8309 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8310 break;
8312 break;
8314 case SImode:
8315 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8317 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8318 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8319 speed);
8321 else
8322 switch (INTVAL (XEXP (x, 1)))
8324 case 0:
8325 *total = 0;
8326 break;
8327 case 1:
8328 *total = COSTS_N_INSNS (4);
8329 break;
8330 case 8:
8331 case 16:
8332 case 24:
8333 *total = COSTS_N_INSNS (6);
8334 break;
8335 case 2:
8336 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8337 break;
8338 case 31:
8339 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8340 break;
8341 default:
8342 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8343 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8344 speed);
8346 break;
8348 default:
8349 return false;
8351 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8352 return true;
8354 case LSHIFTRT:
8355 switch (mode)
8357 case QImode:
8358 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8360 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8361 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8362 speed);
8364 else
8366 val = INTVAL (XEXP (x, 1));
8367 if (val == 7)
8368 *total = COSTS_N_INSNS (3);
8369 else if (val >= 0 && val <= 7)
8370 *total = COSTS_N_INSNS (val);
8371 else
8372 *total = COSTS_N_INSNS (1);
8374 break;
8376 case HImode:
8377 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8379 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8380 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8381 speed);
8383 else
8384 switch (INTVAL (XEXP (x, 1)))
8386 case 0:
8387 *total = 0;
8388 break;
8389 case 1:
8390 case 8:
8391 *total = COSTS_N_INSNS (2);
8392 break;
8393 case 9:
8394 *total = COSTS_N_INSNS (3);
8395 break;
8396 case 2:
8397 case 10:
8398 case 15:
8399 *total = COSTS_N_INSNS (4);
8400 break;
8401 case 7:
8402 case 11:
8403 *total = COSTS_N_INSNS (5);
8404 break;
8405 case 3:
8406 case 12:
8407 case 13:
8408 case 14:
8409 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8410 break;
8411 case 4:
8412 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8413 break;
8414 case 5:
8415 case 6:
8416 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8417 break;
8418 default:
8419 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8420 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8421 speed);
8423 break;
8425 case PSImode:
8426 if (!CONST_INT_P (XEXP (x, 1)))
8428 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8430 else
8431 switch (INTVAL (XEXP (x, 1)))
8433 case 0:
8434 *total = 0;
8435 break;
8436 case 1:
8437 case 8:
8438 case 16:
8439 *total = COSTS_N_INSNS (3);
8440 break;
8441 case 23:
8442 *total = COSTS_N_INSNS (5);
8443 break;
8444 default:
8445 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8446 break;
8448 break;
8450 case SImode:
8451 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8453 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8454 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8455 speed);
8457 else
8458 switch (INTVAL (XEXP (x, 1)))
8460 case 0:
8461 *total = 0;
8462 break;
8463 case 1:
8464 *total = COSTS_N_INSNS (4);
8465 break;
8466 case 2:
8467 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8468 break;
8469 case 8:
8470 case 16:
8471 case 24:
8472 *total = COSTS_N_INSNS (4);
8473 break;
8474 case 31:
8475 *total = COSTS_N_INSNS (6);
8476 break;
8477 default:
8478 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8479 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8480 speed);
8482 break;
8484 default:
8485 return false;
8487 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8488 return true;
8490 case COMPARE:
8491 switch (GET_MODE (XEXP (x, 0)))
8493 case QImode:
8494 *total = COSTS_N_INSNS (1);
8495 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8496 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8497 break;
8499 case HImode:
8500 *total = COSTS_N_INSNS (2);
8501 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8502 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8503 else if (INTVAL (XEXP (x, 1)) != 0)
8504 *total += COSTS_N_INSNS (1);
8505 break;
8507 case PSImode:
8508 *total = COSTS_N_INSNS (3);
8509 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8510 *total += COSTS_N_INSNS (2);
8511 break;
8513 case SImode:
8514 *total = COSTS_N_INSNS (4);
8515 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8516 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8517 else if (INTVAL (XEXP (x, 1)) != 0)
8518 *total += COSTS_N_INSNS (3);
8519 break;
8521 default:
8522 return false;
8524 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8525 return true;
8527 case TRUNCATE:
8528 if (AVR_HAVE_MUL
8529 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8530 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8531 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8533 if (QImode == mode || HImode == mode)
8535 *total = COSTS_N_INSNS (2);
8536 return true;
8539 break;
8541 default:
8542 break;
8544 return false;
8548 /* Implement `TARGET_RTX_COSTS'. */
8550 static bool
8551 avr_rtx_costs (rtx x, int codearg, int outer_code,
8552 int opno, int *total, bool speed)
8554 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8555 opno, total, speed);
8557 if (avr_log.rtx_costs)
8559 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8560 done, speed ? "speed" : "size", *total, outer_code, x);
8563 return done;
8567 /* Implement `TARGET_ADDRESS_COST'. */
8569 static int
8570 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8572 int cost = 4;
8574 if (GET_CODE (x) == PLUS
8575 && CONST_INT_P (XEXP (x, 1))
8576 && (REG_P (XEXP (x, 0))
8577 || GET_CODE (XEXP (x, 0)) == SUBREG))
8579 if (INTVAL (XEXP (x, 1)) >= 61)
8580 cost = 18;
8582 else if (CONSTANT_ADDRESS_P (x))
8584 if (optimize > 0
8585 && io_address_operand (x, QImode))
8586 cost = 2;
8589 if (avr_log.address_cost)
8590 avr_edump ("\n%?: %d = %r\n", cost, x);
8592 return cost;
8595 /* Test for extra memory constraint 'Q'.
8596 It's a memory address based on Y or Z pointer with valid displacement. */
8599 extra_constraint_Q (rtx x)
8601 int ok = 0;
8603 if (GET_CODE (XEXP (x,0)) == PLUS
8604 && REG_P (XEXP (XEXP (x,0), 0))
8605 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8606 && (INTVAL (XEXP (XEXP (x,0), 1))
8607 <= MAX_LD_OFFSET (GET_MODE (x))))
8609 rtx xx = XEXP (XEXP (x,0), 0);
8610 int regno = REGNO (xx);
8612 ok = (/* allocate pseudos */
8613 regno >= FIRST_PSEUDO_REGISTER
8614 /* strictly check */
8615 || regno == REG_Z || regno == REG_Y
8616 /* XXX frame & arg pointer checks */
8617 || xx == frame_pointer_rtx
8618 || xx == arg_pointer_rtx);
8620 if (avr_log.constraints)
8621 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8622 ok, reload_completed, reload_in_progress, x);
8625 return ok;
8628 /* Convert condition code CONDITION to the valid AVR condition code. */
8630 RTX_CODE
8631 avr_normalize_condition (RTX_CODE condition)
8633 switch (condition)
8635 case GT:
8636 return GE;
8637 case GTU:
8638 return GEU;
8639 case LE:
8640 return LT;
8641 case LEU:
8642 return LTU;
8643 default:
8644 gcc_unreachable ();
8648 /* Helper function for `avr_reorg'. */
8650 static rtx
8651 avr_compare_pattern (rtx insn)
8653 rtx pattern = single_set (insn);
8655 if (pattern
8656 && NONJUMP_INSN_P (insn)
8657 && SET_DEST (pattern) == cc0_rtx
8658 && GET_CODE (SET_SRC (pattern)) == COMPARE
8659 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8660 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8662 return pattern;
8665 return NULL_RTX;
8668 /* Helper function for `avr_reorg'. */
8670 /* Expansion of switch/case decision trees leads to code like
8672 cc0 = compare (Reg, Num)
8673 if (cc0 == 0)
8674 goto L1
8676 cc0 = compare (Reg, Num)
8677 if (cc0 > 0)
8678 goto L2
8680 The second comparison is superfluous and can be deleted.
8681 The second jump condition can be transformed from a
8682 "difficult" one to a "simple" one because "cc0 > 0" and
8683 "cc0 >= 0" will have the same effect here.
8685 This function relies on the way switch/case is being expaned
8686 as binary decision tree. For example code see PR 49903.
8688 Return TRUE if optimization performed.
8689 Return FALSE if nothing changed.
8691 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8693 We don't want to do this in text peephole because it is
8694 tedious to work out jump offsets there and the second comparison
8695 might have been transormed by `avr_reorg'.
8697 RTL peephole won't do because peephole2 does not scan across
8698 basic blocks. */
8700 static bool
8701 avr_reorg_remove_redundant_compare (rtx insn1)
8703 rtx comp1, ifelse1, xcond1, branch1;
8704 rtx comp2, ifelse2, xcond2, branch2, insn2;
8705 enum rtx_code code;
8706 rtx jump, target, cond;
8708 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8710 branch1 = next_nonnote_nondebug_insn (insn1);
8711 if (!branch1 || !JUMP_P (branch1))
8712 return false;
8714 insn2 = next_nonnote_nondebug_insn (branch1);
8715 if (!insn2 || !avr_compare_pattern (insn2))
8716 return false;
8718 branch2 = next_nonnote_nondebug_insn (insn2);
8719 if (!branch2 || !JUMP_P (branch2))
8720 return false;
8722 comp1 = avr_compare_pattern (insn1);
8723 comp2 = avr_compare_pattern (insn2);
8724 xcond1 = single_set (branch1);
8725 xcond2 = single_set (branch2);
8727 if (!comp1 || !comp2
8728 || !rtx_equal_p (comp1, comp2)
8729 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8730 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8731 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8732 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8734 return false;
8737 comp1 = SET_SRC (comp1);
8738 ifelse1 = SET_SRC (xcond1);
8739 ifelse2 = SET_SRC (xcond2);
8741 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8743 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8744 || !REG_P (XEXP (comp1, 0))
8745 || !CONST_INT_P (XEXP (comp1, 1))
8746 || XEXP (ifelse1, 2) != pc_rtx
8747 || XEXP (ifelse2, 2) != pc_rtx
8748 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8749 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8750 || !COMPARISON_P (XEXP (ifelse2, 0))
8751 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8752 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8753 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8754 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8756 return false;
8759 /* We filtered the insn sequence to look like
8761 (set (cc0)
8762 (compare (reg:M N)
8763 (const_int VAL)))
8764 (set (pc)
8765 (if_then_else (eq (cc0)
8766 (const_int 0))
8767 (label_ref L1)
8768 (pc)))
8770 (set (cc0)
8771 (compare (reg:M N)
8772 (const_int VAL)))
8773 (set (pc)
8774 (if_then_else (CODE (cc0)
8775 (const_int 0))
8776 (label_ref L2)
8777 (pc)))
8780 code = GET_CODE (XEXP (ifelse2, 0));
8782 /* Map GT/GTU to GE/GEU which is easier for AVR.
8783 The first two instructions compare/branch on EQ
8784 so we may replace the difficult
8786 if (x == VAL) goto L1;
8787 if (x > VAL) goto L2;
8789 with easy
8791 if (x == VAL) goto L1;
8792 if (x >= VAL) goto L2;
8794 Similarly, replace LE/LEU by LT/LTU. */
8796 switch (code)
8798 case EQ:
8799 case LT: case LTU:
8800 case GE: case GEU:
8801 break;
8803 case LE: case LEU:
8804 case GT: case GTU:
8805 code = avr_normalize_condition (code);
8806 break;
8808 default:
8809 return false;
8812 /* Wrap the branches into UNSPECs so they won't be changed or
8813 optimized in the remainder. */
8815 target = XEXP (XEXP (ifelse1, 1), 0);
8816 cond = XEXP (ifelse1, 0);
8817 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8819 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8821 target = XEXP (XEXP (ifelse2, 1), 0);
8822 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8823 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8825 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8827 /* The comparisons in insn1 and insn2 are exactly the same;
8828 insn2 is superfluous so delete it. */
8830 delete_insn (insn2);
8831 delete_insn (branch1);
8832 delete_insn (branch2);
8834 return true;
8838 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8839 /* Optimize conditional jumps. */
8841 static void
8842 avr_reorg (void)
8844 rtx insn = get_insns();
8846 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8848 rtx pattern = avr_compare_pattern (insn);
8850 if (!pattern)
8851 continue;
8853 if (optimize
8854 && avr_reorg_remove_redundant_compare (insn))
8856 continue;
8859 if (compare_diff_p (insn))
8861 /* Now we work under compare insn with difficult branch. */
8863 rtx next = next_real_insn (insn);
8864 rtx pat = PATTERN (next);
8866 pattern = SET_SRC (pattern);
8868 if (true_regnum (XEXP (pattern, 0)) >= 0
8869 && true_regnum (XEXP (pattern, 1)) >= 0)
8871 rtx x = XEXP (pattern, 0);
8872 rtx src = SET_SRC (pat);
8873 rtx t = XEXP (src,0);
8874 PUT_CODE (t, swap_condition (GET_CODE (t)));
8875 XEXP (pattern, 0) = XEXP (pattern, 1);
8876 XEXP (pattern, 1) = x;
8877 INSN_CODE (next) = -1;
8879 else if (true_regnum (XEXP (pattern, 0)) >= 0
8880 && XEXP (pattern, 1) == const0_rtx)
8882 /* This is a tst insn, we can reverse it. */
8883 rtx src = SET_SRC (pat);
8884 rtx t = XEXP (src,0);
8886 PUT_CODE (t, swap_condition (GET_CODE (t)));
8887 XEXP (pattern, 1) = XEXP (pattern, 0);
8888 XEXP (pattern, 0) = const0_rtx;
8889 INSN_CODE (next) = -1;
8890 INSN_CODE (insn) = -1;
8892 else if (true_regnum (XEXP (pattern, 0)) >= 0
8893 && CONST_INT_P (XEXP (pattern, 1)))
8895 rtx x = XEXP (pattern, 1);
8896 rtx src = SET_SRC (pat);
8897 rtx t = XEXP (src,0);
8898 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8900 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8902 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8903 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8904 INSN_CODE (next) = -1;
8905 INSN_CODE (insn) = -1;
8912 /* Returns register number for function return value.*/
8914 static inline unsigned int
8915 avr_ret_register (void)
8917 return 24;
8920 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8922 static bool
8923 avr_function_value_regno_p (const unsigned int regno)
8925 return (regno == avr_ret_register ());
8928 /* Create an RTX representing the place where a
8929 library function returns a value of mode MODE. */
8931 static rtx
8932 avr_libcall_value (enum machine_mode mode,
8933 const_rtx func ATTRIBUTE_UNUSED)
8935 int offs = GET_MODE_SIZE (mode);
8937 if (offs <= 4)
8938 offs = (offs + 1) & ~1;
8940 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8943 /* Create an RTX representing the place where a
8944 function returns a value of data type VALTYPE. */
8946 static rtx
8947 avr_function_value (const_tree type,
8948 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8949 bool outgoing ATTRIBUTE_UNUSED)
8951 unsigned int offs;
8953 if (TYPE_MODE (type) != BLKmode)
8954 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8956 offs = int_size_in_bytes (type);
8957 if (offs < 2)
8958 offs = 2;
8959 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8960 offs = GET_MODE_SIZE (SImode);
8961 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8962 offs = GET_MODE_SIZE (DImode);
8964 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8968 test_hard_reg_class (enum reg_class rclass, rtx x)
8970 int regno = true_regnum (x);
8971 if (regno < 0)
8972 return 0;
8974 if (TEST_HARD_REG_CLASS (rclass, regno))
8975 return 1;
8977 return 0;
8981 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8982 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8984 static bool
8985 avr_2word_insn_p (rtx insn)
8987 if (avr_current_device->errata_skip
8988 || !insn
8989 || 2 != get_attr_length (insn))
8991 return false;
8994 switch (INSN_CODE (insn))
8996 default:
8997 return false;
8999 case CODE_FOR_movqi_insn:
9001 rtx set = single_set (insn);
9002 rtx src = SET_SRC (set);
9003 rtx dest = SET_DEST (set);
9005 /* Factor out LDS and STS from movqi_insn. */
9007 if (MEM_P (dest)
9008 && (REG_P (src) || src == const0_rtx))
9010 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
9012 else if (REG_P (dest)
9013 && MEM_P (src))
9015 return CONSTANT_ADDRESS_P (XEXP (src, 0));
9018 return false;
9021 case CODE_FOR_call_insn:
9022 case CODE_FOR_call_value_insn:
9023 return true;
9029 jump_over_one_insn_p (rtx insn, rtx dest)
9031 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
9032 ? XEXP (dest, 0)
9033 : dest);
9034 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
9035 int dest_addr = INSN_ADDRESSES (uid);
9036 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
9038 return (jump_offset == 1
9039 || (jump_offset == 2
9040 && avr_2word_insn_p (next_active_insn (insn))));
9043 /* Returns 1 if a value of mode MODE can be stored starting with hard
9044 register number REGNO. On the enhanced core, anything larger than
9045 1 byte must start in even numbered register for "movw" to work
9046 (this way we don't have to check for odd registers everywhere). */
9049 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
9051 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9052 Disallowing QI et al. in these regs might lead to code like
9053 (set (subreg:QI (reg:HI 28) n) ...)
9054 which will result in wrong code because reload does not
9055 handle SUBREGs of hard regsisters like this.
9056 This could be fixed in reload. However, it appears
9057 that fixing reload is not wanted by reload people. */
9059 /* Any GENERAL_REGS register can hold 8-bit values. */
9061 if (GET_MODE_SIZE (mode) == 1)
9062 return 1;
9064 /* FIXME: Ideally, the following test is not needed.
9065 However, it turned out that it can reduce the number
9066 of spill fails. AVR and it's poor endowment with
9067 address registers is extreme stress test for reload. */
9069 if (GET_MODE_SIZE (mode) >= 4
9070 && regno >= REG_X)
9071 return 0;
9073 /* All modes larger than 8 bits should start in an even register. */
9075 return !(regno & 1);
9079 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9081 reg_class_t
9082 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
9083 addr_space_t as, RTX_CODE outer_code,
9084 RTX_CODE index_code ATTRIBUTE_UNUSED)
9086 if (!ADDR_SPACE_GENERIC_P (as))
9088 return POINTER_Z_REGS;
9091 if (!avr_strict_X)
9092 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
9094 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
9098 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9100 bool
9101 avr_regno_mode_code_ok_for_base_p (int regno,
9102 enum machine_mode mode ATTRIBUTE_UNUSED,
9103 addr_space_t as ATTRIBUTE_UNUSED,
9104 RTX_CODE outer_code,
9105 RTX_CODE index_code ATTRIBUTE_UNUSED)
9107 bool ok = false;
9109 if (!ADDR_SPACE_GENERIC_P (as))
9111 if (regno < FIRST_PSEUDO_REGISTER
9112 && regno == REG_Z)
9114 return true;
9117 if (reg_renumber)
9119 regno = reg_renumber[regno];
9121 if (regno == REG_Z)
9123 return true;
9127 return false;
9130 if (regno < FIRST_PSEUDO_REGISTER
9131 && (regno == REG_X
9132 || regno == REG_Y
9133 || regno == REG_Z
9134 || regno == ARG_POINTER_REGNUM))
9136 ok = true;
9138 else if (reg_renumber)
9140 regno = reg_renumber[regno];
9142 if (regno == REG_X
9143 || regno == REG_Y
9144 || regno == REG_Z
9145 || regno == ARG_POINTER_REGNUM)
9147 ok = true;
9151 if (avr_strict_X
9152 && PLUS == outer_code
9153 && regno == REG_X)
9155 ok = false;
9158 return ok;
9162 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9163 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9164 CLOBBER_REG is a QI clobber register or NULL_RTX.
9165 LEN == NULL: output instructions.
9166 LEN != NULL: set *LEN to the length of the instruction sequence
9167 (in words) printed with LEN = NULL.
9168 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9169 If CLEAR_P is false, nothing is known about OP[0].
9171 The effect on cc0 is as follows:
9173 Load 0 to any register except ZERO_REG : NONE
9174 Load ld register with any value : NONE
9175 Anything else: : CLOBBER */
9177 static void
9178 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9180 rtx src = op[1];
9181 rtx dest = op[0];
9182 rtx xval, xdest[4];
9183 int ival[4];
9184 int clobber_val = 1234;
9185 bool cooked_clobber_p = false;
9186 bool set_p = false;
9187 enum machine_mode mode = GET_MODE (dest);
9188 int n, n_bytes = GET_MODE_SIZE (mode);
9190 gcc_assert (REG_P (dest)
9191 && CONSTANT_P (src));
9193 if (len)
9194 *len = 0;
9196 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9197 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9199 if (REGNO (dest) < 16
9200 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9202 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9205 /* We might need a clobber reg but don't have one. Look at the value to
9206 be loaded more closely. A clobber is only needed if it is a symbol
9207 or contains a byte that is neither 0, -1 or a power of 2. */
9209 if (NULL_RTX == clobber_reg
9210 && !test_hard_reg_class (LD_REGS, dest)
9211 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9212 || !avr_popcount_each_byte (src, n_bytes,
9213 (1 << 0) | (1 << 1) | (1 << 8))))
9215 /* We have no clobber register but need one. Cook one up.
9216 That's cheaper than loading from constant pool. */
9218 cooked_clobber_p = true;
9219 clobber_reg = all_regs_rtx[REG_Z + 1];
9220 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9223 /* Now start filling DEST from LSB to MSB. */
9225 for (n = 0; n < n_bytes; n++)
9227 int ldreg_p;
9228 bool done_byte = false;
9229 int j;
9230 rtx xop[3];
9232 /* Crop the n-th destination byte. */
9234 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9235 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9237 if (!CONST_INT_P (src)
9238 && !CONST_DOUBLE_P (src))
9240 static const char* const asm_code[][2] =
9242 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9243 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9244 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9245 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9248 xop[0] = xdest[n];
9249 xop[1] = src;
9250 xop[2] = clobber_reg;
9252 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9254 continue;
9257 /* Crop the n-th source byte. */
9259 xval = simplify_gen_subreg (QImode, src, mode, n);
9260 ival[n] = INTVAL (xval);
9262 /* Look if we can reuse the low word by means of MOVW. */
9264 if (n == 2
9265 && n_bytes >= 4
9266 && AVR_HAVE_MOVW)
9268 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9269 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9271 if (INTVAL (lo16) == INTVAL (hi16))
9273 if (0 != INTVAL (lo16)
9274 || !clear_p)
9276 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9279 break;
9283 /* Don't use CLR so that cc0 is set as expected. */
9285 if (ival[n] == 0)
9287 if (!clear_p)
9288 avr_asm_len (ldreg_p ? "ldi %0,0"
9289 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9290 : "mov %0,__zero_reg__",
9291 &xdest[n], len, 1);
9292 continue;
9295 if (clobber_val == ival[n]
9296 && REGNO (clobber_reg) == REGNO (xdest[n]))
9298 continue;
9301 /* LD_REGS can use LDI to move a constant value */
9303 if (ldreg_p)
9305 xop[0] = xdest[n];
9306 xop[1] = xval;
9307 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9308 continue;
9311 /* Try to reuse value already loaded in some lower byte. */
9313 for (j = 0; j < n; j++)
9314 if (ival[j] == ival[n])
9316 xop[0] = xdest[n];
9317 xop[1] = xdest[j];
9319 avr_asm_len ("mov %0,%1", xop, len, 1);
9320 done_byte = true;
9321 break;
9324 if (done_byte)
9325 continue;
9327 /* Need no clobber reg for -1: Use CLR/DEC */
9329 if (-1 == ival[n])
9331 if (!clear_p)
9332 avr_asm_len ("clr %0", &xdest[n], len, 1);
9334 avr_asm_len ("dec %0", &xdest[n], len, 1);
9335 continue;
9337 else if (1 == ival[n])
9339 if (!clear_p)
9340 avr_asm_len ("clr %0", &xdest[n], len, 1);
9342 avr_asm_len ("inc %0", &xdest[n], len, 1);
9343 continue;
9346 /* Use T flag or INC to manage powers of 2 if we have
9347 no clobber reg. */
9349 if (NULL_RTX == clobber_reg
9350 && single_one_operand (xval, QImode))
9352 xop[0] = xdest[n];
9353 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9355 gcc_assert (constm1_rtx != xop[1]);
9357 if (!set_p)
9359 set_p = true;
9360 avr_asm_len ("set", xop, len, 1);
9363 if (!clear_p)
9364 avr_asm_len ("clr %0", xop, len, 1);
9366 avr_asm_len ("bld %0,%1", xop, len, 1);
9367 continue;
9370 /* We actually need the LD_REGS clobber reg. */
9372 gcc_assert (NULL_RTX != clobber_reg);
9374 xop[0] = xdest[n];
9375 xop[1] = xval;
9376 xop[2] = clobber_reg;
9377 clobber_val = ival[n];
9379 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9380 "mov %0,%2", xop, len, 2);
9383 /* If we cooked up a clobber reg above, restore it. */
9385 if (cooked_clobber_p)
9387 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9392 /* Reload the constant OP[1] into the HI register OP[0].
9393 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9394 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9395 need a clobber reg or have to cook one up.
9397 PLEN == NULL: Output instructions.
9398 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9399 by the insns printed.
9401 Return "". */
9403 const char*
9404 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9406 output_reload_in_const (op, clobber_reg, plen, false);
9407 return "";
9411 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9412 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9413 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9414 need a clobber reg or have to cook one up.
9416 LEN == NULL: Output instructions.
9418 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9419 by the insns printed.
9421 Return "". */
9423 const char *
9424 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9426 if (AVR_HAVE_MOVW
9427 && !test_hard_reg_class (LD_REGS, op[0])
9428 && (CONST_INT_P (op[1])
9429 || CONST_DOUBLE_P (op[1])))
9431 int len_clr, len_noclr;
9433 /* In some cases it is better to clear the destination beforehand, e.g.
9435 CLR R2 CLR R3 MOVW R4,R2 INC R2
9437 is shorther than
9439 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9441 We find it too tedious to work that out in the print function.
9442 Instead, we call the print function twice to get the lengths of
9443 both methods and use the shortest one. */
9445 output_reload_in_const (op, clobber_reg, &len_clr, true);
9446 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9448 if (len_noclr - len_clr == 4)
9450 /* Default needs 4 CLR instructions: clear register beforehand. */
9452 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9453 "mov %B0,__zero_reg__" CR_TAB
9454 "movw %C0,%A0", &op[0], len, 3);
9456 output_reload_in_const (op, clobber_reg, len, true);
9458 if (len)
9459 *len += 3;
9461 return "";
9465 /* Default: destination not pre-cleared. */
9467 output_reload_in_const (op, clobber_reg, len, false);
9468 return "";
9471 const char *
9472 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9474 output_reload_in_const (op, clobber_reg, len, false);
9475 return "";
9478 void
9479 avr_output_bld (rtx operands[], int bit_nr)
9481 static char s[] = "bld %A0,0";
9483 s[5] = 'A' + (bit_nr >> 3);
9484 s[8] = '0' + (bit_nr & 7);
9485 output_asm_insn (s, operands);
9488 void
9489 avr_output_addr_vec_elt (FILE *stream, int value)
9491 if (AVR_HAVE_JMP_CALL)
9492 fprintf (stream, "\t.word gs(.L%d)\n", value);
9493 else
9494 fprintf (stream, "\trjmp .L%d\n", value);
9497 /* Returns true if SCRATCH are safe to be allocated as a scratch
9498 registers (for a define_peephole2) in the current function. */
9500 static bool
9501 avr_hard_regno_scratch_ok (unsigned int regno)
9503 /* Interrupt functions can only use registers that have already been saved
9504 by the prologue, even if they would normally be call-clobbered. */
9506 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9507 && !df_regs_ever_live_p (regno))
9508 return false;
9510 /* Don't allow hard registers that might be part of the frame pointer.
9511 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9512 and don't care for a frame pointer that spans more than one register. */
9514 if ((!reload_completed || frame_pointer_needed)
9515 && (regno == REG_Y || regno == REG_Y + 1))
9517 return false;
9520 return true;
9523 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9526 avr_hard_regno_rename_ok (unsigned int old_reg,
9527 unsigned int new_reg)
9529 /* Interrupt functions can only use registers that have already been
9530 saved by the prologue, even if they would normally be
9531 call-clobbered. */
9533 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9534 && !df_regs_ever_live_p (new_reg))
9535 return 0;
9537 /* Don't allow hard registers that might be part of the frame pointer.
9538 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9539 and don't care for a frame pointer that spans more than one register. */
9541 if ((!reload_completed || frame_pointer_needed)
9542 && (old_reg == REG_Y || old_reg == REG_Y + 1
9543 || new_reg == REG_Y || new_reg == REG_Y + 1))
9545 return 0;
9548 return 1;
9551 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9552 or memory location in the I/O space (QImode only).
9554 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9555 Operand 1: register operand to test, or CONST_INT memory address.
9556 Operand 2: bit number.
9557 Operand 3: label to jump to if the test is true. */
9559 const char *
9560 avr_out_sbxx_branch (rtx insn, rtx operands[])
9562 enum rtx_code comp = GET_CODE (operands[0]);
9563 bool long_jump = get_attr_length (insn) >= 4;
9564 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9566 if (comp == GE)
9567 comp = EQ;
9568 else if (comp == LT)
9569 comp = NE;
9571 if (reverse)
9572 comp = reverse_condition (comp);
9574 switch (GET_CODE (operands[1]))
9576 default:
9577 gcc_unreachable();
9579 case CONST_INT:
9581 if (low_io_address_operand (operands[1], QImode))
9583 if (comp == EQ)
9584 output_asm_insn ("sbis %i1,%2", operands);
9585 else
9586 output_asm_insn ("sbic %i1,%2", operands);
9588 else
9590 output_asm_insn ("in __tmp_reg__,%i1", operands);
9591 if (comp == EQ)
9592 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9593 else
9594 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9597 break; /* CONST_INT */
9599 case REG:
9601 if (GET_MODE (operands[1]) == QImode)
9603 if (comp == EQ)
9604 output_asm_insn ("sbrs %1,%2", operands);
9605 else
9606 output_asm_insn ("sbrc %1,%2", operands);
9608 else /* HImode, PSImode or SImode */
9610 static char buf[] = "sbrc %A1,0";
9611 unsigned int bit_nr = UINTVAL (operands[2]);
9613 buf[3] = (comp == EQ) ? 's' : 'c';
9614 buf[6] = 'A' + (bit_nr / 8);
9615 buf[9] = '0' + (bit_nr % 8);
9616 output_asm_insn (buf, operands);
9619 break; /* REG */
9620 } /* switch */
9622 if (long_jump)
9623 return ("rjmp .+4" CR_TAB
9624 "jmp %x3");
9626 if (!reverse)
9627 return "rjmp %x3";
9629 return "";
9632 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9634 static void
9635 avr_asm_out_ctor (rtx symbol, int priority)
9637 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9638 default_ctor_section_asm_out_constructor (symbol, priority);
9641 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9643 static void
9644 avr_asm_out_dtor (rtx symbol, int priority)
9646 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9647 default_dtor_section_asm_out_destructor (symbol, priority);
9650 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9652 static bool
9653 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9655 if (TYPE_MODE (type) == BLKmode)
9657 HOST_WIDE_INT size = int_size_in_bytes (type);
9658 return (size == -1 || size > 8);
9660 else
9661 return false;
9664 /* Worker function for CASE_VALUES_THRESHOLD. */
9666 static unsigned int
9667 avr_case_values_threshold (void)
9669 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9673 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9675 static enum machine_mode
9676 avr_addr_space_address_mode (addr_space_t as)
9678 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9682 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9684 static enum machine_mode
9685 avr_addr_space_pointer_mode (addr_space_t as)
9687 return avr_addr_space_address_mode (as);
9691 /* Helper for following function. */
9693 static bool
9694 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9696 gcc_assert (REG_P (reg));
9698 if (strict)
9700 return REGNO (reg) == REG_Z;
9703 /* Avoid combine to propagate hard regs. */
9705 if (can_create_pseudo_p()
9706 && REGNO (reg) < REG_Z)
9708 return false;
9711 return true;
9715 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9717 static bool
9718 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9719 bool strict, addr_space_t as)
9721 bool ok = false;
9723 switch (as)
9725 default:
9726 gcc_unreachable();
9728 case ADDR_SPACE_GENERIC:
9729 return avr_legitimate_address_p (mode, x, strict);
9731 case ADDR_SPACE_FLASH:
9732 case ADDR_SPACE_FLASH1:
9733 case ADDR_SPACE_FLASH2:
9734 case ADDR_SPACE_FLASH3:
9735 case ADDR_SPACE_FLASH4:
9736 case ADDR_SPACE_FLASH5:
9738 switch (GET_CODE (x))
9740 case REG:
9741 ok = avr_reg_ok_for_pgm_addr (x, strict);
9742 break;
9744 case POST_INC:
9745 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9746 break;
9748 default:
9749 break;
9752 break; /* FLASH */
9754 case ADDR_SPACE_MEMX:
9755 if (REG_P (x))
9756 ok = (!strict
9757 && can_create_pseudo_p());
9759 if (LO_SUM == GET_CODE (x))
9761 rtx hi = XEXP (x, 0);
9762 rtx lo = XEXP (x, 1);
9764 ok = (REG_P (hi)
9765 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9766 && REG_P (lo)
9767 && REGNO (lo) == REG_Z);
9770 break; /* MEMX */
9773 if (avr_log.legitimate_address_p)
9775 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9776 "reload_completed=%d reload_in_progress=%d %s:",
9777 ok, mode, strict, reload_completed, reload_in_progress,
9778 reg_renumber ? "(reg_renumber)" : "");
9780 if (GET_CODE (x) == PLUS
9781 && REG_P (XEXP (x, 0))
9782 && CONST_INT_P (XEXP (x, 1))
9783 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9784 && reg_renumber)
9786 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9787 true_regnum (XEXP (x, 0)));
9790 avr_edump ("\n%r\n", x);
9793 return ok;
9797 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9799 static rtx
9800 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9801 enum machine_mode mode, addr_space_t as)
9803 if (ADDR_SPACE_GENERIC_P (as))
9804 return avr_legitimize_address (x, old_x, mode);
9806 if (avr_log.legitimize_address)
9808 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9811 return old_x;
9815 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9817 static rtx
9818 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9820 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9821 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9823 if (avr_log.progmem)
9824 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9825 src, type_from, type_to);
9827 /* Up-casting from 16-bit to 24-bit pointer. */
9829 if (as_from != ADDR_SPACE_MEMX
9830 && as_to == ADDR_SPACE_MEMX)
9832 int msb;
9833 rtx sym = src;
9834 rtx reg = gen_reg_rtx (PSImode);
9836 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9837 sym = XEXP (sym, 0);
9839 /* Look at symbol flags: avr_encode_section_info set the flags
9840 also if attribute progmem was seen so that we get the right
9841 promotion for, e.g. PSTR-like strings that reside in generic space
9842 but are located in flash. In that case we patch the incoming
9843 address space. */
9845 if (SYMBOL_REF == GET_CODE (sym)
9846 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9848 as_from = ADDR_SPACE_FLASH;
9851 /* Linearize memory: RAM has bit 23 set. */
9853 msb = ADDR_SPACE_GENERIC_P (as_from)
9854 ? 0x80
9855 : avr_addrspace[as_from].segment % avr_current_arch->n_segments;
9857 src = force_reg (Pmode, src);
9859 emit_insn (msb == 0
9860 ? gen_zero_extendhipsi2 (reg, src)
9861 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9863 return reg;
9866 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9868 if (as_from == ADDR_SPACE_MEMX
9869 && as_to != ADDR_SPACE_MEMX)
9871 rtx new_src = gen_reg_rtx (Pmode);
9873 src = force_reg (PSImode, src);
9875 emit_move_insn (new_src,
9876 simplify_gen_subreg (Pmode, src, PSImode, 0));
9877 return new_src;
9880 return src;
9884 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9886 static bool
9887 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9888 addr_space_t superset ATTRIBUTE_UNUSED)
9890 /* Allow any kind of pointer mess. */
9892 return true;
9896 /* Worker function for movmemhi expander.
9897 XOP[0] Destination as MEM:BLK
9898 XOP[1] Source " "
9899 XOP[2] # Bytes to copy
9901 Return TRUE if the expansion is accomplished.
9902 Return FALSE if the operand compination is not supported. */
9904 bool
9905 avr_emit_movmemhi (rtx *xop)
9907 HOST_WIDE_INT count;
9908 enum machine_mode loop_mode;
9909 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9910 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9911 rtx a_hi8 = NULL_RTX;
9913 if (avr_mem_flash_p (xop[0]))
9914 return false;
9916 if (!CONST_INT_P (xop[2]))
9917 return false;
9919 count = INTVAL (xop[2]);
9920 if (count <= 0)
9921 return false;
9923 a_src = XEXP (xop[1], 0);
9924 a_dest = XEXP (xop[0], 0);
9926 if (PSImode == GET_MODE (a_src))
9928 gcc_assert (as == ADDR_SPACE_MEMX);
9930 loop_mode = (count < 0x100) ? QImode : HImode;
9931 loop_reg = gen_rtx_REG (loop_mode, 24);
9932 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9934 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9935 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9937 else
9939 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9941 if (segment
9942 && avr_current_arch->n_segments > 1)
9944 a_hi8 = GEN_INT (segment);
9945 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9947 else if (!ADDR_SPACE_GENERIC_P (as))
9949 as = ADDR_SPACE_FLASH;
9952 addr1 = a_src;
9954 loop_mode = (count <= 0x100) ? QImode : HImode;
9955 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9958 xas = GEN_INT (as);
9960 /* FIXME: Register allocator might come up with spill fails if it is left
9961 on its own. Thus, we allocate the pointer registers by hand:
9962 Z = source address
9963 X = destination address */
9965 emit_move_insn (lpm_addr_reg_rtx, addr1);
9966 addr1 = lpm_addr_reg_rtx;
9968 reg_x = gen_rtx_REG (HImode, REG_X);
9969 emit_move_insn (reg_x, a_dest);
9970 addr0 = reg_x;
9972 /* FIXME: Register allocator does a bad job and might spill address
9973 register(s) inside the loop leading to additional move instruction
9974 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9975 load and store as seperate insns. Instead, we perform the copy
9976 by means of one monolithic insn. */
9978 gcc_assert (TMP_REGNO == LPM_REGNO);
9980 if (as != ADDR_SPACE_MEMX)
9982 /* Load instruction ([E]LPM or LD) is known at compile time:
9983 Do the copy-loop inline. */
9985 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9986 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9988 insn = fun (addr0, addr1, xas, loop_reg,
9989 addr0, addr1, tmp_reg_rtx, loop_reg);
9991 else
9993 rtx loop_reg16 = gen_rtx_REG (HImode, 24);
9994 rtx r23 = gen_rtx_REG (QImode, 23);
9995 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9996 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9998 emit_move_insn (r23, a_hi8);
10000 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
10001 lpm_reg_rtx, loop_reg16, r23, r23, GEN_INT (avr_addr.rampz));
10004 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
10005 emit_insn (insn);
10007 return true;
10011 /* Print assembler for movmem_qi, movmem_hi insns...
10012 $0, $4 : & dest
10013 $1, $5 : & src
10014 $2 : Address Space
10015 $3, $7 : Loop register
10016 $6 : Scratch register
10018 ...and movmem_qi_elpm, movmem_hi_elpm insns.
10020 $8, $9 : hh8 (& src)
10021 $10 : RAMPZ_ADDR
10024 const char*
10025 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
10027 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
10028 enum machine_mode loop_mode = GET_MODE (xop[3]);
10030 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
10032 gcc_assert (REG_X == REGNO (xop[0])
10033 && REG_Z == REGNO (xop[1]));
10035 if (plen)
10036 *plen = 0;
10038 /* Loop label */
10040 avr_asm_len ("0:", xop, plen, 0);
10042 /* Load with post-increment */
10044 switch (as)
10046 default:
10047 gcc_unreachable();
10049 case ADDR_SPACE_GENERIC:
10051 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
10052 break;
10054 case ADDR_SPACE_FLASH:
10056 if (AVR_HAVE_LPMX)
10057 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
10058 else
10059 avr_asm_len ("lpm" CR_TAB
10060 "adiw %1,1", xop, plen, 2);
10061 break;
10063 case ADDR_SPACE_FLASH1:
10064 case ADDR_SPACE_FLASH2:
10065 case ADDR_SPACE_FLASH3:
10066 case ADDR_SPACE_FLASH4:
10067 case ADDR_SPACE_FLASH5:
10069 if (AVR_HAVE_ELPMX)
10070 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
10071 else
10072 avr_asm_len ("elpm" CR_TAB
10073 "adiw %1,1", xop, plen, 2);
10074 break;
10077 /* Store with post-increment */
10079 avr_asm_len ("st %a0+,%6", xop, plen, 1);
10081 /* Decrement loop-counter and set Z-flag */
10083 if (QImode == loop_mode)
10085 avr_asm_len ("dec %3", xop, plen, 1);
10087 else if (sbiw_p)
10089 avr_asm_len ("sbiw %3,1", xop, plen, 1);
10091 else
10093 avr_asm_len ("subi %A3,1" CR_TAB
10094 "sbci %B3,0", xop, plen, 2);
10097 /* Loop until zero */
10099 return avr_asm_len ("brne 0b", xop, plen, 1);
10104 /* Helper for __builtin_avr_delay_cycles */
10106 static void
10107 avr_expand_delay_cycles (rtx operands0)
10109 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
10110 unsigned HOST_WIDE_INT cycles_used;
10111 unsigned HOST_WIDE_INT loop_count;
10113 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
10115 loop_count = ((cycles - 9) / 6) + 1;
10116 cycles_used = ((loop_count - 1) * 6) + 9;
10117 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
10118 cycles -= cycles_used;
10121 if (IN_RANGE (cycles, 262145, 83886081))
10123 loop_count = ((cycles - 7) / 5) + 1;
10124 if (loop_count > 0xFFFFFF)
10125 loop_count = 0xFFFFFF;
10126 cycles_used = ((loop_count - 1) * 5) + 7;
10127 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
10128 cycles -= cycles_used;
10131 if (IN_RANGE (cycles, 768, 262144))
10133 loop_count = ((cycles - 5) / 4) + 1;
10134 if (loop_count > 0xFFFF)
10135 loop_count = 0xFFFF;
10136 cycles_used = ((loop_count - 1) * 4) + 5;
10137 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
10138 cycles -= cycles_used;
10141 if (IN_RANGE (cycles, 6, 767))
10143 loop_count = cycles / 3;
10144 if (loop_count > 255)
10145 loop_count = 255;
10146 cycles_used = loop_count * 3;
10147 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
10148 cycles -= cycles_used;
10151 while (cycles >= 2)
10153 emit_insn (gen_nopv (GEN_INT(2)));
10154 cycles -= 2;
10157 if (cycles == 1)
10159 emit_insn (gen_nopv (GEN_INT(1)));
10160 cycles--;
10165 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10167 static double_int
10168 avr_double_int_push_digit (double_int val, int base,
10169 unsigned HOST_WIDE_INT digit)
10171 val = 0 == base
10172 ? double_int_lshift (val, 32, 64, false)
10173 : double_int_mul (val, uhwi_to_double_int (base));
10175 return double_int_add (val, uhwi_to_double_int (digit));
10179 /* Compute the image of x under f, i.e. perform x --> f(x) */
10181 static int
10182 avr_map (double_int f, int x)
10184 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10188 /* Return some metrics of map A. */
10190 enum
10192 /* Number of fixed points in { 0 ... 7 } */
10193 MAP_FIXED_0_7,
10195 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10196 MAP_NONFIXED_0_7,
10198 /* Mask representing the fixed points in { 0 ... 7 } */
10199 MAP_MASK_FIXED_0_7,
10201 /* Size of the preimage of { 0 ... 7 } */
10202 MAP_PREIMAGE_0_7,
10204 /* Mask that represents the preimage of { f } */
10205 MAP_MASK_PREIMAGE_F
10208 static unsigned
10209 avr_map_metric (double_int a, int mode)
10211 unsigned i, metric = 0;
10213 for (i = 0; i < 8; i++)
10215 unsigned ai = avr_map (a, i);
10217 if (mode == MAP_FIXED_0_7)
10218 metric += ai == i;
10219 else if (mode == MAP_NONFIXED_0_7)
10220 metric += ai < 8 && ai != i;
10221 else if (mode == MAP_MASK_FIXED_0_7)
10222 metric |= ((unsigned) (ai == i)) << i;
10223 else if (mode == MAP_PREIMAGE_0_7)
10224 metric += ai < 8;
10225 else if (mode == MAP_MASK_PREIMAGE_F)
10226 metric |= ((unsigned) (ai == 0xf)) << i;
10227 else
10228 gcc_unreachable();
10231 return metric;
10235 /* Return true if IVAL has a 0xf in its hexadecimal representation
10236 and false, otherwise. Only nibbles 0..7 are taken into account.
10237 Used as constraint helper for C0f and Cxf. */
10239 bool
10240 avr_has_nibble_0xf (rtx ival)
10242 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10246 /* We have a set of bits that are mapped by a function F.
10247 Try to decompose F by means of a second function G so that
10249 F = F o G^-1 o G
10253 cost (F o G^-1) + cost (G) < cost (F)
10255 Example: Suppose builtin insert_bits supplies us with the map
10256 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10257 nibble of the result, we can just as well rotate the bits before inserting
10258 them and use the map 0x7654ffff which is cheaper than the original map.
10259 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10261 typedef struct
10263 /* tree code of binary function G */
10264 enum tree_code code;
10266 /* The constant second argument of G */
10267 int arg;
10269 /* G^-1, the inverse of G (*, arg) */
10270 unsigned ginv;
10272 /* The cost of appplying G (*, arg) */
10273 int cost;
10275 /* The composition F o G^-1 (*, arg) for some function F */
10276 double_int map;
10278 /* For debug purpose only */
10279 const char *str;
10280 } avr_map_op_t;
10282 static const avr_map_op_t avr_map_op[] =
10284 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10285 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10286 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10287 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10288 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10289 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10290 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10291 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10292 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10293 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10294 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10295 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10296 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10297 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10298 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10302 /* Try to decompose F as F = (F o G^-1) o G as described above.
10303 The result is a struct representing F o G^-1 and G.
10304 If result.cost < 0 then such a decomposition does not exist. */
10306 static avr_map_op_t
10307 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10309 int i;
10310 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10311 avr_map_op_t f_ginv = *g;
10312 double_int ginv = uhwi_to_double_int (g->ginv);
10314 f_ginv.cost = -1;
10316 /* Step 1: Computing F o G^-1 */
10318 for (i = 7; i >= 0; i--)
10320 int x = avr_map (f, i);
10322 if (x <= 7)
10324 x = avr_map (ginv, x);
10326 /* The bit is no element of the image of G: no avail (cost = -1) */
10328 if (x > 7)
10329 return f_ginv;
10332 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10335 /* Step 2: Compute the cost of the operations.
10336 The overall cost of doing an operation prior to the insertion is
10337 the cost of the insertion plus the cost of the operation. */
10339 /* Step 2a: Compute cost of F o G^-1 */
10341 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10343 /* The mapping consists only of fixed points and can be folded
10344 to AND/OR logic in the remainder. Reasonable cost is 3. */
10346 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10348 else
10350 rtx xop[4];
10352 /* Get the cost of the insn by calling the output worker with some
10353 fake values. Mimic effect of reloading xop[3]: Unused operands
10354 are mapped to 0 and used operands are reloaded to xop[0]. */
10356 xop[0] = all_regs_rtx[24];
10357 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10358 xop[2] = all_regs_rtx[25];
10359 xop[3] = val_used_p ? xop[0] : const0_rtx;
10361 avr_out_insert_bits (xop, &f_ginv.cost);
10363 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10366 /* Step 2b: Add cost of G */
10368 f_ginv.cost += g->cost;
10370 if (avr_log.builtin)
10371 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10373 return f_ginv;
10377 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10378 XOP[0] and XOP[1] don't overlap.
10379 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10380 If FIXP_P = false: Just move the bit if its position in the destination
10381 is different to its source position. */
10383 static void
10384 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10386 int bit_dest, b;
10388 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10389 int t_bit_src = -1;
10391 /* We order the operations according to the requested source bit b. */
10393 for (b = 0; b < 8; b++)
10394 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10396 int bit_src = avr_map (map, bit_dest);
10398 if (b != bit_src
10399 || bit_src >= 8
10400 /* Same position: No need to copy as requested by FIXP_P. */
10401 || (bit_dest == bit_src && !fixp_p))
10402 continue;
10404 if (t_bit_src != bit_src)
10406 /* Source bit is not yet in T: Store it to T. */
10408 t_bit_src = bit_src;
10410 xop[3] = GEN_INT (bit_src);
10411 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10414 /* Load destination bit with T. */
10416 xop[3] = GEN_INT (bit_dest);
10417 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10422 /* PLEN == 0: Print assembler code for `insert_bits'.
10423 PLEN != 0: Compute code length in bytes.
10425 OP[0]: Result
10426 OP[1]: The mapping composed of nibbles. If nibble no. N is
10427 0: Bit N of result is copied from bit OP[2].0
10428 ... ...
10429 7: Bit N of result is copied from bit OP[2].7
10430 0xf: Bit N of result is copied from bit OP[3].N
10431 OP[2]: Bits to be inserted
10432 OP[3]: Target value */
10434 const char*
10435 avr_out_insert_bits (rtx *op, int *plen)
10437 double_int map = rtx_to_double_int (op[1]);
10438 unsigned mask_fixed;
10439 bool fixp_p = true;
10440 rtx xop[4];
10442 xop[0] = op[0];
10443 xop[1] = op[2];
10444 xop[2] = op[3];
10446 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10448 if (plen)
10449 *plen = 0;
10450 else if (flag_print_asm_name)
10451 fprintf (asm_out_file,
10452 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10453 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10455 /* If MAP has fixed points it might be better to initialize the result
10456 with the bits to be inserted instead of moving all bits by hand. */
10458 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10460 if (REGNO (xop[0]) == REGNO (xop[1]))
10462 /* Avoid early-clobber conflicts */
10464 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10465 xop[1] = tmp_reg_rtx;
10466 fixp_p = false;
10469 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10471 /* XOP[2] is used and reloaded to XOP[0] already */
10473 int n_fix = 0, n_nofix = 0;
10475 gcc_assert (REG_P (xop[2]));
10477 /* Get the code size of the bit insertions; once with all bits
10478 moved and once with fixed points omitted. */
10480 avr_move_bits (xop, map, true, &n_fix);
10481 avr_move_bits (xop, map, false, &n_nofix);
10483 if (fixp_p && n_fix - n_nofix > 3)
10485 xop[3] = gen_int_mode (~mask_fixed, QImode);
10487 avr_asm_len ("eor %0,%1" CR_TAB
10488 "andi %0,%3" CR_TAB
10489 "eor %0,%1", xop, plen, 3);
10490 fixp_p = false;
10493 else
10495 /* XOP[2] is unused */
10497 if (fixp_p && mask_fixed)
10499 avr_asm_len ("mov %0,%1", xop, plen, 1);
10500 fixp_p = false;
10504 /* Move/insert remaining bits. */
10506 avr_move_bits (xop, map, fixp_p, plen);
10508 return "";
10512 /* IDs for all the AVR builtins. */
10514 enum avr_builtin_id
10516 AVR_BUILTIN_NOP,
10517 AVR_BUILTIN_SEI,
10518 AVR_BUILTIN_CLI,
10519 AVR_BUILTIN_WDR,
10520 AVR_BUILTIN_SLEEP,
10521 AVR_BUILTIN_SWAP,
10522 AVR_BUILTIN_INSERT_BITS,
10523 AVR_BUILTIN_FMUL,
10524 AVR_BUILTIN_FMULS,
10525 AVR_BUILTIN_FMULSU,
10526 AVR_BUILTIN_DELAY_CYCLES
10529 static void
10530 avr_init_builtin_int24 (void)
10532 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10533 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10535 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10536 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10539 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10540 do \
10542 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10543 NULL, NULL_TREE); \
10544 } while (0)
10547 /* Implement `TARGET_INIT_BUILTINS' */
10548 /* Set up all builtin functions for this target. */
10550 static void
10551 avr_init_builtins (void)
10553 tree void_ftype_void
10554 = build_function_type_list (void_type_node, NULL_TREE);
10555 tree uchar_ftype_uchar
10556 = build_function_type_list (unsigned_char_type_node,
10557 unsigned_char_type_node,
10558 NULL_TREE);
10559 tree uint_ftype_uchar_uchar
10560 = build_function_type_list (unsigned_type_node,
10561 unsigned_char_type_node,
10562 unsigned_char_type_node,
10563 NULL_TREE);
10564 tree int_ftype_char_char
10565 = build_function_type_list (integer_type_node,
10566 char_type_node,
10567 char_type_node,
10568 NULL_TREE);
10569 tree int_ftype_char_uchar
10570 = build_function_type_list (integer_type_node,
10571 char_type_node,
10572 unsigned_char_type_node,
10573 NULL_TREE);
10574 tree void_ftype_ulong
10575 = build_function_type_list (void_type_node,
10576 long_unsigned_type_node,
10577 NULL_TREE);
10579 tree uchar_ftype_ulong_uchar_uchar
10580 = build_function_type_list (unsigned_char_type_node,
10581 long_unsigned_type_node,
10582 unsigned_char_type_node,
10583 unsigned_char_type_node,
10584 NULL_TREE);
10586 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10587 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10588 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10589 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10590 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10591 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10592 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10593 AVR_BUILTIN_DELAY_CYCLES);
10595 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10596 AVR_BUILTIN_FMUL);
10597 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10598 AVR_BUILTIN_FMULS);
10599 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10600 AVR_BUILTIN_FMULSU);
10602 DEF_BUILTIN ("__builtin_avr_insert_bits", uchar_ftype_ulong_uchar_uchar,
10603 AVR_BUILTIN_INSERT_BITS);
10605 avr_init_builtin_int24 ();
10608 #undef DEF_BUILTIN
10610 struct avr_builtin_description
10612 const enum insn_code icode;
10613 const char *const name;
10614 const enum avr_builtin_id id;
10617 static const struct avr_builtin_description
10618 bdesc_1arg[] =
10620 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10623 static const struct avr_builtin_description
10624 bdesc_2arg[] =
10626 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10627 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10628 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
10631 static const struct avr_builtin_description
10632 bdesc_3arg[] =
10634 { CODE_FOR_insert_bits, "__builtin_avr_insert_bits",
10635 AVR_BUILTIN_INSERT_BITS }
10638 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10640 static rtx
10641 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10642 rtx target)
10644 rtx pat;
10645 tree arg0 = CALL_EXPR_ARG (exp, 0);
10646 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10647 enum machine_mode op0mode = GET_MODE (op0);
10648 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10649 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10651 if (! target
10652 || GET_MODE (target) != tmode
10653 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10655 target = gen_reg_rtx (tmode);
10658 if (op0mode == SImode && mode0 == HImode)
10660 op0mode = HImode;
10661 op0 = gen_lowpart (HImode, op0);
10664 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10666 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10667 op0 = copy_to_mode_reg (mode0, op0);
10669 pat = GEN_FCN (icode) (target, op0);
10670 if (! pat)
10671 return 0;
10673 emit_insn (pat);
10675 return target;
10679 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10681 static rtx
10682 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10684 rtx pat;
10685 tree arg0 = CALL_EXPR_ARG (exp, 0);
10686 tree arg1 = CALL_EXPR_ARG (exp, 1);
10687 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10688 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10689 enum machine_mode op0mode = GET_MODE (op0);
10690 enum machine_mode op1mode = GET_MODE (op1);
10691 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10692 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10693 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10695 if (! target
10696 || GET_MODE (target) != tmode
10697 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10699 target = gen_reg_rtx (tmode);
10702 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10704 op0mode = HImode;
10705 op0 = gen_lowpart (HImode, op0);
10708 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10710 op1mode = HImode;
10711 op1 = gen_lowpart (HImode, op1);
10714 /* In case the insn wants input operands in modes different from
10715 the result, abort. */
10717 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10718 && (op1mode == mode1 || op1mode == VOIDmode));
10720 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10721 op0 = copy_to_mode_reg (mode0, op0);
10723 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10724 op1 = copy_to_mode_reg (mode1, op1);
10726 pat = GEN_FCN (icode) (target, op0, op1);
10728 if (! pat)
10729 return 0;
10731 emit_insn (pat);
10732 return target;
10735 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10737 static rtx
10738 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10740 rtx pat;
10741 tree arg0 = CALL_EXPR_ARG (exp, 0);
10742 tree arg1 = CALL_EXPR_ARG (exp, 1);
10743 tree arg2 = CALL_EXPR_ARG (exp, 2);
10744 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10745 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10746 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10747 enum machine_mode op0mode = GET_MODE (op0);
10748 enum machine_mode op1mode = GET_MODE (op1);
10749 enum machine_mode op2mode = GET_MODE (op2);
10750 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10751 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10752 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10753 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10755 if (! target
10756 || GET_MODE (target) != tmode
10757 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10759 target = gen_reg_rtx (tmode);
10762 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10764 op0mode = HImode;
10765 op0 = gen_lowpart (HImode, op0);
10768 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10770 op1mode = HImode;
10771 op1 = gen_lowpart (HImode, op1);
10774 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10776 op2mode = HImode;
10777 op2 = gen_lowpart (HImode, op2);
10780 /* In case the insn wants input operands in modes different from
10781 the result, abort. */
10783 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10784 && (op1mode == mode1 || op1mode == VOIDmode)
10785 && (op2mode == mode2 || op2mode == VOIDmode));
10787 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10788 op0 = copy_to_mode_reg (mode0, op0);
10790 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10791 op1 = copy_to_mode_reg (mode1, op1);
10793 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10794 op2 = copy_to_mode_reg (mode2, op2);
10796 pat = GEN_FCN (icode) (target, op0, op1, op2);
10798 if (! pat)
10799 return 0;
10801 emit_insn (pat);
10802 return target;
10806 /* Expand an expression EXP that calls a built-in function,
10807 with result going to TARGET if that's convenient
10808 (and in mode MODE if that's convenient).
10809 SUBTARGET may be used as the target for computing one of EXP's operands.
10810 IGNORE is nonzero if the value is to be ignored. */
10812 static rtx
10813 avr_expand_builtin (tree exp, rtx target,
10814 rtx subtarget ATTRIBUTE_UNUSED,
10815 enum machine_mode mode ATTRIBUTE_UNUSED,
10816 int ignore ATTRIBUTE_UNUSED)
10818 size_t i;
10819 const struct avr_builtin_description *d;
10820 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10821 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10822 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10823 tree arg0;
10824 rtx op0;
10826 switch (id)
10828 case AVR_BUILTIN_NOP:
10829 emit_insn (gen_nopv (GEN_INT(1)));
10830 return 0;
10832 case AVR_BUILTIN_SEI:
10833 emit_insn (gen_enable_interrupt ());
10834 return 0;
10836 case AVR_BUILTIN_CLI:
10837 emit_insn (gen_disable_interrupt ());
10838 return 0;
10840 case AVR_BUILTIN_WDR:
10841 emit_insn (gen_wdr ());
10842 return 0;
10844 case AVR_BUILTIN_SLEEP:
10845 emit_insn (gen_sleep ());
10846 return 0;
10848 case AVR_BUILTIN_DELAY_CYCLES:
10850 arg0 = CALL_EXPR_ARG (exp, 0);
10851 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10853 if (! CONST_INT_P (op0))
10854 error ("%s expects a compile time integer constant", bname);
10856 avr_expand_delay_cycles (op0);
10857 return 0;
10860 case AVR_BUILTIN_INSERT_BITS:
10862 arg0 = CALL_EXPR_ARG (exp, 0);
10863 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10865 if (!CONST_INT_P (op0))
10867 error ("%s expects a compile time long integer constant"
10868 " as first argument", bname);
10869 return target;
10874 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10875 if (d->id == id)
10876 return avr_expand_unop_builtin (d->icode, exp, target);
10878 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10879 if (d->id == id)
10880 return avr_expand_binop_builtin (d->icode, exp, target);
10882 for (i = 0, d = bdesc_3arg; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
10883 if (d->id == id)
10884 return avr_expand_triop_builtin (d->icode, exp, target);
10886 gcc_unreachable ();
10890 /* Implement `TARGET_FOLD_BUILTIN'. */
10892 static tree
10893 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10894 bool ignore ATTRIBUTE_UNUSED)
10896 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10897 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10899 if (!optimize)
10900 return NULL_TREE;
10902 switch (fcode)
10904 default:
10905 break;
10907 case AVR_BUILTIN_INSERT_BITS:
10909 tree tbits = arg[1];
10910 tree tval = arg[2];
10911 tree tmap;
10912 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10913 double_int map = tree_to_double_int (arg[0]);
10914 bool changed = false;
10915 unsigned i;
10916 avr_map_op_t best_g;
10918 tmap = double_int_to_tree (map_type, map);
10920 if (TREE_CODE (tval) != INTEGER_CST
10921 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10923 /* There are no F in the map, i.e. 3rd operand is unused.
10924 Replace that argument with some constant to render
10925 respective input unused. */
10927 tval = build_int_cst (val_type, 0);
10928 changed = true;
10931 if (TREE_CODE (tbits) != INTEGER_CST
10932 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10934 /* Similar for the bits to be inserted. If they are unused,
10935 we can just as well pass 0. */
10937 tbits = build_int_cst (val_type, 0);
10940 if (TREE_CODE (tbits) == INTEGER_CST)
10942 /* Inserting bits known at compile time is easy and can be
10943 performed by AND and OR with appropriate masks. */
10945 int bits = TREE_INT_CST_LOW (tbits);
10946 int mask_ior = 0, mask_and = 0xff;
10948 for (i = 0; i < 8; i++)
10950 int mi = avr_map (map, i);
10952 if (mi < 8)
10954 if (bits & (1 << mi)) mask_ior |= (1 << i);
10955 else mask_and &= ~(1 << i);
10959 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10960 build_int_cst (val_type, mask_ior));
10961 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10962 build_int_cst (val_type, mask_and));
10965 if (changed)
10966 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10968 /* If bits don't change their position we can use vanilla logic
10969 to merge the two arguments. */
10971 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10973 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10974 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10976 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10977 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10978 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10981 /* Try to decomposing map to reduce overall cost. */
10983 if (avr_log.builtin)
10984 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10986 best_g = avr_map_op[0];
10987 best_g.cost = 1000;
10989 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10991 avr_map_op_t g
10992 = avr_map_decompose (map, avr_map_op + i,
10993 TREE_CODE (tval) == INTEGER_CST);
10995 if (g.cost >= 0 && g.cost < best_g.cost)
10996 best_g = g;
10999 if (avr_log.builtin)
11000 avr_edump ("\n");
11002 if (best_g.arg == 0)
11003 /* No optimization found */
11004 break;
11006 /* Apply operation G to the 2nd argument. */
11008 if (avr_log.builtin)
11009 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11010 best_g.str, best_g.arg, best_g.map, best_g.cost);
11012 /* Do right-shifts arithmetically: They copy the MSB instead of
11013 shifting in a non-usable value (0) as with logic right-shift. */
11015 tbits = fold_convert (signed_char_type_node, tbits);
11016 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
11017 build_int_cst (val_type, best_g.arg));
11018 tbits = fold_convert (val_type, tbits);
11020 /* Use map o G^-1 instead of original map to undo the effect of G. */
11022 tmap = double_int_to_tree (map_type, best_g.map);
11024 return build_call_expr (fndecl, 3, tmap, tbits, tval);
11025 } /* AVR_BUILTIN_INSERT_BITS */
11028 return NULL_TREE;
11033 struct gcc_target targetm = TARGET_INITIALIZER;
11035 #include "gt-avr.h"