* config/avr/avr.c: Resolve all AS1 and AS2 macros.
[official-gcc.git] / gcc / config / avr / avr.c
blob6ff8b791363339c32dd725e7350572da03d7ccf1
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
40 #include "obstack.h"
41 #include "function.h"
42 #include "recog.h"
43 #include "optabs.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "params.h"
50 #include "df.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 do { \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
74 } while (0)
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
93 { 0 , 0, 0, NULL, 0 }
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
99 ".progmem.data",
100 ".progmem1.data",
101 ".progmem2.data",
102 ".progmem3.data",
103 ".progmem4.data",
104 ".progmem5.data"
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx, rtx[], int*);
111 static const char* out_movhi_r_mr (rtx, rtx[], int*);
112 static const char* out_movsi_r_mr (rtx, rtx[], int*);
113 static const char* out_movqi_mr_r (rtx, rtx[], int*);
114 static const char* out_movhi_mr_r (rtx, rtx[], int*);
115 static const char* out_movsi_mr_r (rtx, rtx[], int*);
117 static int avr_naked_function_p (tree);
118 static int interrupt_function_p (tree);
119 static int signal_function_p (tree);
120 static int avr_OS_task_function_p (tree);
121 static int avr_OS_main_function_p (tree);
122 static int avr_regs_to_save (HARD_REG_SET *);
123 static int get_sequence_length (rtx insns);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code);
127 static int avr_num_arg_regs (enum machine_mode, const_tree);
128 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
129 int, bool);
130 static void output_reload_in_const (rtx*, rtx, int*, bool);
131 static struct machine_function * avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 extern GTY(()) rtx lpm_reg_rtx;
144 rtx lpm_reg_rtx;
146 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
147 extern GTY(()) rtx lpm_addr_reg_rtx;
148 rtx lpm_addr_reg_rtx;
150 /* Temporary register RTX (reg:QI TMP_REGNO) */
151 extern GTY(()) rtx tmp_reg_rtx;
152 rtx tmp_reg_rtx;
154 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
155 extern GTY(()) rtx zero_reg_rtx;
156 rtx zero_reg_rtx;
158 /* RTXs for all general purpose registers as QImode */
159 extern GTY(()) rtx all_regs_rtx[32];
160 rtx all_regs_rtx[32];
162 /* RAMPZ special function register */
163 extern GTY(()) rtx rampz_rtx;
164 rtx rampz_rtx;
166 /* RTX containing the strings "" and "e", respectively */
167 static GTY(()) rtx xstring_empty;
168 static GTY(()) rtx xstring_e;
170 /* Preprocessor macros to define depending on MCU type. */
171 const char *avr_extra_arch_macro;
173 /* Current architecture. */
174 const struct base_arch_s *avr_current_arch;
176 /* Current device. */
177 const struct mcu_type_s *avr_current_device;
179 /* Section to put switch tables in. */
180 static GTY(()) section *progmem_swtable_section;
182 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
183 or to address space __flash*. */
184 static GTY(()) section *progmem_section[6];
186 /* Condition for insns/expanders from avr-dimode.md. */
187 bool avr_have_dimode = true;
189 /* To track if code will use .bss and/or .data. */
190 bool avr_need_clear_bss_p = false;
191 bool avr_need_copy_data_p = false;
194 /* Initialize the GCC target structure. */
195 #undef TARGET_ASM_ALIGNED_HI_OP
196 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
197 #undef TARGET_ASM_ALIGNED_SI_OP
198 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
199 #undef TARGET_ASM_UNALIGNED_HI_OP
200 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
201 #undef TARGET_ASM_UNALIGNED_SI_OP
202 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
203 #undef TARGET_ASM_INTEGER
204 #define TARGET_ASM_INTEGER avr_assemble_integer
205 #undef TARGET_ASM_FILE_START
206 #define TARGET_ASM_FILE_START avr_file_start
207 #undef TARGET_ASM_FILE_END
208 #define TARGET_ASM_FILE_END avr_file_end
210 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
211 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
212 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
213 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
215 #undef TARGET_FUNCTION_VALUE
216 #define TARGET_FUNCTION_VALUE avr_function_value
217 #undef TARGET_LIBCALL_VALUE
218 #define TARGET_LIBCALL_VALUE avr_libcall_value
219 #undef TARGET_FUNCTION_VALUE_REGNO_P
220 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
224 #undef TARGET_INSERT_ATTRIBUTES
225 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
226 #undef TARGET_SECTION_TYPE_FLAGS
227 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
229 #undef TARGET_ASM_NAMED_SECTION
230 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
231 #undef TARGET_ASM_INIT_SECTIONS
232 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
233 #undef TARGET_ENCODE_SECTION_INFO
234 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
235 #undef TARGET_ASM_SELECT_SECTION
236 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
238 #undef TARGET_REGISTER_MOVE_COST
239 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
240 #undef TARGET_MEMORY_MOVE_COST
241 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
242 #undef TARGET_RTX_COSTS
243 #define TARGET_RTX_COSTS avr_rtx_costs
244 #undef TARGET_ADDRESS_COST
245 #define TARGET_ADDRESS_COST avr_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
248 #undef TARGET_FUNCTION_ARG
249 #define TARGET_FUNCTION_ARG avr_function_arg
250 #undef TARGET_FUNCTION_ARG_ADVANCE
251 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
253 #undef TARGET_RETURN_IN_MEMORY
254 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
256 #undef TARGET_STRICT_ARGUMENT_NAMING
257 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
259 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
260 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
262 #undef TARGET_HARD_REGNO_SCRATCH_OK
263 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
264 #undef TARGET_CASE_VALUES_THRESHOLD
265 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
267 #undef TARGET_FRAME_POINTER_REQUIRED
268 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
269 #undef TARGET_CAN_ELIMINATE
270 #define TARGET_CAN_ELIMINATE avr_can_eliminate
272 #undef TARGET_CLASS_LIKELY_SPILLED_P
273 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
275 #undef TARGET_OPTION_OVERRIDE
276 #define TARGET_OPTION_OVERRIDE avr_option_override
278 #undef TARGET_CANNOT_MODIFY_JUMPS_P
279 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
281 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
282 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
284 #undef TARGET_INIT_BUILTINS
285 #define TARGET_INIT_BUILTINS avr_init_builtins
287 #undef TARGET_EXPAND_BUILTIN
288 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
290 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
291 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
293 #undef TARGET_SCALAR_MODE_SUPPORTED_P
294 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
296 #undef TARGET_ADDR_SPACE_SUBSET_P
297 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
299 #undef TARGET_ADDR_SPACE_CONVERT
300 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
302 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
303 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
305 #undef TARGET_ADDR_SPACE_POINTER_MODE
306 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
308 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
309 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
311 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
312 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
314 #undef TARGET_PRINT_OPERAND
315 #define TARGET_PRINT_OPERAND avr_print_operand
316 #undef TARGET_PRINT_OPERAND_ADDRESS
317 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
318 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
319 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
323 /* Custom function to count number of set bits. */
325 static inline int
326 avr_popcount (unsigned int val)
328 int pop = 0;
330 while (val)
332 val &= val-1;
333 pop++;
336 return pop;
340 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
341 Return true if the least significant N_BYTES bytes of XVAL all have a
342 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
343 of integers which contains an integer N iff bit N of POP_MASK is set. */
345 bool
346 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
348 int i;
350 enum machine_mode mode = GET_MODE (xval);
352 if (VOIDmode == mode)
353 mode = SImode;
355 for (i = 0; i < n_bytes; i++)
357 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
358 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
360 if (0 == (pop_mask & (1 << avr_popcount (val8))))
361 return false;
364 return true;
367 static void
368 avr_option_override (void)
370 flag_delete_null_pointer_checks = 0;
372 /* caller-save.c looks for call-clobbered hard registers that are assigned
373 to pseudos that cross calls and tries so save-restore them around calls
374 in order to reduce the number of stack slots needed.
376 This might leads to situations where reload is no more able to cope
377 with the challenge of AVR's very few address registers and fails to
378 perform the requested spills. */
380 if (avr_strict_X)
381 flag_caller_saves = 0;
383 /* Unwind tables currently require a frame pointer for correctness,
384 see toplev.c:process_options(). */
386 if ((flag_unwind_tables
387 || flag_non_call_exceptions
388 || flag_asynchronous_unwind_tables)
389 && !ACCUMULATE_OUTGOING_ARGS)
391 flag_omit_frame_pointer = 0;
394 avr_current_device = &avr_mcu_types[avr_mcu_index];
395 avr_current_arch = &avr_arch_types[avr_current_device->arch];
396 avr_extra_arch_macro = avr_current_device->macro;
398 init_machine_status = avr_init_machine_status;
400 avr_log_set_avr_log();
403 /* Function to set up the backend function structure. */
405 static struct machine_function *
406 avr_init_machine_status (void)
408 return ggc_alloc_cleared_machine_function ();
412 /* Implement `INIT_EXPANDERS'. */
413 /* The function works like a singleton. */
415 void
416 avr_init_expanders (void)
418 int regno;
420 static bool done = false;
422 if (done)
423 return;
424 else
425 done = true;
427 for (regno = 0; regno < 32; regno ++)
428 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
430 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
431 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
432 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
434 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
436 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
438 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
439 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
443 /* Return register class for register R. */
445 enum reg_class
446 avr_regno_reg_class (int r)
448 static const enum reg_class reg_class_tab[] =
450 R0_REG,
451 /* r1 - r15 */
452 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
453 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
454 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
455 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
456 /* r16 - r23 */
457 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
458 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
459 /* r24, r25 */
460 ADDW_REGS, ADDW_REGS,
461 /* X: r26, 27 */
462 POINTER_X_REGS, POINTER_X_REGS,
463 /* Y: r28, r29 */
464 POINTER_Y_REGS, POINTER_Y_REGS,
465 /* Z: r30, r31 */
466 POINTER_Z_REGS, POINTER_Z_REGS,
467 /* SP: SPL, SPH */
468 STACK_REG, STACK_REG
471 if (r <= 33)
472 return reg_class_tab[r];
474 return ALL_REGS;
478 static bool
479 avr_scalar_mode_supported_p (enum machine_mode mode)
481 if (PSImode == mode)
482 return true;
484 return default_scalar_mode_supported_p (mode);
488 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
490 static bool
491 avr_decl_flash_p (tree decl)
493 if (TREE_CODE (decl) != VAR_DECL
494 || TREE_TYPE (decl) == error_mark_node)
496 return false;
499 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
503 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
504 address space and FALSE, otherwise. */
506 static bool
507 avr_decl_memx_p (tree decl)
509 if (TREE_CODE (decl) != VAR_DECL
510 || TREE_TYPE (decl) == error_mark_node)
512 return false;
515 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
519 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
521 bool
522 avr_mem_flash_p (rtx x)
524 return (MEM_P (x)
525 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
529 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
530 address space and FALSE, otherwise. */
532 bool
533 avr_mem_memx_p (rtx x)
535 return (MEM_P (x)
536 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
540 /* A helper for the subsequent function attribute used to dig for
541 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
543 static inline int
544 avr_lookup_function_attribute1 (const_tree func, const char *name)
546 if (FUNCTION_DECL == TREE_CODE (func))
548 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
550 return true;
553 func = TREE_TYPE (func);
556 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
557 || TREE_CODE (func) == METHOD_TYPE);
559 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
562 /* Return nonzero if FUNC is a naked function. */
564 static int
565 avr_naked_function_p (tree func)
567 return avr_lookup_function_attribute1 (func, "naked");
570 /* Return nonzero if FUNC is an interrupt function as specified
571 by the "interrupt" attribute. */
573 static int
574 interrupt_function_p (tree func)
576 return avr_lookup_function_attribute1 (func, "interrupt");
579 /* Return nonzero if FUNC is a signal function as specified
580 by the "signal" attribute. */
582 static int
583 signal_function_p (tree func)
585 return avr_lookup_function_attribute1 (func, "signal");
588 /* Return nonzero if FUNC is an OS_task function. */
590 static int
591 avr_OS_task_function_p (tree func)
593 return avr_lookup_function_attribute1 (func, "OS_task");
596 /* Return nonzero if FUNC is an OS_main function. */
598 static int
599 avr_OS_main_function_p (tree func)
601 return avr_lookup_function_attribute1 (func, "OS_main");
605 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
606 bool
607 avr_accumulate_outgoing_args (void)
609 if (!cfun)
610 return TARGET_ACCUMULATE_OUTGOING_ARGS;
612 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
613 what offset is correct. In some cases it is relative to
614 virtual_outgoing_args_rtx and in others it is relative to
615 virtual_stack_vars_rtx. For example code see
616 gcc.c-torture/execute/built-in-setjmp.c
617 gcc.c-torture/execute/builtins/sprintf-chk.c */
619 return (TARGET_ACCUMULATE_OUTGOING_ARGS
620 && !(cfun->calls_setjmp
621 || cfun->has_nonlocal_label));
625 /* Report contribution of accumulated outgoing arguments to stack size. */
627 static inline int
628 avr_outgoing_args_size (void)
630 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
634 /* Implement `STARTING_FRAME_OFFSET'. */
635 /* This is the offset from the frame pointer register to the first stack slot
636 that contains a variable living in the frame. */
639 avr_starting_frame_offset (void)
641 return 1 + avr_outgoing_args_size ();
645 /* Return the number of hard registers to push/pop in the prologue/epilogue
646 of the current function, and optionally store these registers in SET. */
648 static int
649 avr_regs_to_save (HARD_REG_SET *set)
651 int reg, count;
652 int int_or_sig_p = (interrupt_function_p (current_function_decl)
653 || signal_function_p (current_function_decl));
655 if (set)
656 CLEAR_HARD_REG_SET (*set);
657 count = 0;
659 /* No need to save any registers if the function never returns or
660 has the "OS_task" or "OS_main" attribute. */
661 if (TREE_THIS_VOLATILE (current_function_decl)
662 || cfun->machine->is_OS_task
663 || cfun->machine->is_OS_main)
664 return 0;
666 for (reg = 0; reg < 32; reg++)
668 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
669 any global register variables. */
670 if (fixed_regs[reg])
671 continue;
673 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
674 || (df_regs_ever_live_p (reg)
675 && (int_or_sig_p || !call_used_regs[reg])
676 /* Don't record frame pointer registers here. They are treated
677 indivitually in prologue. */
678 && !(frame_pointer_needed
679 && (reg == REG_Y || reg == (REG_Y+1)))))
681 if (set)
682 SET_HARD_REG_BIT (*set, reg);
683 count++;
686 return count;
689 /* Return true if register FROM can be eliminated via register TO. */
691 static bool
692 avr_can_eliminate (const int from, const int to)
694 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
695 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
696 || ((from == FRAME_POINTER_REGNUM
697 || from == FRAME_POINTER_REGNUM + 1)
698 && !frame_pointer_needed));
701 /* Compute offset between arg_pointer and frame_pointer. */
704 avr_initial_elimination_offset (int from, int to)
706 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
707 return 0;
708 else
710 int offset = frame_pointer_needed ? 2 : 0;
711 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
713 offset += avr_regs_to_save (NULL);
714 return (get_frame_size () + avr_outgoing_args_size()
715 + avr_pc_size + 1 + offset);
719 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
720 frame pointer by +STARTING_FRAME_OFFSET.
721 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
722 avoids creating add/sub of offset in nonlocal goto and setjmp. */
724 static rtx
725 avr_builtin_setjmp_frame_value (void)
727 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
728 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
731 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
732 This is return address of function. */
733 rtx
734 avr_return_addr_rtx (int count, rtx tem)
736 rtx r;
738 /* Can only return this function's return address. Others not supported. */
739 if (count)
740 return NULL;
742 if (AVR_3_BYTE_PC)
744 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
745 warning (0, "'builtin_return_address' contains only 2 bytes of address");
747 else
748 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
750 r = gen_rtx_PLUS (Pmode, tem, r);
751 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
752 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
753 return r;
756 /* Return 1 if the function epilogue is just a single "ret". */
759 avr_simple_epilogue (void)
761 return (! frame_pointer_needed
762 && get_frame_size () == 0
763 && avr_outgoing_args_size() == 0
764 && avr_regs_to_save (NULL) == 0
765 && ! interrupt_function_p (current_function_decl)
766 && ! signal_function_p (current_function_decl)
767 && ! avr_naked_function_p (current_function_decl)
768 && ! TREE_THIS_VOLATILE (current_function_decl));
771 /* This function checks sequence of live registers. */
773 static int
774 sequent_regs_live (void)
776 int reg;
777 int live_seq=0;
778 int cur_seq=0;
780 for (reg = 0; reg < 18; ++reg)
782 if (fixed_regs[reg])
784 /* Don't recognize sequences that contain global register
785 variables. */
787 if (live_seq != 0)
788 return 0;
789 else
790 continue;
793 if (!call_used_regs[reg])
795 if (df_regs_ever_live_p (reg))
797 ++live_seq;
798 ++cur_seq;
800 else
801 cur_seq = 0;
805 if (!frame_pointer_needed)
807 if (df_regs_ever_live_p (REG_Y))
809 ++live_seq;
810 ++cur_seq;
812 else
813 cur_seq = 0;
815 if (df_regs_ever_live_p (REG_Y+1))
817 ++live_seq;
818 ++cur_seq;
820 else
821 cur_seq = 0;
823 else
825 cur_seq += 2;
826 live_seq += 2;
828 return (cur_seq == live_seq) ? live_seq : 0;
831 /* Obtain the length sequence of insns. */
834 get_sequence_length (rtx insns)
836 rtx insn;
837 int length;
839 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
840 length += get_attr_length (insn);
842 return length;
845 /* Implement INCOMING_RETURN_ADDR_RTX. */
848 avr_incoming_return_addr_rtx (void)
850 /* The return address is at the top of the stack. Note that the push
851 was via post-decrement, which means the actual address is off by one. */
852 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
855 /* Helper for expand_prologue. Emit a push of a byte register. */
857 static void
858 emit_push_byte (unsigned regno, bool frame_related_p)
860 rtx mem, reg, insn;
862 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
863 mem = gen_frame_mem (QImode, mem);
864 reg = gen_rtx_REG (QImode, regno);
866 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
867 if (frame_related_p)
868 RTX_FRAME_RELATED_P (insn) = 1;
870 cfun->machine->stack_usage++;
873 static void
874 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
876 rtx insn;
877 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
878 int live_seq = sequent_regs_live ();
880 bool minimize = (TARGET_CALL_PROLOGUES
881 && live_seq
882 && !isr_p
883 && !cfun->machine->is_OS_task
884 && !cfun->machine->is_OS_main);
886 if (minimize
887 && (frame_pointer_needed
888 || avr_outgoing_args_size() > 8
889 || (AVR_2_BYTE_PC && live_seq > 6)
890 || live_seq > 7))
892 rtx pattern;
893 int first_reg, reg, offset;
895 emit_move_insn (gen_rtx_REG (HImode, REG_X),
896 gen_int_mode (size, HImode));
898 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
899 gen_int_mode (live_seq+size, HImode));
900 insn = emit_insn (pattern);
901 RTX_FRAME_RELATED_P (insn) = 1;
903 /* Describe the effect of the unspec_volatile call to prologue_saves.
904 Note that this formulation assumes that add_reg_note pushes the
905 notes to the front. Thus we build them in the reverse order of
906 how we want dwarf2out to process them. */
908 /* The function does always set frame_pointer_rtx, but whether that
909 is going to be permanent in the function is frame_pointer_needed. */
911 add_reg_note (insn, REG_CFA_ADJUST_CFA,
912 gen_rtx_SET (VOIDmode, (frame_pointer_needed
913 ? frame_pointer_rtx
914 : stack_pointer_rtx),
915 plus_constant (stack_pointer_rtx,
916 -(size + live_seq))));
918 /* Note that live_seq always contains r28+r29, but the other
919 registers to be saved are all below 18. */
921 first_reg = 18 - (live_seq - 2);
923 for (reg = 29, offset = -live_seq + 1;
924 reg >= first_reg;
925 reg = (reg == 28 ? 17 : reg - 1), ++offset)
927 rtx m, r;
929 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
930 r = gen_rtx_REG (QImode, reg);
931 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
934 cfun->machine->stack_usage += size + live_seq;
936 else /* !minimize */
938 int reg;
940 for (reg = 0; reg < 32; ++reg)
941 if (TEST_HARD_REG_BIT (set, reg))
942 emit_push_byte (reg, true);
944 if (frame_pointer_needed
945 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
947 /* Push frame pointer. Always be consistent about the
948 ordering of pushes -- epilogue_restores expects the
949 register pair to be pushed low byte first. */
951 emit_push_byte (REG_Y, true);
952 emit_push_byte (REG_Y + 1, true);
955 if (frame_pointer_needed
956 && size == 0)
958 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
959 RTX_FRAME_RELATED_P (insn) = 1;
962 if (size != 0)
964 /* Creating a frame can be done by direct manipulation of the
965 stack or via the frame pointer. These two methods are:
966 fp = sp
967 fp -= size
968 sp = fp
970 sp -= size
971 fp = sp (*)
972 the optimum method depends on function type, stack and
973 frame size. To avoid a complex logic, both methods are
974 tested and shortest is selected.
976 There is also the case where SIZE != 0 and no frame pointer is
977 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
978 In that case, insn (*) is not needed in that case.
979 We use the X register as scratch. This is save because in X
980 is call-clobbered.
981 In an interrupt routine, the case of SIZE != 0 together with
982 !frame_pointer_needed can only occur if the function is not a
983 leaf function and thus X has already been saved. */
985 rtx fp_plus_insns, fp, my_fp;
986 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
988 gcc_assert (frame_pointer_needed
989 || !isr_p
990 || !current_function_is_leaf);
992 fp = my_fp = (frame_pointer_needed
993 ? frame_pointer_rtx
994 : gen_rtx_REG (Pmode, REG_X));
996 if (AVR_HAVE_8BIT_SP)
998 /* The high byte (r29) does not change:
999 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1001 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1004 /************ Method 1: Adjust frame pointer ************/
1006 start_sequence ();
1008 /* Normally, the dwarf2out frame-related-expr interpreter does
1009 not expect to have the CFA change once the frame pointer is
1010 set up. Thus, we avoid marking the move insn below and
1011 instead indicate that the entire operation is complete after
1012 the frame pointer subtraction is done. */
1014 insn = emit_move_insn (fp, stack_pointer_rtx);
1015 if (!frame_pointer_needed)
1016 RTX_FRAME_RELATED_P (insn) = 1;
1018 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1019 RTX_FRAME_RELATED_P (insn) = 1;
1021 if (frame_pointer_needed)
1023 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1024 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1027 /* Copy to stack pointer. Note that since we've already
1028 changed the CFA to the frame pointer this operation
1029 need not be annotated if frame pointer is needed. */
1031 if (AVR_HAVE_8BIT_SP)
1033 insn = emit_move_insn (stack_pointer_rtx, fp);
1035 else if (TARGET_NO_INTERRUPTS
1036 || isr_p
1037 || cfun->machine->is_OS_main)
1039 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1041 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1042 fp, irqs_are_on));
1044 else
1046 insn = emit_move_insn (stack_pointer_rtx, fp);
1049 if (!frame_pointer_needed)
1050 RTX_FRAME_RELATED_P (insn) = 1;
1052 fp_plus_insns = get_insns ();
1053 end_sequence ();
1055 /************ Method 2: Adjust Stack pointer ************/
1057 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1058 can only handle specific offsets. */
1060 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1062 rtx sp_plus_insns;
1064 start_sequence ();
1066 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 if (frame_pointer_needed)
1071 insn = emit_move_insn (fp, stack_pointer_rtx);
1072 RTX_FRAME_RELATED_P (insn) = 1;
1075 sp_plus_insns = get_insns ();
1076 end_sequence ();
1078 /************ Use shortest method ************/
1080 emit_insn (get_sequence_length (sp_plus_insns)
1081 < get_sequence_length (fp_plus_insns)
1082 ? sp_plus_insns
1083 : fp_plus_insns);
1085 else
1087 emit_insn (fp_plus_insns);
1090 cfun->machine->stack_usage += size;
1091 } /* !minimize && size != 0 */
1092 } /* !minimize */
1096 /* Output function prologue. */
1098 void
1099 expand_prologue (void)
1101 HARD_REG_SET set;
1102 HOST_WIDE_INT size;
1104 size = get_frame_size() + avr_outgoing_args_size();
1106 /* Init cfun->machine. */
1107 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1108 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1109 cfun->machine->is_signal = signal_function_p (current_function_decl);
1110 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1111 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1112 cfun->machine->stack_usage = 0;
1114 /* Prologue: naked. */
1115 if (cfun->machine->is_naked)
1117 return;
1120 avr_regs_to_save (&set);
1122 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1124 /* Enable interrupts. */
1125 if (cfun->machine->is_interrupt)
1126 emit_insn (gen_enable_interrupt ());
1128 /* Push zero reg. */
1129 emit_push_byte (ZERO_REGNO, true);
1131 /* Push tmp reg. */
1132 emit_push_byte (TMP_REGNO, true);
1134 /* Push SREG. */
1135 /* ??? There's no dwarf2 column reserved for SREG. */
1136 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1137 emit_push_byte (TMP_REGNO, false);
1139 /* Push RAMPZ. */
1140 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1141 if (AVR_HAVE_RAMPZ
1142 && TEST_HARD_REG_BIT (set, REG_Z)
1143 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1145 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1146 emit_push_byte (TMP_REGNO, false);
1149 /* Clear zero reg. */
1150 emit_move_insn (zero_reg_rtx, const0_rtx);
1152 /* Prevent any attempt to delete the setting of ZERO_REG! */
1153 emit_use (zero_reg_rtx);
1156 avr_prologue_setup_frame (size, set);
1158 if (flag_stack_usage_info)
1159 current_function_static_stack_size = cfun->machine->stack_usage;
1162 /* Output summary at end of function prologue. */
1164 static void
1165 avr_asm_function_end_prologue (FILE *file)
1167 if (cfun->machine->is_naked)
1169 fputs ("/* prologue: naked */\n", file);
1171 else
1173 if (cfun->machine->is_interrupt)
1175 fputs ("/* prologue: Interrupt */\n", file);
1177 else if (cfun->machine->is_signal)
1179 fputs ("/* prologue: Signal */\n", file);
1181 else
1182 fputs ("/* prologue: function */\n", file);
1185 if (ACCUMULATE_OUTGOING_ARGS)
1186 fprintf (file, "/* outgoing args size = %d */\n",
1187 avr_outgoing_args_size());
1189 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1190 get_frame_size());
1191 fprintf (file, "/* stack size = %d */\n",
1192 cfun->machine->stack_usage);
1193 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1194 usage for offset so that SP + .L__stack_offset = return address. */
1195 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1199 /* Implement EPILOGUE_USES. */
1202 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1204 if (reload_completed
1205 && cfun->machine
1206 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1207 return 1;
1208 return 0;
1211 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1213 static void
1214 emit_pop_byte (unsigned regno)
1216 rtx mem, reg;
1218 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1219 mem = gen_frame_mem (QImode, mem);
1220 reg = gen_rtx_REG (QImode, regno);
1222 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1225 /* Output RTL epilogue. */
1227 void
1228 expand_epilogue (bool sibcall_p)
1230 int reg;
1231 int live_seq;
1232 HARD_REG_SET set;
1233 int minimize;
1234 HOST_WIDE_INT size;
1235 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1237 size = get_frame_size() + avr_outgoing_args_size();
1239 /* epilogue: naked */
1240 if (cfun->machine->is_naked)
1242 gcc_assert (!sibcall_p);
1244 emit_jump_insn (gen_return ());
1245 return;
1248 avr_regs_to_save (&set);
1249 live_seq = sequent_regs_live ();
1251 minimize = (TARGET_CALL_PROLOGUES
1252 && live_seq
1253 && !isr_p
1254 && !cfun->machine->is_OS_task
1255 && !cfun->machine->is_OS_main);
1257 if (minimize
1258 && (live_seq > 4
1259 || frame_pointer_needed
1260 || size))
1262 /* Get rid of frame. */
1264 if (!frame_pointer_needed)
1266 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1269 if (size)
1271 emit_move_insn (frame_pointer_rtx,
1272 plus_constant (frame_pointer_rtx, size));
1275 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1276 return;
1279 if (size)
1281 /* Try two methods to adjust stack and select shortest. */
1283 rtx fp, my_fp;
1284 rtx fp_plus_insns;
1286 gcc_assert (frame_pointer_needed
1287 || !isr_p
1288 || !current_function_is_leaf);
1290 fp = my_fp = (frame_pointer_needed
1291 ? frame_pointer_rtx
1292 : gen_rtx_REG (Pmode, REG_X));
1294 if (AVR_HAVE_8BIT_SP)
1296 /* The high byte (r29) does not change:
1297 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1299 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1302 /********** Method 1: Adjust fp register **********/
1304 start_sequence ();
1306 if (!frame_pointer_needed)
1307 emit_move_insn (fp, stack_pointer_rtx);
1309 emit_move_insn (my_fp, plus_constant (my_fp, size));
1311 /* Copy to stack pointer. */
1313 if (AVR_HAVE_8BIT_SP)
1315 emit_move_insn (stack_pointer_rtx, fp);
1317 else if (TARGET_NO_INTERRUPTS
1318 || isr_p
1319 || cfun->machine->is_OS_main)
1321 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1323 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1325 else
1327 emit_move_insn (stack_pointer_rtx, fp);
1330 fp_plus_insns = get_insns ();
1331 end_sequence ();
1333 /********** Method 2: Adjust Stack pointer **********/
1335 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1337 rtx sp_plus_insns;
1339 start_sequence ();
1341 emit_move_insn (stack_pointer_rtx,
1342 plus_constant (stack_pointer_rtx, size));
1344 sp_plus_insns = get_insns ();
1345 end_sequence ();
1347 /************ Use shortest method ************/
1349 emit_insn (get_sequence_length (sp_plus_insns)
1350 < get_sequence_length (fp_plus_insns)
1351 ? sp_plus_insns
1352 : fp_plus_insns);
1354 else
1355 emit_insn (fp_plus_insns);
1356 } /* size != 0 */
1358 if (frame_pointer_needed
1359 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1361 /* Restore previous frame_pointer. See expand_prologue for
1362 rationale for not using pophi. */
1364 emit_pop_byte (REG_Y + 1);
1365 emit_pop_byte (REG_Y);
1368 /* Restore used registers. */
1370 for (reg = 31; reg >= 0; --reg)
1371 if (TEST_HARD_REG_BIT (set, reg))
1372 emit_pop_byte (reg);
1374 if (isr_p)
1376 /* Restore RAMPZ using tmp reg as scratch. */
1378 if (AVR_HAVE_RAMPZ
1379 && TEST_HARD_REG_BIT (set, REG_Z)
1380 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1382 emit_pop_byte (TMP_REGNO);
1383 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1386 /* Restore SREG using tmp reg as scratch. */
1388 emit_pop_byte (TMP_REGNO);
1389 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1390 tmp_reg_rtx);
1392 /* Restore tmp REG. */
1393 emit_pop_byte (TMP_REGNO);
1395 /* Restore zero REG. */
1396 emit_pop_byte (ZERO_REGNO);
1399 if (!sibcall_p)
1400 emit_jump_insn (gen_return ());
1403 /* Output summary messages at beginning of function epilogue. */
1405 static void
1406 avr_asm_function_begin_epilogue (FILE *file)
1408 fprintf (file, "/* epilogue start */\n");
1412 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1414 static bool
1415 avr_cannot_modify_jumps_p (void)
1418 /* Naked Functions must not have any instructions after
1419 their epilogue, see PR42240 */
1421 if (reload_completed
1422 && cfun->machine
1423 && cfun->machine->is_naked)
1425 return true;
1428 return false;
1432 /* Helper function for `avr_legitimate_address_p'. */
1434 static inline bool
1435 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1436 RTX_CODE outer_code, bool strict)
1438 return (REG_P (reg)
1439 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1440 as, outer_code, UNKNOWN)
1441 || (!strict
1442 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1446 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1447 machine for a memory operand of mode MODE. */
1449 static bool
1450 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1452 bool ok = CONSTANT_ADDRESS_P (x);
1454 switch (GET_CODE (x))
1456 case REG:
1457 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1458 MEM, strict);
1460 if (strict
1461 && DImode == mode
1462 && REG_X == REGNO (x))
1464 ok = false;
1466 break;
1468 case POST_INC:
1469 case PRE_DEC:
1470 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1471 GET_CODE (x), strict);
1472 break;
1474 case PLUS:
1476 rtx reg = XEXP (x, 0);
1477 rtx op1 = XEXP (x, 1);
1479 if (REG_P (reg)
1480 && CONST_INT_P (op1)
1481 && INTVAL (op1) >= 0)
1483 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1485 if (fit)
1487 ok = (! strict
1488 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1489 PLUS, strict));
1491 if (reg == frame_pointer_rtx
1492 || reg == arg_pointer_rtx)
1494 ok = true;
1497 else if (frame_pointer_needed
1498 && reg == frame_pointer_rtx)
1500 ok = true;
1504 break;
1506 default:
1507 break;
1510 if (avr_log.legitimate_address_p)
1512 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1513 "reload_completed=%d reload_in_progress=%d %s:",
1514 ok, mode, strict, reload_completed, reload_in_progress,
1515 reg_renumber ? "(reg_renumber)" : "");
1517 if (GET_CODE (x) == PLUS
1518 && REG_P (XEXP (x, 0))
1519 && CONST_INT_P (XEXP (x, 1))
1520 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1521 && reg_renumber)
1523 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1524 true_regnum (XEXP (x, 0)));
1527 avr_edump ("\n%r\n", x);
1530 return ok;
1534 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1535 now only a helper for avr_addr_space_legitimize_address. */
1536 /* Attempts to replace X with a valid
1537 memory address for an operand of mode MODE */
1539 static rtx
1540 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1542 bool big_offset_p = false;
1544 x = oldx;
1546 if (GET_CODE (oldx) == PLUS
1547 && REG_P (XEXP (oldx, 0)))
1549 if (REG_P (XEXP (oldx, 1)))
1550 x = force_reg (GET_MODE (oldx), oldx);
1551 else if (CONST_INT_P (XEXP (oldx, 1)))
1553 int offs = INTVAL (XEXP (oldx, 1));
1554 if (frame_pointer_rtx != XEXP (oldx, 0)
1555 && offs > MAX_LD_OFFSET (mode))
1557 big_offset_p = true;
1558 x = force_reg (GET_MODE (oldx), oldx);
1563 if (avr_log.legitimize_address)
1565 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1567 if (x != oldx)
1568 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1571 return x;
1575 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1576 /* This will allow register R26/27 to be used where it is no worse than normal
1577 base pointers R28/29 or R30/31. For example, if base offset is greater
1578 than 63 bytes or for R++ or --R addressing. */
1581 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1582 int opnum, int type, int addr_type,
1583 int ind_levels ATTRIBUTE_UNUSED,
1584 rtx (*mk_memloc)(rtx,int))
1586 rtx x = *px;
1588 if (avr_log.legitimize_reload_address)
1589 avr_edump ("\n%?:%m %r\n", mode, x);
1591 if (1 && (GET_CODE (x) == POST_INC
1592 || GET_CODE (x) == PRE_DEC))
1594 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1595 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1596 opnum, RELOAD_OTHER);
1598 if (avr_log.legitimize_reload_address)
1599 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1600 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1602 return x;
1605 if (GET_CODE (x) == PLUS
1606 && REG_P (XEXP (x, 0))
1607 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1608 && CONST_INT_P (XEXP (x, 1))
1609 && INTVAL (XEXP (x, 1)) >= 1)
1611 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1613 if (fit)
1615 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1617 int regno = REGNO (XEXP (x, 0));
1618 rtx mem = mk_memloc (x, regno);
1620 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1621 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1622 1, addr_type);
1624 if (avr_log.legitimize_reload_address)
1625 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1626 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1628 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1629 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1630 opnum, type);
1632 if (avr_log.legitimize_reload_address)
1633 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1634 BASE_POINTER_REGS, mem, NULL_RTX);
1636 return x;
1639 else if (! (frame_pointer_needed
1640 && XEXP (x, 0) == frame_pointer_rtx))
1642 push_reload (x, NULL_RTX, px, NULL,
1643 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1644 opnum, type);
1646 if (avr_log.legitimize_reload_address)
1647 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1648 POINTER_REGS, x, NULL_RTX);
1650 return x;
1654 return NULL_RTX;
1658 /* Helper function to print assembler resp. track instruction
1659 sequence lengths. Always return "".
1661 If PLEN == NULL:
1662 Output assembler code from template TPL with operands supplied
1663 by OPERANDS. This is just forwarding to output_asm_insn.
1665 If PLEN != NULL:
1666 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1667 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1668 Don't output anything.
1671 static const char*
1672 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1674 if (NULL == plen)
1676 output_asm_insn (tpl, operands);
1678 else
1680 if (n_words < 0)
1681 *plen = -n_words;
1682 else
1683 *plen += n_words;
1686 return "";
1690 /* Return a pointer register name as a string. */
1692 static const char *
1693 ptrreg_to_str (int regno)
1695 switch (regno)
1697 case REG_X: return "X";
1698 case REG_Y: return "Y";
1699 case REG_Z: return "Z";
1700 default:
1701 output_operand_lossage ("address operand requires constraint for"
1702 " X, Y, or Z register");
1704 return NULL;
1707 /* Return the condition name as a string.
1708 Used in conditional jump constructing */
1710 static const char *
1711 cond_string (enum rtx_code code)
1713 switch (code)
1715 case NE:
1716 return "ne";
1717 case EQ:
1718 return "eq";
1719 case GE:
1720 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1721 return "pl";
1722 else
1723 return "ge";
1724 case LT:
1725 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1726 return "mi";
1727 else
1728 return "lt";
1729 case GEU:
1730 return "sh";
1731 case LTU:
1732 return "lo";
1733 default:
1734 gcc_unreachable ();
1737 return "";
1741 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1742 /* Output ADDR to FILE as address. */
1744 static void
1745 avr_print_operand_address (FILE *file, rtx addr)
1747 switch (GET_CODE (addr))
1749 case REG:
1750 fprintf (file, ptrreg_to_str (REGNO (addr)));
1751 break;
1753 case PRE_DEC:
1754 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1755 break;
1757 case POST_INC:
1758 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1759 break;
1761 default:
1762 if (CONSTANT_ADDRESS_P (addr)
1763 && text_segment_operand (addr, VOIDmode))
1765 rtx x = addr;
1766 if (GET_CODE (x) == CONST)
1767 x = XEXP (x, 0);
1768 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1770 /* Assembler gs() will implant word address. Make offset
1771 a byte offset inside gs() for assembler. This is
1772 needed because the more logical (constant+gs(sym)) is not
1773 accepted by gas. For 128K and lower devices this is ok.
1774 For large devices it will create a Trampoline to offset
1775 from symbol which may not be what the user really wanted. */
1776 fprintf (file, "gs(");
1777 output_addr_const (file, XEXP (x,0));
1778 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1779 2 * INTVAL (XEXP (x, 1)));
1780 if (AVR_3_BYTE_PC)
1781 if (warning (0, "pointer offset from symbol maybe incorrect"))
1783 output_addr_const (stderr, addr);
1784 fprintf(stderr,"\n");
1787 else
1789 fprintf (file, "gs(");
1790 output_addr_const (file, addr);
1791 fprintf (file, ")");
1794 else
1795 output_addr_const (file, addr);
1800 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1802 static bool
1803 avr_print_operand_punct_valid_p (unsigned char code)
1805 return code == '~' || code == '!';
1809 /* Implement `TARGET_PRINT_OPERAND'. */
1810 /* Output X as assembler operand to file FILE.
1811 For a description of supported %-codes, see top of avr.md. */
1813 static void
1814 avr_print_operand (FILE *file, rtx x, int code)
1816 int abcd = 0;
1818 if (code >= 'A' && code <= 'D')
1819 abcd = code - 'A';
1821 if (code == '~')
1823 if (!AVR_HAVE_JMP_CALL)
1824 fputc ('r', file);
1826 else if (code == '!')
1828 if (AVR_HAVE_EIJMP_EICALL)
1829 fputc ('e', file);
1831 else if (code == 't'
1832 || code == 'T')
1834 static int t_regno = -1;
1835 static int t_nbits = -1;
1837 if (REG_P (x) && t_regno < 0 && code == 'T')
1839 t_regno = REGNO (x);
1840 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1842 else if (CONST_INT_P (x) && t_regno >= 0
1843 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1845 int bpos = INTVAL (x);
1847 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1848 if (code == 'T')
1849 fprintf (file, ",%d", bpos % 8);
1851 t_regno = -1;
1853 else
1854 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1856 else if (REG_P (x))
1858 if (x == zero_reg_rtx)
1859 fprintf (file, "__zero_reg__");
1860 else
1861 fprintf (file, reg_names[true_regnum (x) + abcd]);
1863 else if (CONST_INT_P (x))
1865 HOST_WIDE_INT ival = INTVAL (x);
1867 if ('i' != code)
1868 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1869 else if (low_io_address_operand (x, VOIDmode)
1870 || high_io_address_operand (x, VOIDmode))
1872 switch (ival)
1874 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1875 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1876 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1877 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1879 default:
1880 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1881 ival - avr_current_arch->sfr_offset);
1882 break;
1885 else
1886 fatal_insn ("bad address, not an I/O address:", x);
1888 else if (MEM_P (x))
1890 rtx addr = XEXP (x, 0);
1892 if (code == 'm')
1894 if (!CONSTANT_P (addr))
1895 fatal_insn ("bad address, not a constant:", addr);
1896 /* Assembler template with m-code is data - not progmem section */
1897 if (text_segment_operand (addr, VOIDmode))
1898 if (warning (0, "accessing data memory with"
1899 " program memory address"))
1901 output_addr_const (stderr, addr);
1902 fprintf(stderr,"\n");
1904 output_addr_const (file, addr);
1906 else if (code == 'i')
1908 avr_print_operand (file, addr, 'i');
1910 else if (code == 'o')
1912 if (GET_CODE (addr) != PLUS)
1913 fatal_insn ("bad address, not (reg+disp):", addr);
1915 avr_print_operand (file, XEXP (addr, 1), 0);
1917 else if (code == 'p' || code == 'r')
1919 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1920 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1922 if (code == 'p')
1923 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1924 else
1925 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1927 else if (GET_CODE (addr) == PLUS)
1929 avr_print_operand_address (file, XEXP (addr,0));
1930 if (REGNO (XEXP (addr, 0)) == REG_X)
1931 fatal_insn ("internal compiler error. Bad address:"
1932 ,addr);
1933 fputc ('+', file);
1934 avr_print_operand (file, XEXP (addr,1), code);
1936 else
1937 avr_print_operand_address (file, addr);
1939 else if (code == 'i')
1941 fatal_insn ("bad address, not an I/O address:", x);
1943 else if (code == 'x')
1945 /* Constant progmem address - like used in jmp or call */
1946 if (0 == text_segment_operand (x, VOIDmode))
1947 if (warning (0, "accessing program memory"
1948 " with data memory address"))
1950 output_addr_const (stderr, x);
1951 fprintf(stderr,"\n");
1953 /* Use normal symbol for direct address no linker trampoline needed */
1954 output_addr_const (file, x);
1956 else if (GET_CODE (x) == CONST_DOUBLE)
1958 long val;
1959 REAL_VALUE_TYPE rv;
1960 if (GET_MODE (x) != SFmode)
1961 fatal_insn ("internal compiler error. Unknown mode:", x);
1962 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1963 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1964 fprintf (file, "0x%lx", val);
1966 else if (GET_CODE (x) == CONST_STRING)
1967 fputs (XSTR (x, 0), file);
1968 else if (code == 'j')
1969 fputs (cond_string (GET_CODE (x)), file);
1970 else if (code == 'k')
1971 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1972 else
1973 avr_print_operand_address (file, x);
1976 /* Update the condition code in the INSN. */
1978 void
1979 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1981 rtx set;
1982 enum attr_cc cc = get_attr_cc (insn);
1984 switch (cc)
1986 default:
1987 break;
1989 case CC_OUT_PLUS:
1990 case CC_OUT_PLUS_NOCLOBBER:
1991 case CC_LDI:
1993 rtx *op = recog_data.operand;
1994 int len_dummy, icc;
1996 /* Extract insn's operands. */
1997 extract_constrain_insn_cached (insn);
1999 switch (cc)
2001 default:
2002 gcc_unreachable();
2004 case CC_OUT_PLUS:
2005 avr_out_plus (op, &len_dummy, &icc);
2006 cc = (enum attr_cc) icc;
2007 break;
2009 case CC_OUT_PLUS_NOCLOBBER:
2010 avr_out_plus_noclobber (op, &len_dummy, &icc);
2011 cc = (enum attr_cc) icc;
2012 break;
2014 case CC_LDI:
2016 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2017 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2018 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2019 ? CC_CLOBBER
2020 /* Any other "r,rL" combination does not alter cc0. */
2021 : CC_NONE;
2023 break;
2024 } /* inner switch */
2026 break;
2028 } /* outer swicth */
2030 switch (cc)
2032 default:
2033 /* Special values like CC_OUT_PLUS from above have been
2034 mapped to "standard" CC_* values so we never come here. */
2036 gcc_unreachable();
2037 break;
2039 case CC_NONE:
2040 /* Insn does not affect CC at all. */
2041 break;
2043 case CC_SET_N:
2044 CC_STATUS_INIT;
2045 break;
2047 case CC_SET_ZN:
2048 set = single_set (insn);
2049 CC_STATUS_INIT;
2050 if (set)
2052 cc_status.flags |= CC_NO_OVERFLOW;
2053 cc_status.value1 = SET_DEST (set);
2055 break;
2057 case CC_SET_CZN:
2058 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059 The V flag may or may not be known but that's ok because
2060 alter_cond will change tests to use EQ/NE. */
2061 set = single_set (insn);
2062 CC_STATUS_INIT;
2063 if (set)
2065 cc_status.value1 = SET_DEST (set);
2066 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2068 break;
2070 case CC_COMPARE:
2071 set = single_set (insn);
2072 CC_STATUS_INIT;
2073 if (set)
2074 cc_status.value1 = SET_SRC (set);
2075 break;
2077 case CC_CLOBBER:
2078 /* Insn doesn't leave CC in a usable state. */
2079 CC_STATUS_INIT;
2080 break;
2084 /* Choose mode for jump insn:
2085 1 - relative jump in range -63 <= x <= 62 ;
2086 2 - relative jump in range -2046 <= x <= 2045 ;
2087 3 - absolute jump (only for ATmega[16]03). */
2090 avr_jump_mode (rtx x, rtx insn)
2092 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2093 ? XEXP (x, 0) : x));
2094 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2095 int jump_distance = cur_addr - dest_addr;
2097 if (-63 <= jump_distance && jump_distance <= 62)
2098 return 1;
2099 else if (-2046 <= jump_distance && jump_distance <= 2045)
2100 return 2;
2101 else if (AVR_HAVE_JMP_CALL)
2102 return 3;
2104 return 2;
2107 /* return an AVR condition jump commands.
2108 X is a comparison RTX.
2109 LEN is a number returned by avr_jump_mode function.
2110 if REVERSE nonzero then condition code in X must be reversed. */
2112 const char *
2113 ret_cond_branch (rtx x, int len, int reverse)
2115 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2117 switch (cond)
2119 case GT:
2120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2121 return (len == 1 ? ("breq .+2" CR_TAB
2122 "brpl %0") :
2123 len == 2 ? ("breq .+4" CR_TAB
2124 "brmi .+2" CR_TAB
2125 "rjmp %0") :
2126 ("breq .+6" CR_TAB
2127 "brmi .+4" CR_TAB
2128 "jmp %0"));
2130 else
2131 return (len == 1 ? ("breq .+2" CR_TAB
2132 "brge %0") :
2133 len == 2 ? ("breq .+4" CR_TAB
2134 "brlt .+2" CR_TAB
2135 "rjmp %0") :
2136 ("breq .+6" CR_TAB
2137 "brlt .+4" CR_TAB
2138 "jmp %0"));
2139 case GTU:
2140 return (len == 1 ? ("breq .+2" CR_TAB
2141 "brsh %0") :
2142 len == 2 ? ("breq .+4" CR_TAB
2143 "brlo .+2" CR_TAB
2144 "rjmp %0") :
2145 ("breq .+6" CR_TAB
2146 "brlo .+4" CR_TAB
2147 "jmp %0"));
2148 case LE:
2149 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2150 return (len == 1 ? ("breq %0" CR_TAB
2151 "brmi %0") :
2152 len == 2 ? ("breq .+2" CR_TAB
2153 "brpl .+2" CR_TAB
2154 "rjmp %0") :
2155 ("breq .+2" CR_TAB
2156 "brpl .+4" CR_TAB
2157 "jmp %0"));
2158 else
2159 return (len == 1 ? ("breq %0" CR_TAB
2160 "brlt %0") :
2161 len == 2 ? ("breq .+2" CR_TAB
2162 "brge .+2" CR_TAB
2163 "rjmp %0") :
2164 ("breq .+2" CR_TAB
2165 "brge .+4" CR_TAB
2166 "jmp %0"));
2167 case LEU:
2168 return (len == 1 ? ("breq %0" CR_TAB
2169 "brlo %0") :
2170 len == 2 ? ("breq .+2" CR_TAB
2171 "brsh .+2" CR_TAB
2172 "rjmp %0") :
2173 ("breq .+2" CR_TAB
2174 "brsh .+4" CR_TAB
2175 "jmp %0"));
2176 default:
2177 if (reverse)
2179 switch (len)
2181 case 1:
2182 return "br%k1 %0";
2183 case 2:
2184 return ("br%j1 .+2" CR_TAB
2185 "rjmp %0");
2186 default:
2187 return ("br%j1 .+4" CR_TAB
2188 "jmp %0");
2191 else
2193 switch (len)
2195 case 1:
2196 return "br%j1 %0";
2197 case 2:
2198 return ("br%k1 .+2" CR_TAB
2199 "rjmp %0");
2200 default:
2201 return ("br%k1 .+4" CR_TAB
2202 "jmp %0");
2206 return "";
2209 /* Output insn cost for next insn. */
2211 void
2212 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2213 int num_operands ATTRIBUTE_UNUSED)
2215 if (avr_log.rtx_costs)
2217 rtx set = single_set (insn);
2219 if (set)
2220 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2221 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2222 else
2223 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2224 rtx_cost (PATTERN (insn), INSN, 0,
2225 optimize_insn_for_speed_p()));
2229 /* Return 0 if undefined, 1 if always true or always false. */
2232 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2234 unsigned int max = (mode == QImode ? 0xff :
2235 mode == HImode ? 0xffff :
2236 mode == PSImode ? 0xffffff :
2237 mode == SImode ? 0xffffffff : 0);
2238 if (max && op && GET_CODE (x) == CONST_INT)
2240 if (unsigned_condition (op) != op)
2241 max >>= 1;
2243 if (max != (INTVAL (x) & max)
2244 && INTVAL (x) != 0xff)
2245 return 1;
2247 return 0;
2251 /* Returns nonzero if REGNO is the number of a hard
2252 register in which function arguments are sometimes passed. */
2255 function_arg_regno_p(int r)
2257 return (r >= 8 && r <= 25);
2260 /* Initializing the variable cum for the state at the beginning
2261 of the argument list. */
2263 void
2264 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2265 tree fndecl ATTRIBUTE_UNUSED)
2267 cum->nregs = 18;
2268 cum->regno = FIRST_CUM_REG;
2269 if (!libname && stdarg_p (fntype))
2270 cum->nregs = 0;
2272 /* Assume the calle may be tail called */
2274 cfun->machine->sibcall_fails = 0;
2277 /* Returns the number of registers to allocate for a function argument. */
2279 static int
2280 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2282 int size;
2284 if (mode == BLKmode)
2285 size = int_size_in_bytes (type);
2286 else
2287 size = GET_MODE_SIZE (mode);
2289 /* Align all function arguments to start in even-numbered registers.
2290 Odd-sized arguments leave holes above them. */
2292 return (size + 1) & ~1;
2295 /* Controls whether a function argument is passed
2296 in a register, and which register. */
2298 static rtx
2299 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2300 const_tree type, bool named ATTRIBUTE_UNUSED)
2302 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2303 int bytes = avr_num_arg_regs (mode, type);
2305 if (cum->nregs && bytes <= cum->nregs)
2306 return gen_rtx_REG (mode, cum->regno - bytes);
2308 return NULL_RTX;
2311 /* Update the summarizer variable CUM to advance past an argument
2312 in the argument list. */
2314 static void
2315 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2316 const_tree type, bool named ATTRIBUTE_UNUSED)
2318 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2319 int bytes = avr_num_arg_regs (mode, type);
2321 cum->nregs -= bytes;
2322 cum->regno -= bytes;
2324 /* A parameter is being passed in a call-saved register. As the original
2325 contents of these regs has to be restored before leaving the function,
2326 a function must not pass arguments in call-saved regs in order to get
2327 tail-called. */
2329 if (cum->regno >= 8
2330 && cum->nregs >= 0
2331 && !call_used_regs[cum->regno])
2333 /* FIXME: We ship info on failing tail-call in struct machine_function.
2334 This uses internals of calls.c:expand_call() and the way args_so_far
2335 is used. targetm.function_ok_for_sibcall() needs to be extended to
2336 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337 dependent so that such an extension is not wanted. */
2339 cfun->machine->sibcall_fails = 1;
2342 /* Test if all registers needed by the ABI are actually available. If the
2343 user has fixed a GPR needed to pass an argument, an (implicit) function
2344 call will clobber that fixed register. See PR45099 for an example. */
2346 if (cum->regno >= 8
2347 && cum->nregs >= 0)
2349 int regno;
2351 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2352 if (fixed_regs[regno])
2353 warning (0, "fixed register %s used to pass parameter to function",
2354 reg_names[regno]);
2357 if (cum->nregs <= 0)
2359 cum->nregs = 0;
2360 cum->regno = FIRST_CUM_REG;
2364 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365 /* Decide whether we can make a sibling call to a function. DECL is the
2366 declaration of the function being targeted by the call and EXP is the
2367 CALL_EXPR representing the call. */
2369 static bool
2370 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2372 tree fntype_callee;
2374 /* Tail-calling must fail if callee-saved regs are used to pass
2375 function args. We must not tail-call when `epilogue_restores'
2376 is used. Unfortunately, we cannot tell at this point if that
2377 actually will happen or not, and we cannot step back from
2378 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2380 if (cfun->machine->sibcall_fails
2381 || TARGET_CALL_PROLOGUES)
2383 return false;
2386 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2388 if (decl_callee)
2390 decl_callee = TREE_TYPE (decl_callee);
2392 else
2394 decl_callee = fntype_callee;
2396 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2397 && METHOD_TYPE != TREE_CODE (decl_callee))
2399 decl_callee = TREE_TYPE (decl_callee);
2403 /* Ensure that caller and callee have compatible epilogues */
2405 if (interrupt_function_p (current_function_decl)
2406 || signal_function_p (current_function_decl)
2407 || avr_naked_function_p (decl_callee)
2408 || avr_naked_function_p (current_function_decl)
2409 /* FIXME: For OS_task and OS_main, we are over-conservative.
2410 This is due to missing documentation of these attributes
2411 and what they actually should do and should not do. */
2412 || (avr_OS_task_function_p (decl_callee)
2413 != avr_OS_task_function_p (current_function_decl))
2414 || (avr_OS_main_function_p (decl_callee)
2415 != avr_OS_main_function_p (current_function_decl)))
2417 return false;
2420 return true;
2423 /***********************************************************************
2424 Functions for outputting various mov's for a various modes
2425 ************************************************************************/
2427 /* Return true if a value of mode MODE is read from flash by
2428 __load_* function from libgcc. */
2430 bool
2431 avr_load_libgcc_p (rtx op)
2433 enum machine_mode mode = GET_MODE (op);
2434 int n_bytes = GET_MODE_SIZE (mode);
2436 return (n_bytes > 2
2437 && !AVR_HAVE_LPMX
2438 && avr_mem_flash_p (op));
2441 /* Return true if a value of mode MODE is read by __xload_* function. */
2443 bool
2444 avr_xload_libgcc_p (enum machine_mode mode)
2446 int n_bytes = GET_MODE_SIZE (mode);
2448 return (n_bytes > 1
2449 || avr_current_arch->n_segments > 1);
2453 /* Find an unused d-register to be used as scratch in INSN.
2454 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455 is a register, skip all possible return values that overlap EXCLUDE.
2456 The policy for the returned register is similar to that of
2457 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2458 of INSN.
2460 Return a QImode d-register or NULL_RTX if nothing found. */
2462 static rtx
2463 avr_find_unused_d_reg (rtx insn, rtx exclude)
2465 int regno;
2466 bool isr_p = (interrupt_function_p (current_function_decl)
2467 || signal_function_p (current_function_decl));
2469 for (regno = 16; regno < 32; regno++)
2471 rtx reg = all_regs_rtx[regno];
2473 if ((exclude
2474 && reg_overlap_mentioned_p (exclude, reg))
2475 || fixed_regs[regno])
2477 continue;
2480 /* Try non-live register */
2482 if (!df_regs_ever_live_p (regno)
2483 && (TREE_THIS_VOLATILE (current_function_decl)
2484 || cfun->machine->is_OS_task
2485 || cfun->machine->is_OS_main
2486 || (!isr_p && call_used_regs[regno])))
2488 return reg;
2491 /* Any live register can be used if it is unused after.
2492 Prologue/epilogue will care for it as needed. */
2494 if (df_regs_ever_live_p (regno)
2495 && reg_unused_after (insn, reg))
2497 return reg;
2501 return NULL_RTX;
2505 /* Helper function for the next function in the case where only restricted
2506 version of LPM instruction is available. */
2508 static const char*
2509 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2511 rtx dest = xop[0];
2512 rtx addr = xop[1];
2513 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2514 int regno_dest;
2516 regno_dest = REGNO (dest);
2518 /* The implicit target register of LPM. */
2519 xop[3] = lpm_reg_rtx;
2521 switch (GET_CODE (addr))
2523 default:
2524 gcc_unreachable();
2526 case REG:
2528 gcc_assert (REG_Z == REGNO (addr));
2530 switch (n_bytes)
2532 default:
2533 gcc_unreachable();
2535 case 1:
2536 avr_asm_len ("%4lpm", xop, plen, 1);
2538 if (regno_dest != LPM_REGNO)
2539 avr_asm_len ("mov %0,%3", xop, plen, 1);
2541 return "";
2543 case 2:
2544 if (REGNO (dest) == REG_Z)
2545 return avr_asm_len ("%4lpm" CR_TAB
2546 "push %3" CR_TAB
2547 "adiw %2,1" CR_TAB
2548 "%4lpm" CR_TAB
2549 "mov %B0,%3" CR_TAB
2550 "pop %A0", xop, plen, 6);
2552 avr_asm_len ("%4lpm" CR_TAB
2553 "mov %A0,%3" CR_TAB
2554 "adiw %2,1" CR_TAB
2555 "%4lpm" CR_TAB
2556 "mov %B0,%3", xop, plen, 5);
2558 if (!reg_unused_after (insn, addr))
2559 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2561 break; /* 2 */
2564 break; /* REG */
2566 case POST_INC:
2568 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2569 && n_bytes <= 4);
2571 if (regno_dest == LPM_REGNO)
2572 avr_asm_len ("%4lpm" CR_TAB
2573 "adiw %2,1", xop, plen, 2);
2574 else
2575 avr_asm_len ("%4lpm" CR_TAB
2576 "mov %A0,%3" CR_TAB
2577 "adiw %2,1", xop, plen, 3);
2579 if (n_bytes >= 2)
2580 avr_asm_len ("%4lpm" CR_TAB
2581 "mov %B0,%3" CR_TAB
2582 "adiw %2,1", xop, plen, 3);
2584 if (n_bytes >= 3)
2585 avr_asm_len ("%4lpm" CR_TAB
2586 "mov %C0,%3" CR_TAB
2587 "adiw %2,1", xop, plen, 3);
2589 if (n_bytes >= 4)
2590 avr_asm_len ("%4lpm" CR_TAB
2591 "mov %D0,%3" CR_TAB
2592 "adiw %2,1", xop, plen, 3);
2594 break; /* POST_INC */
2596 } /* switch CODE (addr) */
2598 return "";
2602 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603 OP[1] in AS1 to register OP[0].
2604 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2605 Return "". */
2607 static const char*
2608 avr_out_lpm (rtx insn, rtx *op, int *plen)
2610 rtx xop[6];
2611 rtx dest = op[0];
2612 rtx src = SET_SRC (single_set (insn));
2613 rtx addr;
2614 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2615 int regno_dest;
2616 int segment;
2617 RTX_CODE code;
2618 addr_space_t as = MEM_ADDR_SPACE (src);
2620 if (plen)
2621 *plen = 0;
2623 if (MEM_P (dest))
2625 warning (0, "writing to address space %qs not supported",
2626 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2628 return "";
2631 addr = XEXP (src, 0);
2632 code = GET_CODE (addr);
2634 gcc_assert (REG_P (dest));
2635 gcc_assert (REG == code || POST_INC == code);
2637 xop[0] = dest;
2638 xop[1] = addr;
2639 xop[2] = lpm_addr_reg_rtx;
2640 xop[4] = xstring_empty;
2641 xop[5] = tmp_reg_rtx;
2643 regno_dest = REGNO (dest);
2645 /* Cut down segment number to a number the device actually supports.
2646 We do this late to preserve the address space's name for diagnostics. */
2648 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2650 /* Set RAMPZ as needed. */
2652 if (segment)
2654 xop[4] = GEN_INT (segment);
2656 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2657 xop[3])
2659 avr_asm_len ("ldi %3,%4" CR_TAB
2660 "out __RAMPZ__,%3", xop, plen, 2);
2662 else if (segment == 1)
2664 avr_asm_len ("clr %5" CR_TAB
2665 "inc %5" CR_TAB
2666 "out __RAMPZ__,%5", xop, plen, 3);
2668 else
2670 avr_asm_len ("mov %5,%2" CR_TAB
2671 "ldi %2,%4" CR_TAB
2672 "out __RAMPZ__,%2" CR_TAB
2673 "mov %2,%5", xop, plen, 4);
2676 xop[4] = xstring_e;
2678 if (!AVR_HAVE_ELPMX)
2679 return avr_out_lpm_no_lpmx (insn, xop, plen);
2681 else if (!AVR_HAVE_LPMX)
2683 return avr_out_lpm_no_lpmx (insn, xop, plen);
2686 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2688 switch (GET_CODE (addr))
2690 default:
2691 gcc_unreachable();
2693 case REG:
2695 gcc_assert (REG_Z == REGNO (addr));
2697 switch (n_bytes)
2699 default:
2700 gcc_unreachable();
2702 case 1:
2703 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2705 case 2:
2706 if (REGNO (dest) == REG_Z)
2707 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2708 "%4lpm %B0,%a2" CR_TAB
2709 "mov %A0,%5", xop, plen, 3);
2710 else
2712 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2713 "%4lpm %B0,%a2", xop, plen, 2);
2715 if (!reg_unused_after (insn, addr))
2716 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2719 break; /* 2 */
2721 case 3:
2723 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2724 "%4lpm %B0,%a2+" CR_TAB
2725 "%4lpm %C0,%a2", xop, plen, 3);
2727 if (!reg_unused_after (insn, addr))
2728 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2730 break; /* 3 */
2732 case 4:
2734 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2735 "%4lpm %B0,%a2+", xop, plen, 2);
2737 if (REGNO (dest) == REG_Z - 2)
2738 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2739 "%4lpm %C0,%a2" CR_TAB
2740 "mov %D0,%5", xop, plen, 3);
2741 else
2743 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2744 "%4lpm %D0,%a2", xop, plen, 2);
2746 if (!reg_unused_after (insn, addr))
2747 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2750 break; /* 4 */
2751 } /* n_bytes */
2753 break; /* REG */
2755 case POST_INC:
2757 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2758 && n_bytes <= 4);
2760 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2761 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2762 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2763 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2765 break; /* POST_INC */
2767 } /* switch CODE (addr) */
2769 return "";
2773 /* Worker function for xload_8 insn. */
2775 const char*
2776 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2778 rtx xop[4];
2780 xop[0] = op[0];
2781 xop[1] = op[1];
2782 xop[2] = lpm_addr_reg_rtx;
2783 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2785 if (plen)
2786 *plen = 0;
2788 avr_asm_len ("ld %3,%a2" CR_TAB
2789 "sbrs %1,7", xop, plen, 2);
2791 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2793 if (REGNO (xop[0]) != REGNO (xop[3]))
2794 avr_asm_len ("mov %0,%3", xop, plen, 1);
2796 return "";
2800 const char *
2801 output_movqi (rtx insn, rtx operands[], int *l)
2803 int dummy;
2804 rtx dest = operands[0];
2805 rtx src = operands[1];
2806 int *real_l = l;
2808 if (avr_mem_flash_p (src)
2809 || avr_mem_flash_p (dest))
2811 return avr_out_lpm (insn, operands, real_l);
2814 if (!l)
2815 l = &dummy;
2817 *l = 1;
2819 if (register_operand (dest, QImode))
2821 if (register_operand (src, QImode)) /* mov r,r */
2823 if (test_hard_reg_class (STACK_REG, dest))
2824 return "out %0,%1";
2825 else if (test_hard_reg_class (STACK_REG, src))
2826 return "in %0,%1";
2828 return "mov %0,%1";
2830 else if (CONSTANT_P (src))
2832 output_reload_in_const (operands, NULL_RTX, real_l, false);
2833 return "";
2835 else if (GET_CODE (src) == MEM)
2836 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2838 else if (GET_CODE (dest) == MEM)
2840 rtx xop[2];
2842 xop[0] = dest;
2843 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2845 return out_movqi_mr_r (insn, xop, real_l);
2847 return "";
2851 const char *
2852 output_movhi (rtx insn, rtx xop[], int *plen)
2854 rtx dest = xop[0];
2855 rtx src = xop[1];
2857 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2859 if (avr_mem_flash_p (src)
2860 || avr_mem_flash_p (dest))
2862 return avr_out_lpm (insn, xop, plen);
2865 if (REG_P (dest))
2867 if (REG_P (src)) /* mov r,r */
2869 if (test_hard_reg_class (STACK_REG, dest))
2871 if (AVR_HAVE_8BIT_SP)
2872 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2874 /* Use simple load of SP if no interrupts are used. */
2876 return TARGET_NO_INTERRUPTS
2877 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2878 "out __SP_L__,%A1", xop, plen, -2)
2880 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2881 "cli" CR_TAB
2882 "out __SP_H__,%B1" CR_TAB
2883 "out __SREG__,__tmp_reg__" CR_TAB
2884 "out __SP_L__,%A1", xop, plen, -5);
2886 else if (test_hard_reg_class (STACK_REG, src))
2888 return AVR_HAVE_8BIT_SP
2889 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2890 "clr %B0", xop, plen, -2)
2892 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2893 "in %B0,__SP_H__", xop, plen, -2);
2896 return AVR_HAVE_MOVW
2897 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2899 : avr_asm_len ("mov %A0,%A1" CR_TAB
2900 "mov %B0,%B1", xop, plen, -2);
2901 } /* REG_P (src) */
2902 else if (CONSTANT_P (src))
2904 return output_reload_inhi (xop, NULL, plen);
2906 else if (MEM_P (src))
2908 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2911 else if (MEM_P (dest))
2913 rtx xop[2];
2915 xop[0] = dest;
2916 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2918 return out_movhi_mr_r (insn, xop, plen);
2921 fatal_insn ("invalid insn:", insn);
2923 return "";
2926 static const char*
2927 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2929 rtx dest = op[0];
2930 rtx src = op[1];
2931 rtx x = XEXP (src, 0);
2933 if (CONSTANT_ADDRESS_P (x))
2935 return optimize > 0 && io_address_operand (x, QImode)
2936 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2937 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2939 else if (GET_CODE (x) == PLUS
2940 && REG_P (XEXP (x, 0))
2941 && CONST_INT_P (XEXP (x, 1)))
2943 /* memory access by reg+disp */
2945 int disp = INTVAL (XEXP (x, 1));
2947 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2949 if (REGNO (XEXP (x, 0)) != REG_Y)
2950 fatal_insn ("incorrect insn:",insn);
2952 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2953 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2954 "ldd %0,Y+63" CR_TAB
2955 "sbiw r28,%o1-63", op, plen, -3);
2957 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2958 "sbci r29,hi8(-%o1)" CR_TAB
2959 "ld %0,Y" CR_TAB
2960 "subi r28,lo8(%o1)" CR_TAB
2961 "sbci r29,hi8(%o1)", op, plen, -5);
2963 else if (REGNO (XEXP (x, 0)) == REG_X)
2965 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2966 it but I have this situation with extremal optimizing options. */
2968 avr_asm_len ("adiw r26,%o1" CR_TAB
2969 "ld %0,X", op, plen, -2);
2971 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2972 && !reg_unused_after (insn, XEXP (x,0)))
2974 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2977 return "";
2980 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2983 return avr_asm_len ("ld %0,%1", op, plen, -1);
2986 static const char*
2987 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2989 rtx dest = op[0];
2990 rtx src = op[1];
2991 rtx base = XEXP (src, 0);
2992 int reg_dest = true_regnum (dest);
2993 int reg_base = true_regnum (base);
2994 /* "volatile" forces reading low byte first, even if less efficient,
2995 for correct operation with 16-bit I/O registers. */
2996 int mem_volatile_p = MEM_VOLATILE_P (src);
2998 if (reg_base > 0)
3000 if (reg_dest == reg_base) /* R = (R) */
3001 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3002 "ld %B0,%1" CR_TAB
3003 "mov %A0,__tmp_reg__", op, plen, -3);
3005 if (reg_base != REG_X)
3006 return avr_asm_len ("ld %A0,%1" CR_TAB
3007 "ldd %B0,%1+1", op, plen, -2);
3009 avr_asm_len ("ld %A0,X+" CR_TAB
3010 "ld %B0,X", op, plen, -2);
3012 if (!reg_unused_after (insn, base))
3013 avr_asm_len ("sbiw r26,1", op, plen, 1);
3015 return "";
3017 else if (GET_CODE (base) == PLUS) /* (R + i) */
3019 int disp = INTVAL (XEXP (base, 1));
3020 int reg_base = true_regnum (XEXP (base, 0));
3022 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3024 if (REGNO (XEXP (base, 0)) != REG_Y)
3025 fatal_insn ("incorrect insn:",insn);
3027 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3028 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3029 "ldd %A0,Y+62" CR_TAB
3030 "ldd %B0,Y+63" CR_TAB
3031 "sbiw r28,%o1-62", op, plen, -4)
3033 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3034 "sbci r29,hi8(-%o1)" CR_TAB
3035 "ld %A0,Y" CR_TAB
3036 "ldd %B0,Y+1" CR_TAB
3037 "subi r28,lo8(%o1)" CR_TAB
3038 "sbci r29,hi8(%o1)", op, plen, -6);
3041 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3042 it but I have this situation with extremal
3043 optimization options. */
3045 if (reg_base == REG_X)
3046 return reg_base == reg_dest
3047 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3048 "ld __tmp_reg__,X+" CR_TAB
3049 "ld %B0,X" CR_TAB
3050 "mov %A0,__tmp_reg__", op, plen, -4)
3052 : avr_asm_len ("adiw r26,%o1" CR_TAB
3053 "ld %A0,X+" CR_TAB
3054 "ld %B0,X" CR_TAB
3055 "sbiw r26,%o1+1", op, plen, -4);
3057 return reg_base == reg_dest
3058 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3059 "ldd %B0,%B1" CR_TAB
3060 "mov %A0,__tmp_reg__", op, plen, -3)
3062 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3063 "ldd %B0,%B1", op, plen, -2);
3065 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3067 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3068 fatal_insn ("incorrect insn:", insn);
3070 if (!mem_volatile_p)
3071 return avr_asm_len ("ld %B0,%1" CR_TAB
3072 "ld %A0,%1", op, plen, -2);
3074 return REGNO (XEXP (base, 0)) == REG_X
3075 ? avr_asm_len ("sbiw r26,2" CR_TAB
3076 "ld %A0,X+" CR_TAB
3077 "ld %B0,X" CR_TAB
3078 "sbiw r26,1", op, plen, -4)
3080 : avr_asm_len ("sbiw %r1,2" CR_TAB
3081 "ld %A0,%p1" CR_TAB
3082 "ldd %B0,%p1+1", op, plen, -3);
3084 else if (GET_CODE (base) == POST_INC) /* (R++) */
3086 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3087 fatal_insn ("incorrect insn:", insn);
3089 return avr_asm_len ("ld %A0,%1" CR_TAB
3090 "ld %B0,%1", op, plen, -2);
3092 else if (CONSTANT_ADDRESS_P (base))
3094 return optimize > 0 && io_address_operand (base, HImode)
3095 ? avr_asm_len ("in %A0,%i1" CR_TAB
3096 "in %B0,%i1+1", op, plen, -2)
3098 : avr_asm_len ("lds %A0,%m1" CR_TAB
3099 "lds %B0,%m1+1", op, plen, -4);
3102 fatal_insn ("unknown move insn:",insn);
3103 return "";
3106 static const char*
3107 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3109 rtx dest = op[0];
3110 rtx src = op[1];
3111 rtx base = XEXP (src, 0);
3112 int reg_dest = true_regnum (dest);
3113 int reg_base = true_regnum (base);
3114 int tmp;
3116 if (!l)
3117 l = &tmp;
3119 if (reg_base > 0)
3121 if (reg_base == REG_X) /* (R26) */
3123 if (reg_dest == REG_X)
3124 /* "ld r26,-X" is undefined */
3125 return *l=7, ("adiw r26,3" CR_TAB
3126 "ld r29,X" CR_TAB
3127 "ld r28,-X" CR_TAB
3128 "ld __tmp_reg__,-X" CR_TAB
3129 "sbiw r26,1" CR_TAB
3130 "ld r26,X" CR_TAB
3131 "mov r27,__tmp_reg__");
3132 else if (reg_dest == REG_X - 2)
3133 return *l=5, ("ld %A0,X+" CR_TAB
3134 "ld %B0,X+" CR_TAB
3135 "ld __tmp_reg__,X+" CR_TAB
3136 "ld %D0,X" CR_TAB
3137 "mov %C0,__tmp_reg__");
3138 else if (reg_unused_after (insn, base))
3139 return *l=4, ("ld %A0,X+" CR_TAB
3140 "ld %B0,X+" CR_TAB
3141 "ld %C0,X+" CR_TAB
3142 "ld %D0,X");
3143 else
3144 return *l=5, ("ld %A0,X+" CR_TAB
3145 "ld %B0,X+" CR_TAB
3146 "ld %C0,X+" CR_TAB
3147 "ld %D0,X" CR_TAB
3148 "sbiw r26,3");
3150 else
3152 if (reg_dest == reg_base)
3153 return *l=5, ("ldd %D0,%1+3" CR_TAB
3154 "ldd %C0,%1+2" CR_TAB
3155 "ldd __tmp_reg__,%1+1" CR_TAB
3156 "ld %A0,%1" CR_TAB
3157 "mov %B0,__tmp_reg__");
3158 else if (reg_base == reg_dest + 2)
3159 return *l=5, ("ld %A0,%1" CR_TAB
3160 "ldd %B0,%1+1" CR_TAB
3161 "ldd __tmp_reg__,%1+2" CR_TAB
3162 "ldd %D0,%1+3" CR_TAB
3163 "mov %C0,__tmp_reg__");
3164 else
3165 return *l=4, ("ld %A0,%1" CR_TAB
3166 "ldd %B0,%1+1" CR_TAB
3167 "ldd %C0,%1+2" CR_TAB
3168 "ldd %D0,%1+3");
3171 else if (GET_CODE (base) == PLUS) /* (R + i) */
3173 int disp = INTVAL (XEXP (base, 1));
3175 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3177 if (REGNO (XEXP (base, 0)) != REG_Y)
3178 fatal_insn ("incorrect insn:",insn);
3180 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3181 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3182 "ldd %A0,Y+60" CR_TAB
3183 "ldd %B0,Y+61" CR_TAB
3184 "ldd %C0,Y+62" CR_TAB
3185 "ldd %D0,Y+63" CR_TAB
3186 "sbiw r28,%o1-60");
3188 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3189 "sbci r29,hi8(-%o1)" CR_TAB
3190 "ld %A0,Y" CR_TAB
3191 "ldd %B0,Y+1" CR_TAB
3192 "ldd %C0,Y+2" CR_TAB
3193 "ldd %D0,Y+3" CR_TAB
3194 "subi r28,lo8(%o1)" CR_TAB
3195 "sbci r29,hi8(%o1)");
3198 reg_base = true_regnum (XEXP (base, 0));
3199 if (reg_base == REG_X)
3201 /* R = (X + d) */
3202 if (reg_dest == REG_X)
3204 *l = 7;
3205 /* "ld r26,-X" is undefined */
3206 return ("adiw r26,%o1+3" CR_TAB
3207 "ld r29,X" CR_TAB
3208 "ld r28,-X" CR_TAB
3209 "ld __tmp_reg__,-X" CR_TAB
3210 "sbiw r26,1" CR_TAB
3211 "ld r26,X" CR_TAB
3212 "mov r27,__tmp_reg__");
3214 *l = 6;
3215 if (reg_dest == REG_X - 2)
3216 return ("adiw r26,%o1" CR_TAB
3217 "ld r24,X+" CR_TAB
3218 "ld r25,X+" CR_TAB
3219 "ld __tmp_reg__,X+" CR_TAB
3220 "ld r27,X" CR_TAB
3221 "mov r26,__tmp_reg__");
3223 return ("adiw r26,%o1" CR_TAB
3224 "ld %A0,X+" CR_TAB
3225 "ld %B0,X+" CR_TAB
3226 "ld %C0,X+" CR_TAB
3227 "ld %D0,X" CR_TAB
3228 "sbiw r26,%o1+3");
3230 if (reg_dest == reg_base)
3231 return *l=5, ("ldd %D0,%D1" CR_TAB
3232 "ldd %C0,%C1" CR_TAB
3233 "ldd __tmp_reg__,%B1" CR_TAB
3234 "ldd %A0,%A1" CR_TAB
3235 "mov %B0,__tmp_reg__");
3236 else if (reg_dest == reg_base - 2)
3237 return *l=5, ("ldd %A0,%A1" CR_TAB
3238 "ldd %B0,%B1" CR_TAB
3239 "ldd __tmp_reg__,%C1" CR_TAB
3240 "ldd %D0,%D1" CR_TAB
3241 "mov %C0,__tmp_reg__");
3242 return *l=4, ("ldd %A0,%A1" CR_TAB
3243 "ldd %B0,%B1" CR_TAB
3244 "ldd %C0,%C1" CR_TAB
3245 "ldd %D0,%D1");
3247 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3248 return *l=4, ("ld %D0,%1" CR_TAB
3249 "ld %C0,%1" CR_TAB
3250 "ld %B0,%1" CR_TAB
3251 "ld %A0,%1");
3252 else if (GET_CODE (base) == POST_INC) /* (R++) */
3253 return *l=4, ("ld %A0,%1" CR_TAB
3254 "ld %B0,%1" CR_TAB
3255 "ld %C0,%1" CR_TAB
3256 "ld %D0,%1");
3257 else if (CONSTANT_ADDRESS_P (base))
3258 return *l=8, ("lds %A0,%m1" CR_TAB
3259 "lds %B0,%m1+1" CR_TAB
3260 "lds %C0,%m1+2" CR_TAB
3261 "lds %D0,%m1+3");
3263 fatal_insn ("unknown move insn:",insn);
3264 return "";
3267 static const char*
3268 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3270 rtx dest = op[0];
3271 rtx src = op[1];
3272 rtx base = XEXP (dest, 0);
3273 int reg_base = true_regnum (base);
3274 int reg_src = true_regnum (src);
3275 int tmp;
3277 if (!l)
3278 l = &tmp;
3280 if (CONSTANT_ADDRESS_P (base))
3281 return *l=8,("sts %m0,%A1" CR_TAB
3282 "sts %m0+1,%B1" CR_TAB
3283 "sts %m0+2,%C1" CR_TAB
3284 "sts %m0+3,%D1");
3285 if (reg_base > 0) /* (r) */
3287 if (reg_base == REG_X) /* (R26) */
3289 if (reg_src == REG_X)
3291 /* "st X+,r26" is undefined */
3292 if (reg_unused_after (insn, base))
3293 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3294 "st X,r26" CR_TAB
3295 "adiw r26,1" CR_TAB
3296 "st X+,__tmp_reg__" CR_TAB
3297 "st X+,r28" CR_TAB
3298 "st X,r29");
3299 else
3300 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3301 "st X,r26" CR_TAB
3302 "adiw r26,1" CR_TAB
3303 "st X+,__tmp_reg__" CR_TAB
3304 "st X+,r28" CR_TAB
3305 "st X,r29" CR_TAB
3306 "sbiw r26,3");
3308 else if (reg_base == reg_src + 2)
3310 if (reg_unused_after (insn, base))
3311 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3312 "mov __tmp_reg__,%D1" CR_TAB
3313 "st %0+,%A1" CR_TAB
3314 "st %0+,%B1" CR_TAB
3315 "st %0+,__zero_reg__" CR_TAB
3316 "st %0,__tmp_reg__" CR_TAB
3317 "clr __zero_reg__");
3318 else
3319 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3320 "mov __tmp_reg__,%D1" CR_TAB
3321 "st %0+,%A1" CR_TAB
3322 "st %0+,%B1" CR_TAB
3323 "st %0+,__zero_reg__" CR_TAB
3324 "st %0,__tmp_reg__" CR_TAB
3325 "clr __zero_reg__" CR_TAB
3326 "sbiw r26,3");
3328 return *l=5, ("st %0+,%A1" CR_TAB
3329 "st %0+,%B1" CR_TAB
3330 "st %0+,%C1" CR_TAB
3331 "st %0,%D1" CR_TAB
3332 "sbiw r26,3");
3334 else
3335 return *l=4, ("st %0,%A1" CR_TAB
3336 "std %0+1,%B1" CR_TAB
3337 "std %0+2,%C1" CR_TAB
3338 "std %0+3,%D1");
3340 else if (GET_CODE (base) == PLUS) /* (R + i) */
3342 int disp = INTVAL (XEXP (base, 1));
3343 reg_base = REGNO (XEXP (base, 0));
3344 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3346 if (reg_base != REG_Y)
3347 fatal_insn ("incorrect insn:",insn);
3349 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3350 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3351 "std Y+60,%A1" CR_TAB
3352 "std Y+61,%B1" CR_TAB
3353 "std Y+62,%C1" CR_TAB
3354 "std Y+63,%D1" CR_TAB
3355 "sbiw r28,%o0-60");
3357 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3358 "sbci r29,hi8(-%o0)" CR_TAB
3359 "st Y,%A1" CR_TAB
3360 "std Y+1,%B1" CR_TAB
3361 "std Y+2,%C1" CR_TAB
3362 "std Y+3,%D1" CR_TAB
3363 "subi r28,lo8(%o0)" CR_TAB
3364 "sbci r29,hi8(%o0)");
3366 if (reg_base == REG_X)
3368 /* (X + d) = R */
3369 if (reg_src == REG_X)
3371 *l = 9;
3372 return ("mov __tmp_reg__,r26" CR_TAB
3373 "mov __zero_reg__,r27" CR_TAB
3374 "adiw r26,%o0" CR_TAB
3375 "st X+,__tmp_reg__" CR_TAB
3376 "st X+,__zero_reg__" CR_TAB
3377 "st X+,r28" CR_TAB
3378 "st X,r29" CR_TAB
3379 "clr __zero_reg__" CR_TAB
3380 "sbiw r26,%o0+3");
3382 else if (reg_src == REG_X - 2)
3384 *l = 9;
3385 return ("mov __tmp_reg__,r26" CR_TAB
3386 "mov __zero_reg__,r27" CR_TAB
3387 "adiw r26,%o0" CR_TAB
3388 "st X+,r24" CR_TAB
3389 "st X+,r25" CR_TAB
3390 "st X+,__tmp_reg__" CR_TAB
3391 "st X,__zero_reg__" CR_TAB
3392 "clr __zero_reg__" CR_TAB
3393 "sbiw r26,%o0+3");
3395 *l = 6;
3396 return ("adiw r26,%o0" CR_TAB
3397 "st X+,%A1" CR_TAB
3398 "st X+,%B1" CR_TAB
3399 "st X+,%C1" CR_TAB
3400 "st X,%D1" CR_TAB
3401 "sbiw r26,%o0+3");
3403 return *l=4, ("std %A0,%A1" CR_TAB
3404 "std %B0,%B1" CR_TAB
3405 "std %C0,%C1" CR_TAB
3406 "std %D0,%D1");
3408 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3409 return *l=4, ("st %0,%D1" CR_TAB
3410 "st %0,%C1" CR_TAB
3411 "st %0,%B1" CR_TAB
3412 "st %0,%A1");
3413 else if (GET_CODE (base) == POST_INC) /* (R++) */
3414 return *l=4, ("st %0,%A1" CR_TAB
3415 "st %0,%B1" CR_TAB
3416 "st %0,%C1" CR_TAB
3417 "st %0,%D1");
3418 fatal_insn ("unknown move insn:",insn);
3419 return "";
3422 const char *
3423 output_movsisf (rtx insn, rtx operands[], int *l)
3425 int dummy;
3426 rtx dest = operands[0];
3427 rtx src = operands[1];
3428 int *real_l = l;
3430 if (avr_mem_flash_p (src)
3431 || avr_mem_flash_p (dest))
3433 return avr_out_lpm (insn, operands, real_l);
3436 if (!l)
3437 l = &dummy;
3439 if (register_operand (dest, VOIDmode))
3441 if (register_operand (src, VOIDmode)) /* mov r,r */
3443 if (true_regnum (dest) > true_regnum (src))
3445 if (AVR_HAVE_MOVW)
3447 *l = 2;
3448 return ("movw %C0,%C1" CR_TAB
3449 "movw %A0,%A1");
3451 *l = 4;
3452 return ("mov %D0,%D1" CR_TAB
3453 "mov %C0,%C1" CR_TAB
3454 "mov %B0,%B1" CR_TAB
3455 "mov %A0,%A1");
3457 else
3459 if (AVR_HAVE_MOVW)
3461 *l = 2;
3462 return ("movw %A0,%A1" CR_TAB
3463 "movw %C0,%C1");
3465 *l = 4;
3466 return ("mov %A0,%A1" CR_TAB
3467 "mov %B0,%B1" CR_TAB
3468 "mov %C0,%C1" CR_TAB
3469 "mov %D0,%D1");
3472 else if (CONSTANT_P (src))
3474 return output_reload_insisf (operands, NULL_RTX, real_l);
3476 else if (GET_CODE (src) == MEM)
3477 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3479 else if (GET_CODE (dest) == MEM)
3481 const char *templ;
3483 if (src == CONST0_RTX (GET_MODE (dest)))
3484 operands[1] = zero_reg_rtx;
3486 templ = out_movsi_mr_r (insn, operands, real_l);
3488 if (!real_l)
3489 output_asm_insn (templ, operands);
3491 operands[1] = src;
3492 return "";
3494 fatal_insn ("invalid insn:", insn);
3495 return "";
3499 /* Handle loads of 24-bit types from memory to register. */
3501 static const char*
3502 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3504 rtx dest = op[0];
3505 rtx src = op[1];
3506 rtx base = XEXP (src, 0);
3507 int reg_dest = true_regnum (dest);
3508 int reg_base = true_regnum (base);
3510 if (reg_base > 0)
3512 if (reg_base == REG_X) /* (R26) */
3514 if (reg_dest == REG_X)
3515 /* "ld r26,-X" is undefined */
3516 return avr_asm_len ("adiw r26,2" CR_TAB
3517 "ld r28,X" CR_TAB
3518 "ld __tmp_reg__,-X" CR_TAB
3519 "sbiw r26,1" CR_TAB
3520 "ld r26,X" CR_TAB
3521 "mov r27,__tmp_reg__", op, plen, -6);
3522 else
3524 avr_asm_len ("ld %A0,X+" CR_TAB
3525 "ld %B0,X+" CR_TAB
3526 "ld %C0,X", op, plen, -3);
3528 if (reg_dest != REG_X - 2
3529 && !reg_unused_after (insn, base))
3531 avr_asm_len ("sbiw r26,2", op, plen, 1);
3534 return "";
3537 else /* reg_base != REG_X */
3539 if (reg_dest == reg_base)
3540 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3541 "ldd __tmp_reg__,%1+1" CR_TAB
3542 "ld %A0,%1" CR_TAB
3543 "mov %B0,__tmp_reg__", op, plen, -4);
3544 else
3545 return avr_asm_len ("ld %A0,%1" CR_TAB
3546 "ldd %B0,%1+1" CR_TAB
3547 "ldd %C0,%1+2", op, plen, -3);
3550 else if (GET_CODE (base) == PLUS) /* (R + i) */
3552 int disp = INTVAL (XEXP (base, 1));
3554 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3556 if (REGNO (XEXP (base, 0)) != REG_Y)
3557 fatal_insn ("incorrect insn:",insn);
3559 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3560 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3561 "ldd %A0,Y+61" CR_TAB
3562 "ldd %B0,Y+62" CR_TAB
3563 "ldd %C0,Y+63" CR_TAB
3564 "sbiw r28,%o1-61", op, plen, -5);
3566 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3567 "sbci r29,hi8(-%o1)" CR_TAB
3568 "ld %A0,Y" CR_TAB
3569 "ldd %B0,Y+1" CR_TAB
3570 "ldd %C0,Y+2" CR_TAB
3571 "subi r28,lo8(%o1)" CR_TAB
3572 "sbci r29,hi8(%o1)", op, plen, -7);
3575 reg_base = true_regnum (XEXP (base, 0));
3576 if (reg_base == REG_X)
3578 /* R = (X + d) */
3579 if (reg_dest == REG_X)
3581 /* "ld r26,-X" is undefined */
3582 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3583 "ld r28,X" CR_TAB
3584 "ld __tmp_reg__,-X" CR_TAB
3585 "sbiw r26,1" CR_TAB
3586 "ld r26,X" CR_TAB
3587 "mov r27,__tmp_reg__", op, plen, -6);
3590 avr_asm_len ("adiw r26,%o1" CR_TAB
3591 "ld r24,X+" CR_TAB
3592 "ld r25,X+" CR_TAB
3593 "ld r26,X", op, plen, -4);
3595 if (reg_dest != REG_X - 2)
3596 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3598 return "";
3601 if (reg_dest == reg_base)
3602 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3603 "ldd __tmp_reg__,%B1" CR_TAB
3604 "ldd %A0,%A1" CR_TAB
3605 "mov %B0,__tmp_reg__", op, plen, -4);
3607 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3608 "ldd %B0,%B1" CR_TAB
3609 "ldd %C0,%C1", op, plen, -3);
3611 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3612 return avr_asm_len ("ld %C0,%1" CR_TAB
3613 "ld %B0,%1" CR_TAB
3614 "ld %A0,%1", op, plen, -3);
3615 else if (GET_CODE (base) == POST_INC) /* (R++) */
3616 return avr_asm_len ("ld %A0,%1" CR_TAB
3617 "ld %B0,%1" CR_TAB
3618 "ld %C0,%1", op, plen, -3);
3620 else if (CONSTANT_ADDRESS_P (base))
3621 return avr_asm_len ("lds %A0,%m1" CR_TAB
3622 "lds %B0,%m1+1" CR_TAB
3623 "lds %C0,%m1+2", op, plen , -6);
3625 fatal_insn ("unknown move insn:",insn);
3626 return "";
3629 /* Handle store of 24-bit type from register or zero to memory. */
3631 static const char*
3632 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3634 rtx dest = op[0];
3635 rtx src = op[1];
3636 rtx base = XEXP (dest, 0);
3637 int reg_base = true_regnum (base);
3639 if (CONSTANT_ADDRESS_P (base))
3640 return avr_asm_len ("sts %m0,%A1" CR_TAB
3641 "sts %m0+1,%B1" CR_TAB
3642 "sts %m0+2,%C1", op, plen, -6);
3644 if (reg_base > 0) /* (r) */
3646 if (reg_base == REG_X) /* (R26) */
3648 gcc_assert (!reg_overlap_mentioned_p (base, src));
3650 avr_asm_len ("st %0+,%A1" CR_TAB
3651 "st %0+,%B1" CR_TAB
3652 "st %0,%C1", op, plen, -3);
3654 if (!reg_unused_after (insn, base))
3655 avr_asm_len ("sbiw r26,2", op, plen, 1);
3657 return "";
3659 else
3660 return avr_asm_len ("st %0,%A1" CR_TAB
3661 "std %0+1,%B1" CR_TAB
3662 "std %0+2,%C1", op, plen, -3);
3664 else if (GET_CODE (base) == PLUS) /* (R + i) */
3666 int disp = INTVAL (XEXP (base, 1));
3667 reg_base = REGNO (XEXP (base, 0));
3669 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3671 if (reg_base != REG_Y)
3672 fatal_insn ("incorrect insn:",insn);
3674 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3675 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3676 "std Y+61,%A1" CR_TAB
3677 "std Y+62,%B1" CR_TAB
3678 "std Y+63,%C1" CR_TAB
3679 "sbiw r28,%o0-60", op, plen, -5);
3681 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3682 "sbci r29,hi8(-%o0)" CR_TAB
3683 "st Y,%A1" CR_TAB
3684 "std Y+1,%B1" CR_TAB
3685 "std Y+2,%C1" CR_TAB
3686 "subi r28,lo8(%o0)" CR_TAB
3687 "sbci r29,hi8(%o0)", op, plen, -7);
3689 if (reg_base == REG_X)
3691 /* (X + d) = R */
3692 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3694 avr_asm_len ("adiw r26,%o0" CR_TAB
3695 "st X+,%A1" CR_TAB
3696 "st X+,%B1" CR_TAB
3697 "st X,%C1", op, plen, -4);
3699 if (!reg_unused_after (insn, XEXP (base, 0)))
3700 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3702 return "";
3705 return avr_asm_len ("std %A0,%A1" CR_TAB
3706 "std %B0,%B1" CR_TAB
3707 "std %C0,%C1", op, plen, -3);
3709 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3710 return avr_asm_len ("st %0,%C1" CR_TAB
3711 "st %0,%B1" CR_TAB
3712 "st %0,%A1", op, plen, -3);
3713 else if (GET_CODE (base) == POST_INC) /* (R++) */
3714 return avr_asm_len ("st %0,%A1" CR_TAB
3715 "st %0,%B1" CR_TAB
3716 "st %0,%C1", op, plen, -3);
3718 fatal_insn ("unknown move insn:",insn);
3719 return "";
3723 /* Move around 24-bit stuff. */
3725 const char *
3726 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3728 rtx dest = op[0];
3729 rtx src = op[1];
3731 if (avr_mem_flash_p (src)
3732 || avr_mem_flash_p (dest))
3734 return avr_out_lpm (insn, op, plen);
3737 if (register_operand (dest, VOIDmode))
3739 if (register_operand (src, VOIDmode)) /* mov r,r */
3741 if (true_regnum (dest) > true_regnum (src))
3743 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3745 if (AVR_HAVE_MOVW)
3746 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3747 else
3748 return avr_asm_len ("mov %B0,%B1" CR_TAB
3749 "mov %A0,%A1", op, plen, 2);
3751 else
3753 if (AVR_HAVE_MOVW)
3754 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3755 else
3756 avr_asm_len ("mov %A0,%A1" CR_TAB
3757 "mov %B0,%B1", op, plen, -2);
3759 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3762 else if (CONSTANT_P (src))
3764 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3766 else if (MEM_P (src))
3767 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3769 else if (MEM_P (dest))
3771 rtx xop[2];
3773 xop[0] = dest;
3774 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3776 return avr_out_store_psi (insn, xop, plen);
3779 fatal_insn ("invalid insn:", insn);
3780 return "";
3784 static const char*
3785 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3787 rtx dest = op[0];
3788 rtx src = op[1];
3789 rtx x = XEXP (dest, 0);
3791 if (CONSTANT_ADDRESS_P (x))
3793 return optimize > 0 && io_address_operand (x, QImode)
3794 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3795 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3797 else if (GET_CODE (x) == PLUS
3798 && REG_P (XEXP (x, 0))
3799 && CONST_INT_P (XEXP (x, 1)))
3801 /* memory access by reg+disp */
3803 int disp = INTVAL (XEXP (x, 1));
3805 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3807 if (REGNO (XEXP (x, 0)) != REG_Y)
3808 fatal_insn ("incorrect insn:",insn);
3810 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3811 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3812 "std Y+63,%1" CR_TAB
3813 "sbiw r28,%o0-63", op, plen, -3);
3815 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3816 "sbci r29,hi8(-%o0)" CR_TAB
3817 "st Y,%1" CR_TAB
3818 "subi r28,lo8(%o0)" CR_TAB
3819 "sbci r29,hi8(%o0)", op, plen, -5);
3821 else if (REGNO (XEXP (x,0)) == REG_X)
3823 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3825 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3826 "adiw r26,%o0" CR_TAB
3827 "st X,__tmp_reg__", op, plen, -3);
3829 else
3831 avr_asm_len ("adiw r26,%o0" CR_TAB
3832 "st X,%1", op, plen, -2);
3835 if (!reg_unused_after (insn, XEXP (x,0)))
3836 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3838 return "";
3841 return avr_asm_len ("std %0,%1", op, plen, -1);
3844 return avr_asm_len ("st %0,%1", op, plen, -1);
3847 static const char*
3848 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3850 rtx dest = op[0];
3851 rtx src = op[1];
3852 rtx base = XEXP (dest, 0);
3853 int reg_base = true_regnum (base);
3854 int reg_src = true_regnum (src);
3855 /* "volatile" forces writing high byte first, even if less efficient,
3856 for correct operation with 16-bit I/O registers. */
3857 int mem_volatile_p = MEM_VOLATILE_P (dest);
3859 if (CONSTANT_ADDRESS_P (base))
3860 return optimize > 0 && io_address_operand (base, HImode)
3861 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3862 "out %i0,%A1", op, plen, -2)
3864 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3865 "sts %m0,%A1", op, plen, -4);
3867 if (reg_base > 0)
3869 if (reg_base != REG_X)
3870 return avr_asm_len ("std %0+1,%B1" CR_TAB
3871 "st %0,%A1", op, plen, -2);
3873 if (reg_src == REG_X)
3874 /* "st X+,r26" and "st -X,r26" are undefined. */
3875 return !mem_volatile_p && reg_unused_after (insn, src)
3876 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3877 "st X,r26" CR_TAB
3878 "adiw r26,1" CR_TAB
3879 "st X,__tmp_reg__", op, plen, -4)
3881 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3882 "adiw r26,1" CR_TAB
3883 "st X,__tmp_reg__" CR_TAB
3884 "sbiw r26,1" CR_TAB
3885 "st X,r26", op, plen, -5);
3887 return !mem_volatile_p && reg_unused_after (insn, base)
3888 ? avr_asm_len ("st X+,%A1" CR_TAB
3889 "st X,%B1", op, plen, -2)
3890 : avr_asm_len ("adiw r26,1" CR_TAB
3891 "st X,%B1" CR_TAB
3892 "st -X,%A1", op, plen, -3);
3894 else if (GET_CODE (base) == PLUS)
3896 int disp = INTVAL (XEXP (base, 1));
3897 reg_base = REGNO (XEXP (base, 0));
3898 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3900 if (reg_base != REG_Y)
3901 fatal_insn ("incorrect insn:",insn);
3903 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3904 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3905 "std Y+63,%B1" CR_TAB
3906 "std Y+62,%A1" CR_TAB
3907 "sbiw r28,%o0-62", op, plen, -4)
3909 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3910 "sbci r29,hi8(-%o0)" CR_TAB
3911 "std Y+1,%B1" CR_TAB
3912 "st Y,%A1" CR_TAB
3913 "subi r28,lo8(%o0)" CR_TAB
3914 "sbci r29,hi8(%o0)", op, plen, -6);
3917 if (reg_base != REG_X)
3918 return avr_asm_len ("std %B0,%B1" CR_TAB
3919 "std %A0,%A1", op, plen, -2);
3920 /* (X + d) = R */
3921 return reg_src == REG_X
3922 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3923 "mov __zero_reg__,r27" CR_TAB
3924 "adiw r26,%o0+1" CR_TAB
3925 "st X,__zero_reg__" CR_TAB
3926 "st -X,__tmp_reg__" CR_TAB
3927 "clr __zero_reg__" CR_TAB
3928 "sbiw r26,%o0", op, plen, -7)
3930 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3931 "st X,%B1" CR_TAB
3932 "st -X,%A1" CR_TAB
3933 "sbiw r26,%o0", op, plen, -4);
3935 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3937 return avr_asm_len ("st %0,%B1" CR_TAB
3938 "st %0,%A1", op, plen, -2);
3940 else if (GET_CODE (base) == POST_INC) /* (R++) */
3942 if (!mem_volatile_p)
3943 return avr_asm_len ("st %0,%A1" CR_TAB
3944 "st %0,%B1", op, plen, -2);
3946 return REGNO (XEXP (base, 0)) == REG_X
3947 ? avr_asm_len ("adiw r26,1" CR_TAB
3948 "st X,%B1" CR_TAB
3949 "st -X,%A1" CR_TAB
3950 "adiw r26,2", op, plen, -4)
3952 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3953 "st %p0,%A1" CR_TAB
3954 "adiw %r0,2", op, plen, -3);
3956 fatal_insn ("unknown move insn:",insn);
3957 return "";
3960 /* Return 1 if frame pointer for current function required. */
3962 static bool
3963 avr_frame_pointer_required_p (void)
3965 return (cfun->calls_alloca
3966 || cfun->calls_setjmp
3967 || cfun->has_nonlocal_label
3968 || crtl->args.info.nregs == 0
3969 || get_frame_size () > 0);
3972 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3974 static RTX_CODE
3975 compare_condition (rtx insn)
3977 rtx next = next_real_insn (insn);
3979 if (next && JUMP_P (next))
3981 rtx pat = PATTERN (next);
3982 rtx src = SET_SRC (pat);
3984 if (IF_THEN_ELSE == GET_CODE (src))
3985 return GET_CODE (XEXP (src, 0));
3988 return UNKNOWN;
3992 /* Returns true iff INSN is a tst insn that only tests the sign. */
3994 static bool
3995 compare_sign_p (rtx insn)
3997 RTX_CODE cond = compare_condition (insn);
3998 return (cond == GE || cond == LT);
4002 /* Returns true iff the next insn is a JUMP_INSN with a condition
4003 that needs to be swapped (GT, GTU, LE, LEU). */
4005 static bool
4006 compare_diff_p (rtx insn)
4008 RTX_CODE cond = compare_condition (insn);
4009 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4012 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4014 static bool
4015 compare_eq_p (rtx insn)
4017 RTX_CODE cond = compare_condition (insn);
4018 return (cond == EQ || cond == NE);
4022 /* Output compare instruction
4024 compare (XOP[0], XOP[1])
4026 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4027 XOP[2] is an 8-bit scratch register as needed.
4029 PLEN == NULL: Output instructions.
4030 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4031 Don't output anything. */
4033 const char*
4034 avr_out_compare (rtx insn, rtx *xop, int *plen)
4036 /* Register to compare and value to compare against. */
4037 rtx xreg = xop[0];
4038 rtx xval = xop[1];
4040 /* MODE of the comparison. */
4041 enum machine_mode mode = GET_MODE (xreg);
4043 /* Number of bytes to operate on. */
4044 int i, n_bytes = GET_MODE_SIZE (mode);
4046 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4047 int clobber_val = -1;
4049 gcc_assert (REG_P (xreg));
4050 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4051 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4053 if (plen)
4054 *plen = 0;
4056 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4057 against 0 by ORing the bytes. This is one instruction shorter.
4058 Notice that DImode comparisons are always against reg:DI 18
4059 and therefore don't use this. */
4061 if (!test_hard_reg_class (LD_REGS, xreg)
4062 && compare_eq_p (insn)
4063 && reg_unused_after (insn, xreg))
4065 if (xval == const1_rtx)
4067 avr_asm_len ("dec %A0" CR_TAB
4068 "or %A0,%B0", xop, plen, 2);
4070 if (n_bytes >= 3)
4071 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4073 if (n_bytes >= 4)
4074 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4076 return "";
4078 else if (xval == constm1_rtx)
4080 if (n_bytes >= 4)
4081 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4083 if (n_bytes >= 3)
4084 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4086 return avr_asm_len ("and %A0,%B0" CR_TAB
4087 "com %A0", xop, plen, 2);
4091 for (i = 0; i < n_bytes; i++)
4093 /* We compare byte-wise. */
4094 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4095 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4097 /* 8-bit value to compare with this byte. */
4098 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4100 /* Registers R16..R31 can operate with immediate. */
4101 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4103 xop[0] = reg8;
4104 xop[1] = gen_int_mode (val8, QImode);
4106 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4108 if (i == 0
4109 && test_hard_reg_class (ADDW_REGS, reg8))
4111 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4113 if (IN_RANGE (val16, 0, 63)
4114 && (val8 == 0
4115 || reg_unused_after (insn, xreg)))
4117 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4118 i++;
4119 continue;
4122 if (n_bytes == 2
4123 && IN_RANGE (val16, -63, -1)
4124 && compare_eq_p (insn)
4125 && reg_unused_after (insn, xreg))
4127 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4131 /* Comparing against 0 is easy. */
4133 if (val8 == 0)
4135 avr_asm_len (i == 0
4136 ? "cp %0,__zero_reg__"
4137 : "cpc %0,__zero_reg__", xop, plen, 1);
4138 continue;
4141 /* Upper registers can compare and subtract-with-carry immediates.
4142 Notice that compare instructions do the same as respective subtract
4143 instruction; the only difference is that comparisons don't write
4144 the result back to the target register. */
4146 if (ld_reg_p)
4148 if (i == 0)
4150 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4151 continue;
4153 else if (reg_unused_after (insn, xreg))
4155 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4156 continue;
4160 /* Must load the value into the scratch register. */
4162 gcc_assert (REG_P (xop[2]));
4164 if (clobber_val != (int) val8)
4165 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4166 clobber_val = (int) val8;
4168 avr_asm_len (i == 0
4169 ? "cp %0,%2"
4170 : "cpc %0,%2", xop, plen, 1);
4173 return "";
4177 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4179 const char*
4180 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4182 rtx xop[3];
4184 xop[0] = gen_rtx_REG (DImode, 18);
4185 xop[1] = op[0];
4186 xop[2] = op[1];
4188 return avr_out_compare (insn, xop, plen);
4191 /* Output test instruction for HImode. */
4193 const char*
4194 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4196 if (compare_sign_p (insn))
4198 avr_asm_len ("tst %B0", op, plen, -1);
4200 else if (reg_unused_after (insn, op[0])
4201 && compare_eq_p (insn))
4203 /* Faster than sbiw if we can clobber the operand. */
4204 avr_asm_len ("or %A0,%B0", op, plen, -1);
4206 else
4208 avr_out_compare (insn, op, plen);
4211 return "";
4215 /* Output test instruction for PSImode. */
4217 const char*
4218 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4220 if (compare_sign_p (insn))
4222 avr_asm_len ("tst %C0", op, plen, -1);
4224 else if (reg_unused_after (insn, op[0])
4225 && compare_eq_p (insn))
4227 /* Faster than sbiw if we can clobber the operand. */
4228 avr_asm_len ("or %A0,%B0" CR_TAB
4229 "or %A0,%C0", op, plen, -2);
4231 else
4233 avr_out_compare (insn, op, plen);
4236 return "";
4240 /* Output test instruction for SImode. */
4242 const char*
4243 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4245 if (compare_sign_p (insn))
4247 avr_asm_len ("tst %D0", op, plen, -1);
4249 else if (reg_unused_after (insn, op[0])
4250 && compare_eq_p (insn))
4252 /* Faster than sbiw if we can clobber the operand. */
4253 avr_asm_len ("or %A0,%B0" CR_TAB
4254 "or %A0,%C0" CR_TAB
4255 "or %A0,%D0", op, plen, -3);
4257 else
4259 avr_out_compare (insn, op, plen);
4262 return "";
4266 /* Generate asm equivalent for various shifts. This only handles cases
4267 that are not already carefully hand-optimized in ?sh??i3_out.
4269 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4270 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4271 OPERANDS[3] is a QImode scratch register from LD regs if
4272 available and SCRATCH, otherwise (no scratch available)
4274 TEMPL is an assembler template that shifts by one position.
4275 T_LEN is the length of this template. */
4277 void
4278 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4279 int *plen, int t_len)
4281 bool second_label = true;
4282 bool saved_in_tmp = false;
4283 bool use_zero_reg = false;
4284 rtx op[5];
4286 op[0] = operands[0];
4287 op[1] = operands[1];
4288 op[2] = operands[2];
4289 op[3] = operands[3];
4291 if (plen)
4292 *plen = 0;
4294 if (CONST_INT_P (operands[2]))
4296 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4297 && REG_P (operands[3]));
4298 int count = INTVAL (operands[2]);
4299 int max_len = 10; /* If larger than this, always use a loop. */
4301 if (count <= 0)
4302 return;
4304 if (count < 8 && !scratch)
4305 use_zero_reg = true;
4307 if (optimize_size)
4308 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4310 if (t_len * count <= max_len)
4312 /* Output shifts inline with no loop - faster. */
4314 while (count-- > 0)
4315 avr_asm_len (templ, op, plen, t_len);
4317 return;
4320 if (scratch)
4322 avr_asm_len ("ldi %3,%2", op, plen, 1);
4324 else if (use_zero_reg)
4326 /* Hack to save one word: use __zero_reg__ as loop counter.
4327 Set one bit, then shift in a loop until it is 0 again. */
4329 op[3] = zero_reg_rtx;
4331 avr_asm_len ("set" CR_TAB
4332 "bld %3,%2-1", op, plen, 2);
4334 else
4336 /* No scratch register available, use one from LD_REGS (saved in
4337 __tmp_reg__) that doesn't overlap with registers to shift. */
4339 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4340 op[4] = tmp_reg_rtx;
4341 saved_in_tmp = true;
4343 avr_asm_len ("mov %4,%3" CR_TAB
4344 "ldi %3,%2", op, plen, 2);
4347 second_label = false;
4349 else if (MEM_P (op[2]))
4351 rtx op_mov[2];
4353 op_mov[0] = op[3] = tmp_reg_rtx;
4354 op_mov[1] = op[2];
4356 out_movqi_r_mr (insn, op_mov, plen);
4358 else if (register_operand (op[2], QImode))
4360 op[3] = op[2];
4362 if (!reg_unused_after (insn, op[2])
4363 || reg_overlap_mentioned_p (op[0], op[2]))
4365 op[3] = tmp_reg_rtx;
4366 avr_asm_len ("mov %3,%2", op, plen, 1);
4369 else
4370 fatal_insn ("bad shift insn:", insn);
4372 if (second_label)
4373 avr_asm_len ("rjmp 2f", op, plen, 1);
4375 avr_asm_len ("1:", op, plen, 0);
4376 avr_asm_len (templ, op, plen, t_len);
4378 if (second_label)
4379 avr_asm_len ("2:", op, plen, 0);
4381 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4382 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4384 if (saved_in_tmp)
4385 avr_asm_len ("mov %3,%4", op, plen, 1);
4389 /* 8bit shift left ((char)x << i) */
4391 const char *
4392 ashlqi3_out (rtx insn, rtx operands[], int *len)
4394 if (GET_CODE (operands[2]) == CONST_INT)
4396 int k;
4398 if (!len)
4399 len = &k;
4401 switch (INTVAL (operands[2]))
4403 default:
4404 if (INTVAL (operands[2]) < 8)
4405 break;
4407 *len = 1;
4408 return "clr %0";
4410 case 1:
4411 *len = 1;
4412 return "lsl %0";
4414 case 2:
4415 *len = 2;
4416 return ("lsl %0" CR_TAB
4417 "lsl %0");
4419 case 3:
4420 *len = 3;
4421 return ("lsl %0" CR_TAB
4422 "lsl %0" CR_TAB
4423 "lsl %0");
4425 case 4:
4426 if (test_hard_reg_class (LD_REGS, operands[0]))
4428 *len = 2;
4429 return ("swap %0" CR_TAB
4430 "andi %0,0xf0");
4432 *len = 4;
4433 return ("lsl %0" CR_TAB
4434 "lsl %0" CR_TAB
4435 "lsl %0" CR_TAB
4436 "lsl %0");
4438 case 5:
4439 if (test_hard_reg_class (LD_REGS, operands[0]))
4441 *len = 3;
4442 return ("swap %0" CR_TAB
4443 "lsl %0" CR_TAB
4444 "andi %0,0xe0");
4446 *len = 5;
4447 return ("lsl %0" CR_TAB
4448 "lsl %0" CR_TAB
4449 "lsl %0" CR_TAB
4450 "lsl %0" CR_TAB
4451 "lsl %0");
4453 case 6:
4454 if (test_hard_reg_class (LD_REGS, operands[0]))
4456 *len = 4;
4457 return ("swap %0" CR_TAB
4458 "lsl %0" CR_TAB
4459 "lsl %0" CR_TAB
4460 "andi %0,0xc0");
4462 *len = 6;
4463 return ("lsl %0" CR_TAB
4464 "lsl %0" CR_TAB
4465 "lsl %0" CR_TAB
4466 "lsl %0" CR_TAB
4467 "lsl %0" CR_TAB
4468 "lsl %0");
4470 case 7:
4471 *len = 3;
4472 return ("ror %0" CR_TAB
4473 "clr %0" CR_TAB
4474 "ror %0");
4477 else if (CONSTANT_P (operands[2]))
4478 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4480 out_shift_with_cnt ("lsl %0",
4481 insn, operands, len, 1);
4482 return "";
4486 /* 16bit shift left ((short)x << i) */
4488 const char *
4489 ashlhi3_out (rtx insn, rtx operands[], int *len)
4491 if (GET_CODE (operands[2]) == CONST_INT)
4493 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4494 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4495 int k;
4496 int *t = len;
4498 if (!len)
4499 len = &k;
4501 switch (INTVAL (operands[2]))
4503 default:
4504 if (INTVAL (operands[2]) < 16)
4505 break;
4507 *len = 2;
4508 return ("clr %B0" CR_TAB
4509 "clr %A0");
4511 case 4:
4512 if (optimize_size && scratch)
4513 break; /* 5 */
4514 if (ldi_ok)
4516 *len = 6;
4517 return ("swap %A0" CR_TAB
4518 "swap %B0" CR_TAB
4519 "andi %B0,0xf0" CR_TAB
4520 "eor %B0,%A0" CR_TAB
4521 "andi %A0,0xf0" CR_TAB
4522 "eor %B0,%A0");
4524 if (scratch)
4526 *len = 7;
4527 return ("swap %A0" CR_TAB
4528 "swap %B0" CR_TAB
4529 "ldi %3,0xf0" CR_TAB
4530 "and %B0,%3" CR_TAB
4531 "eor %B0,%A0" CR_TAB
4532 "and %A0,%3" CR_TAB
4533 "eor %B0,%A0");
4535 break; /* optimize_size ? 6 : 8 */
4537 case 5:
4538 if (optimize_size)
4539 break; /* scratch ? 5 : 6 */
4540 if (ldi_ok)
4542 *len = 8;
4543 return ("lsl %A0" CR_TAB
4544 "rol %B0" CR_TAB
4545 "swap %A0" CR_TAB
4546 "swap %B0" CR_TAB
4547 "andi %B0,0xf0" CR_TAB
4548 "eor %B0,%A0" CR_TAB
4549 "andi %A0,0xf0" CR_TAB
4550 "eor %B0,%A0");
4552 if (scratch)
4554 *len = 9;
4555 return ("lsl %A0" CR_TAB
4556 "rol %B0" CR_TAB
4557 "swap %A0" CR_TAB
4558 "swap %B0" CR_TAB
4559 "ldi %3,0xf0" CR_TAB
4560 "and %B0,%3" CR_TAB
4561 "eor %B0,%A0" CR_TAB
4562 "and %A0,%3" CR_TAB
4563 "eor %B0,%A0");
4565 break; /* 10 */
4567 case 6:
4568 if (optimize_size)
4569 break; /* scratch ? 5 : 6 */
4570 *len = 9;
4571 return ("clr __tmp_reg__" CR_TAB
4572 "lsr %B0" CR_TAB
4573 "ror %A0" CR_TAB
4574 "ror __tmp_reg__" CR_TAB
4575 "lsr %B0" CR_TAB
4576 "ror %A0" CR_TAB
4577 "ror __tmp_reg__" CR_TAB
4578 "mov %B0,%A0" CR_TAB
4579 "mov %A0,__tmp_reg__");
4581 case 7:
4582 *len = 5;
4583 return ("lsr %B0" CR_TAB
4584 "mov %B0,%A0" CR_TAB
4585 "clr %A0" CR_TAB
4586 "ror %B0" CR_TAB
4587 "ror %A0");
4589 case 8:
4590 return *len = 2, ("mov %B0,%A1" CR_TAB
4591 "clr %A0");
4593 case 9:
4594 *len = 3;
4595 return ("mov %B0,%A0" CR_TAB
4596 "clr %A0" CR_TAB
4597 "lsl %B0");
4599 case 10:
4600 *len = 4;
4601 return ("mov %B0,%A0" CR_TAB
4602 "clr %A0" CR_TAB
4603 "lsl %B0" CR_TAB
4604 "lsl %B0");
4606 case 11:
4607 *len = 5;
4608 return ("mov %B0,%A0" CR_TAB
4609 "clr %A0" CR_TAB
4610 "lsl %B0" CR_TAB
4611 "lsl %B0" CR_TAB
4612 "lsl %B0");
4614 case 12:
4615 if (ldi_ok)
4617 *len = 4;
4618 return ("mov %B0,%A0" CR_TAB
4619 "clr %A0" CR_TAB
4620 "swap %B0" CR_TAB
4621 "andi %B0,0xf0");
4623 if (scratch)
4625 *len = 5;
4626 return ("mov %B0,%A0" CR_TAB
4627 "clr %A0" CR_TAB
4628 "swap %B0" CR_TAB
4629 "ldi %3,0xf0" CR_TAB
4630 "and %B0,%3");
4632 *len = 6;
4633 return ("mov %B0,%A0" CR_TAB
4634 "clr %A0" CR_TAB
4635 "lsl %B0" CR_TAB
4636 "lsl %B0" CR_TAB
4637 "lsl %B0" CR_TAB
4638 "lsl %B0");
4640 case 13:
4641 if (ldi_ok)
4643 *len = 5;
4644 return ("mov %B0,%A0" CR_TAB
4645 "clr %A0" CR_TAB
4646 "swap %B0" CR_TAB
4647 "lsl %B0" CR_TAB
4648 "andi %B0,0xe0");
4650 if (AVR_HAVE_MUL && scratch)
4652 *len = 5;
4653 return ("ldi %3,0x20" CR_TAB
4654 "mul %A0,%3" CR_TAB
4655 "mov %B0,r0" CR_TAB
4656 "clr %A0" CR_TAB
4657 "clr __zero_reg__");
4659 if (optimize_size && scratch)
4660 break; /* 5 */
4661 if (scratch)
4663 *len = 6;
4664 return ("mov %B0,%A0" CR_TAB
4665 "clr %A0" CR_TAB
4666 "swap %B0" CR_TAB
4667 "lsl %B0" CR_TAB
4668 "ldi %3,0xe0" CR_TAB
4669 "and %B0,%3");
4671 if (AVR_HAVE_MUL)
4673 *len = 6;
4674 return ("set" CR_TAB
4675 "bld r1,5" CR_TAB
4676 "mul %A0,r1" CR_TAB
4677 "mov %B0,r0" CR_TAB
4678 "clr %A0" CR_TAB
4679 "clr __zero_reg__");
4681 *len = 7;
4682 return ("mov %B0,%A0" CR_TAB
4683 "clr %A0" CR_TAB
4684 "lsl %B0" CR_TAB
4685 "lsl %B0" CR_TAB
4686 "lsl %B0" CR_TAB
4687 "lsl %B0" CR_TAB
4688 "lsl %B0");
4690 case 14:
4691 if (AVR_HAVE_MUL && ldi_ok)
4693 *len = 5;
4694 return ("ldi %B0,0x40" CR_TAB
4695 "mul %A0,%B0" CR_TAB
4696 "mov %B0,r0" CR_TAB
4697 "clr %A0" CR_TAB
4698 "clr __zero_reg__");
4700 if (AVR_HAVE_MUL && scratch)
4702 *len = 5;
4703 return ("ldi %3,0x40" CR_TAB
4704 "mul %A0,%3" CR_TAB
4705 "mov %B0,r0" CR_TAB
4706 "clr %A0" CR_TAB
4707 "clr __zero_reg__");
4709 if (optimize_size && ldi_ok)
4711 *len = 5;
4712 return ("mov %B0,%A0" CR_TAB
4713 "ldi %A0,6" "\n1:\t"
4714 "lsl %B0" CR_TAB
4715 "dec %A0" CR_TAB
4716 "brne 1b");
4718 if (optimize_size && scratch)
4719 break; /* 5 */
4720 *len = 6;
4721 return ("clr %B0" CR_TAB
4722 "lsr %A0" CR_TAB
4723 "ror %B0" CR_TAB
4724 "lsr %A0" CR_TAB
4725 "ror %B0" CR_TAB
4726 "clr %A0");
4728 case 15:
4729 *len = 4;
4730 return ("clr %B0" CR_TAB
4731 "lsr %A0" CR_TAB
4732 "ror %B0" CR_TAB
4733 "clr %A0");
4735 len = t;
4737 out_shift_with_cnt ("lsl %A0" CR_TAB
4738 "rol %B0", insn, operands, len, 2);
4739 return "";
4743 /* 24-bit shift left */
4745 const char*
4746 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4748 if (plen)
4749 *plen = 0;
4751 if (CONST_INT_P (op[2]))
4753 switch (INTVAL (op[2]))
4755 default:
4756 if (INTVAL (op[2]) < 24)
4757 break;
4759 return avr_asm_len ("clr %A0" CR_TAB
4760 "clr %B0" CR_TAB
4761 "clr %C0", op, plen, 3);
4763 case 8:
4765 int reg0 = REGNO (op[0]);
4766 int reg1 = REGNO (op[1]);
4768 if (reg0 >= reg1)
4769 return avr_asm_len ("mov %C0,%B1" CR_TAB
4770 "mov %B0,%A1" CR_TAB
4771 "clr %A0", op, plen, 3);
4772 else
4773 return avr_asm_len ("clr %A0" CR_TAB
4774 "mov %B0,%A1" CR_TAB
4775 "mov %C0,%B1", op, plen, 3);
4778 case 16:
4780 int reg0 = REGNO (op[0]);
4781 int reg1 = REGNO (op[1]);
4783 if (reg0 + 2 != reg1)
4784 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4786 return avr_asm_len ("clr %B0" CR_TAB
4787 "clr %A0", op, plen, 2);
4790 case 23:
4791 return avr_asm_len ("clr %C0" CR_TAB
4792 "lsr %A0" CR_TAB
4793 "ror %C0" CR_TAB
4794 "clr %B0" CR_TAB
4795 "clr %A0", op, plen, 5);
4799 out_shift_with_cnt ("lsl %A0" CR_TAB
4800 "rol %B0" CR_TAB
4801 "rol %C0", insn, op, plen, 3);
4802 return "";
4806 /* 32bit shift left ((long)x << i) */
4808 const char *
4809 ashlsi3_out (rtx insn, rtx operands[], int *len)
4811 if (GET_CODE (operands[2]) == CONST_INT)
4813 int k;
4814 int *t = len;
4816 if (!len)
4817 len = &k;
4819 switch (INTVAL (operands[2]))
4821 default:
4822 if (INTVAL (operands[2]) < 32)
4823 break;
4825 if (AVR_HAVE_MOVW)
4826 return *len = 3, ("clr %D0" CR_TAB
4827 "clr %C0" CR_TAB
4828 "movw %A0,%C0");
4829 *len = 4;
4830 return ("clr %D0" CR_TAB
4831 "clr %C0" CR_TAB
4832 "clr %B0" CR_TAB
4833 "clr %A0");
4835 case 8:
4837 int reg0 = true_regnum (operands[0]);
4838 int reg1 = true_regnum (operands[1]);
4839 *len = 4;
4840 if (reg0 >= reg1)
4841 return ("mov %D0,%C1" CR_TAB
4842 "mov %C0,%B1" CR_TAB
4843 "mov %B0,%A1" CR_TAB
4844 "clr %A0");
4845 else
4846 return ("clr %A0" CR_TAB
4847 "mov %B0,%A1" CR_TAB
4848 "mov %C0,%B1" CR_TAB
4849 "mov %D0,%C1");
4852 case 16:
4854 int reg0 = true_regnum (operands[0]);
4855 int reg1 = true_regnum (operands[1]);
4856 if (reg0 + 2 == reg1)
4857 return *len = 2, ("clr %B0" CR_TAB
4858 "clr %A0");
4859 if (AVR_HAVE_MOVW)
4860 return *len = 3, ("movw %C0,%A1" CR_TAB
4861 "clr %B0" CR_TAB
4862 "clr %A0");
4863 else
4864 return *len = 4, ("mov %C0,%A1" CR_TAB
4865 "mov %D0,%B1" CR_TAB
4866 "clr %B0" CR_TAB
4867 "clr %A0");
4870 case 24:
4871 *len = 4;
4872 return ("mov %D0,%A1" CR_TAB
4873 "clr %C0" CR_TAB
4874 "clr %B0" CR_TAB
4875 "clr %A0");
4877 case 31:
4878 *len = 6;
4879 return ("clr %D0" CR_TAB
4880 "lsr %A0" CR_TAB
4881 "ror %D0" CR_TAB
4882 "clr %C0" CR_TAB
4883 "clr %B0" CR_TAB
4884 "clr %A0");
4886 len = t;
4888 out_shift_with_cnt ("lsl %A0" CR_TAB
4889 "rol %B0" CR_TAB
4890 "rol %C0" CR_TAB
4891 "rol %D0", insn, operands, len, 4);
4892 return "";
4895 /* 8bit arithmetic shift right ((signed char)x >> i) */
4897 const char *
4898 ashrqi3_out (rtx insn, rtx operands[], int *len)
4900 if (GET_CODE (operands[2]) == CONST_INT)
4902 int k;
4904 if (!len)
4905 len = &k;
4907 switch (INTVAL (operands[2]))
4909 case 1:
4910 *len = 1;
4911 return "asr %0";
4913 case 2:
4914 *len = 2;
4915 return ("asr %0" CR_TAB
4916 "asr %0");
4918 case 3:
4919 *len = 3;
4920 return ("asr %0" CR_TAB
4921 "asr %0" CR_TAB
4922 "asr %0");
4924 case 4:
4925 *len = 4;
4926 return ("asr %0" CR_TAB
4927 "asr %0" CR_TAB
4928 "asr %0" CR_TAB
4929 "asr %0");
4931 case 5:
4932 *len = 5;
4933 return ("asr %0" CR_TAB
4934 "asr %0" CR_TAB
4935 "asr %0" CR_TAB
4936 "asr %0" CR_TAB
4937 "asr %0");
4939 case 6:
4940 *len = 4;
4941 return ("bst %0,6" CR_TAB
4942 "lsl %0" CR_TAB
4943 "sbc %0,%0" CR_TAB
4944 "bld %0,0");
4946 default:
4947 if (INTVAL (operands[2]) < 8)
4948 break;
4950 /* fall through */
4952 case 7:
4953 *len = 2;
4954 return ("lsl %0" CR_TAB
4955 "sbc %0,%0");
4958 else if (CONSTANT_P (operands[2]))
4959 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4961 out_shift_with_cnt ("asr %0",
4962 insn, operands, len, 1);
4963 return "";
4967 /* 16bit arithmetic shift right ((signed short)x >> i) */
4969 const char *
4970 ashrhi3_out (rtx insn, rtx operands[], int *len)
4972 if (GET_CODE (operands[2]) == CONST_INT)
4974 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4975 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4976 int k;
4977 int *t = len;
4979 if (!len)
4980 len = &k;
4982 switch (INTVAL (operands[2]))
4984 case 4:
4985 case 5:
4986 /* XXX try to optimize this too? */
4987 break;
4989 case 6:
4990 if (optimize_size)
4991 break; /* scratch ? 5 : 6 */
4992 *len = 8;
4993 return ("mov __tmp_reg__,%A0" CR_TAB
4994 "mov %A0,%B0" CR_TAB
4995 "lsl __tmp_reg__" CR_TAB
4996 "rol %A0" CR_TAB
4997 "sbc %B0,%B0" CR_TAB
4998 "lsl __tmp_reg__" CR_TAB
4999 "rol %A0" CR_TAB
5000 "rol %B0");
5002 case 7:
5003 *len = 4;
5004 return ("lsl %A0" CR_TAB
5005 "mov %A0,%B0" CR_TAB
5006 "rol %A0" CR_TAB
5007 "sbc %B0,%B0");
5009 case 8:
5011 int reg0 = true_regnum (operands[0]);
5012 int reg1 = true_regnum (operands[1]);
5014 if (reg0 == reg1)
5015 return *len = 3, ("mov %A0,%B0" CR_TAB
5016 "lsl %B0" CR_TAB
5017 "sbc %B0,%B0");
5018 else
5019 return *len = 4, ("mov %A0,%B1" CR_TAB
5020 "clr %B0" CR_TAB
5021 "sbrc %A0,7" CR_TAB
5022 "dec %B0");
5025 case 9:
5026 *len = 4;
5027 return ("mov %A0,%B0" CR_TAB
5028 "lsl %B0" CR_TAB
5029 "sbc %B0,%B0" CR_TAB
5030 "asr %A0");
5032 case 10:
5033 *len = 5;
5034 return ("mov %A0,%B0" CR_TAB
5035 "lsl %B0" CR_TAB
5036 "sbc %B0,%B0" CR_TAB
5037 "asr %A0" CR_TAB
5038 "asr %A0");
5040 case 11:
5041 if (AVR_HAVE_MUL && ldi_ok)
5043 *len = 5;
5044 return ("ldi %A0,0x20" CR_TAB
5045 "muls %B0,%A0" CR_TAB
5046 "mov %A0,r1" CR_TAB
5047 "sbc %B0,%B0" CR_TAB
5048 "clr __zero_reg__");
5050 if (optimize_size && scratch)
5051 break; /* 5 */
5052 *len = 6;
5053 return ("mov %A0,%B0" CR_TAB
5054 "lsl %B0" CR_TAB
5055 "sbc %B0,%B0" CR_TAB
5056 "asr %A0" CR_TAB
5057 "asr %A0" CR_TAB
5058 "asr %A0");
5060 case 12:
5061 if (AVR_HAVE_MUL && ldi_ok)
5063 *len = 5;
5064 return ("ldi %A0,0x10" CR_TAB
5065 "muls %B0,%A0" CR_TAB
5066 "mov %A0,r1" CR_TAB
5067 "sbc %B0,%B0" CR_TAB
5068 "clr __zero_reg__");
5070 if (optimize_size && scratch)
5071 break; /* 5 */
5072 *len = 7;
5073 return ("mov %A0,%B0" CR_TAB
5074 "lsl %B0" CR_TAB
5075 "sbc %B0,%B0" CR_TAB
5076 "asr %A0" CR_TAB
5077 "asr %A0" CR_TAB
5078 "asr %A0" CR_TAB
5079 "asr %A0");
5081 case 13:
5082 if (AVR_HAVE_MUL && ldi_ok)
5084 *len = 5;
5085 return ("ldi %A0,0x08" CR_TAB
5086 "muls %B0,%A0" CR_TAB
5087 "mov %A0,r1" CR_TAB
5088 "sbc %B0,%B0" CR_TAB
5089 "clr __zero_reg__");
5091 if (optimize_size)
5092 break; /* scratch ? 5 : 7 */
5093 *len = 8;
5094 return ("mov %A0,%B0" CR_TAB
5095 "lsl %B0" CR_TAB
5096 "sbc %B0,%B0" CR_TAB
5097 "asr %A0" CR_TAB
5098 "asr %A0" CR_TAB
5099 "asr %A0" CR_TAB
5100 "asr %A0" CR_TAB
5101 "asr %A0");
5103 case 14:
5104 *len = 5;
5105 return ("lsl %B0" CR_TAB
5106 "sbc %A0,%A0" CR_TAB
5107 "lsl %B0" CR_TAB
5108 "mov %B0,%A0" CR_TAB
5109 "rol %A0");
5111 default:
5112 if (INTVAL (operands[2]) < 16)
5113 break;
5115 /* fall through */
5117 case 15:
5118 return *len = 3, ("lsl %B0" CR_TAB
5119 "sbc %A0,%A0" CR_TAB
5120 "mov %B0,%A0");
5122 len = t;
5124 out_shift_with_cnt ("asr %B0" CR_TAB
5125 "ror %A0", insn, operands, len, 2);
5126 return "";
5130 /* 24-bit arithmetic shift right */
5132 const char*
5133 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5135 int dest = REGNO (op[0]);
5136 int src = REGNO (op[1]);
5138 if (CONST_INT_P (op[2]))
5140 if (plen)
5141 *plen = 0;
5143 switch (INTVAL (op[2]))
5145 case 8:
5146 if (dest <= src)
5147 return avr_asm_len ("mov %A0,%B1" CR_TAB
5148 "mov %B0,%C1" CR_TAB
5149 "clr %C0" CR_TAB
5150 "sbrc %B0,7" CR_TAB
5151 "dec %C0", op, plen, 5);
5152 else
5153 return avr_asm_len ("clr %C0" CR_TAB
5154 "sbrc %C1,7" CR_TAB
5155 "dec %C0" CR_TAB
5156 "mov %B0,%C1" CR_TAB
5157 "mov %A0,%B1", op, plen, 5);
5159 case 16:
5160 if (dest != src + 2)
5161 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5163 return avr_asm_len ("clr %B0" CR_TAB
5164 "sbrc %A0,7" CR_TAB
5165 "com %B0" CR_TAB
5166 "mov %C0,%B0", op, plen, 4);
5168 default:
5169 if (INTVAL (op[2]) < 24)
5170 break;
5172 /* fall through */
5174 case 23:
5175 return avr_asm_len ("lsl %C0" CR_TAB
5176 "sbc %A0,%A0" CR_TAB
5177 "mov %B0,%A0" CR_TAB
5178 "mov %C0,%A0", op, plen, 4);
5179 } /* switch */
5182 out_shift_with_cnt ("asr %C0" CR_TAB
5183 "ror %B0" CR_TAB
5184 "ror %A0", insn, op, plen, 3);
5185 return "";
5189 /* 32bit arithmetic shift right ((signed long)x >> i) */
5191 const char *
5192 ashrsi3_out (rtx insn, rtx operands[], int *len)
5194 if (GET_CODE (operands[2]) == CONST_INT)
5196 int k;
5197 int *t = len;
5199 if (!len)
5200 len = &k;
5202 switch (INTVAL (operands[2]))
5204 case 8:
5206 int reg0 = true_regnum (operands[0]);
5207 int reg1 = true_regnum (operands[1]);
5208 *len=6;
5209 if (reg0 <= reg1)
5210 return ("mov %A0,%B1" CR_TAB
5211 "mov %B0,%C1" CR_TAB
5212 "mov %C0,%D1" CR_TAB
5213 "clr %D0" CR_TAB
5214 "sbrc %C0,7" CR_TAB
5215 "dec %D0");
5216 else
5217 return ("clr %D0" CR_TAB
5218 "sbrc %D1,7" CR_TAB
5219 "dec %D0" CR_TAB
5220 "mov %C0,%D1" CR_TAB
5221 "mov %B0,%C1" CR_TAB
5222 "mov %A0,%B1");
5225 case 16:
5227 int reg0 = true_regnum (operands[0]);
5228 int reg1 = true_regnum (operands[1]);
5230 if (reg0 == reg1 + 2)
5231 return *len = 4, ("clr %D0" CR_TAB
5232 "sbrc %B0,7" CR_TAB
5233 "com %D0" CR_TAB
5234 "mov %C0,%D0");
5235 if (AVR_HAVE_MOVW)
5236 return *len = 5, ("movw %A0,%C1" CR_TAB
5237 "clr %D0" CR_TAB
5238 "sbrc %B0,7" CR_TAB
5239 "com %D0" CR_TAB
5240 "mov %C0,%D0");
5241 else
5242 return *len = 6, ("mov %B0,%D1" CR_TAB
5243 "mov %A0,%C1" CR_TAB
5244 "clr %D0" CR_TAB
5245 "sbrc %B0,7" CR_TAB
5246 "com %D0" CR_TAB
5247 "mov %C0,%D0");
5250 case 24:
5251 return *len = 6, ("mov %A0,%D1" CR_TAB
5252 "clr %D0" CR_TAB
5253 "sbrc %A0,7" CR_TAB
5254 "com %D0" CR_TAB
5255 "mov %B0,%D0" CR_TAB
5256 "mov %C0,%D0");
5258 default:
5259 if (INTVAL (operands[2]) < 32)
5260 break;
5262 /* fall through */
5264 case 31:
5265 if (AVR_HAVE_MOVW)
5266 return *len = 4, ("lsl %D0" CR_TAB
5267 "sbc %A0,%A0" CR_TAB
5268 "mov %B0,%A0" CR_TAB
5269 "movw %C0,%A0");
5270 else
5271 return *len = 5, ("lsl %D0" CR_TAB
5272 "sbc %A0,%A0" CR_TAB
5273 "mov %B0,%A0" CR_TAB
5274 "mov %C0,%A0" CR_TAB
5275 "mov %D0,%A0");
5277 len = t;
5279 out_shift_with_cnt ("asr %D0" CR_TAB
5280 "ror %C0" CR_TAB
5281 "ror %B0" CR_TAB
5282 "ror %A0", insn, operands, len, 4);
5283 return "";
5286 /* 8bit logic shift right ((unsigned char)x >> i) */
5288 const char *
5289 lshrqi3_out (rtx insn, rtx operands[], int *len)
5291 if (GET_CODE (operands[2]) == CONST_INT)
5293 int k;
5295 if (!len)
5296 len = &k;
5298 switch (INTVAL (operands[2]))
5300 default:
5301 if (INTVAL (operands[2]) < 8)
5302 break;
5304 *len = 1;
5305 return "clr %0";
5307 case 1:
5308 *len = 1;
5309 return "lsr %0";
5311 case 2:
5312 *len = 2;
5313 return ("lsr %0" CR_TAB
5314 "lsr %0");
5315 case 3:
5316 *len = 3;
5317 return ("lsr %0" CR_TAB
5318 "lsr %0" CR_TAB
5319 "lsr %0");
5321 case 4:
5322 if (test_hard_reg_class (LD_REGS, operands[0]))
5324 *len=2;
5325 return ("swap %0" CR_TAB
5326 "andi %0,0x0f");
5328 *len = 4;
5329 return ("lsr %0" CR_TAB
5330 "lsr %0" CR_TAB
5331 "lsr %0" CR_TAB
5332 "lsr %0");
5334 case 5:
5335 if (test_hard_reg_class (LD_REGS, operands[0]))
5337 *len = 3;
5338 return ("swap %0" CR_TAB
5339 "lsr %0" CR_TAB
5340 "andi %0,0x7");
5342 *len = 5;
5343 return ("lsr %0" CR_TAB
5344 "lsr %0" CR_TAB
5345 "lsr %0" CR_TAB
5346 "lsr %0" CR_TAB
5347 "lsr %0");
5349 case 6:
5350 if (test_hard_reg_class (LD_REGS, operands[0]))
5352 *len = 4;
5353 return ("swap %0" CR_TAB
5354 "lsr %0" CR_TAB
5355 "lsr %0" CR_TAB
5356 "andi %0,0x3");
5358 *len = 6;
5359 return ("lsr %0" CR_TAB
5360 "lsr %0" CR_TAB
5361 "lsr %0" CR_TAB
5362 "lsr %0" CR_TAB
5363 "lsr %0" CR_TAB
5364 "lsr %0");
5366 case 7:
5367 *len = 3;
5368 return ("rol %0" CR_TAB
5369 "clr %0" CR_TAB
5370 "rol %0");
5373 else if (CONSTANT_P (operands[2]))
5374 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5376 out_shift_with_cnt ("lsr %0",
5377 insn, operands, len, 1);
5378 return "";
5381 /* 16bit logic shift right ((unsigned short)x >> i) */
5383 const char *
5384 lshrhi3_out (rtx insn, rtx operands[], int *len)
5386 if (GET_CODE (operands[2]) == CONST_INT)
5388 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5389 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5390 int k;
5391 int *t = len;
5393 if (!len)
5394 len = &k;
5396 switch (INTVAL (operands[2]))
5398 default:
5399 if (INTVAL (operands[2]) < 16)
5400 break;
5402 *len = 2;
5403 return ("clr %B0" CR_TAB
5404 "clr %A0");
5406 case 4:
5407 if (optimize_size && scratch)
5408 break; /* 5 */
5409 if (ldi_ok)
5411 *len = 6;
5412 return ("swap %B0" CR_TAB
5413 "swap %A0" CR_TAB
5414 "andi %A0,0x0f" CR_TAB
5415 "eor %A0,%B0" CR_TAB
5416 "andi %B0,0x0f" CR_TAB
5417 "eor %A0,%B0");
5419 if (scratch)
5421 *len = 7;
5422 return ("swap %B0" CR_TAB
5423 "swap %A0" CR_TAB
5424 "ldi %3,0x0f" CR_TAB
5425 "and %A0,%3" CR_TAB
5426 "eor %A0,%B0" CR_TAB
5427 "and %B0,%3" CR_TAB
5428 "eor %A0,%B0");
5430 break; /* optimize_size ? 6 : 8 */
5432 case 5:
5433 if (optimize_size)
5434 break; /* scratch ? 5 : 6 */
5435 if (ldi_ok)
5437 *len = 8;
5438 return ("lsr %B0" CR_TAB
5439 "ror %A0" CR_TAB
5440 "swap %B0" CR_TAB
5441 "swap %A0" CR_TAB
5442 "andi %A0,0x0f" CR_TAB
5443 "eor %A0,%B0" CR_TAB
5444 "andi %B0,0x0f" CR_TAB
5445 "eor %A0,%B0");
5447 if (scratch)
5449 *len = 9;
5450 return ("lsr %B0" CR_TAB
5451 "ror %A0" CR_TAB
5452 "swap %B0" CR_TAB
5453 "swap %A0" CR_TAB
5454 "ldi %3,0x0f" CR_TAB
5455 "and %A0,%3" CR_TAB
5456 "eor %A0,%B0" CR_TAB
5457 "and %B0,%3" CR_TAB
5458 "eor %A0,%B0");
5460 break; /* 10 */
5462 case 6:
5463 if (optimize_size)
5464 break; /* scratch ? 5 : 6 */
5465 *len = 9;
5466 return ("clr __tmp_reg__" CR_TAB
5467 "lsl %A0" CR_TAB
5468 "rol %B0" CR_TAB
5469 "rol __tmp_reg__" CR_TAB
5470 "lsl %A0" CR_TAB
5471 "rol %B0" CR_TAB
5472 "rol __tmp_reg__" CR_TAB
5473 "mov %A0,%B0" CR_TAB
5474 "mov %B0,__tmp_reg__");
5476 case 7:
5477 *len = 5;
5478 return ("lsl %A0" CR_TAB
5479 "mov %A0,%B0" CR_TAB
5480 "rol %A0" CR_TAB
5481 "sbc %B0,%B0" CR_TAB
5482 "neg %B0");
5484 case 8:
5485 return *len = 2, ("mov %A0,%B1" CR_TAB
5486 "clr %B0");
5488 case 9:
5489 *len = 3;
5490 return ("mov %A0,%B0" CR_TAB
5491 "clr %B0" CR_TAB
5492 "lsr %A0");
5494 case 10:
5495 *len = 4;
5496 return ("mov %A0,%B0" CR_TAB
5497 "clr %B0" CR_TAB
5498 "lsr %A0" CR_TAB
5499 "lsr %A0");
5501 case 11:
5502 *len = 5;
5503 return ("mov %A0,%B0" CR_TAB
5504 "clr %B0" CR_TAB
5505 "lsr %A0" CR_TAB
5506 "lsr %A0" CR_TAB
5507 "lsr %A0");
5509 case 12:
5510 if (ldi_ok)
5512 *len = 4;
5513 return ("mov %A0,%B0" CR_TAB
5514 "clr %B0" CR_TAB
5515 "swap %A0" CR_TAB
5516 "andi %A0,0x0f");
5518 if (scratch)
5520 *len = 5;
5521 return ("mov %A0,%B0" CR_TAB
5522 "clr %B0" CR_TAB
5523 "swap %A0" CR_TAB
5524 "ldi %3,0x0f" CR_TAB
5525 "and %A0,%3");
5527 *len = 6;
5528 return ("mov %A0,%B0" CR_TAB
5529 "clr %B0" CR_TAB
5530 "lsr %A0" CR_TAB
5531 "lsr %A0" CR_TAB
5532 "lsr %A0" CR_TAB
5533 "lsr %A0");
5535 case 13:
5536 if (ldi_ok)
5538 *len = 5;
5539 return ("mov %A0,%B0" CR_TAB
5540 "clr %B0" CR_TAB
5541 "swap %A0" CR_TAB
5542 "lsr %A0" CR_TAB
5543 "andi %A0,0x07");
5545 if (AVR_HAVE_MUL && scratch)
5547 *len = 5;
5548 return ("ldi %3,0x08" CR_TAB
5549 "mul %B0,%3" CR_TAB
5550 "mov %A0,r1" CR_TAB
5551 "clr %B0" CR_TAB
5552 "clr __zero_reg__");
5554 if (optimize_size && scratch)
5555 break; /* 5 */
5556 if (scratch)
5558 *len = 6;
5559 return ("mov %A0,%B0" CR_TAB
5560 "clr %B0" CR_TAB
5561 "swap %A0" CR_TAB
5562 "lsr %A0" CR_TAB
5563 "ldi %3,0x07" CR_TAB
5564 "and %A0,%3");
5566 if (AVR_HAVE_MUL)
5568 *len = 6;
5569 return ("set" CR_TAB
5570 "bld r1,3" CR_TAB
5571 "mul %B0,r1" CR_TAB
5572 "mov %A0,r1" CR_TAB
5573 "clr %B0" CR_TAB
5574 "clr __zero_reg__");
5576 *len = 7;
5577 return ("mov %A0,%B0" CR_TAB
5578 "clr %B0" CR_TAB
5579 "lsr %A0" CR_TAB
5580 "lsr %A0" CR_TAB
5581 "lsr %A0" CR_TAB
5582 "lsr %A0" CR_TAB
5583 "lsr %A0");
5585 case 14:
5586 if (AVR_HAVE_MUL && ldi_ok)
5588 *len = 5;
5589 return ("ldi %A0,0x04" CR_TAB
5590 "mul %B0,%A0" CR_TAB
5591 "mov %A0,r1" CR_TAB
5592 "clr %B0" CR_TAB
5593 "clr __zero_reg__");
5595 if (AVR_HAVE_MUL && scratch)
5597 *len = 5;
5598 return ("ldi %3,0x04" CR_TAB
5599 "mul %B0,%3" CR_TAB
5600 "mov %A0,r1" CR_TAB
5601 "clr %B0" CR_TAB
5602 "clr __zero_reg__");
5604 if (optimize_size && ldi_ok)
5606 *len = 5;
5607 return ("mov %A0,%B0" CR_TAB
5608 "ldi %B0,6" "\n1:\t"
5609 "lsr %A0" CR_TAB
5610 "dec %B0" CR_TAB
5611 "brne 1b");
5613 if (optimize_size && scratch)
5614 break; /* 5 */
5615 *len = 6;
5616 return ("clr %A0" CR_TAB
5617 "lsl %B0" CR_TAB
5618 "rol %A0" CR_TAB
5619 "lsl %B0" CR_TAB
5620 "rol %A0" CR_TAB
5621 "clr %B0");
5623 case 15:
5624 *len = 4;
5625 return ("clr %A0" CR_TAB
5626 "lsl %B0" CR_TAB
5627 "rol %A0" CR_TAB
5628 "clr %B0");
5630 len = t;
5632 out_shift_with_cnt ("lsr %B0" CR_TAB
5633 "ror %A0", insn, operands, len, 2);
5634 return "";
5638 /* 24-bit logic shift right */
5640 const char*
5641 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5643 int dest = REGNO (op[0]);
5644 int src = REGNO (op[1]);
5646 if (CONST_INT_P (op[2]))
5648 if (plen)
5649 *plen = 0;
5651 switch (INTVAL (op[2]))
5653 case 8:
5654 if (dest <= src)
5655 return avr_asm_len ("mov %A0,%B1" CR_TAB
5656 "mov %B0,%C1" CR_TAB
5657 "clr %C0", op, plen, 3);
5658 else
5659 return avr_asm_len ("clr %C0" CR_TAB
5660 "mov %B0,%C1" CR_TAB
5661 "mov %A0,%B1", op, plen, 3);
5663 case 16:
5664 if (dest != src + 2)
5665 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5667 return avr_asm_len ("clr %B0" CR_TAB
5668 "clr %C0", op, plen, 2);
5670 default:
5671 if (INTVAL (op[2]) < 24)
5672 break;
5674 /* fall through */
5676 case 23:
5677 return avr_asm_len ("clr %A0" CR_TAB
5678 "sbrc %C0,7" CR_TAB
5679 "inc %A0" CR_TAB
5680 "clr %B0" CR_TAB
5681 "clr %C0", op, plen, 5);
5682 } /* switch */
5685 out_shift_with_cnt ("lsr %C0" CR_TAB
5686 "ror %B0" CR_TAB
5687 "ror %A0", insn, op, plen, 3);
5688 return "";
5692 /* 32bit logic shift right ((unsigned int)x >> i) */
5694 const char *
5695 lshrsi3_out (rtx insn, rtx operands[], int *len)
5697 if (GET_CODE (operands[2]) == CONST_INT)
5699 int k;
5700 int *t = len;
5702 if (!len)
5703 len = &k;
5705 switch (INTVAL (operands[2]))
5707 default:
5708 if (INTVAL (operands[2]) < 32)
5709 break;
5711 if (AVR_HAVE_MOVW)
5712 return *len = 3, ("clr %D0" CR_TAB
5713 "clr %C0" CR_TAB
5714 "movw %A0,%C0");
5715 *len = 4;
5716 return ("clr %D0" CR_TAB
5717 "clr %C0" CR_TAB
5718 "clr %B0" CR_TAB
5719 "clr %A0");
5721 case 8:
5723 int reg0 = true_regnum (operands[0]);
5724 int reg1 = true_regnum (operands[1]);
5725 *len = 4;
5726 if (reg0 <= reg1)
5727 return ("mov %A0,%B1" CR_TAB
5728 "mov %B0,%C1" CR_TAB
5729 "mov %C0,%D1" CR_TAB
5730 "clr %D0");
5731 else
5732 return ("clr %D0" CR_TAB
5733 "mov %C0,%D1" CR_TAB
5734 "mov %B0,%C1" CR_TAB
5735 "mov %A0,%B1");
5738 case 16:
5740 int reg0 = true_regnum (operands[0]);
5741 int reg1 = true_regnum (operands[1]);
5743 if (reg0 == reg1 + 2)
5744 return *len = 2, ("clr %C0" CR_TAB
5745 "clr %D0");
5746 if (AVR_HAVE_MOVW)
5747 return *len = 3, ("movw %A0,%C1" CR_TAB
5748 "clr %C0" CR_TAB
5749 "clr %D0");
5750 else
5751 return *len = 4, ("mov %B0,%D1" CR_TAB
5752 "mov %A0,%C1" CR_TAB
5753 "clr %C0" CR_TAB
5754 "clr %D0");
5757 case 24:
5758 return *len = 4, ("mov %A0,%D1" CR_TAB
5759 "clr %B0" CR_TAB
5760 "clr %C0" CR_TAB
5761 "clr %D0");
5763 case 31:
5764 *len = 6;
5765 return ("clr %A0" CR_TAB
5766 "sbrc %D0,7" CR_TAB
5767 "inc %A0" CR_TAB
5768 "clr %B0" CR_TAB
5769 "clr %C0" CR_TAB
5770 "clr %D0");
5772 len = t;
5774 out_shift_with_cnt ("lsr %D0" CR_TAB
5775 "ror %C0" CR_TAB
5776 "ror %B0" CR_TAB
5777 "ror %A0", insn, operands, len, 4);
5778 return "";
5782 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5784 XOP[0] = XOP[0] + XOP[2]
5786 and return "". If PLEN == NULL, print assembler instructions to perform the
5787 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5788 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5789 CODE == PLUS: perform addition by using ADD instructions.
5790 CODE == MINUS: perform addition by using SUB instructions.
5791 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5793 static void
5794 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5796 /* MODE of the operation. */
5797 enum machine_mode mode = GET_MODE (xop[0]);
5799 /* Number of bytes to operate on. */
5800 int i, n_bytes = GET_MODE_SIZE (mode);
5802 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5803 int clobber_val = -1;
5805 /* op[0]: 8-bit destination register
5806 op[1]: 8-bit const int
5807 op[2]: 8-bit scratch register */
5808 rtx op[3];
5810 /* Started the operation? Before starting the operation we may skip
5811 adding 0. This is no more true after the operation started because
5812 carry must be taken into account. */
5813 bool started = false;
5815 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5816 rtx xval = xop[2];
5818 /* Except in the case of ADIW with 16-bit register (see below)
5819 addition does not set cc0 in a usable way. */
5821 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5823 if (MINUS == code)
5824 xval = simplify_unary_operation (NEG, mode, xval, mode);
5826 op[2] = xop[3];
5828 if (plen)
5829 *plen = 0;
5831 for (i = 0; i < n_bytes; i++)
5833 /* We operate byte-wise on the destination. */
5834 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5835 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5837 /* 8-bit value to operate with this byte. */
5838 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5840 /* Registers R16..R31 can operate with immediate. */
5841 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5843 op[0] = reg8;
5844 op[1] = gen_int_mode (val8, QImode);
5846 /* To get usable cc0 no low-bytes must have been skipped. */
5848 if (i && !started)
5849 *pcc = CC_CLOBBER;
5851 if (!started
5852 && i % 2 == 0
5853 && i + 2 <= n_bytes
5854 && test_hard_reg_class (ADDW_REGS, reg8))
5856 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5857 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5859 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5860 i.e. operate word-wise. */
5862 if (val16 < 64)
5864 if (val16 != 0)
5866 started = true;
5867 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5868 op, plen, 1);
5870 if (n_bytes == 2 && PLUS == code)
5871 *pcc = CC_SET_ZN;
5874 i++;
5875 continue;
5879 if (val8 == 0)
5881 if (started)
5882 avr_asm_len (code == PLUS
5883 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5884 op, plen, 1);
5885 continue;
5887 else if ((val8 == 1 || val8 == 0xff)
5888 && !started
5889 && i == n_bytes - 1)
5891 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5892 op, plen, 1);
5893 break;
5896 switch (code)
5898 case PLUS:
5900 gcc_assert (plen != NULL || REG_P (op[2]));
5902 if (clobber_val != (int) val8)
5903 avr_asm_len ("ldi %2,%1", op, plen, 1);
5904 clobber_val = (int) val8;
5906 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5908 break; /* PLUS */
5910 case MINUS:
5912 if (ld_reg_p)
5913 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5914 else
5916 gcc_assert (plen != NULL || REG_P (op[2]));
5918 if (clobber_val != (int) val8)
5919 avr_asm_len ("ldi %2,%1", op, plen, 1);
5920 clobber_val = (int) val8;
5922 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5925 break; /* MINUS */
5927 default:
5928 /* Unknown code */
5929 gcc_unreachable();
5932 started = true;
5934 } /* for all sub-bytes */
5936 /* No output doesn't change cc0. */
5938 if (plen && *plen == 0)
5939 *pcc = CC_NONE;
5943 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5945 XOP[0] = XOP[0] + XOP[2]
5947 and return "". If PLEN == NULL, print assembler instructions to perform the
5948 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5949 words) printed with PLEN == NULL.
5950 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5951 condition code (with respect to XOP[0]). */
5953 const char*
5954 avr_out_plus (rtx *xop, int *plen, int *pcc)
5956 int len_plus, len_minus;
5957 int cc_plus, cc_minus, cc_dummy;
5959 if (!pcc)
5960 pcc = &cc_dummy;
5962 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5964 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5965 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5967 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5969 if (plen)
5971 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5972 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5974 else if (len_minus <= len_plus)
5975 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5976 else
5977 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5979 return "";
5983 /* Same as above but XOP has just 3 entries.
5984 Supply a dummy 4th operand. */
5986 const char*
5987 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5989 rtx op[4];
5991 op[0] = xop[0];
5992 op[1] = xop[1];
5993 op[2] = xop[2];
5994 op[3] = NULL_RTX;
5996 return avr_out_plus (op, plen, pcc);
6000 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6002 const char*
6003 avr_out_plus64 (rtx addend, int *plen)
6005 int cc_dummy;
6006 rtx op[4];
6008 op[0] = gen_rtx_REG (DImode, 18);
6009 op[1] = op[0];
6010 op[2] = addend;
6011 op[3] = NULL_RTX;
6013 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6015 return "";
6018 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6019 time constant XOP[2]:
6021 XOP[0] = XOP[0] <op> XOP[2]
6023 and return "". If PLEN == NULL, print assembler instructions to perform the
6024 operation; otherwise, set *PLEN to the length of the instruction sequence
6025 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6026 register or SCRATCH if no clobber register is needed for the operation. */
6028 const char*
6029 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6031 /* CODE and MODE of the operation. */
6032 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6033 enum machine_mode mode = GET_MODE (xop[0]);
6035 /* Number of bytes to operate on. */
6036 int i, n_bytes = GET_MODE_SIZE (mode);
6038 /* Value of T-flag (0 or 1) or -1 if unknow. */
6039 int set_t = -1;
6041 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6042 int clobber_val = -1;
6044 /* op[0]: 8-bit destination register
6045 op[1]: 8-bit const int
6046 op[2]: 8-bit clobber register or SCRATCH
6047 op[3]: 8-bit register containing 0xff or NULL_RTX */
6048 rtx op[4];
6050 op[2] = xop[3];
6051 op[3] = NULL_RTX;
6053 if (plen)
6054 *plen = 0;
6056 for (i = 0; i < n_bytes; i++)
6058 /* We operate byte-wise on the destination. */
6059 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6060 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6062 /* 8-bit value to operate with this byte. */
6063 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6065 /* Number of bits set in the current byte of the constant. */
6066 int pop8 = avr_popcount (val8);
6068 /* Registers R16..R31 can operate with immediate. */
6069 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6071 op[0] = reg8;
6072 op[1] = GEN_INT (val8);
6074 switch (code)
6076 case IOR:
6078 if (0 == pop8)
6079 continue;
6080 else if (ld_reg_p)
6081 avr_asm_len ("ori %0,%1", op, plen, 1);
6082 else if (1 == pop8)
6084 if (set_t != 1)
6085 avr_asm_len ("set", op, plen, 1);
6086 set_t = 1;
6088 op[1] = GEN_INT (exact_log2 (val8));
6089 avr_asm_len ("bld %0,%1", op, plen, 1);
6091 else if (8 == pop8)
6093 if (op[3] != NULL_RTX)
6094 avr_asm_len ("mov %0,%3", op, plen, 1);
6095 else
6096 avr_asm_len ("clr %0" CR_TAB
6097 "dec %0", op, plen, 2);
6099 op[3] = op[0];
6101 else
6103 if (clobber_val != (int) val8)
6104 avr_asm_len ("ldi %2,%1", op, plen, 1);
6105 clobber_val = (int) val8;
6107 avr_asm_len ("or %0,%2", op, plen, 1);
6110 continue; /* IOR */
6112 case AND:
6114 if (8 == pop8)
6115 continue;
6116 else if (0 == pop8)
6117 avr_asm_len ("clr %0", op, plen, 1);
6118 else if (ld_reg_p)
6119 avr_asm_len ("andi %0,%1", op, plen, 1);
6120 else if (7 == pop8)
6122 if (set_t != 0)
6123 avr_asm_len ("clt", op, plen, 1);
6124 set_t = 0;
6126 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6127 avr_asm_len ("bld %0,%1", op, plen, 1);
6129 else
6131 if (clobber_val != (int) val8)
6132 avr_asm_len ("ldi %2,%1", op, plen, 1);
6133 clobber_val = (int) val8;
6135 avr_asm_len ("and %0,%2", op, plen, 1);
6138 continue; /* AND */
6140 case XOR:
6142 if (0 == pop8)
6143 continue;
6144 else if (8 == pop8)
6145 avr_asm_len ("com %0", op, plen, 1);
6146 else if (ld_reg_p && val8 == (1 << 7))
6147 avr_asm_len ("subi %0,%1", op, plen, 1);
6148 else
6150 if (clobber_val != (int) val8)
6151 avr_asm_len ("ldi %2,%1", op, plen, 1);
6152 clobber_val = (int) val8;
6154 avr_asm_len ("eor %0,%2", op, plen, 1);
6157 continue; /* XOR */
6159 default:
6160 /* Unknown rtx_code */
6161 gcc_unreachable();
6163 } /* for all sub-bytes */
6165 return "";
6169 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6170 PLEN != NULL: Set *PLEN to the length of that sequence.
6171 Return "". */
6173 const char*
6174 avr_out_addto_sp (rtx *op, int *plen)
6176 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6177 int addend = INTVAL (op[0]);
6179 if (plen)
6180 *plen = 0;
6182 if (addend < 0)
6184 if (flag_verbose_asm || flag_print_asm_name)
6185 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6187 while (addend <= -pc_len)
6189 addend += pc_len;
6190 avr_asm_len ("rcall .", op, plen, 1);
6193 while (addend++ < 0)
6194 avr_asm_len ("push __zero_reg__", op, plen, 1);
6196 else if (addend > 0)
6198 if (flag_verbose_asm || flag_print_asm_name)
6199 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6201 while (addend-- > 0)
6202 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6205 return "";
6209 /* Create RTL split patterns for byte sized rotate expressions. This
6210 produces a series of move instructions and considers overlap situations.
6211 Overlapping non-HImode operands need a scratch register. */
6213 bool
6214 avr_rotate_bytes (rtx operands[])
6216 int i, j;
6217 enum machine_mode mode = GET_MODE (operands[0]);
6218 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6219 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6220 int num = INTVAL (operands[2]);
6221 rtx scratch = operands[3];
6222 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6223 Word move if no scratch is needed, otherwise use size of scratch. */
6224 enum machine_mode move_mode = QImode;
6225 int move_size, offset, size;
6227 if (num & 0xf)
6228 move_mode = QImode;
6229 else if ((mode == SImode && !same_reg) || !overlapped)
6230 move_mode = HImode;
6231 else
6232 move_mode = GET_MODE (scratch);
6234 /* Force DI rotate to use QI moves since other DI moves are currently split
6235 into QI moves so forward propagation works better. */
6236 if (mode == DImode)
6237 move_mode = QImode;
6238 /* Make scratch smaller if needed. */
6239 if (SCRATCH != GET_CODE (scratch)
6240 && HImode == GET_MODE (scratch)
6241 && QImode == move_mode)
6242 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6244 move_size = GET_MODE_SIZE (move_mode);
6245 /* Number of bytes/words to rotate. */
6246 offset = (num >> 3) / move_size;
6247 /* Number of moves needed. */
6248 size = GET_MODE_SIZE (mode) / move_size;
6249 /* Himode byte swap is special case to avoid a scratch register. */
6250 if (mode == HImode && same_reg)
6252 /* HImode byte swap, using xor. This is as quick as using scratch. */
6253 rtx src, dst;
6254 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6255 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6256 if (!rtx_equal_p (dst, src))
6258 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6259 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6260 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6263 else
6265 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6266 /* Create linked list of moves to determine move order. */
6267 struct {
6268 rtx src, dst;
6269 int links;
6270 } move[MAX_SIZE + 8];
6271 int blocked, moves;
6273 gcc_assert (size <= MAX_SIZE);
6274 /* Generate list of subreg moves. */
6275 for (i = 0; i < size; i++)
6277 int from = i;
6278 int to = (from + offset) % size;
6279 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6280 mode, from * move_size);
6281 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6282 mode, to * move_size);
6283 move[i].links = -1;
6285 /* Mark dependence where a dst of one move is the src of another move.
6286 The first move is a conflict as it must wait until second is
6287 performed. We ignore moves to self - we catch this later. */
6288 if (overlapped)
6289 for (i = 0; i < size; i++)
6290 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6291 for (j = 0; j < size; j++)
6292 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6294 /* The dst of move i is the src of move j. */
6295 move[i].links = j;
6296 break;
6299 blocked = -1;
6300 moves = 0;
6301 /* Go through move list and perform non-conflicting moves. As each
6302 non-overlapping move is made, it may remove other conflicts
6303 so the process is repeated until no conflicts remain. */
6306 blocked = -1;
6307 moves = 0;
6308 /* Emit move where dst is not also a src or we have used that
6309 src already. */
6310 for (i = 0; i < size; i++)
6311 if (move[i].src != NULL_RTX)
6313 if (move[i].links == -1
6314 || move[move[i].links].src == NULL_RTX)
6316 moves++;
6317 /* Ignore NOP moves to self. */
6318 if (!rtx_equal_p (move[i].dst, move[i].src))
6319 emit_move_insn (move[i].dst, move[i].src);
6321 /* Remove conflict from list. */
6322 move[i].src = NULL_RTX;
6324 else
6325 blocked = i;
6328 /* Check for deadlock. This is when no moves occurred and we have
6329 at least one blocked move. */
6330 if (moves == 0 && blocked != -1)
6332 /* Need to use scratch register to break deadlock.
6333 Add move to put dst of blocked move into scratch.
6334 When this move occurs, it will break chain deadlock.
6335 The scratch register is substituted for real move. */
6337 gcc_assert (SCRATCH != GET_CODE (scratch));
6339 move[size].src = move[blocked].dst;
6340 move[size].dst = scratch;
6341 /* Scratch move is never blocked. */
6342 move[size].links = -1;
6343 /* Make sure we have valid link. */
6344 gcc_assert (move[blocked].links != -1);
6345 /* Replace src of blocking move with scratch reg. */
6346 move[move[blocked].links].src = scratch;
6347 /* Make dependent on scratch move occuring. */
6348 move[blocked].links = size;
6349 size=size+1;
6352 while (blocked != -1);
6354 return true;
6357 /* Modifies the length assigned to instruction INSN
6358 LEN is the initially computed length of the insn. */
6361 adjust_insn_length (rtx insn, int len)
6363 rtx *op = recog_data.operand;
6364 enum attr_adjust_len adjust_len;
6366 /* Some complex insns don't need length adjustment and therefore
6367 the length need not/must not be adjusted for these insns.
6368 It is easier to state this in an insn attribute "adjust_len" than
6369 to clutter up code here... */
6371 if (-1 == recog_memoized (insn))
6373 return len;
6376 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6378 adjust_len = get_attr_adjust_len (insn);
6380 if (adjust_len == ADJUST_LEN_NO)
6382 /* Nothing to adjust: The length from attribute "length" is fine.
6383 This is the default. */
6385 return len;
6388 /* Extract insn's operands. */
6390 extract_constrain_insn_cached (insn);
6392 /* Dispatch to right function. */
6394 switch (adjust_len)
6396 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6397 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6398 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6400 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6402 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6403 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6404 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6405 avr_out_plus_noclobber (op, &len, NULL); break;
6407 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6409 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6410 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6411 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6412 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6413 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6414 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6416 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6417 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6418 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6419 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6420 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6422 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6423 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6424 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6426 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6427 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6428 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6430 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6431 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6432 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6434 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6435 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6436 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6438 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6440 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6442 default:
6443 gcc_unreachable();
6446 return len;
6449 /* Return nonzero if register REG dead after INSN. */
6452 reg_unused_after (rtx insn, rtx reg)
6454 return (dead_or_set_p (insn, reg)
6455 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6458 /* Return nonzero if REG is not used after INSN.
6459 We assume REG is a reload reg, and therefore does
6460 not live past labels. It may live past calls or jumps though. */
6463 _reg_unused_after (rtx insn, rtx reg)
6465 enum rtx_code code;
6466 rtx set;
6468 /* If the reg is set by this instruction, then it is safe for our
6469 case. Disregard the case where this is a store to memory, since
6470 we are checking a register used in the store address. */
6471 set = single_set (insn);
6472 if (set && GET_CODE (SET_DEST (set)) != MEM
6473 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6474 return 1;
6476 while ((insn = NEXT_INSN (insn)))
6478 rtx set;
6479 code = GET_CODE (insn);
6481 #if 0
6482 /* If this is a label that existed before reload, then the register
6483 if dead here. However, if this is a label added by reorg, then
6484 the register may still be live here. We can't tell the difference,
6485 so we just ignore labels completely. */
6486 if (code == CODE_LABEL)
6487 return 1;
6488 /* else */
6489 #endif
6491 if (!INSN_P (insn))
6492 continue;
6494 if (code == JUMP_INSN)
6495 return 0;
6497 /* If this is a sequence, we must handle them all at once.
6498 We could have for instance a call that sets the target register,
6499 and an insn in a delay slot that uses the register. In this case,
6500 we must return 0. */
6501 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6503 int i;
6504 int retval = 0;
6506 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6508 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6509 rtx set = single_set (this_insn);
6511 if (GET_CODE (this_insn) == CALL_INSN)
6512 code = CALL_INSN;
6513 else if (GET_CODE (this_insn) == JUMP_INSN)
6515 if (INSN_ANNULLED_BRANCH_P (this_insn))
6516 return 0;
6517 code = JUMP_INSN;
6520 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6521 return 0;
6522 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6524 if (GET_CODE (SET_DEST (set)) != MEM)
6525 retval = 1;
6526 else
6527 return 0;
6529 if (set == 0
6530 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6531 return 0;
6533 if (retval == 1)
6534 return 1;
6535 else if (code == JUMP_INSN)
6536 return 0;
6539 if (code == CALL_INSN)
6541 rtx tem;
6542 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6543 if (GET_CODE (XEXP (tem, 0)) == USE
6544 && REG_P (XEXP (XEXP (tem, 0), 0))
6545 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6546 return 0;
6547 if (call_used_regs[REGNO (reg)])
6548 return 1;
6551 set = single_set (insn);
6553 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6554 return 0;
6555 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6556 return GET_CODE (SET_DEST (set)) != MEM;
6557 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6558 return 0;
6560 return 1;
6564 /* Return RTX that represents the lower 16 bits of a constant address.
6565 Unfortunately, simplify_gen_subreg does not handle this case. */
6567 static rtx
6568 avr_const_address_lo16 (rtx x)
6570 rtx lo16;
6572 switch (GET_CODE (x))
6574 default:
6575 break;
6577 case CONST:
6578 if (PLUS == GET_CODE (XEXP (x, 0))
6579 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6580 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6582 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6583 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6585 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6586 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6588 return lo16;
6591 break;
6593 case SYMBOL_REF:
6595 const char *name = XSTR (x, 0);
6597 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6601 avr_edump ("\n%?: %r\n", x);
6602 gcc_unreachable();
6606 /* Target hook for assembling integer objects. The AVR version needs
6607 special handling for references to certain labels. */
6609 static bool
6610 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6612 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6613 && text_segment_operand (x, VOIDmode) )
6615 fputs ("\t.word\tgs(", asm_out_file);
6616 output_addr_const (asm_out_file, x);
6617 fputs (")\n", asm_out_file);
6619 return true;
6621 else if (GET_MODE (x) == PSImode)
6623 default_assemble_integer (avr_const_address_lo16 (x),
6624 GET_MODE_SIZE (HImode), aligned_p);
6626 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6627 " extension for hh8(", asm_out_file);
6628 output_addr_const (asm_out_file, x);
6629 fputs (")\"\n", asm_out_file);
6631 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6632 output_addr_const (asm_out_file, x);
6633 fputs (")\n", asm_out_file);
6635 return true;
6638 return default_assemble_integer (x, size, aligned_p);
6642 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6644 void
6645 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6648 /* If the function has the 'signal' or 'interrupt' attribute, test to
6649 make sure that the name of the function is "__vector_NN" so as to
6650 catch when the user misspells the interrupt vector name. */
6652 if (cfun->machine->is_interrupt)
6654 if (!STR_PREFIX_P (name, "__vector"))
6656 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6657 "%qs appears to be a misspelled interrupt handler",
6658 name);
6661 else if (cfun->machine->is_signal)
6663 if (!STR_PREFIX_P (name, "__vector"))
6665 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6666 "%qs appears to be a misspelled signal handler",
6667 name);
6671 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6672 ASM_OUTPUT_LABEL (file, name);
6676 /* Return value is nonzero if pseudos that have been
6677 assigned to registers of class CLASS would likely be spilled
6678 because registers of CLASS are needed for spill registers. */
6680 static bool
6681 avr_class_likely_spilled_p (reg_class_t c)
6683 return (c != ALL_REGS && c != ADDW_REGS);
6686 /* Valid attributes:
6687 progmem - put data to program memory;
6688 signal - make a function to be hardware interrupt. After function
6689 prologue interrupts are disabled;
6690 interrupt - make a function to be hardware interrupt. After function
6691 prologue interrupts are enabled;
6692 naked - don't generate function prologue/epilogue and `ret' command.
6694 Only `progmem' attribute valid for type. */
6696 /* Handle a "progmem" attribute; arguments as in
6697 struct attribute_spec.handler. */
6698 static tree
6699 avr_handle_progmem_attribute (tree *node, tree name,
6700 tree args ATTRIBUTE_UNUSED,
6701 int flags ATTRIBUTE_UNUSED,
6702 bool *no_add_attrs)
6704 if (DECL_P (*node))
6706 if (TREE_CODE (*node) == TYPE_DECL)
6708 /* This is really a decl attribute, not a type attribute,
6709 but try to handle it for GCC 3.0 backwards compatibility. */
6711 tree type = TREE_TYPE (*node);
6712 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6713 tree newtype = build_type_attribute_variant (type, attr);
6715 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6716 TREE_TYPE (*node) = newtype;
6717 *no_add_attrs = true;
6719 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6721 *no_add_attrs = false;
6723 else
6725 warning (OPT_Wattributes, "%qE attribute ignored",
6726 name);
6727 *no_add_attrs = true;
6731 return NULL_TREE;
6734 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6735 struct attribute_spec.handler. */
6737 static tree
6738 avr_handle_fndecl_attribute (tree *node, tree name,
6739 tree args ATTRIBUTE_UNUSED,
6740 int flags ATTRIBUTE_UNUSED,
6741 bool *no_add_attrs)
6743 if (TREE_CODE (*node) != FUNCTION_DECL)
6745 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6746 name);
6747 *no_add_attrs = true;
6750 return NULL_TREE;
6753 static tree
6754 avr_handle_fntype_attribute (tree *node, tree name,
6755 tree args ATTRIBUTE_UNUSED,
6756 int flags ATTRIBUTE_UNUSED,
6757 bool *no_add_attrs)
6759 if (TREE_CODE (*node) != FUNCTION_TYPE)
6761 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6762 name);
6763 *no_add_attrs = true;
6766 return NULL_TREE;
6770 /* AVR attributes. */
6771 static const struct attribute_spec
6772 avr_attribute_table[] =
6774 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6775 affects_type_identity } */
6776 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6777 false },
6778 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6779 false },
6780 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6781 false },
6782 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6783 false },
6784 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6785 false },
6786 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6787 false },
6788 { NULL, 0, 0, false, false, false, NULL, false }
6792 /* Look if DECL shall be placed in program memory space by
6793 means of attribute `progmem' or some address-space qualifier.
6794 Return non-zero if DECL is data that must end up in Flash and
6795 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6797 Return 2 if DECL is located in 24-bit flash address-space
6798 Return 1 if DECL is located in 16-bit flash address-space
6799 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6800 Return 0 otherwise */
6803 avr_progmem_p (tree decl, tree attributes)
6805 tree a;
6807 if (TREE_CODE (decl) != VAR_DECL)
6808 return 0;
6810 if (avr_decl_memx_p (decl))
6811 return 2;
6813 if (avr_decl_flash_p (decl))
6814 return 1;
6816 if (NULL_TREE
6817 != lookup_attribute ("progmem", attributes))
6818 return -1;
6820 a = decl;
6823 a = TREE_TYPE(a);
6824 while (TREE_CODE (a) == ARRAY_TYPE);
6826 if (a == error_mark_node)
6827 return 0;
6829 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6830 return -1;
6832 return 0;
6836 /* Scan type TYP for pointer references to address space ASn.
6837 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6838 the AS are also declared to be CONST.
6839 Otherwise, return the respective addres space, i.e. a value != 0. */
6841 static addr_space_t
6842 avr_nonconst_pointer_addrspace (tree typ)
6844 while (ARRAY_TYPE == TREE_CODE (typ))
6845 typ = TREE_TYPE (typ);
6847 if (POINTER_TYPE_P (typ))
6849 tree target = TREE_TYPE (typ);
6851 /* Pointer to function: Test the function's return type. */
6853 if (FUNCTION_TYPE == TREE_CODE (target))
6854 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6856 /* "Ordinary" pointers... */
6858 while (TREE_CODE (target) == ARRAY_TYPE)
6859 target = TREE_TYPE (target);
6861 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6862 && !TYPE_READONLY (target))
6864 /* Pointers to non-generic address space must be const. */
6866 return TYPE_ADDR_SPACE (target);
6869 /* Scan pointer's target type. */
6871 return avr_nonconst_pointer_addrspace (target);
6874 return ADDR_SPACE_GENERIC;
6878 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6879 go along with CONST qualifier. Writing to these address spaces should
6880 be detected and complained about as early as possible. */
6882 static bool
6883 avr_pgm_check_var_decl (tree node)
6885 const char *reason = NULL;
6887 addr_space_t as = ADDR_SPACE_GENERIC;
6889 gcc_assert (as == 0);
6891 if (avr_log.progmem)
6892 avr_edump ("%?: %t\n", node);
6894 switch (TREE_CODE (node))
6896 default:
6897 break;
6899 case VAR_DECL:
6900 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6901 reason = "variable";
6902 break;
6904 case PARM_DECL:
6905 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6906 reason = "function parameter";
6907 break;
6909 case FIELD_DECL:
6910 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6911 reason = "structure field";
6912 break;
6914 case FUNCTION_DECL:
6915 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6917 reason = "return type of function";
6918 break;
6920 case POINTER_TYPE:
6921 if (as = avr_nonconst_pointer_addrspace (node), as)
6922 reason = "pointer";
6923 break;
6926 if (reason)
6928 if (TYPE_P (node))
6929 error ("pointer targeting address space %qs must be const in %qT",
6930 avr_addrspace[as].name, node);
6931 else
6932 error ("pointer targeting address space %qs must be const in %s %q+D",
6933 avr_addrspace[as].name, reason, node);
6936 return reason == NULL;
6940 /* Add the section attribute if the variable is in progmem. */
6942 static void
6943 avr_insert_attributes (tree node, tree *attributes)
6945 avr_pgm_check_var_decl (node);
6947 if (TREE_CODE (node) == VAR_DECL
6948 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6949 && avr_progmem_p (node, *attributes))
6951 tree node0 = node;
6953 /* For C++, we have to peel arrays in order to get correct
6954 determination of readonlyness. */
6957 node0 = TREE_TYPE (node0);
6958 while (TREE_CODE (node0) == ARRAY_TYPE);
6960 if (error_mark_node == node0)
6961 return;
6963 if (!TYPE_READONLY (node0)
6964 && !TREE_READONLY (node))
6966 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6967 const char *reason = "__attribute__((progmem))";
6969 if (!ADDR_SPACE_GENERIC_P (as))
6970 reason = avr_addrspace[as].name;
6972 if (avr_log.progmem)
6973 avr_edump ("\n%?: %t\n%t\n", node, node0);
6975 error ("variable %q+D must be const in order to be put into"
6976 " read-only section by means of %qs", node, reason);
6982 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6983 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6984 /* Track need of __do_clear_bss. */
6986 void
6987 avr_asm_output_aligned_decl_common (FILE * stream,
6988 const_tree decl ATTRIBUTE_UNUSED,
6989 const char *name,
6990 unsigned HOST_WIDE_INT size,
6991 unsigned int align, bool local_p)
6993 avr_need_clear_bss_p = true;
6995 if (local_p)
6996 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
6997 else
6998 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7002 /* Unnamed section callback for data_section
7003 to track need of __do_copy_data. */
7005 static void
7006 avr_output_data_section_asm_op (const void *data)
7008 avr_need_copy_data_p = true;
7010 /* Dispatch to default. */
7011 output_section_asm_op (data);
7015 /* Unnamed section callback for bss_section
7016 to track need of __do_clear_bss. */
7018 static void
7019 avr_output_bss_section_asm_op (const void *data)
7021 avr_need_clear_bss_p = true;
7023 /* Dispatch to default. */
7024 output_section_asm_op (data);
7028 /* Unnamed section callback for progmem*.data sections. */
7030 static void
7031 avr_output_progmem_section_asm_op (const void *data)
7033 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7034 (const char*) data);
7038 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7040 static void
7041 avr_asm_init_sections (void)
7043 unsigned int n;
7045 /* Set up a section for jump tables. Alignment is handled by
7046 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7048 if (AVR_HAVE_JMP_CALL)
7050 progmem_swtable_section
7051 = get_unnamed_section (0, output_section_asm_op,
7052 "\t.section\t.progmem.gcc_sw_table"
7053 ",\"a\",@progbits");
7055 else
7057 progmem_swtable_section
7058 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7059 "\t.section\t.progmem.gcc_sw_table"
7060 ",\"ax\",@progbits");
7063 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7065 progmem_section[n]
7066 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7067 progmem_section_prefix[n]);
7070 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7071 resp. `avr_need_copy_data_p'. */
7073 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7074 data_section->unnamed.callback = avr_output_data_section_asm_op;
7075 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7079 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7081 static section*
7082 avr_asm_function_rodata_section (tree decl)
7084 /* If a function is unused and optimized out by -ffunction-sections
7085 and --gc-sections, ensure that the same will happen for its jump
7086 tables by putting them into individual sections. */
7088 unsigned int flags;
7089 section * frodata;
7091 /* Get the frodata section from the default function in varasm.c
7092 but treat function-associated data-like jump tables as code
7093 rather than as user defined data. AVR has no constant pools. */
7095 int fdata = flag_data_sections;
7097 flag_data_sections = flag_function_sections;
7098 frodata = default_function_rodata_section (decl);
7099 flag_data_sections = fdata;
7100 flags = frodata->common.flags;
7103 if (frodata != readonly_data_section
7104 && flags & SECTION_NAMED)
7106 /* Adjust section flags and replace section name prefix. */
7108 unsigned int i;
7110 static const char* const prefix[] =
7112 ".rodata", ".progmem.gcc_sw_table",
7113 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7116 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7118 const char * old_prefix = prefix[i];
7119 const char * new_prefix = prefix[i+1];
7120 const char * name = frodata->named.name;
7122 if (STR_PREFIX_P (name, old_prefix))
7124 const char *rname = ACONCAT ((new_prefix,
7125 name + strlen (old_prefix), NULL));
7126 flags &= ~SECTION_CODE;
7127 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7129 return get_section (rname, flags, frodata->named.decl);
7134 return progmem_swtable_section;
7138 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7139 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7141 static void
7142 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7144 if (flags & AVR_SECTION_PROGMEM)
7146 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7147 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7148 const char *old_prefix = ".rodata";
7149 const char *new_prefix = progmem_section_prefix[segment];
7151 if (STR_PREFIX_P (name, old_prefix))
7153 const char *sname = ACONCAT ((new_prefix,
7154 name + strlen (old_prefix), NULL));
7155 default_elf_asm_named_section (sname, flags, decl);
7156 return;
7159 default_elf_asm_named_section (new_prefix, flags, decl);
7160 return;
7163 if (!avr_need_copy_data_p)
7164 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7165 || STR_PREFIX_P (name, ".rodata")
7166 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7168 if (!avr_need_clear_bss_p)
7169 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7171 default_elf_asm_named_section (name, flags, decl);
7174 static unsigned int
7175 avr_section_type_flags (tree decl, const char *name, int reloc)
7177 unsigned int flags = default_section_type_flags (decl, name, reloc);
7179 if (STR_PREFIX_P (name, ".noinit"))
7181 if (decl && TREE_CODE (decl) == VAR_DECL
7182 && DECL_INITIAL (decl) == NULL_TREE)
7183 flags |= SECTION_BSS; /* @nobits */
7184 else
7185 warning (0, "only uninitialized variables can be placed in the "
7186 ".noinit section");
7189 if (decl && DECL_P (decl)
7190 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7192 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7194 /* Attribute progmem puts data in generic address space.
7195 Set section flags as if it was in __flash to get the right
7196 section prefix in the remainder. */
7198 if (ADDR_SPACE_GENERIC_P (as))
7199 as = ADDR_SPACE_FLASH;
7201 flags |= as * SECTION_MACH_DEP;
7202 flags &= ~SECTION_WRITE;
7203 flags &= ~SECTION_BSS;
7206 return flags;
7210 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7212 static void
7213 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7215 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7216 readily available, see PR34734. So we postpone the warning
7217 about uninitialized data in program memory section until here. */
7219 if (new_decl_p
7220 && decl && DECL_P (decl)
7221 && NULL_TREE == DECL_INITIAL (decl)
7222 && !DECL_EXTERNAL (decl)
7223 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7225 warning (OPT_Wuninitialized,
7226 "uninitialized variable %q+D put into "
7227 "program memory area", decl);
7230 default_encode_section_info (decl, rtl, new_decl_p);
7232 if (decl && DECL_P (decl)
7233 && TREE_CODE (decl) != FUNCTION_DECL
7234 && MEM_P (rtl)
7235 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7237 rtx sym = XEXP (rtl, 0);
7238 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7240 /* PSTR strings are in generic space but located in flash:
7241 patch address space. */
7243 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7244 as = ADDR_SPACE_FLASH;
7246 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7251 /* Implement `TARGET_ASM_SELECT_SECTION' */
7253 static section *
7254 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7256 section * sect = default_elf_select_section (decl, reloc, align);
7258 if (decl && DECL_P (decl)
7259 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7261 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7262 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7264 if (sect->common.flags & SECTION_NAMED)
7266 const char * name = sect->named.name;
7267 const char * old_prefix = ".rodata";
7268 const char * new_prefix = progmem_section_prefix[segment];
7270 if (STR_PREFIX_P (name, old_prefix))
7272 const char *sname = ACONCAT ((new_prefix,
7273 name + strlen (old_prefix), NULL));
7274 return get_section (sname, sect->common.flags, sect->named.decl);
7278 return progmem_section[segment];
7281 return sect;
7284 /* Implement `TARGET_ASM_FILE_START'. */
7285 /* Outputs some text at the start of each assembler file. */
7287 static void
7288 avr_file_start (void)
7290 int sfr_offset = avr_current_arch->sfr_offset;
7292 if (avr_current_arch->asm_only)
7293 error ("MCU %qs supported for assembler only", avr_current_device->name);
7295 default_file_start ();
7297 if (!AVR_HAVE_8BIT_SP)
7298 fprintf (asm_out_file,
7299 "__SP_H__ = 0x%02x\n",
7300 -sfr_offset + SP_ADDR + 1);
7302 fprintf (asm_out_file,
7303 "__SP_L__ = 0x%02x\n"
7304 "__SREG__ = 0x%02x\n"
7305 "__RAMPZ__ = 0x%02x\n"
7306 "__tmp_reg__ = %d\n"
7307 "__zero_reg__ = %d\n",
7308 -sfr_offset + SP_ADDR,
7309 -sfr_offset + SREG_ADDR,
7310 -sfr_offset + RAMPZ_ADDR,
7311 TMP_REGNO,
7312 ZERO_REGNO);
7316 /* Implement `TARGET_ASM_FILE_END'. */
7317 /* Outputs to the stdio stream FILE some
7318 appropriate text to go at the end of an assembler file. */
7320 static void
7321 avr_file_end (void)
7323 /* Output these only if there is anything in the
7324 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7325 input section(s) - some code size can be saved by not
7326 linking in the initialization code from libgcc if resp.
7327 sections are empty. */
7329 if (avr_need_copy_data_p)
7330 fputs (".global __do_copy_data\n", asm_out_file);
7332 if (avr_need_clear_bss_p)
7333 fputs (".global __do_clear_bss\n", asm_out_file);
7336 /* Choose the order in which to allocate hard registers for
7337 pseudo-registers local to a basic block.
7339 Store the desired register order in the array `reg_alloc_order'.
7340 Element 0 should be the register to allocate first; element 1, the
7341 next register; and so on. */
7343 void
7344 order_regs_for_local_alloc (void)
7346 unsigned int i;
7347 static const int order_0[] = {
7348 24,25,
7349 18,19,
7350 20,21,
7351 22,23,
7352 30,31,
7353 26,27,
7354 28,29,
7355 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7356 0,1,
7357 32,33,34,35
7359 static const int order_1[] = {
7360 18,19,
7361 20,21,
7362 22,23,
7363 24,25,
7364 30,31,
7365 26,27,
7366 28,29,
7367 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7368 0,1,
7369 32,33,34,35
7371 static const int order_2[] = {
7372 25,24,
7373 23,22,
7374 21,20,
7375 19,18,
7376 30,31,
7377 26,27,
7378 28,29,
7379 17,16,
7380 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7381 1,0,
7382 32,33,34,35
7385 const int *order = (TARGET_ORDER_1 ? order_1 :
7386 TARGET_ORDER_2 ? order_2 :
7387 order_0);
7388 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7389 reg_alloc_order[i] = order[i];
7393 /* Implement `TARGET_REGISTER_MOVE_COST' */
7395 static int
7396 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7397 reg_class_t from, reg_class_t to)
7399 return (from == STACK_REG ? 6
7400 : to == STACK_REG ? 12
7401 : 2);
7405 /* Implement `TARGET_MEMORY_MOVE_COST' */
7407 static int
7408 avr_memory_move_cost (enum machine_mode mode,
7409 reg_class_t rclass ATTRIBUTE_UNUSED,
7410 bool in ATTRIBUTE_UNUSED)
7412 return (mode == QImode ? 2
7413 : mode == HImode ? 4
7414 : mode == SImode ? 8
7415 : mode == SFmode ? 8
7416 : 16);
7420 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7421 cost of an RTX operand given its context. X is the rtx of the
7422 operand, MODE is its mode, and OUTER is the rtx_code of this
7423 operand's parent operator. */
7425 static int
7426 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7427 int opno, bool speed)
7429 enum rtx_code code = GET_CODE (x);
7430 int total;
7432 switch (code)
7434 case REG:
7435 case SUBREG:
7436 return 0;
7438 case CONST_INT:
7439 case CONST_DOUBLE:
7440 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7442 default:
7443 break;
7446 total = 0;
7447 avr_rtx_costs (x, code, outer, opno, &total, speed);
7448 return total;
7451 /* Worker function for AVR backend's rtx_cost function.
7452 X is rtx expression whose cost is to be calculated.
7453 Return true if the complete cost has been computed.
7454 Return false if subexpressions should be scanned.
7455 In either case, *TOTAL contains the cost result. */
7457 static bool
7458 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7459 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7461 enum rtx_code code = (enum rtx_code) codearg;
7462 enum machine_mode mode = GET_MODE (x);
7463 HOST_WIDE_INT val;
7465 switch (code)
7467 case CONST_INT:
7468 case CONST_DOUBLE:
7469 case SYMBOL_REF:
7470 case CONST:
7471 case LABEL_REF:
7472 /* Immediate constants are as cheap as registers. */
7473 *total = 0;
7474 return true;
7476 case MEM:
7477 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7478 return true;
7480 case NEG:
7481 switch (mode)
7483 case QImode:
7484 case SFmode:
7485 *total = COSTS_N_INSNS (1);
7486 break;
7488 case HImode:
7489 case PSImode:
7490 case SImode:
7491 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7492 break;
7494 default:
7495 return false;
7497 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7498 return true;
7500 case ABS:
7501 switch (mode)
7503 case QImode:
7504 case SFmode:
7505 *total = COSTS_N_INSNS (1);
7506 break;
7508 default:
7509 return false;
7511 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7512 return true;
7514 case NOT:
7515 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7516 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7517 return true;
7519 case ZERO_EXTEND:
7520 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7521 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7522 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7523 return true;
7525 case SIGN_EXTEND:
7526 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7527 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7528 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7529 return true;
7531 case PLUS:
7532 switch (mode)
7534 case QImode:
7535 if (AVR_HAVE_MUL
7536 && MULT == GET_CODE (XEXP (x, 0))
7537 && register_operand (XEXP (x, 1), QImode))
7539 /* multiply-add */
7540 *total = COSTS_N_INSNS (speed ? 4 : 3);
7541 /* multiply-add with constant: will be split and load constant. */
7542 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7543 *total = COSTS_N_INSNS (1) + *total;
7544 return true;
7546 *total = COSTS_N_INSNS (1);
7547 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7548 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7549 break;
7551 case HImode:
7552 if (AVR_HAVE_MUL
7553 && (MULT == GET_CODE (XEXP (x, 0))
7554 || ASHIFT == GET_CODE (XEXP (x, 0)))
7555 && register_operand (XEXP (x, 1), HImode)
7556 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7557 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7559 /* multiply-add */
7560 *total = COSTS_N_INSNS (speed ? 5 : 4);
7561 /* multiply-add with constant: will be split and load constant. */
7562 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7563 *total = COSTS_N_INSNS (1) + *total;
7564 return true;
7566 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7568 *total = COSTS_N_INSNS (2);
7569 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7570 speed);
7572 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7573 *total = COSTS_N_INSNS (1);
7574 else
7575 *total = COSTS_N_INSNS (2);
7576 break;
7578 case PSImode:
7579 if (!CONST_INT_P (XEXP (x, 1)))
7581 *total = COSTS_N_INSNS (3);
7582 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7583 speed);
7585 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7586 *total = COSTS_N_INSNS (2);
7587 else
7588 *total = COSTS_N_INSNS (3);
7589 break;
7591 case SImode:
7592 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7594 *total = COSTS_N_INSNS (4);
7595 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7596 speed);
7598 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7599 *total = COSTS_N_INSNS (1);
7600 else
7601 *total = COSTS_N_INSNS (4);
7602 break;
7604 default:
7605 return false;
7607 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7608 return true;
7610 case MINUS:
7611 if (AVR_HAVE_MUL
7612 && QImode == mode
7613 && register_operand (XEXP (x, 0), QImode)
7614 && MULT == GET_CODE (XEXP (x, 1)))
7616 /* multiply-sub */
7617 *total = COSTS_N_INSNS (speed ? 4 : 3);
7618 /* multiply-sub with constant: will be split and load constant. */
7619 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7620 *total = COSTS_N_INSNS (1) + *total;
7621 return true;
7623 if (AVR_HAVE_MUL
7624 && HImode == mode
7625 && register_operand (XEXP (x, 0), HImode)
7626 && (MULT == GET_CODE (XEXP (x, 1))
7627 || ASHIFT == GET_CODE (XEXP (x, 1)))
7628 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7629 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7631 /* multiply-sub */
7632 *total = COSTS_N_INSNS (speed ? 5 : 4);
7633 /* multiply-sub with constant: will be split and load constant. */
7634 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7635 *total = COSTS_N_INSNS (1) + *total;
7636 return true;
7638 /* FALLTHRU */
7639 case AND:
7640 case IOR:
7641 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7642 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7643 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7644 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7645 return true;
7647 case XOR:
7648 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7649 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7651 return true;
7653 case MULT:
7654 switch (mode)
7656 case QImode:
7657 if (AVR_HAVE_MUL)
7658 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7659 else if (!speed)
7660 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7661 else
7662 return false;
7663 break;
7665 case HImode:
7666 if (AVR_HAVE_MUL)
7668 rtx op0 = XEXP (x, 0);
7669 rtx op1 = XEXP (x, 1);
7670 enum rtx_code code0 = GET_CODE (op0);
7671 enum rtx_code code1 = GET_CODE (op1);
7672 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7673 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7675 if (ex0
7676 && (u8_operand (op1, HImode)
7677 || s8_operand (op1, HImode)))
7679 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7680 return true;
7682 if (ex0
7683 && register_operand (op1, HImode))
7685 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7686 return true;
7688 else if (ex0 || ex1)
7690 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7691 return true;
7693 else if (register_operand (op0, HImode)
7694 && (u8_operand (op1, HImode)
7695 || s8_operand (op1, HImode)))
7697 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7698 return true;
7700 else
7701 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7703 else if (!speed)
7704 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7705 else
7706 return false;
7707 break;
7709 case PSImode:
7710 if (!speed)
7711 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7712 else
7713 *total = 10;
7714 break;
7716 case SImode:
7717 if (AVR_HAVE_MUL)
7719 if (!speed)
7721 /* Add some additional costs besides CALL like moves etc. */
7723 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7725 else
7727 /* Just a rough estimate. Even with -O2 we don't want bulky
7728 code expanded inline. */
7730 *total = COSTS_N_INSNS (25);
7733 else
7735 if (speed)
7736 *total = COSTS_N_INSNS (300);
7737 else
7738 /* Add some additional costs besides CALL like moves etc. */
7739 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7742 return true;
7744 default:
7745 return false;
7747 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7748 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7749 return true;
7751 case DIV:
7752 case MOD:
7753 case UDIV:
7754 case UMOD:
7755 if (!speed)
7756 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7757 else
7758 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7759 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7760 /* For div/mod with const-int divisor we have at least the cost of
7761 loading the divisor. */
7762 if (CONST_INT_P (XEXP (x, 1)))
7763 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7764 /* Add some overall penaly for clobbering and moving around registers */
7765 *total += COSTS_N_INSNS (2);
7766 return true;
7768 case ROTATE:
7769 switch (mode)
7771 case QImode:
7772 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7773 *total = COSTS_N_INSNS (1);
7775 break;
7777 case HImode:
7778 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7779 *total = COSTS_N_INSNS (3);
7781 break;
7783 case SImode:
7784 if (CONST_INT_P (XEXP (x, 1)))
7785 switch (INTVAL (XEXP (x, 1)))
7787 case 8:
7788 case 24:
7789 *total = COSTS_N_INSNS (5);
7790 break;
7791 case 16:
7792 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7793 break;
7795 break;
7797 default:
7798 return false;
7800 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7801 return true;
7803 case ASHIFT:
7804 switch (mode)
7806 case QImode:
7807 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7809 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7810 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7811 speed);
7813 else
7815 val = INTVAL (XEXP (x, 1));
7816 if (val == 7)
7817 *total = COSTS_N_INSNS (3);
7818 else if (val >= 0 && val <= 7)
7819 *total = COSTS_N_INSNS (val);
7820 else
7821 *total = COSTS_N_INSNS (1);
7823 break;
7825 case HImode:
7826 if (AVR_HAVE_MUL)
7828 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7829 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7830 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7832 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7833 return true;
7837 if (const1_rtx == (XEXP (x, 1))
7838 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7840 *total = COSTS_N_INSNS (2);
7841 return true;
7844 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7846 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7847 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7848 speed);
7850 else
7851 switch (INTVAL (XEXP (x, 1)))
7853 case 0:
7854 *total = 0;
7855 break;
7856 case 1:
7857 case 8:
7858 *total = COSTS_N_INSNS (2);
7859 break;
7860 case 9:
7861 *total = COSTS_N_INSNS (3);
7862 break;
7863 case 2:
7864 case 3:
7865 case 10:
7866 case 15:
7867 *total = COSTS_N_INSNS (4);
7868 break;
7869 case 7:
7870 case 11:
7871 case 12:
7872 *total = COSTS_N_INSNS (5);
7873 break;
7874 case 4:
7875 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7876 break;
7877 case 6:
7878 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7879 break;
7880 case 5:
7881 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7882 break;
7883 default:
7884 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7885 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7886 speed);
7888 break;
7890 case PSImode:
7891 if (!CONST_INT_P (XEXP (x, 1)))
7893 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7895 else
7896 switch (INTVAL (XEXP (x, 1)))
7898 case 0:
7899 *total = 0;
7900 break;
7901 case 1:
7902 case 8:
7903 case 16:
7904 *total = COSTS_N_INSNS (3);
7905 break;
7906 case 23:
7907 *total = COSTS_N_INSNS (5);
7908 break;
7909 default:
7910 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7911 break;
7913 break;
7915 case SImode:
7916 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7918 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7919 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7920 speed);
7922 else
7923 switch (INTVAL (XEXP (x, 1)))
7925 case 0:
7926 *total = 0;
7927 break;
7928 case 24:
7929 *total = COSTS_N_INSNS (3);
7930 break;
7931 case 1:
7932 case 8:
7933 case 16:
7934 *total = COSTS_N_INSNS (4);
7935 break;
7936 case 31:
7937 *total = COSTS_N_INSNS (6);
7938 break;
7939 case 2:
7940 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7941 break;
7942 default:
7943 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7944 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7945 speed);
7947 break;
7949 default:
7950 return false;
7952 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7953 return true;
7955 case ASHIFTRT:
7956 switch (mode)
7958 case QImode:
7959 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7961 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7962 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7963 speed);
7965 else
7967 val = INTVAL (XEXP (x, 1));
7968 if (val == 6)
7969 *total = COSTS_N_INSNS (4);
7970 else if (val == 7)
7971 *total = COSTS_N_INSNS (2);
7972 else if (val >= 0 && val <= 7)
7973 *total = COSTS_N_INSNS (val);
7974 else
7975 *total = COSTS_N_INSNS (1);
7977 break;
7979 case HImode:
7980 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7982 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7983 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7984 speed);
7986 else
7987 switch (INTVAL (XEXP (x, 1)))
7989 case 0:
7990 *total = 0;
7991 break;
7992 case 1:
7993 *total = COSTS_N_INSNS (2);
7994 break;
7995 case 15:
7996 *total = COSTS_N_INSNS (3);
7997 break;
7998 case 2:
7999 case 7:
8000 case 8:
8001 case 9:
8002 *total = COSTS_N_INSNS (4);
8003 break;
8004 case 10:
8005 case 14:
8006 *total = COSTS_N_INSNS (5);
8007 break;
8008 case 11:
8009 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8010 break;
8011 case 12:
8012 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8013 break;
8014 case 6:
8015 case 13:
8016 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8017 break;
8018 default:
8019 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8020 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8021 speed);
8023 break;
8025 case PSImode:
8026 if (!CONST_INT_P (XEXP (x, 1)))
8028 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8030 else
8031 switch (INTVAL (XEXP (x, 1)))
8033 case 0:
8034 *total = 0;
8035 break;
8036 case 1:
8037 *total = COSTS_N_INSNS (3);
8038 break;
8039 case 16:
8040 case 8:
8041 *total = COSTS_N_INSNS (5);
8042 break;
8043 case 23:
8044 *total = COSTS_N_INSNS (4);
8045 break;
8046 default:
8047 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8048 break;
8050 break;
8052 case SImode:
8053 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8055 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8056 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8057 speed);
8059 else
8060 switch (INTVAL (XEXP (x, 1)))
8062 case 0:
8063 *total = 0;
8064 break;
8065 case 1:
8066 *total = COSTS_N_INSNS (4);
8067 break;
8068 case 8:
8069 case 16:
8070 case 24:
8071 *total = COSTS_N_INSNS (6);
8072 break;
8073 case 2:
8074 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8075 break;
8076 case 31:
8077 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8078 break;
8079 default:
8080 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8081 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8082 speed);
8084 break;
8086 default:
8087 return false;
8089 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8090 return true;
8092 case LSHIFTRT:
8093 switch (mode)
8095 case QImode:
8096 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8098 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8099 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8100 speed);
8102 else
8104 val = INTVAL (XEXP (x, 1));
8105 if (val == 7)
8106 *total = COSTS_N_INSNS (3);
8107 else if (val >= 0 && val <= 7)
8108 *total = COSTS_N_INSNS (val);
8109 else
8110 *total = COSTS_N_INSNS (1);
8112 break;
8114 case HImode:
8115 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8117 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8118 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8119 speed);
8121 else
8122 switch (INTVAL (XEXP (x, 1)))
8124 case 0:
8125 *total = 0;
8126 break;
8127 case 1:
8128 case 8:
8129 *total = COSTS_N_INSNS (2);
8130 break;
8131 case 9:
8132 *total = COSTS_N_INSNS (3);
8133 break;
8134 case 2:
8135 case 10:
8136 case 15:
8137 *total = COSTS_N_INSNS (4);
8138 break;
8139 case 7:
8140 case 11:
8141 *total = COSTS_N_INSNS (5);
8142 break;
8143 case 3:
8144 case 12:
8145 case 13:
8146 case 14:
8147 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8148 break;
8149 case 4:
8150 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8151 break;
8152 case 5:
8153 case 6:
8154 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8155 break;
8156 default:
8157 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8158 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8159 speed);
8161 break;
8163 case PSImode:
8164 if (!CONST_INT_P (XEXP (x, 1)))
8166 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8168 else
8169 switch (INTVAL (XEXP (x, 1)))
8171 case 0:
8172 *total = 0;
8173 break;
8174 case 1:
8175 case 8:
8176 case 16:
8177 *total = COSTS_N_INSNS (3);
8178 break;
8179 case 23:
8180 *total = COSTS_N_INSNS (5);
8181 break;
8182 default:
8183 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8184 break;
8186 break;
8188 case SImode:
8189 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8191 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8192 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8193 speed);
8195 else
8196 switch (INTVAL (XEXP (x, 1)))
8198 case 0:
8199 *total = 0;
8200 break;
8201 case 1:
8202 *total = COSTS_N_INSNS (4);
8203 break;
8204 case 2:
8205 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8206 break;
8207 case 8:
8208 case 16:
8209 case 24:
8210 *total = COSTS_N_INSNS (4);
8211 break;
8212 case 31:
8213 *total = COSTS_N_INSNS (6);
8214 break;
8215 default:
8216 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8217 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8218 speed);
8220 break;
8222 default:
8223 return false;
8225 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8226 return true;
8228 case COMPARE:
8229 switch (GET_MODE (XEXP (x, 0)))
8231 case QImode:
8232 *total = COSTS_N_INSNS (1);
8233 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8235 break;
8237 case HImode:
8238 *total = COSTS_N_INSNS (2);
8239 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8241 else if (INTVAL (XEXP (x, 1)) != 0)
8242 *total += COSTS_N_INSNS (1);
8243 break;
8245 case PSImode:
8246 *total = COSTS_N_INSNS (3);
8247 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8248 *total += COSTS_N_INSNS (2);
8249 break;
8251 case SImode:
8252 *total = COSTS_N_INSNS (4);
8253 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8255 else if (INTVAL (XEXP (x, 1)) != 0)
8256 *total += COSTS_N_INSNS (3);
8257 break;
8259 default:
8260 return false;
8262 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8263 return true;
8265 case TRUNCATE:
8266 if (AVR_HAVE_MUL
8267 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8268 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8269 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8271 if (QImode == mode || HImode == mode)
8273 *total = COSTS_N_INSNS (2);
8274 return true;
8277 break;
8279 default:
8280 break;
8282 return false;
8286 /* Implement `TARGET_RTX_COSTS'. */
8288 static bool
8289 avr_rtx_costs (rtx x, int codearg, int outer_code,
8290 int opno, int *total, bool speed)
8292 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8293 opno, total, speed);
8295 if (avr_log.rtx_costs)
8297 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8298 done, speed ? "speed" : "size", *total, outer_code, x);
8301 return done;
8305 /* Implement `TARGET_ADDRESS_COST'. */
8307 static int
8308 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8310 int cost = 4;
8312 if (GET_CODE (x) == PLUS
8313 && CONST_INT_P (XEXP (x, 1))
8314 && (REG_P (XEXP (x, 0))
8315 || GET_CODE (XEXP (x, 0)) == SUBREG))
8317 if (INTVAL (XEXP (x, 1)) >= 61)
8318 cost = 18;
8320 else if (CONSTANT_ADDRESS_P (x))
8322 if (optimize > 0
8323 && io_address_operand (x, QImode))
8324 cost = 2;
8327 if (avr_log.address_cost)
8328 avr_edump ("\n%?: %d = %r\n", cost, x);
8330 return cost;
8333 /* Test for extra memory constraint 'Q'.
8334 It's a memory address based on Y or Z pointer with valid displacement. */
8337 extra_constraint_Q (rtx x)
8339 int ok = 0;
8341 if (GET_CODE (XEXP (x,0)) == PLUS
8342 && REG_P (XEXP (XEXP (x,0), 0))
8343 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8344 && (INTVAL (XEXP (XEXP (x,0), 1))
8345 <= MAX_LD_OFFSET (GET_MODE (x))))
8347 rtx xx = XEXP (XEXP (x,0), 0);
8348 int regno = REGNO (xx);
8350 ok = (/* allocate pseudos */
8351 regno >= FIRST_PSEUDO_REGISTER
8352 /* strictly check */
8353 || regno == REG_Z || regno == REG_Y
8354 /* XXX frame & arg pointer checks */
8355 || xx == frame_pointer_rtx
8356 || xx == arg_pointer_rtx);
8358 if (avr_log.constraints)
8359 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8360 ok, reload_completed, reload_in_progress, x);
8363 return ok;
8366 /* Convert condition code CONDITION to the valid AVR condition code. */
8368 RTX_CODE
8369 avr_normalize_condition (RTX_CODE condition)
8371 switch (condition)
8373 case GT:
8374 return GE;
8375 case GTU:
8376 return GEU;
8377 case LE:
8378 return LT;
8379 case LEU:
8380 return LTU;
8381 default:
8382 gcc_unreachable ();
8386 /* Helper function for `avr_reorg'. */
8388 static rtx
8389 avr_compare_pattern (rtx insn)
8391 rtx pattern = single_set (insn);
8393 if (pattern
8394 && NONJUMP_INSN_P (insn)
8395 && SET_DEST (pattern) == cc0_rtx
8396 && GET_CODE (SET_SRC (pattern)) == COMPARE
8397 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8398 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8400 return pattern;
8403 return NULL_RTX;
8406 /* Helper function for `avr_reorg'. */
8408 /* Expansion of switch/case decision trees leads to code like
8410 cc0 = compare (Reg, Num)
8411 if (cc0 == 0)
8412 goto L1
8414 cc0 = compare (Reg, Num)
8415 if (cc0 > 0)
8416 goto L2
8418 The second comparison is superfluous and can be deleted.
8419 The second jump condition can be transformed from a
8420 "difficult" one to a "simple" one because "cc0 > 0" and
8421 "cc0 >= 0" will have the same effect here.
8423 This function relies on the way switch/case is being expaned
8424 as binary decision tree. For example code see PR 49903.
8426 Return TRUE if optimization performed.
8427 Return FALSE if nothing changed.
8429 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8431 We don't want to do this in text peephole because it is
8432 tedious to work out jump offsets there and the second comparison
8433 might have been transormed by `avr_reorg'.
8435 RTL peephole won't do because peephole2 does not scan across
8436 basic blocks. */
8438 static bool
8439 avr_reorg_remove_redundant_compare (rtx insn1)
8441 rtx comp1, ifelse1, xcond1, branch1;
8442 rtx comp2, ifelse2, xcond2, branch2, insn2;
8443 enum rtx_code code;
8444 rtx jump, target, cond;
8446 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8448 branch1 = next_nonnote_nondebug_insn (insn1);
8449 if (!branch1 || !JUMP_P (branch1))
8450 return false;
8452 insn2 = next_nonnote_nondebug_insn (branch1);
8453 if (!insn2 || !avr_compare_pattern (insn2))
8454 return false;
8456 branch2 = next_nonnote_nondebug_insn (insn2);
8457 if (!branch2 || !JUMP_P (branch2))
8458 return false;
8460 comp1 = avr_compare_pattern (insn1);
8461 comp2 = avr_compare_pattern (insn2);
8462 xcond1 = single_set (branch1);
8463 xcond2 = single_set (branch2);
8465 if (!comp1 || !comp2
8466 || !rtx_equal_p (comp1, comp2)
8467 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8468 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8469 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8470 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8472 return false;
8475 comp1 = SET_SRC (comp1);
8476 ifelse1 = SET_SRC (xcond1);
8477 ifelse2 = SET_SRC (xcond2);
8479 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8481 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8482 || !REG_P (XEXP (comp1, 0))
8483 || !CONST_INT_P (XEXP (comp1, 1))
8484 || XEXP (ifelse1, 2) != pc_rtx
8485 || XEXP (ifelse2, 2) != pc_rtx
8486 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8487 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8488 || !COMPARISON_P (XEXP (ifelse2, 0))
8489 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8490 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8491 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8492 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8494 return false;
8497 /* We filtered the insn sequence to look like
8499 (set (cc0)
8500 (compare (reg:M N)
8501 (const_int VAL)))
8502 (set (pc)
8503 (if_then_else (eq (cc0)
8504 (const_int 0))
8505 (label_ref L1)
8506 (pc)))
8508 (set (cc0)
8509 (compare (reg:M N)
8510 (const_int VAL)))
8511 (set (pc)
8512 (if_then_else (CODE (cc0)
8513 (const_int 0))
8514 (label_ref L2)
8515 (pc)))
8518 code = GET_CODE (XEXP (ifelse2, 0));
8520 /* Map GT/GTU to GE/GEU which is easier for AVR.
8521 The first two instructions compare/branch on EQ
8522 so we may replace the difficult
8524 if (x == VAL) goto L1;
8525 if (x > VAL) goto L2;
8527 with easy
8529 if (x == VAL) goto L1;
8530 if (x >= VAL) goto L2;
8532 Similarly, replace LE/LEU by LT/LTU. */
8534 switch (code)
8536 case EQ:
8537 case LT: case LTU:
8538 case GE: case GEU:
8539 break;
8541 case LE: case LEU:
8542 case GT: case GTU:
8543 code = avr_normalize_condition (code);
8544 break;
8546 default:
8547 return false;
8550 /* Wrap the branches into UNSPECs so they won't be changed or
8551 optimized in the remainder. */
8553 target = XEXP (XEXP (ifelse1, 1), 0);
8554 cond = XEXP (ifelse1, 0);
8555 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8557 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8559 target = XEXP (XEXP (ifelse2, 1), 0);
8560 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8561 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8563 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8565 /* The comparisons in insn1 and insn2 are exactly the same;
8566 insn2 is superfluous so delete it. */
8568 delete_insn (insn2);
8569 delete_insn (branch1);
8570 delete_insn (branch2);
8572 return true;
8576 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8577 /* Optimize conditional jumps. */
8579 static void
8580 avr_reorg (void)
8582 rtx insn = get_insns();
8584 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8586 rtx pattern = avr_compare_pattern (insn);
8588 if (!pattern)
8589 continue;
8591 if (optimize
8592 && avr_reorg_remove_redundant_compare (insn))
8594 continue;
8597 if (compare_diff_p (insn))
8599 /* Now we work under compare insn with difficult branch. */
8601 rtx next = next_real_insn (insn);
8602 rtx pat = PATTERN (next);
8604 pattern = SET_SRC (pattern);
8606 if (true_regnum (XEXP (pattern, 0)) >= 0
8607 && true_regnum (XEXP (pattern, 1)) >= 0)
8609 rtx x = XEXP (pattern, 0);
8610 rtx src = SET_SRC (pat);
8611 rtx t = XEXP (src,0);
8612 PUT_CODE (t, swap_condition (GET_CODE (t)));
8613 XEXP (pattern, 0) = XEXP (pattern, 1);
8614 XEXP (pattern, 1) = x;
8615 INSN_CODE (next) = -1;
8617 else if (true_regnum (XEXP (pattern, 0)) >= 0
8618 && XEXP (pattern, 1) == const0_rtx)
8620 /* This is a tst insn, we can reverse it. */
8621 rtx src = SET_SRC (pat);
8622 rtx t = XEXP (src,0);
8624 PUT_CODE (t, swap_condition (GET_CODE (t)));
8625 XEXP (pattern, 1) = XEXP (pattern, 0);
8626 XEXP (pattern, 0) = const0_rtx;
8627 INSN_CODE (next) = -1;
8628 INSN_CODE (insn) = -1;
8630 else if (true_regnum (XEXP (pattern, 0)) >= 0
8631 && CONST_INT_P (XEXP (pattern, 1)))
8633 rtx x = XEXP (pattern, 1);
8634 rtx src = SET_SRC (pat);
8635 rtx t = XEXP (src,0);
8636 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8638 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8640 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8641 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8642 INSN_CODE (next) = -1;
8643 INSN_CODE (insn) = -1;
8650 /* Returns register number for function return value.*/
8652 static inline unsigned int
8653 avr_ret_register (void)
8655 return 24;
8658 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8660 static bool
8661 avr_function_value_regno_p (const unsigned int regno)
8663 return (regno == avr_ret_register ());
8666 /* Create an RTX representing the place where a
8667 library function returns a value of mode MODE. */
8669 static rtx
8670 avr_libcall_value (enum machine_mode mode,
8671 const_rtx func ATTRIBUTE_UNUSED)
8673 int offs = GET_MODE_SIZE (mode);
8675 if (offs <= 4)
8676 offs = (offs + 1) & ~1;
8678 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8681 /* Create an RTX representing the place where a
8682 function returns a value of data type VALTYPE. */
8684 static rtx
8685 avr_function_value (const_tree type,
8686 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8687 bool outgoing ATTRIBUTE_UNUSED)
8689 unsigned int offs;
8691 if (TYPE_MODE (type) != BLKmode)
8692 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8694 offs = int_size_in_bytes (type);
8695 if (offs < 2)
8696 offs = 2;
8697 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8698 offs = GET_MODE_SIZE (SImode);
8699 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8700 offs = GET_MODE_SIZE (DImode);
8702 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8706 test_hard_reg_class (enum reg_class rclass, rtx x)
8708 int regno = true_regnum (x);
8709 if (regno < 0)
8710 return 0;
8712 if (TEST_HARD_REG_CLASS (rclass, regno))
8713 return 1;
8715 return 0;
8719 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8720 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8722 static bool
8723 avr_2word_insn_p (rtx insn)
8725 if (avr_current_device->errata_skip
8726 || !insn
8727 || 2 != get_attr_length (insn))
8729 return false;
8732 switch (INSN_CODE (insn))
8734 default:
8735 return false;
8737 case CODE_FOR_movqi_insn:
8739 rtx set = single_set (insn);
8740 rtx src = SET_SRC (set);
8741 rtx dest = SET_DEST (set);
8743 /* Factor out LDS and STS from movqi_insn. */
8745 if (MEM_P (dest)
8746 && (REG_P (src) || src == const0_rtx))
8748 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8750 else if (REG_P (dest)
8751 && MEM_P (src))
8753 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8756 return false;
8759 case CODE_FOR_call_insn:
8760 case CODE_FOR_call_value_insn:
8761 return true;
8767 jump_over_one_insn_p (rtx insn, rtx dest)
8769 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8770 ? XEXP (dest, 0)
8771 : dest);
8772 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8773 int dest_addr = INSN_ADDRESSES (uid);
8774 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8776 return (jump_offset == 1
8777 || (jump_offset == 2
8778 && avr_2word_insn_p (next_active_insn (insn))));
8781 /* Returns 1 if a value of mode MODE can be stored starting with hard
8782 register number REGNO. On the enhanced core, anything larger than
8783 1 byte must start in even numbered register for "movw" to work
8784 (this way we don't have to check for odd registers everywhere). */
8787 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8789 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8790 Disallowing QI et al. in these regs might lead to code like
8791 (set (subreg:QI (reg:HI 28) n) ...)
8792 which will result in wrong code because reload does not
8793 handle SUBREGs of hard regsisters like this.
8794 This could be fixed in reload. However, it appears
8795 that fixing reload is not wanted by reload people. */
8797 /* Any GENERAL_REGS register can hold 8-bit values. */
8799 if (GET_MODE_SIZE (mode) == 1)
8800 return 1;
8802 /* FIXME: Ideally, the following test is not needed.
8803 However, it turned out that it can reduce the number
8804 of spill fails. AVR and it's poor endowment with
8805 address registers is extreme stress test for reload. */
8807 if (GET_MODE_SIZE (mode) >= 4
8808 && regno >= REG_X)
8809 return 0;
8811 /* All modes larger than 8 bits should start in an even register. */
8813 return !(regno & 1);
8817 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8819 reg_class_t
8820 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8821 addr_space_t as, RTX_CODE outer_code,
8822 RTX_CODE index_code ATTRIBUTE_UNUSED)
8824 if (!ADDR_SPACE_GENERIC_P (as))
8826 return POINTER_Z_REGS;
8829 if (!avr_strict_X)
8830 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8832 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8836 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8838 bool
8839 avr_regno_mode_code_ok_for_base_p (int regno,
8840 enum machine_mode mode ATTRIBUTE_UNUSED,
8841 addr_space_t as ATTRIBUTE_UNUSED,
8842 RTX_CODE outer_code,
8843 RTX_CODE index_code ATTRIBUTE_UNUSED)
8845 bool ok = false;
8847 if (!ADDR_SPACE_GENERIC_P (as))
8849 if (regno < FIRST_PSEUDO_REGISTER
8850 && regno == REG_Z)
8852 return true;
8855 if (reg_renumber)
8857 regno = reg_renumber[regno];
8859 if (regno == REG_Z)
8861 return true;
8865 return false;
8868 if (regno < FIRST_PSEUDO_REGISTER
8869 && (regno == REG_X
8870 || regno == REG_Y
8871 || regno == REG_Z
8872 || regno == ARG_POINTER_REGNUM))
8874 ok = true;
8876 else if (reg_renumber)
8878 regno = reg_renumber[regno];
8880 if (regno == REG_X
8881 || regno == REG_Y
8882 || regno == REG_Z
8883 || regno == ARG_POINTER_REGNUM)
8885 ok = true;
8889 if (avr_strict_X
8890 && PLUS == outer_code
8891 && regno == REG_X)
8893 ok = false;
8896 return ok;
8900 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8901 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8902 CLOBBER_REG is a QI clobber register or NULL_RTX.
8903 LEN == NULL: output instructions.
8904 LEN != NULL: set *LEN to the length of the instruction sequence
8905 (in words) printed with LEN = NULL.
8906 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8907 If CLEAR_P is false, nothing is known about OP[0].
8909 The effect on cc0 is as follows:
8911 Load 0 to any register except ZERO_REG : NONE
8912 Load ld register with any value : NONE
8913 Anything else: : CLOBBER */
8915 static void
8916 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8918 rtx src = op[1];
8919 rtx dest = op[0];
8920 rtx xval, xdest[4];
8921 int ival[4];
8922 int clobber_val = 1234;
8923 bool cooked_clobber_p = false;
8924 bool set_p = false;
8925 enum machine_mode mode = GET_MODE (dest);
8926 int n, n_bytes = GET_MODE_SIZE (mode);
8928 gcc_assert (REG_P (dest)
8929 && CONSTANT_P (src));
8931 if (len)
8932 *len = 0;
8934 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8935 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8937 if (REGNO (dest) < 16
8938 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8940 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8943 /* We might need a clobber reg but don't have one. Look at the value to
8944 be loaded more closely. A clobber is only needed if it is a symbol
8945 or contains a byte that is neither 0, -1 or a power of 2. */
8947 if (NULL_RTX == clobber_reg
8948 && !test_hard_reg_class (LD_REGS, dest)
8949 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8950 || !avr_popcount_each_byte (src, n_bytes,
8951 (1 << 0) | (1 << 1) | (1 << 8))))
8953 /* We have no clobber register but need one. Cook one up.
8954 That's cheaper than loading from constant pool. */
8956 cooked_clobber_p = true;
8957 clobber_reg = all_regs_rtx[REG_Z + 1];
8958 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8961 /* Now start filling DEST from LSB to MSB. */
8963 for (n = 0; n < n_bytes; n++)
8965 int ldreg_p;
8966 bool done_byte = false;
8967 int j;
8968 rtx xop[3];
8970 /* Crop the n-th destination byte. */
8972 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8973 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8975 if (!CONST_INT_P (src)
8976 && !CONST_DOUBLE_P (src))
8978 static const char* const asm_code[][2] =
8980 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8981 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8982 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8983 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8986 xop[0] = xdest[n];
8987 xop[1] = src;
8988 xop[2] = clobber_reg;
8990 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8992 continue;
8995 /* Crop the n-th source byte. */
8997 xval = simplify_gen_subreg (QImode, src, mode, n);
8998 ival[n] = INTVAL (xval);
9000 /* Look if we can reuse the low word by means of MOVW. */
9002 if (n == 2
9003 && n_bytes >= 4
9004 && AVR_HAVE_MOVW)
9006 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9007 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9009 if (INTVAL (lo16) == INTVAL (hi16))
9011 if (0 != INTVAL (lo16)
9012 || !clear_p)
9014 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9017 break;
9021 /* Don't use CLR so that cc0 is set as expected. */
9023 if (ival[n] == 0)
9025 if (!clear_p)
9026 avr_asm_len (ldreg_p ? "ldi %0,0"
9027 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9028 : "mov %0,__zero_reg__",
9029 &xdest[n], len, 1);
9030 continue;
9033 if (clobber_val == ival[n]
9034 && REGNO (clobber_reg) == REGNO (xdest[n]))
9036 continue;
9039 /* LD_REGS can use LDI to move a constant value */
9041 if (ldreg_p)
9043 xop[0] = xdest[n];
9044 xop[1] = xval;
9045 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9046 continue;
9049 /* Try to reuse value already loaded in some lower byte. */
9051 for (j = 0; j < n; j++)
9052 if (ival[j] == ival[n])
9054 xop[0] = xdest[n];
9055 xop[1] = xdest[j];
9057 avr_asm_len ("mov %0,%1", xop, len, 1);
9058 done_byte = true;
9059 break;
9062 if (done_byte)
9063 continue;
9065 /* Need no clobber reg for -1: Use CLR/DEC */
9067 if (-1 == ival[n])
9069 if (!clear_p)
9070 avr_asm_len ("clr %0", &xdest[n], len, 1);
9072 avr_asm_len ("dec %0", &xdest[n], len, 1);
9073 continue;
9075 else if (1 == ival[n])
9077 if (!clear_p)
9078 avr_asm_len ("clr %0", &xdest[n], len, 1);
9080 avr_asm_len ("inc %0", &xdest[n], len, 1);
9081 continue;
9084 /* Use T flag or INC to manage powers of 2 if we have
9085 no clobber reg. */
9087 if (NULL_RTX == clobber_reg
9088 && single_one_operand (xval, QImode))
9090 xop[0] = xdest[n];
9091 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9093 gcc_assert (constm1_rtx != xop[1]);
9095 if (!set_p)
9097 set_p = true;
9098 avr_asm_len ("set", xop, len, 1);
9101 if (!clear_p)
9102 avr_asm_len ("clr %0", xop, len, 1);
9104 avr_asm_len ("bld %0,%1", xop, len, 1);
9105 continue;
9108 /* We actually need the LD_REGS clobber reg. */
9110 gcc_assert (NULL_RTX != clobber_reg);
9112 xop[0] = xdest[n];
9113 xop[1] = xval;
9114 xop[2] = clobber_reg;
9115 clobber_val = ival[n];
9117 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9118 "mov %0,%2", xop, len, 2);
9121 /* If we cooked up a clobber reg above, restore it. */
9123 if (cooked_clobber_p)
9125 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9130 /* Reload the constant OP[1] into the HI register OP[0].
9131 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9132 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9133 need a clobber reg or have to cook one up.
9135 PLEN == NULL: Output instructions.
9136 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9137 by the insns printed.
9139 Return "". */
9141 const char*
9142 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9144 output_reload_in_const (op, clobber_reg, plen, false);
9145 return "";
9149 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9150 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9151 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9152 need a clobber reg or have to cook one up.
9154 LEN == NULL: Output instructions.
9156 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9157 by the insns printed.
9159 Return "". */
9161 const char *
9162 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9164 if (AVR_HAVE_MOVW
9165 && !test_hard_reg_class (LD_REGS, op[0])
9166 && (CONST_INT_P (op[1])
9167 || CONST_DOUBLE_P (op[1])))
9169 int len_clr, len_noclr;
9171 /* In some cases it is better to clear the destination beforehand, e.g.
9173 CLR R2 CLR R3 MOVW R4,R2 INC R2
9175 is shorther than
9177 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9179 We find it too tedious to work that out in the print function.
9180 Instead, we call the print function twice to get the lengths of
9181 both methods and use the shortest one. */
9183 output_reload_in_const (op, clobber_reg, &len_clr, true);
9184 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9186 if (len_noclr - len_clr == 4)
9188 /* Default needs 4 CLR instructions: clear register beforehand. */
9190 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9191 "mov %B0,__zero_reg__" CR_TAB
9192 "movw %C0,%A0", &op[0], len, 3);
9194 output_reload_in_const (op, clobber_reg, len, true);
9196 if (len)
9197 *len += 3;
9199 return "";
9203 /* Default: destination not pre-cleared. */
9205 output_reload_in_const (op, clobber_reg, len, false);
9206 return "";
9209 const char *
9210 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9212 output_reload_in_const (op, clobber_reg, len, false);
9213 return "";
9216 void
9217 avr_output_bld (rtx operands[], int bit_nr)
9219 static char s[] = "bld %A0,0";
9221 s[5] = 'A' + (bit_nr >> 3);
9222 s[8] = '0' + (bit_nr & 7);
9223 output_asm_insn (s, operands);
9226 void
9227 avr_output_addr_vec_elt (FILE *stream, int value)
9229 if (AVR_HAVE_JMP_CALL)
9230 fprintf (stream, "\t.word gs(.L%d)\n", value);
9231 else
9232 fprintf (stream, "\trjmp .L%d\n", value);
9235 /* Returns true if SCRATCH are safe to be allocated as a scratch
9236 registers (for a define_peephole2) in the current function. */
9238 static bool
9239 avr_hard_regno_scratch_ok (unsigned int regno)
9241 /* Interrupt functions can only use registers that have already been saved
9242 by the prologue, even if they would normally be call-clobbered. */
9244 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9245 && !df_regs_ever_live_p (regno))
9246 return false;
9248 /* Don't allow hard registers that might be part of the frame pointer.
9249 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9250 and don't care for a frame pointer that spans more than one register. */
9252 if ((!reload_completed || frame_pointer_needed)
9253 && (regno == REG_Y || regno == REG_Y + 1))
9255 return false;
9258 return true;
9261 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9264 avr_hard_regno_rename_ok (unsigned int old_reg,
9265 unsigned int new_reg)
9267 /* Interrupt functions can only use registers that have already been
9268 saved by the prologue, even if they would normally be
9269 call-clobbered. */
9271 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9272 && !df_regs_ever_live_p (new_reg))
9273 return 0;
9275 /* Don't allow hard registers that might be part of the frame pointer.
9276 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9277 and don't care for a frame pointer that spans more than one register. */
9279 if ((!reload_completed || frame_pointer_needed)
9280 && (old_reg == REG_Y || old_reg == REG_Y + 1
9281 || new_reg == REG_Y || new_reg == REG_Y + 1))
9283 return 0;
9286 return 1;
9289 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9290 or memory location in the I/O space (QImode only).
9292 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9293 Operand 1: register operand to test, or CONST_INT memory address.
9294 Operand 2: bit number.
9295 Operand 3: label to jump to if the test is true. */
9297 const char *
9298 avr_out_sbxx_branch (rtx insn, rtx operands[])
9300 enum rtx_code comp = GET_CODE (operands[0]);
9301 bool long_jump = get_attr_length (insn) >= 4;
9302 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9304 if (comp == GE)
9305 comp = EQ;
9306 else if (comp == LT)
9307 comp = NE;
9309 if (reverse)
9310 comp = reverse_condition (comp);
9312 switch (GET_CODE (operands[1]))
9314 default:
9315 gcc_unreachable();
9317 case CONST_INT:
9319 if (low_io_address_operand (operands[1], QImode))
9321 if (comp == EQ)
9322 output_asm_insn ("sbis %i1,%2", operands);
9323 else
9324 output_asm_insn ("sbic %i1,%2", operands);
9326 else
9328 output_asm_insn ("in __tmp_reg__,%i1", operands);
9329 if (comp == EQ)
9330 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9331 else
9332 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9335 break; /* CONST_INT */
9337 case REG:
9339 if (GET_MODE (operands[1]) == QImode)
9341 if (comp == EQ)
9342 output_asm_insn ("sbrs %1,%2", operands);
9343 else
9344 output_asm_insn ("sbrc %1,%2", operands);
9346 else /* HImode, PSImode or SImode */
9348 static char buf[] = "sbrc %A1,0";
9349 unsigned int bit_nr = UINTVAL (operands[2]);
9351 buf[3] = (comp == EQ) ? 's' : 'c';
9352 buf[6] = 'A' + (bit_nr / 8);
9353 buf[9] = '0' + (bit_nr % 8);
9354 output_asm_insn (buf, operands);
9357 break; /* REG */
9358 } /* switch */
9360 if (long_jump)
9361 return ("rjmp .+4" CR_TAB
9362 "jmp %x3");
9364 if (!reverse)
9365 return "rjmp %x3";
9367 return "";
9370 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9372 static void
9373 avr_asm_out_ctor (rtx symbol, int priority)
9375 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9376 default_ctor_section_asm_out_constructor (symbol, priority);
9379 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9381 static void
9382 avr_asm_out_dtor (rtx symbol, int priority)
9384 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9385 default_dtor_section_asm_out_destructor (symbol, priority);
9388 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9390 static bool
9391 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9393 if (TYPE_MODE (type) == BLKmode)
9395 HOST_WIDE_INT size = int_size_in_bytes (type);
9396 return (size == -1 || size > 8);
9398 else
9399 return false;
9402 /* Worker function for CASE_VALUES_THRESHOLD. */
9404 static unsigned int
9405 avr_case_values_threshold (void)
9407 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9411 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9413 static enum machine_mode
9414 avr_addr_space_address_mode (addr_space_t as)
9416 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9420 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9422 static enum machine_mode
9423 avr_addr_space_pointer_mode (addr_space_t as)
9425 return avr_addr_space_address_mode (as);
9429 /* Helper for following function. */
9431 static bool
9432 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9434 gcc_assert (REG_P (reg));
9436 if (strict)
9438 return REGNO (reg) == REG_Z;
9441 /* Avoid combine to propagate hard regs. */
9443 if (can_create_pseudo_p()
9444 && REGNO (reg) < REG_Z)
9446 return false;
9449 return true;
9453 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9455 static bool
9456 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9457 bool strict, addr_space_t as)
9459 bool ok = false;
9461 switch (as)
9463 default:
9464 gcc_unreachable();
9466 case ADDR_SPACE_GENERIC:
9467 return avr_legitimate_address_p (mode, x, strict);
9469 case ADDR_SPACE_FLASH:
9470 case ADDR_SPACE_FLASH1:
9471 case ADDR_SPACE_FLASH2:
9472 case ADDR_SPACE_FLASH3:
9473 case ADDR_SPACE_FLASH4:
9474 case ADDR_SPACE_FLASH5:
9476 switch (GET_CODE (x))
9478 case REG:
9479 ok = avr_reg_ok_for_pgm_addr (x, strict);
9480 break;
9482 case POST_INC:
9483 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9484 break;
9486 default:
9487 break;
9490 break; /* FLASH */
9492 case ADDR_SPACE_MEMX:
9493 if (REG_P (x))
9494 ok = (!strict
9495 && can_create_pseudo_p());
9497 if (LO_SUM == GET_CODE (x))
9499 rtx hi = XEXP (x, 0);
9500 rtx lo = XEXP (x, 1);
9502 ok = (REG_P (hi)
9503 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9504 && REG_P (lo)
9505 && REGNO (lo) == REG_Z);
9508 break; /* MEMX */
9511 if (avr_log.legitimate_address_p)
9513 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9514 "reload_completed=%d reload_in_progress=%d %s:",
9515 ok, mode, strict, reload_completed, reload_in_progress,
9516 reg_renumber ? "(reg_renumber)" : "");
9518 if (GET_CODE (x) == PLUS
9519 && REG_P (XEXP (x, 0))
9520 && CONST_INT_P (XEXP (x, 1))
9521 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9522 && reg_renumber)
9524 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9525 true_regnum (XEXP (x, 0)));
9528 avr_edump ("\n%r\n", x);
9531 return ok;
9535 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9537 static rtx
9538 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9539 enum machine_mode mode, addr_space_t as)
9541 if (ADDR_SPACE_GENERIC_P (as))
9542 return avr_legitimize_address (x, old_x, mode);
9544 if (avr_log.legitimize_address)
9546 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9549 return old_x;
9553 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9555 static rtx
9556 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9558 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9559 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9561 if (avr_log.progmem)
9562 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9563 src, type_from, type_to);
9565 /* Up-casting from 16-bit to 24-bit pointer. */
9567 if (as_from != ADDR_SPACE_MEMX
9568 && as_to == ADDR_SPACE_MEMX)
9570 int msb;
9571 rtx sym = src;
9572 rtx reg = gen_reg_rtx (PSImode);
9574 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9575 sym = XEXP (sym, 0);
9577 /* Look at symbol flags: avr_encode_section_info set the flags
9578 also if attribute progmem was seen so that we get the right
9579 promotion for, e.g. PSTR-like strings that reside in generic space
9580 but are located in flash. In that case we patch the incoming
9581 address space. */
9583 if (SYMBOL_REF == GET_CODE (sym)
9584 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9586 as_from = ADDR_SPACE_FLASH;
9589 /* Linearize memory: RAM has bit 23 set. */
9591 msb = ADDR_SPACE_GENERIC_P (as_from)
9592 ? 0x80
9593 : avr_addrspace[as_from].segment % avr_current_arch->n_segments;
9595 src = force_reg (Pmode, src);
9597 emit_insn (msb == 0
9598 ? gen_zero_extendhipsi2 (reg, src)
9599 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9601 return reg;
9604 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9606 if (as_from == ADDR_SPACE_MEMX
9607 && as_to != ADDR_SPACE_MEMX)
9609 rtx new_src = gen_reg_rtx (Pmode);
9611 src = force_reg (PSImode, src);
9613 emit_move_insn (new_src,
9614 simplify_gen_subreg (Pmode, src, PSImode, 0));
9615 return new_src;
9618 return src;
9622 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9624 static bool
9625 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9626 addr_space_t superset ATTRIBUTE_UNUSED)
9628 /* Allow any kind of pointer mess. */
9630 return true;
9634 /* Worker function for movmemhi expander.
9635 XOP[0] Destination as MEM:BLK
9636 XOP[1] Source " "
9637 XOP[2] # Bytes to copy
9639 Return TRUE if the expansion is accomplished.
9640 Return FALSE if the operand compination is not supported. */
9642 bool
9643 avr_emit_movmemhi (rtx *xop)
9645 HOST_WIDE_INT count;
9646 enum machine_mode loop_mode;
9647 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9648 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9649 rtx a_hi8 = NULL_RTX;
9651 if (avr_mem_flash_p (xop[0]))
9652 return false;
9654 if (!CONST_INT_P (xop[2]))
9655 return false;
9657 count = INTVAL (xop[2]);
9658 if (count <= 0)
9659 return false;
9661 a_src = XEXP (xop[1], 0);
9662 a_dest = XEXP (xop[0], 0);
9664 if (PSImode == GET_MODE (a_src))
9666 gcc_assert (as == ADDR_SPACE_MEMX);
9668 loop_mode = (count < 0x100) ? QImode : HImode;
9669 loop_reg = gen_rtx_REG (loop_mode, 24);
9670 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9672 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9673 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9675 else
9677 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9679 if (segment
9680 && avr_current_arch->n_segments > 1)
9682 a_hi8 = GEN_INT (segment);
9683 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9685 else if (!ADDR_SPACE_GENERIC_P (as))
9687 as = ADDR_SPACE_FLASH;
9690 addr1 = a_src;
9692 loop_mode = (count <= 0x100) ? QImode : HImode;
9693 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9696 xas = GEN_INT (as);
9698 /* FIXME: Register allocator might come up with spill fails if it is left
9699 on its own. Thus, we allocate the pointer registers by hand:
9700 Z = source address
9701 X = destination address */
9703 emit_move_insn (lpm_addr_reg_rtx, addr1);
9704 addr1 = lpm_addr_reg_rtx;
9706 reg_x = gen_rtx_REG (HImode, REG_X);
9707 emit_move_insn (reg_x, a_dest);
9708 addr0 = reg_x;
9710 /* FIXME: Register allocator does a bad job and might spill address
9711 register(s) inside the loop leading to additional move instruction
9712 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9713 load and store as seperate insns. Instead, we perform the copy
9714 by means of one monolithic insn. */
9716 gcc_assert (TMP_REGNO == LPM_REGNO);
9718 if (as != ADDR_SPACE_MEMX)
9720 /* Load instruction ([E]LPM or LD) is known at compile time:
9721 Do the copy-loop inline. */
9723 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9724 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9726 insn = fun (addr0, addr1, xas, loop_reg,
9727 addr0, addr1, tmp_reg_rtx, loop_reg);
9729 else
9731 rtx loop_reg16 = gen_rtx_REG (HImode, 24);
9732 rtx r23 = gen_rtx_REG (QImode, 23);
9733 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9734 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9736 emit_move_insn (r23, a_hi8);
9738 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9739 lpm_reg_rtx, loop_reg16, r23, r23, GEN_INT (RAMPZ_ADDR));
9742 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9743 emit_insn (insn);
9745 return true;
9749 /* Print assembler for movmem_qi, movmem_hi insns...
9750 $0, $4 : & dest
9751 $1, $5 : & src
9752 $2 : Address Space
9753 $3, $7 : Loop register
9754 $6 : Scratch register
9756 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9758 $8, $9 : hh8 (& src)
9759 $10 : RAMPZ_ADDR
9762 const char*
9763 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9765 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9766 enum machine_mode loop_mode = GET_MODE (xop[3]);
9768 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9770 gcc_assert (REG_X == REGNO (xop[0])
9771 && REG_Z == REGNO (xop[1]));
9773 if (plen)
9774 *plen = 0;
9776 /* Loop label */
9778 avr_asm_len ("0:", xop, plen, 0);
9780 /* Load with post-increment */
9782 switch (as)
9784 default:
9785 gcc_unreachable();
9787 case ADDR_SPACE_GENERIC:
9789 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9790 break;
9792 case ADDR_SPACE_FLASH:
9794 if (AVR_HAVE_LPMX)
9795 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9796 else
9797 avr_asm_len ("lpm" CR_TAB
9798 "adiw %1,1", xop, plen, 2);
9799 break;
9801 case ADDR_SPACE_FLASH1:
9802 case ADDR_SPACE_FLASH2:
9803 case ADDR_SPACE_FLASH3:
9804 case ADDR_SPACE_FLASH4:
9805 case ADDR_SPACE_FLASH5:
9807 if (AVR_HAVE_ELPMX)
9808 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9809 else
9810 avr_asm_len ("elpm" CR_TAB
9811 "adiw %1,1", xop, plen, 2);
9812 break;
9815 /* Store with post-increment */
9817 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9819 /* Decrement loop-counter and set Z-flag */
9821 if (QImode == loop_mode)
9823 avr_asm_len ("dec %3", xop, plen, 1);
9825 else if (sbiw_p)
9827 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9829 else
9831 avr_asm_len ("subi %A3,1" CR_TAB
9832 "sbci %B3,0", xop, plen, 2);
9835 /* Loop until zero */
9837 return avr_asm_len ("brne 0b", xop, plen, 1);
9842 /* Helper for __builtin_avr_delay_cycles */
9844 static void
9845 avr_expand_delay_cycles (rtx operands0)
9847 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9848 unsigned HOST_WIDE_INT cycles_used;
9849 unsigned HOST_WIDE_INT loop_count;
9851 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9853 loop_count = ((cycles - 9) / 6) + 1;
9854 cycles_used = ((loop_count - 1) * 6) + 9;
9855 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9856 cycles -= cycles_used;
9859 if (IN_RANGE (cycles, 262145, 83886081))
9861 loop_count = ((cycles - 7) / 5) + 1;
9862 if (loop_count > 0xFFFFFF)
9863 loop_count = 0xFFFFFF;
9864 cycles_used = ((loop_count - 1) * 5) + 7;
9865 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9866 cycles -= cycles_used;
9869 if (IN_RANGE (cycles, 768, 262144))
9871 loop_count = ((cycles - 5) / 4) + 1;
9872 if (loop_count > 0xFFFF)
9873 loop_count = 0xFFFF;
9874 cycles_used = ((loop_count - 1) * 4) + 5;
9875 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9876 cycles -= cycles_used;
9879 if (IN_RANGE (cycles, 6, 767))
9881 loop_count = cycles / 3;
9882 if (loop_count > 255)
9883 loop_count = 255;
9884 cycles_used = loop_count * 3;
9885 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9886 cycles -= cycles_used;
9889 while (cycles >= 2)
9891 emit_insn (gen_nopv (GEN_INT(2)));
9892 cycles -= 2;
9895 if (cycles == 1)
9897 emit_insn (gen_nopv (GEN_INT(1)));
9898 cycles--;
9903 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9905 static double_int
9906 avr_double_int_push_digit (double_int val, int base,
9907 unsigned HOST_WIDE_INT digit)
9909 val = 0 == base
9910 ? double_int_lshift (val, 32, 64, false)
9911 : double_int_mul (val, uhwi_to_double_int (base));
9913 return double_int_add (val, uhwi_to_double_int (digit));
9917 /* Compute the image of x under f, i.e. perform x --> f(x) */
9919 static int
9920 avr_map (double_int f, int x)
9922 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9926 /* Return the map R that reverses the bits of byte B.
9928 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9929 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9931 Notice that R o R = id. */
9933 static double_int
9934 avr_revert_map (int b)
9936 int i;
9937 double_int r = double_int_zero;
9939 for (i = 16-1; i >= 0; i--)
9940 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9942 return r;
9946 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9948 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9949 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9951 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9952 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9954 Notice that R o R = id. */
9956 static double_int
9957 avr_swap_map (int size, int b)
9959 int i;
9960 double_int r = double_int_zero;
9962 for (i = 16-1; i >= 0; i--)
9963 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9965 return r;
9969 /* Return Identity. */
9971 static double_int
9972 avr_id_map (void)
9974 int i;
9975 double_int r = double_int_zero;
9977 for (i = 16-1; i >= 0; i--)
9978 r = avr_double_int_push_digit (r, 16, i);
9980 return r;
9984 enum
9986 SIG_ID = 0,
9987 /* for QI and HI */
9988 SIG_ROL = 0xf,
9989 SIG_REVERT_0 = 1 << 4,
9990 SIG_SWAP1_0 = 1 << 5,
9991 /* HI only */
9992 SIG_REVERT_1 = 1 << 6,
9993 SIG_SWAP1_1 = 1 << 7,
9994 SIG_SWAP4_0 = 1 << 8,
9995 SIG_SWAP4_1 = 1 << 9
9999 /* Return basic map with signature SIG. */
10001 static double_int
10002 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
10004 if (sig == SIG_ID) return avr_id_map ();
10005 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
10006 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
10007 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
10008 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
10009 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
10010 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10011 else
10012 gcc_unreachable();
10016 /* Return the Hamming distance between the B-th byte of A and C. */
10018 static bool
10019 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10021 int i, hamming = 0;
10023 for (i = 8*b; i < n && i < 8*b + 8; i++)
10025 int ai = avr_map (a, i);
10026 int ci = avr_map (c, i);
10028 hamming += ai != ci && (strict || (ai < n && ci < n));
10031 return hamming;
10035 /* Return the non-strict Hamming distance between A and B. */
10037 #define avr_map_hamming_nonstrict(N,A,B) \
10038 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10039 + avr_map_hamming_byte (N, 1, A, B, false))
10042 /* Return TRUE iff A and B represent the same mapping. */
10044 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10047 /* Return TRUE iff A is a map of signature S. Notice that there is no
10048 1:1 correspondance between maps and signatures and thus this is
10049 only supported for basic signatures recognized by avr_sig_map(). */
10051 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10054 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10056 static const char*
10057 avr_out_swap_bits (rtx *xop, int *plen)
10059 xop[1] = tmp_reg_rtx;
10061 return avr_asm_len ("mov %1,%0" CR_TAB
10062 "andi %0,0xaa" CR_TAB
10063 "eor %1,%0" CR_TAB
10064 "lsr %0" CR_TAB
10065 "lsl %1" CR_TAB
10066 "or %0,%1", xop, plen, 6);
10069 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10071 static const char*
10072 avr_out_revert_bits (rtx *xop, int *plen)
10074 return avr_asm_len ("inc __zero_reg__" "\n"
10075 "0:\tror %1" CR_TAB
10076 "rol %0" CR_TAB
10077 "lsl __zero_reg__" CR_TAB
10078 "brne 0b", xop, plen, 5);
10082 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10083 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10084 early-clobber conflicts if XOP[0] = XOP[1]. */
10086 static void
10087 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10089 int bit_dest, b, clobber = 0;
10091 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10092 int t_bit_src = -1;
10094 if (!optimize && !out_p)
10096 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10097 xop[1] = tmp_reg_rtx;
10098 return;
10101 /* We order the operations according to the requested source bit b. */
10103 for (b = 0; b < n_bits; b++)
10104 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10106 int bit_src = avr_map (map, bit_dest);
10108 if (b != bit_src
10109 /* Same position: No need to copy as the caller did MOV. */
10110 || bit_dest == bit_src
10111 /* Accessing bits 8..f for 8-bit version is void. */
10112 || bit_src >= n_bits)
10113 continue;
10115 if (t_bit_src != bit_src)
10117 /* Source bit is not yet in T: Store it to T. */
10119 t_bit_src = bit_src;
10121 if (out_p)
10123 xop[2] = GEN_INT (bit_src);
10124 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10126 else if (clobber & (1 << bit_src))
10128 /* Bit to be read was written already: Backup input
10129 to resolve early-clobber conflict. */
10131 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10132 xop[1] = tmp_reg_rtx;
10133 return;
10137 /* Load destination bit with T. */
10139 if (out_p)
10141 xop[2] = GEN_INT (bit_dest);
10142 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10145 clobber |= 1 << bit_dest;
10150 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10152 const char*
10153 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10155 bool copy_0, copy_1;
10156 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10157 double_int map = rtx_to_double_int (operands[1]);
10158 rtx xop[3];
10160 xop[0] = operands[0];
10161 xop[1] = operands[2];
10163 if (plen)
10164 *plen = 0;
10165 else if (flag_print_asm_name)
10166 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10168 switch (n_bits)
10170 default:
10171 gcc_unreachable();
10173 case 8:
10174 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10176 return avr_out_swap_bits (xop, plen);
10178 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10180 if (REGNO (xop[0]) == REGNO (xop[1])
10181 || !reg_unused_after (insn, xop[1]))
10183 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10184 xop[1] = tmp_reg_rtx;
10187 return avr_out_revert_bits (xop, plen);
10190 break; /* 8 */
10192 case 16:
10194 break; /* 16 */
10197 /* Copy whole byte is cheaper than moving bits that stay at the same
10198 position. Some bits in a byte stay at the same position iff the
10199 strict Hamming distance to Identity is not 8. */
10201 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10202 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10204 /* Perform the move(s) just worked out. */
10206 if (n_bits == 8)
10208 if (REGNO (xop[0]) == REGNO (xop[1]))
10210 /* Fix early-clobber clashes.
10211 Notice XOP[0] hat no eary-clobber in its constraint. */
10213 avr_move_bits (xop, map, n_bits, false, plen);
10215 else if (copy_0)
10217 avr_asm_len ("mov %0,%1", xop, plen, 1);
10220 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10222 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10224 else
10226 if (copy_0)
10227 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10229 if (copy_1)
10230 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10233 /* Move individual bits. */
10235 avr_move_bits (xop, map, n_bits, true, plen);
10237 return "";
10241 /* IDs for all the AVR builtins. */
10243 enum avr_builtin_id
10245 AVR_BUILTIN_NOP,
10246 AVR_BUILTIN_SEI,
10247 AVR_BUILTIN_CLI,
10248 AVR_BUILTIN_WDR,
10249 AVR_BUILTIN_SLEEP,
10250 AVR_BUILTIN_SWAP,
10251 AVR_BUILTIN_MAP8,
10252 AVR_BUILTIN_MAP16,
10253 AVR_BUILTIN_FMUL,
10254 AVR_BUILTIN_FMULS,
10255 AVR_BUILTIN_FMULSU,
10256 AVR_BUILTIN_DELAY_CYCLES
10259 static void
10260 avr_init_builtin_int24 (void)
10262 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10263 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10265 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10266 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10269 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10270 do \
10272 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10273 NULL, NULL_TREE); \
10274 } while (0)
10277 /* Implement `TARGET_INIT_BUILTINS' */
10278 /* Set up all builtin functions for this target. */
10280 static void
10281 avr_init_builtins (void)
10283 tree void_ftype_void
10284 = build_function_type_list (void_type_node, NULL_TREE);
10285 tree uchar_ftype_uchar
10286 = build_function_type_list (unsigned_char_type_node,
10287 unsigned_char_type_node,
10288 NULL_TREE);
10289 tree uint_ftype_uchar_uchar
10290 = build_function_type_list (unsigned_type_node,
10291 unsigned_char_type_node,
10292 unsigned_char_type_node,
10293 NULL_TREE);
10294 tree int_ftype_char_char
10295 = build_function_type_list (integer_type_node,
10296 char_type_node,
10297 char_type_node,
10298 NULL_TREE);
10299 tree int_ftype_char_uchar
10300 = build_function_type_list (integer_type_node,
10301 char_type_node,
10302 unsigned_char_type_node,
10303 NULL_TREE);
10304 tree void_ftype_ulong
10305 = build_function_type_list (void_type_node,
10306 long_unsigned_type_node,
10307 NULL_TREE);
10309 tree uchar_ftype_ulong_uchar
10310 = build_function_type_list (unsigned_char_type_node,
10311 long_unsigned_type_node,
10312 unsigned_char_type_node,
10313 NULL_TREE);
10315 tree uint_ftype_ullong_uint
10316 = build_function_type_list (unsigned_type_node,
10317 long_long_unsigned_type_node,
10318 unsigned_type_node,
10319 NULL_TREE);
10321 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10322 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10323 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10324 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10325 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10326 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10327 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10328 AVR_BUILTIN_DELAY_CYCLES);
10330 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10331 AVR_BUILTIN_FMUL);
10332 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10333 AVR_BUILTIN_FMULS);
10334 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10335 AVR_BUILTIN_FMULSU);
10337 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10338 AVR_BUILTIN_MAP8);
10339 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10340 AVR_BUILTIN_MAP16);
10342 avr_init_builtin_int24 ();
10345 #undef DEF_BUILTIN
10347 struct avr_builtin_description
10349 const enum insn_code icode;
10350 const char *const name;
10351 const enum avr_builtin_id id;
10354 static const struct avr_builtin_description
10355 bdesc_1arg[] =
10357 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10360 static const struct avr_builtin_description
10361 bdesc_2arg[] =
10363 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10364 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10365 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10366 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10367 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10370 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10372 static rtx
10373 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10374 rtx target)
10376 rtx pat;
10377 tree arg0 = CALL_EXPR_ARG (exp, 0);
10378 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10379 enum machine_mode op0mode = GET_MODE (op0);
10380 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10381 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10383 if (! target
10384 || GET_MODE (target) != tmode
10385 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10387 target = gen_reg_rtx (tmode);
10390 if (op0mode == SImode && mode0 == HImode)
10392 op0mode = HImode;
10393 op0 = gen_lowpart (HImode, op0);
10396 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10398 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10399 op0 = copy_to_mode_reg (mode0, op0);
10401 pat = GEN_FCN (icode) (target, op0);
10402 if (! pat)
10403 return 0;
10405 emit_insn (pat);
10407 return target;
10411 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10413 static rtx
10414 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10416 rtx pat;
10417 tree arg0 = CALL_EXPR_ARG (exp, 0);
10418 tree arg1 = CALL_EXPR_ARG (exp, 1);
10419 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10420 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10421 enum machine_mode op0mode = GET_MODE (op0);
10422 enum machine_mode op1mode = GET_MODE (op1);
10423 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10424 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10425 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10427 if (! target
10428 || GET_MODE (target) != tmode
10429 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10431 target = gen_reg_rtx (tmode);
10434 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10436 op0mode = HImode;
10437 op0 = gen_lowpart (HImode, op0);
10440 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10442 op1mode = HImode;
10443 op1 = gen_lowpart (HImode, op1);
10446 /* In case the insn wants input operands in modes different from
10447 the result, abort. */
10449 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10450 && (op1mode == mode1 || op1mode == VOIDmode));
10452 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10453 op0 = copy_to_mode_reg (mode0, op0);
10455 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10456 op1 = copy_to_mode_reg (mode1, op1);
10458 pat = GEN_FCN (icode) (target, op0, op1);
10460 if (! pat)
10461 return 0;
10463 emit_insn (pat);
10464 return target;
10468 /* Expand an expression EXP that calls a built-in function,
10469 with result going to TARGET if that's convenient
10470 (and in mode MODE if that's convenient).
10471 SUBTARGET may be used as the target for computing one of EXP's operands.
10472 IGNORE is nonzero if the value is to be ignored. */
10474 static rtx
10475 avr_expand_builtin (tree exp, rtx target,
10476 rtx subtarget ATTRIBUTE_UNUSED,
10477 enum machine_mode mode ATTRIBUTE_UNUSED,
10478 int ignore ATTRIBUTE_UNUSED)
10480 size_t i;
10481 const struct avr_builtin_description *d;
10482 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10483 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10484 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10485 tree arg0;
10486 rtx op0;
10488 switch (id)
10490 case AVR_BUILTIN_NOP:
10491 emit_insn (gen_nopv (GEN_INT(1)));
10492 return 0;
10494 case AVR_BUILTIN_SEI:
10495 emit_insn (gen_enable_interrupt ());
10496 return 0;
10498 case AVR_BUILTIN_CLI:
10499 emit_insn (gen_disable_interrupt ());
10500 return 0;
10502 case AVR_BUILTIN_WDR:
10503 emit_insn (gen_wdr ());
10504 return 0;
10506 case AVR_BUILTIN_SLEEP:
10507 emit_insn (gen_sleep ());
10508 return 0;
10510 case AVR_BUILTIN_DELAY_CYCLES:
10512 arg0 = CALL_EXPR_ARG (exp, 0);
10513 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10515 if (! CONST_INT_P (op0))
10516 error ("%s expects a compile time integer constant", bname);
10518 avr_expand_delay_cycles (op0);
10519 return 0;
10522 case AVR_BUILTIN_MAP8:
10524 arg0 = CALL_EXPR_ARG (exp, 0);
10525 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10527 if (!CONST_INT_P (op0))
10529 error ("%s expects a compile time long integer constant"
10530 " as first argument", bname);
10531 return target;
10535 case AVR_BUILTIN_MAP16:
10537 arg0 = CALL_EXPR_ARG (exp, 0);
10538 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10540 if (!const_double_operand (op0, VOIDmode))
10542 error ("%s expects a compile time long long integer constant"
10543 " as first argument", bname);
10544 return target;
10549 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10550 if (d->id == id)
10551 return avr_expand_unop_builtin (d->icode, exp, target);
10553 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10554 if (d->id == id)
10555 return avr_expand_binop_builtin (d->icode, exp, target);
10557 gcc_unreachable ();
10560 struct gcc_target targetm = TARGET_INITIALIZER;
10562 #include "gt-avr.h"