* config/avr/avr-protos.h (avr_output_bld): Remove unused prototype.
[official-gcc.git] / gcc / config / avr / avr.c
blobb287d7c7e7a92f1a7bfbcf5b8fa65dbde49df65a
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
40 #include "obstack.h"
41 #include "function.h"
42 #include "recog.h"
43 #include "optabs.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "params.h"
50 #include "df.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 do { \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
74 } while (0)
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
93 { 0 , 0, 0, NULL, 0 }
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
99 ".progmem.data",
100 ".progmem1.data",
101 ".progmem2.data",
102 ".progmem3.data",
103 ".progmem4.data",
104 ".progmem5.data"
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
110 typedef struct
112 /* SREG: The pocessor status */
113 int sreg;
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
116 int ccp;
117 int rampd;
118 int rampx;
119 int rampy;
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
122 int rampz;
124 /* SP: The stack pointer and its low and high byte */
125 int sp_l;
126 int sp_h;
127 } avr_addr_t;
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int avr_naked_function_p (tree);
142 static int interrupt_function_p (tree);
143 static int signal_function_p (tree);
144 static int avr_OS_task_function_p (tree);
145 static int avr_OS_main_function_p (tree);
146 static int avr_regs_to_save (HARD_REG_SET *);
147 static int get_sequence_length (rtx insns);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code);
151 static int avr_num_arg_regs (enum machine_mode, const_tree);
152 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
153 int, bool);
154 static void output_reload_in_const (rtx*, rtx, int*, bool);
155 static struct machine_function * avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx;
168 rtx lpm_reg_rtx;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx;
172 rtx lpm_addr_reg_rtx;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx;
176 rtx tmp_reg_rtx;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx;
180 rtx zero_reg_rtx;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx[32];
184 rtx all_regs_rtx[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx;
188 rtx sreg_rtx;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx;
192 extern GTY(()) rtx rampx_rtx;
193 extern GTY(()) rtx rampy_rtx;
194 extern GTY(()) rtx rampz_rtx;
195 rtx rampd_rtx;
196 rtx rampx_rtx;
197 rtx rampy_rtx;
198 rtx rampz_rtx;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty;
202 static GTY(()) rtx xstring_e;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro;
207 /* Current architecture. */
208 const struct base_arch_s *avr_current_arch;
210 /* Current device. */
211 const struct mcu_type_s *avr_current_device;
213 /* Section to put switch tables in. */
214 static GTY(()) section *progmem_swtable_section;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section *progmem_section[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode = true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p = false;
225 bool avr_need_copy_data_p = false;
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_ALIGNED_HI_OP
230 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
233 #undef TARGET_ASM_UNALIGNED_HI_OP
234 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
235 #undef TARGET_ASM_UNALIGNED_SI_OP
236 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
237 #undef TARGET_ASM_INTEGER
238 #define TARGET_ASM_INTEGER avr_assemble_integer
239 #undef TARGET_ASM_FILE_START
240 #define TARGET_ASM_FILE_START avr_file_start
241 #undef TARGET_ASM_FILE_END
242 #define TARGET_ASM_FILE_END avr_file_end
244 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
245 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
246 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
247 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
249 #undef TARGET_FUNCTION_VALUE
250 #define TARGET_FUNCTION_VALUE avr_function_value
251 #undef TARGET_LIBCALL_VALUE
252 #define TARGET_LIBCALL_VALUE avr_libcall_value
253 #undef TARGET_FUNCTION_VALUE_REGNO_P
254 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
256 #undef TARGET_ATTRIBUTE_TABLE
257 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
258 #undef TARGET_INSERT_ATTRIBUTES
259 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
260 #undef TARGET_SECTION_TYPE_FLAGS
261 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
263 #undef TARGET_ASM_NAMED_SECTION
264 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
265 #undef TARGET_ASM_INIT_SECTIONS
266 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
269 #undef TARGET_ASM_SELECT_SECTION
270 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
272 #undef TARGET_REGISTER_MOVE_COST
273 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
274 #undef TARGET_MEMORY_MOVE_COST
275 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
276 #undef TARGET_RTX_COSTS
277 #define TARGET_RTX_COSTS avr_rtx_costs
278 #undef TARGET_ADDRESS_COST
279 #define TARGET_ADDRESS_COST avr_address_cost
280 #undef TARGET_MACHINE_DEPENDENT_REORG
281 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
282 #undef TARGET_FUNCTION_ARG
283 #define TARGET_FUNCTION_ARG avr_function_arg
284 #undef TARGET_FUNCTION_ARG_ADVANCE
285 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
287 #undef TARGET_RETURN_IN_MEMORY
288 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
290 #undef TARGET_STRICT_ARGUMENT_NAMING
291 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
293 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
294 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
296 #undef TARGET_HARD_REGNO_SCRATCH_OK
297 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
298 #undef TARGET_CASE_VALUES_THRESHOLD
299 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
301 #undef TARGET_FRAME_POINTER_REQUIRED
302 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE avr_can_eliminate
306 #undef TARGET_CLASS_LIKELY_SPILLED_P
307 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
309 #undef TARGET_OPTION_OVERRIDE
310 #define TARGET_OPTION_OVERRIDE avr_option_override
312 #undef TARGET_CANNOT_MODIFY_JUMPS_P
313 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
315 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
316 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
318 #undef TARGET_INIT_BUILTINS
319 #define TARGET_INIT_BUILTINS avr_init_builtins
321 #undef TARGET_EXPAND_BUILTIN
322 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
324 #undef TARGET_FOLD_BUILTIN
325 #define TARGET_FOLD_BUILTIN avr_fold_builtin
327 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
328 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
330 #undef TARGET_SCALAR_MODE_SUPPORTED_P
331 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
333 #undef TARGET_ADDR_SPACE_SUBSET_P
334 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
336 #undef TARGET_ADDR_SPACE_CONVERT
337 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
339 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
340 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
342 #undef TARGET_ADDR_SPACE_POINTER_MODE
343 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
345 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
346 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
348 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
349 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
351 #undef TARGET_PRINT_OPERAND
352 #define TARGET_PRINT_OPERAND avr_print_operand
353 #undef TARGET_PRINT_OPERAND_ADDRESS
354 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
355 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
356 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
360 /* Custom function to count number of set bits. */
362 static inline int
363 avr_popcount (unsigned int val)
365 int pop = 0;
367 while (val)
369 val &= val-1;
370 pop++;
373 return pop;
377 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
378 Return true if the least significant N_BYTES bytes of XVAL all have a
379 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
380 of integers which contains an integer N iff bit N of POP_MASK is set. */
382 bool
383 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
385 int i;
387 enum machine_mode mode = GET_MODE (xval);
389 if (VOIDmode == mode)
390 mode = SImode;
392 for (i = 0; i < n_bytes; i++)
394 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
395 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
397 if (0 == (pop_mask & (1 << avr_popcount (val8))))
398 return false;
401 return true;
404 static void
405 avr_option_override (void)
407 flag_delete_null_pointer_checks = 0;
409 /* caller-save.c looks for call-clobbered hard registers that are assigned
410 to pseudos that cross calls and tries so save-restore them around calls
411 in order to reduce the number of stack slots needed.
413 This might leads to situations where reload is no more able to cope
414 with the challenge of AVR's very few address registers and fails to
415 perform the requested spills. */
417 if (avr_strict_X)
418 flag_caller_saves = 0;
420 /* Unwind tables currently require a frame pointer for correctness,
421 see toplev.c:process_options(). */
423 if ((flag_unwind_tables
424 || flag_non_call_exceptions
425 || flag_asynchronous_unwind_tables)
426 && !ACCUMULATE_OUTGOING_ARGS)
428 flag_omit_frame_pointer = 0;
431 avr_current_device = &avr_mcu_types[avr_mcu_index];
432 avr_current_arch = &avr_arch_types[avr_current_device->arch];
433 avr_extra_arch_macro = avr_current_device->macro;
435 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
437 /* SREG: Status Register containing flags like I (global IRQ) */
438 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
440 /* RAMPZ: Address' high part when loading via ELPM */
441 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
443 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
444 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
445 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
446 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
448 /* SP: Stack Pointer (SP_H:SP_L) */
449 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
450 avr_addr.sp_h = avr_addr.sp_l + 1;
452 init_machine_status = avr_init_machine_status;
454 avr_log_set_avr_log();
457 /* Function to set up the backend function structure. */
459 static struct machine_function *
460 avr_init_machine_status (void)
462 return ggc_alloc_cleared_machine_function ();
466 /* Implement `INIT_EXPANDERS'. */
467 /* The function works like a singleton. */
469 void
470 avr_init_expanders (void)
472 int regno;
474 for (regno = 0; regno < 32; regno ++)
475 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
477 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
478 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
479 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
481 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
483 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
484 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
485 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
486 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
487 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
489 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
490 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
494 /* Return register class for register R. */
496 enum reg_class
497 avr_regno_reg_class (int r)
499 static const enum reg_class reg_class_tab[] =
501 R0_REG,
502 /* r1 - r15 */
503 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
504 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
505 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
506 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
507 /* r16 - r23 */
508 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
509 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
510 /* r24, r25 */
511 ADDW_REGS, ADDW_REGS,
512 /* X: r26, 27 */
513 POINTER_X_REGS, POINTER_X_REGS,
514 /* Y: r28, r29 */
515 POINTER_Y_REGS, POINTER_Y_REGS,
516 /* Z: r30, r31 */
517 POINTER_Z_REGS, POINTER_Z_REGS,
518 /* SP: SPL, SPH */
519 STACK_REG, STACK_REG
522 if (r <= 33)
523 return reg_class_tab[r];
525 return ALL_REGS;
529 static bool
530 avr_scalar_mode_supported_p (enum machine_mode mode)
532 if (PSImode == mode)
533 return true;
535 return default_scalar_mode_supported_p (mode);
539 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
541 static bool
542 avr_decl_flash_p (tree decl)
544 if (TREE_CODE (decl) != VAR_DECL
545 || TREE_TYPE (decl) == error_mark_node)
547 return false;
550 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
554 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
555 address space and FALSE, otherwise. */
557 static bool
558 avr_decl_memx_p (tree decl)
560 if (TREE_CODE (decl) != VAR_DECL
561 || TREE_TYPE (decl) == error_mark_node)
563 return false;
566 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
570 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
572 bool
573 avr_mem_flash_p (rtx x)
575 return (MEM_P (x)
576 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
580 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
581 address space and FALSE, otherwise. */
583 bool
584 avr_mem_memx_p (rtx x)
586 return (MEM_P (x)
587 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
591 /* A helper for the subsequent function attribute used to dig for
592 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
594 static inline int
595 avr_lookup_function_attribute1 (const_tree func, const char *name)
597 if (FUNCTION_DECL == TREE_CODE (func))
599 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
601 return true;
604 func = TREE_TYPE (func);
607 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
608 || TREE_CODE (func) == METHOD_TYPE);
610 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
613 /* Return nonzero if FUNC is a naked function. */
615 static int
616 avr_naked_function_p (tree func)
618 return avr_lookup_function_attribute1 (func, "naked");
621 /* Return nonzero if FUNC is an interrupt function as specified
622 by the "interrupt" attribute. */
624 static int
625 interrupt_function_p (tree func)
627 return avr_lookup_function_attribute1 (func, "interrupt");
630 /* Return nonzero if FUNC is a signal function as specified
631 by the "signal" attribute. */
633 static int
634 signal_function_p (tree func)
636 return avr_lookup_function_attribute1 (func, "signal");
639 /* Return nonzero if FUNC is an OS_task function. */
641 static int
642 avr_OS_task_function_p (tree func)
644 return avr_lookup_function_attribute1 (func, "OS_task");
647 /* Return nonzero if FUNC is an OS_main function. */
649 static int
650 avr_OS_main_function_p (tree func)
652 return avr_lookup_function_attribute1 (func, "OS_main");
656 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
659 avr_accumulate_outgoing_args (void)
661 if (!cfun)
662 return TARGET_ACCUMULATE_OUTGOING_ARGS;
664 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
665 what offset is correct. In some cases it is relative to
666 virtual_outgoing_args_rtx and in others it is relative to
667 virtual_stack_vars_rtx. For example code see
668 gcc.c-torture/execute/built-in-setjmp.c
669 gcc.c-torture/execute/builtins/sprintf-chk.c */
671 return (TARGET_ACCUMULATE_OUTGOING_ARGS
672 && !(cfun->calls_setjmp
673 || cfun->has_nonlocal_label));
677 /* Report contribution of accumulated outgoing arguments to stack size. */
679 static inline int
680 avr_outgoing_args_size (void)
682 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
686 /* Implement `STARTING_FRAME_OFFSET'. */
687 /* This is the offset from the frame pointer register to the first stack slot
688 that contains a variable living in the frame. */
691 avr_starting_frame_offset (void)
693 return 1 + avr_outgoing_args_size ();
697 /* Return the number of hard registers to push/pop in the prologue/epilogue
698 of the current function, and optionally store these registers in SET. */
700 static int
701 avr_regs_to_save (HARD_REG_SET *set)
703 int reg, count;
704 int int_or_sig_p = (interrupt_function_p (current_function_decl)
705 || signal_function_p (current_function_decl));
707 if (set)
708 CLEAR_HARD_REG_SET (*set);
709 count = 0;
711 /* No need to save any registers if the function never returns or
712 has the "OS_task" or "OS_main" attribute. */
713 if (TREE_THIS_VOLATILE (current_function_decl)
714 || cfun->machine->is_OS_task
715 || cfun->machine->is_OS_main)
716 return 0;
718 for (reg = 0; reg < 32; reg++)
720 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
721 any global register variables. */
722 if (fixed_regs[reg])
723 continue;
725 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
726 || (df_regs_ever_live_p (reg)
727 && (int_or_sig_p || !call_used_regs[reg])
728 /* Don't record frame pointer registers here. They are treated
729 indivitually in prologue. */
730 && !(frame_pointer_needed
731 && (reg == REG_Y || reg == (REG_Y+1)))))
733 if (set)
734 SET_HARD_REG_BIT (*set, reg);
735 count++;
738 return count;
741 /* Return true if register FROM can be eliminated via register TO. */
743 static bool
744 avr_can_eliminate (const int from, const int to)
746 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
747 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
748 || ((from == FRAME_POINTER_REGNUM
749 || from == FRAME_POINTER_REGNUM + 1)
750 && !frame_pointer_needed));
753 /* Compute offset between arg_pointer and frame_pointer. */
756 avr_initial_elimination_offset (int from, int to)
758 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
759 return 0;
760 else
762 int offset = frame_pointer_needed ? 2 : 0;
763 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
765 offset += avr_regs_to_save (NULL);
766 return (get_frame_size () + avr_outgoing_args_size()
767 + avr_pc_size + 1 + offset);
771 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
772 frame pointer by +STARTING_FRAME_OFFSET.
773 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
774 avoids creating add/sub of offset in nonlocal goto and setjmp. */
776 static rtx
777 avr_builtin_setjmp_frame_value (void)
779 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
780 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
783 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
784 This is return address of function. */
785 rtx
786 avr_return_addr_rtx (int count, rtx tem)
788 rtx r;
790 /* Can only return this function's return address. Others not supported. */
791 if (count)
792 return NULL;
794 if (AVR_3_BYTE_PC)
796 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
797 warning (0, "'builtin_return_address' contains only 2 bytes of address");
799 else
800 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
802 r = gen_rtx_PLUS (Pmode, tem, r);
803 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
804 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
805 return r;
808 /* Return 1 if the function epilogue is just a single "ret". */
811 avr_simple_epilogue (void)
813 return (! frame_pointer_needed
814 && get_frame_size () == 0
815 && avr_outgoing_args_size() == 0
816 && avr_regs_to_save (NULL) == 0
817 && ! interrupt_function_p (current_function_decl)
818 && ! signal_function_p (current_function_decl)
819 && ! avr_naked_function_p (current_function_decl)
820 && ! TREE_THIS_VOLATILE (current_function_decl));
823 /* This function checks sequence of live registers. */
825 static int
826 sequent_regs_live (void)
828 int reg;
829 int live_seq=0;
830 int cur_seq=0;
832 for (reg = 0; reg < 18; ++reg)
834 if (fixed_regs[reg])
836 /* Don't recognize sequences that contain global register
837 variables. */
839 if (live_seq != 0)
840 return 0;
841 else
842 continue;
845 if (!call_used_regs[reg])
847 if (df_regs_ever_live_p (reg))
849 ++live_seq;
850 ++cur_seq;
852 else
853 cur_seq = 0;
857 if (!frame_pointer_needed)
859 if (df_regs_ever_live_p (REG_Y))
861 ++live_seq;
862 ++cur_seq;
864 else
865 cur_seq = 0;
867 if (df_regs_ever_live_p (REG_Y+1))
869 ++live_seq;
870 ++cur_seq;
872 else
873 cur_seq = 0;
875 else
877 cur_seq += 2;
878 live_seq += 2;
880 return (cur_seq == live_seq) ? live_seq : 0;
883 /* Obtain the length sequence of insns. */
886 get_sequence_length (rtx insns)
888 rtx insn;
889 int length;
891 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
892 length += get_attr_length (insn);
894 return length;
897 /* Implement INCOMING_RETURN_ADDR_RTX. */
900 avr_incoming_return_addr_rtx (void)
902 /* The return address is at the top of the stack. Note that the push
903 was via post-decrement, which means the actual address is off by one. */
904 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
907 /* Helper for expand_prologue. Emit a push of a byte register. */
909 static void
910 emit_push_byte (unsigned regno, bool frame_related_p)
912 rtx mem, reg, insn;
914 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
915 mem = gen_frame_mem (QImode, mem);
916 reg = gen_rtx_REG (QImode, regno);
918 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
919 if (frame_related_p)
920 RTX_FRAME_RELATED_P (insn) = 1;
922 cfun->machine->stack_usage++;
926 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
927 SFR is a MEM representing the memory location of the SFR.
928 If CLR_P then clear the SFR after the push using zero_reg. */
930 static void
931 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
933 rtx insn;
935 gcc_assert (MEM_P (sfr));
937 /* IN __tmp_reg__, IO(SFR) */
938 insn = emit_move_insn (tmp_reg_rtx, sfr);
939 if (frame_related_p)
940 RTX_FRAME_RELATED_P (insn) = 1;
942 /* PUSH __tmp_reg__ */
943 emit_push_byte (TMP_REGNO, frame_related_p);
945 if (clr_p)
947 /* OUT IO(SFR), __zero_reg__ */
948 insn = emit_move_insn (sfr, const0_rtx);
949 if (frame_related_p)
950 RTX_FRAME_RELATED_P (insn) = 1;
954 static void
955 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
957 rtx insn;
958 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
959 int live_seq = sequent_regs_live ();
961 bool minimize = (TARGET_CALL_PROLOGUES
962 && live_seq
963 && !isr_p
964 && !cfun->machine->is_OS_task
965 && !cfun->machine->is_OS_main);
967 if (minimize
968 && (frame_pointer_needed
969 || avr_outgoing_args_size() > 8
970 || (AVR_2_BYTE_PC && live_seq > 6)
971 || live_seq > 7))
973 rtx pattern;
974 int first_reg, reg, offset;
976 emit_move_insn (gen_rtx_REG (HImode, REG_X),
977 gen_int_mode (size, HImode));
979 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
980 gen_int_mode (live_seq+size, HImode));
981 insn = emit_insn (pattern);
982 RTX_FRAME_RELATED_P (insn) = 1;
984 /* Describe the effect of the unspec_volatile call to prologue_saves.
985 Note that this formulation assumes that add_reg_note pushes the
986 notes to the front. Thus we build them in the reverse order of
987 how we want dwarf2out to process them. */
989 /* The function does always set frame_pointer_rtx, but whether that
990 is going to be permanent in the function is frame_pointer_needed. */
992 add_reg_note (insn, REG_CFA_ADJUST_CFA,
993 gen_rtx_SET (VOIDmode, (frame_pointer_needed
994 ? frame_pointer_rtx
995 : stack_pointer_rtx),
996 plus_constant (stack_pointer_rtx,
997 -(size + live_seq))));
999 /* Note that live_seq always contains r28+r29, but the other
1000 registers to be saved are all below 18. */
1002 first_reg = 18 - (live_seq - 2);
1004 for (reg = 29, offset = -live_seq + 1;
1005 reg >= first_reg;
1006 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1008 rtx m, r;
1010 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
1011 r = gen_rtx_REG (QImode, reg);
1012 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1015 cfun->machine->stack_usage += size + live_seq;
1017 else /* !minimize */
1019 int reg;
1021 for (reg = 0; reg < 32; ++reg)
1022 if (TEST_HARD_REG_BIT (set, reg))
1023 emit_push_byte (reg, true);
1025 if (frame_pointer_needed
1026 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1028 /* Push frame pointer. Always be consistent about the
1029 ordering of pushes -- epilogue_restores expects the
1030 register pair to be pushed low byte first. */
1032 emit_push_byte (REG_Y, true);
1033 emit_push_byte (REG_Y + 1, true);
1036 if (frame_pointer_needed
1037 && size == 0)
1039 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1040 RTX_FRAME_RELATED_P (insn) = 1;
1043 if (size != 0)
1045 /* Creating a frame can be done by direct manipulation of the
1046 stack or via the frame pointer. These two methods are:
1047 fp = sp
1048 fp -= size
1049 sp = fp
1051 sp -= size
1052 fp = sp (*)
1053 the optimum method depends on function type, stack and
1054 frame size. To avoid a complex logic, both methods are
1055 tested and shortest is selected.
1057 There is also the case where SIZE != 0 and no frame pointer is
1058 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1059 In that case, insn (*) is not needed in that case.
1060 We use the X register as scratch. This is save because in X
1061 is call-clobbered.
1062 In an interrupt routine, the case of SIZE != 0 together with
1063 !frame_pointer_needed can only occur if the function is not a
1064 leaf function and thus X has already been saved. */
1066 int irq_state = -1;
1067 rtx fp_plus_insns, fp, my_fp;
1069 gcc_assert (frame_pointer_needed
1070 || !isr_p
1071 || !current_function_is_leaf);
1073 fp = my_fp = (frame_pointer_needed
1074 ? frame_pointer_rtx
1075 : gen_rtx_REG (Pmode, REG_X));
1077 if (AVR_HAVE_8BIT_SP)
1079 /* The high byte (r29) does not change:
1080 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1082 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1085 /************ Method 1: Adjust frame pointer ************/
1087 start_sequence ();
1089 /* Normally, the dwarf2out frame-related-expr interpreter does
1090 not expect to have the CFA change once the frame pointer is
1091 set up. Thus, we avoid marking the move insn below and
1092 instead indicate that the entire operation is complete after
1093 the frame pointer subtraction is done. */
1095 insn = emit_move_insn (fp, stack_pointer_rtx);
1096 if (frame_pointer_needed)
1098 RTX_FRAME_RELATED_P (insn) = 1;
1099 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1100 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1103 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1104 if (frame_pointer_needed)
1106 RTX_FRAME_RELATED_P (insn) = 1;
1107 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1108 gen_rtx_SET (VOIDmode, fp,
1109 plus_constant (fp, -size)));
1112 /* Copy to stack pointer. Note that since we've already
1113 changed the CFA to the frame pointer this operation
1114 need not be annotated if frame pointer is needed.
1115 Always move through unspec, see PR50063.
1116 For meaning of irq_state see movhi_sp_r insn. */
1118 if (cfun->machine->is_interrupt)
1119 irq_state = 1;
1121 if (TARGET_NO_INTERRUPTS
1122 || cfun->machine->is_signal
1123 || cfun->machine->is_OS_main)
1124 irq_state = 0;
1126 if (AVR_HAVE_8BIT_SP)
1127 irq_state = 2;
1129 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1130 fp, GEN_INT (irq_state)));
1131 if (!frame_pointer_needed)
1133 RTX_FRAME_RELATED_P (insn) = 1;
1134 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1135 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1136 plus_constant (stack_pointer_rtx,
1137 -size)));
1140 fp_plus_insns = get_insns ();
1141 end_sequence ();
1143 /************ Method 2: Adjust Stack pointer ************/
1145 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1146 can only handle specific offsets. */
1148 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1150 rtx sp_plus_insns;
1152 start_sequence ();
1154 insn = emit_move_insn (stack_pointer_rtx,
1155 plus_constant (stack_pointer_rtx, -size));
1156 RTX_FRAME_RELATED_P (insn) = 1;
1157 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1158 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1159 plus_constant (stack_pointer_rtx,
1160 -size)));
1161 if (frame_pointer_needed)
1163 insn = emit_move_insn (fp, stack_pointer_rtx);
1164 RTX_FRAME_RELATED_P (insn) = 1;
1167 sp_plus_insns = get_insns ();
1168 end_sequence ();
1170 /************ Use shortest method ************/
1172 emit_insn (get_sequence_length (sp_plus_insns)
1173 < get_sequence_length (fp_plus_insns)
1174 ? sp_plus_insns
1175 : fp_plus_insns);
1177 else
1179 emit_insn (fp_plus_insns);
1182 cfun->machine->stack_usage += size;
1183 } /* !minimize && size != 0 */
1184 } /* !minimize */
1188 /* Output function prologue. */
1190 void
1191 expand_prologue (void)
1193 HARD_REG_SET set;
1194 HOST_WIDE_INT size;
1196 size = get_frame_size() + avr_outgoing_args_size();
1198 /* Init cfun->machine. */
1199 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1200 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1201 cfun->machine->is_signal = signal_function_p (current_function_decl);
1202 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1203 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1204 cfun->machine->stack_usage = 0;
1206 /* Prologue: naked. */
1207 if (cfun->machine->is_naked)
1209 return;
1212 avr_regs_to_save (&set);
1214 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1216 /* Enable interrupts. */
1217 if (cfun->machine->is_interrupt)
1218 emit_insn (gen_enable_interrupt ());
1220 /* Push zero reg. */
1221 emit_push_byte (ZERO_REGNO, true);
1223 /* Push tmp reg. */
1224 emit_push_byte (TMP_REGNO, true);
1226 /* Push SREG. */
1227 /* ??? There's no dwarf2 column reserved for SREG. */
1228 emit_push_sfr (sreg_rtx, false, false /* clr */);
1230 /* Clear zero reg. */
1231 emit_move_insn (zero_reg_rtx, const0_rtx);
1233 /* Prevent any attempt to delete the setting of ZERO_REG! */
1234 emit_use (zero_reg_rtx);
1236 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1237 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1239 if (AVR_HAVE_RAMPD)
1240 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1242 if (AVR_HAVE_RAMPX
1243 && TEST_HARD_REG_BIT (set, REG_X)
1244 && TEST_HARD_REG_BIT (set, REG_X + 1))
1246 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1249 if (AVR_HAVE_RAMPY
1250 && (frame_pointer_needed
1251 || (TEST_HARD_REG_BIT (set, REG_Y)
1252 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1254 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1257 if (AVR_HAVE_RAMPZ
1258 && TEST_HARD_REG_BIT (set, REG_Z)
1259 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1261 emit_push_sfr (rampz_rtx, false /* frame-related */, true /* clr */);
1263 } /* is_interrupt is_signal */
1265 avr_prologue_setup_frame (size, set);
1267 if (flag_stack_usage_info)
1268 current_function_static_stack_size = cfun->machine->stack_usage;
1271 /* Output summary at end of function prologue. */
1273 static void
1274 avr_asm_function_end_prologue (FILE *file)
1276 if (cfun->machine->is_naked)
1278 fputs ("/* prologue: naked */\n", file);
1280 else
1282 if (cfun->machine->is_interrupt)
1284 fputs ("/* prologue: Interrupt */\n", file);
1286 else if (cfun->machine->is_signal)
1288 fputs ("/* prologue: Signal */\n", file);
1290 else
1291 fputs ("/* prologue: function */\n", file);
1294 if (ACCUMULATE_OUTGOING_ARGS)
1295 fprintf (file, "/* outgoing args size = %d */\n",
1296 avr_outgoing_args_size());
1298 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1299 get_frame_size());
1300 fprintf (file, "/* stack size = %d */\n",
1301 cfun->machine->stack_usage);
1302 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1303 usage for offset so that SP + .L__stack_offset = return address. */
1304 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1308 /* Implement EPILOGUE_USES. */
1311 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1313 if (reload_completed
1314 && cfun->machine
1315 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1316 return 1;
1317 return 0;
1320 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1322 static void
1323 emit_pop_byte (unsigned regno)
1325 rtx mem, reg;
1327 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1328 mem = gen_frame_mem (QImode, mem);
1329 reg = gen_rtx_REG (QImode, regno);
1331 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1334 /* Output RTL epilogue. */
1336 void
1337 expand_epilogue (bool sibcall_p)
1339 int reg;
1340 int live_seq;
1341 HARD_REG_SET set;
1342 int minimize;
1343 HOST_WIDE_INT size;
1344 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1346 size = get_frame_size() + avr_outgoing_args_size();
1348 /* epilogue: naked */
1349 if (cfun->machine->is_naked)
1351 gcc_assert (!sibcall_p);
1353 emit_jump_insn (gen_return ());
1354 return;
1357 avr_regs_to_save (&set);
1358 live_seq = sequent_regs_live ();
1360 minimize = (TARGET_CALL_PROLOGUES
1361 && live_seq
1362 && !isr_p
1363 && !cfun->machine->is_OS_task
1364 && !cfun->machine->is_OS_main);
1366 if (minimize
1367 && (live_seq > 4
1368 || frame_pointer_needed
1369 || size))
1371 /* Get rid of frame. */
1373 if (!frame_pointer_needed)
1375 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1378 if (size)
1380 emit_move_insn (frame_pointer_rtx,
1381 plus_constant (frame_pointer_rtx, size));
1384 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1385 return;
1388 if (size)
1390 /* Try two methods to adjust stack and select shortest. */
1392 int irq_state = -1;
1393 rtx fp, my_fp;
1394 rtx fp_plus_insns;
1396 gcc_assert (frame_pointer_needed
1397 || !isr_p
1398 || !current_function_is_leaf);
1400 fp = my_fp = (frame_pointer_needed
1401 ? frame_pointer_rtx
1402 : gen_rtx_REG (Pmode, REG_X));
1404 if (AVR_HAVE_8BIT_SP)
1406 /* The high byte (r29) does not change:
1407 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1409 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1412 /********** Method 1: Adjust fp register **********/
1414 start_sequence ();
1416 if (!frame_pointer_needed)
1417 emit_move_insn (fp, stack_pointer_rtx);
1419 emit_move_insn (my_fp, plus_constant (my_fp, size));
1421 /* Copy to stack pointer. */
1423 if (TARGET_NO_INTERRUPTS)
1424 irq_state = 0;
1426 if (AVR_HAVE_8BIT_SP)
1427 irq_state = 2;
1429 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1430 GEN_INT (irq_state)));
1432 fp_plus_insns = get_insns ();
1433 end_sequence ();
1435 /********** Method 2: Adjust Stack pointer **********/
1437 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1439 rtx sp_plus_insns;
1441 start_sequence ();
1443 emit_move_insn (stack_pointer_rtx,
1444 plus_constant (stack_pointer_rtx, size));
1446 sp_plus_insns = get_insns ();
1447 end_sequence ();
1449 /************ Use shortest method ************/
1451 emit_insn (get_sequence_length (sp_plus_insns)
1452 < get_sequence_length (fp_plus_insns)
1453 ? sp_plus_insns
1454 : fp_plus_insns);
1456 else
1457 emit_insn (fp_plus_insns);
1458 } /* size != 0 */
1460 if (frame_pointer_needed
1461 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1463 /* Restore previous frame_pointer. See expand_prologue for
1464 rationale for not using pophi. */
1466 emit_pop_byte (REG_Y + 1);
1467 emit_pop_byte (REG_Y);
1470 /* Restore used registers. */
1472 for (reg = 31; reg >= 0; --reg)
1473 if (TEST_HARD_REG_BIT (set, reg))
1474 emit_pop_byte (reg);
1476 if (isr_p)
1478 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1479 The conditions to restore them must be tha same as in prologue. */
1481 if (AVR_HAVE_RAMPX
1482 && TEST_HARD_REG_BIT (set, REG_X)
1483 && TEST_HARD_REG_BIT (set, REG_X + 1))
1485 emit_pop_byte (TMP_REGNO);
1486 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1489 if (AVR_HAVE_RAMPY
1490 && (frame_pointer_needed
1491 || (TEST_HARD_REG_BIT (set, REG_Y)
1492 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1494 emit_pop_byte (TMP_REGNO);
1495 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1498 if (AVR_HAVE_RAMPZ
1499 && TEST_HARD_REG_BIT (set, REG_Z)
1500 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1502 emit_pop_byte (TMP_REGNO);
1503 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1506 if (AVR_HAVE_RAMPD)
1508 emit_pop_byte (TMP_REGNO);
1509 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1512 /* Restore SREG using tmp_reg as scratch. */
1514 emit_pop_byte (TMP_REGNO);
1515 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1517 /* Restore tmp REG. */
1518 emit_pop_byte (TMP_REGNO);
1520 /* Restore zero REG. */
1521 emit_pop_byte (ZERO_REGNO);
1524 if (!sibcall_p)
1525 emit_jump_insn (gen_return ());
1528 /* Output summary messages at beginning of function epilogue. */
1530 static void
1531 avr_asm_function_begin_epilogue (FILE *file)
1533 fprintf (file, "/* epilogue start */\n");
1537 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1539 static bool
1540 avr_cannot_modify_jumps_p (void)
1543 /* Naked Functions must not have any instructions after
1544 their epilogue, see PR42240 */
1546 if (reload_completed
1547 && cfun->machine
1548 && cfun->machine->is_naked)
1550 return true;
1553 return false;
1557 /* Helper function for `avr_legitimate_address_p'. */
1559 static inline bool
1560 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1561 RTX_CODE outer_code, bool strict)
1563 return (REG_P (reg)
1564 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1565 as, outer_code, UNKNOWN)
1566 || (!strict
1567 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1571 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1572 machine for a memory operand of mode MODE. */
1574 static bool
1575 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1577 bool ok = CONSTANT_ADDRESS_P (x);
1579 switch (GET_CODE (x))
1581 case REG:
1582 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1583 MEM, strict);
1585 if (strict
1586 && DImode == mode
1587 && REG_X == REGNO (x))
1589 ok = false;
1591 break;
1593 case POST_INC:
1594 case PRE_DEC:
1595 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1596 GET_CODE (x), strict);
1597 break;
1599 case PLUS:
1601 rtx reg = XEXP (x, 0);
1602 rtx op1 = XEXP (x, 1);
1604 if (REG_P (reg)
1605 && CONST_INT_P (op1)
1606 && INTVAL (op1) >= 0)
1608 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1610 if (fit)
1612 ok = (! strict
1613 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1614 PLUS, strict));
1616 if (reg == frame_pointer_rtx
1617 || reg == arg_pointer_rtx)
1619 ok = true;
1622 else if (frame_pointer_needed
1623 && reg == frame_pointer_rtx)
1625 ok = true;
1629 break;
1631 default:
1632 break;
1635 if (avr_log.legitimate_address_p)
1637 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1638 "reload_completed=%d reload_in_progress=%d %s:",
1639 ok, mode, strict, reload_completed, reload_in_progress,
1640 reg_renumber ? "(reg_renumber)" : "");
1642 if (GET_CODE (x) == PLUS
1643 && REG_P (XEXP (x, 0))
1644 && CONST_INT_P (XEXP (x, 1))
1645 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1646 && reg_renumber)
1648 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1649 true_regnum (XEXP (x, 0)));
1652 avr_edump ("\n%r\n", x);
1655 return ok;
1659 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1660 now only a helper for avr_addr_space_legitimize_address. */
1661 /* Attempts to replace X with a valid
1662 memory address for an operand of mode MODE */
1664 static rtx
1665 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1667 bool big_offset_p = false;
1669 x = oldx;
1671 if (GET_CODE (oldx) == PLUS
1672 && REG_P (XEXP (oldx, 0)))
1674 if (REG_P (XEXP (oldx, 1)))
1675 x = force_reg (GET_MODE (oldx), oldx);
1676 else if (CONST_INT_P (XEXP (oldx, 1)))
1678 int offs = INTVAL (XEXP (oldx, 1));
1679 if (frame_pointer_rtx != XEXP (oldx, 0)
1680 && offs > MAX_LD_OFFSET (mode))
1682 big_offset_p = true;
1683 x = force_reg (GET_MODE (oldx), oldx);
1688 if (avr_log.legitimize_address)
1690 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1692 if (x != oldx)
1693 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1696 return x;
1700 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1701 /* This will allow register R26/27 to be used where it is no worse than normal
1702 base pointers R28/29 or R30/31. For example, if base offset is greater
1703 than 63 bytes or for R++ or --R addressing. */
1706 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1707 int opnum, int type, int addr_type,
1708 int ind_levels ATTRIBUTE_UNUSED,
1709 rtx (*mk_memloc)(rtx,int))
1711 rtx x = *px;
1713 if (avr_log.legitimize_reload_address)
1714 avr_edump ("\n%?:%m %r\n", mode, x);
1716 if (1 && (GET_CODE (x) == POST_INC
1717 || GET_CODE (x) == PRE_DEC))
1719 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1720 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1721 opnum, RELOAD_OTHER);
1723 if (avr_log.legitimize_reload_address)
1724 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1725 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1727 return x;
1730 if (GET_CODE (x) == PLUS
1731 && REG_P (XEXP (x, 0))
1732 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1733 && CONST_INT_P (XEXP (x, 1))
1734 && INTVAL (XEXP (x, 1)) >= 1)
1736 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1738 if (fit)
1740 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1742 int regno = REGNO (XEXP (x, 0));
1743 rtx mem = mk_memloc (x, regno);
1745 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1746 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1747 1, addr_type);
1749 if (avr_log.legitimize_reload_address)
1750 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1751 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1753 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1754 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1755 opnum, type);
1757 if (avr_log.legitimize_reload_address)
1758 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1759 BASE_POINTER_REGS, mem, NULL_RTX);
1761 return x;
1764 else if (! (frame_pointer_needed
1765 && XEXP (x, 0) == frame_pointer_rtx))
1767 push_reload (x, NULL_RTX, px, NULL,
1768 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1769 opnum, type);
1771 if (avr_log.legitimize_reload_address)
1772 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1773 POINTER_REGS, x, NULL_RTX);
1775 return x;
1779 return NULL_RTX;
1783 /* Helper function to print assembler resp. track instruction
1784 sequence lengths. Always return "".
1786 If PLEN == NULL:
1787 Output assembler code from template TPL with operands supplied
1788 by OPERANDS. This is just forwarding to output_asm_insn.
1790 If PLEN != NULL:
1791 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1792 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1793 Don't output anything.
1796 static const char*
1797 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1799 if (NULL == plen)
1801 output_asm_insn (tpl, operands);
1803 else
1805 if (n_words < 0)
1806 *plen = -n_words;
1807 else
1808 *plen += n_words;
1811 return "";
1815 /* Return a pointer register name as a string. */
1817 static const char *
1818 ptrreg_to_str (int regno)
1820 switch (regno)
1822 case REG_X: return "X";
1823 case REG_Y: return "Y";
1824 case REG_Z: return "Z";
1825 default:
1826 output_operand_lossage ("address operand requires constraint for"
1827 " X, Y, or Z register");
1829 return NULL;
1832 /* Return the condition name as a string.
1833 Used in conditional jump constructing */
1835 static const char *
1836 cond_string (enum rtx_code code)
1838 switch (code)
1840 case NE:
1841 return "ne";
1842 case EQ:
1843 return "eq";
1844 case GE:
1845 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1846 return "pl";
1847 else
1848 return "ge";
1849 case LT:
1850 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1851 return "mi";
1852 else
1853 return "lt";
1854 case GEU:
1855 return "sh";
1856 case LTU:
1857 return "lo";
1858 default:
1859 gcc_unreachable ();
1862 return "";
1866 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1867 /* Output ADDR to FILE as address. */
1869 static void
1870 avr_print_operand_address (FILE *file, rtx addr)
1872 switch (GET_CODE (addr))
1874 case REG:
1875 fprintf (file, ptrreg_to_str (REGNO (addr)));
1876 break;
1878 case PRE_DEC:
1879 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1880 break;
1882 case POST_INC:
1883 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1884 break;
1886 default:
1887 if (CONSTANT_ADDRESS_P (addr)
1888 && text_segment_operand (addr, VOIDmode))
1890 rtx x = addr;
1891 if (GET_CODE (x) == CONST)
1892 x = XEXP (x, 0);
1893 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1895 /* Assembler gs() will implant word address. Make offset
1896 a byte offset inside gs() for assembler. This is
1897 needed because the more logical (constant+gs(sym)) is not
1898 accepted by gas. For 128K and lower devices this is ok.
1899 For large devices it will create a Trampoline to offset
1900 from symbol which may not be what the user really wanted. */
1901 fprintf (file, "gs(");
1902 output_addr_const (file, XEXP (x,0));
1903 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1904 2 * INTVAL (XEXP (x, 1)));
1905 if (AVR_3_BYTE_PC)
1906 if (warning (0, "pointer offset from symbol maybe incorrect"))
1908 output_addr_const (stderr, addr);
1909 fprintf(stderr,"\n");
1912 else
1914 fprintf (file, "gs(");
1915 output_addr_const (file, addr);
1916 fprintf (file, ")");
1919 else
1920 output_addr_const (file, addr);
1925 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1927 static bool
1928 avr_print_operand_punct_valid_p (unsigned char code)
1930 return code == '~' || code == '!';
1934 /* Implement `TARGET_PRINT_OPERAND'. */
1935 /* Output X as assembler operand to file FILE.
1936 For a description of supported %-codes, see top of avr.md. */
1938 static void
1939 avr_print_operand (FILE *file, rtx x, int code)
1941 int abcd = 0;
1943 if (code >= 'A' && code <= 'D')
1944 abcd = code - 'A';
1946 if (code == '~')
1948 if (!AVR_HAVE_JMP_CALL)
1949 fputc ('r', file);
1951 else if (code == '!')
1953 if (AVR_HAVE_EIJMP_EICALL)
1954 fputc ('e', file);
1956 else if (code == 't'
1957 || code == 'T')
1959 static int t_regno = -1;
1960 static int t_nbits = -1;
1962 if (REG_P (x) && t_regno < 0 && code == 'T')
1964 t_regno = REGNO (x);
1965 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1967 else if (CONST_INT_P (x) && t_regno >= 0
1968 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1970 int bpos = INTVAL (x);
1972 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1973 if (code == 'T')
1974 fprintf (file, ",%d", bpos % 8);
1976 t_regno = -1;
1978 else
1979 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1981 else if (REG_P (x))
1983 if (x == zero_reg_rtx)
1984 fprintf (file, "__zero_reg__");
1985 else
1986 fprintf (file, reg_names[true_regnum (x) + abcd]);
1988 else if (CONST_INT_P (x))
1990 HOST_WIDE_INT ival = INTVAL (x);
1992 if ('i' != code)
1993 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1994 else if (low_io_address_operand (x, VOIDmode)
1995 || high_io_address_operand (x, VOIDmode))
1997 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1998 fprintf (file, "__RAMPZ__");
1999 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2000 fprintf (file, "__RAMPY__");
2001 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2002 fprintf (file, "__RAMPX__");
2003 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2004 fprintf (file, "__RAMPD__");
2005 else if (AVR_XMEGA && ival == avr_addr.ccp)
2006 fprintf (file, "__CCP__");
2007 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2008 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2009 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2010 else
2012 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2013 ival - avr_current_arch->sfr_offset);
2016 else
2017 fatal_insn ("bad address, not an I/O address:", x);
2019 else if (MEM_P (x))
2021 rtx addr = XEXP (x, 0);
2023 if (code == 'm')
2025 if (!CONSTANT_P (addr))
2026 fatal_insn ("bad address, not a constant:", addr);
2027 /* Assembler template with m-code is data - not progmem section */
2028 if (text_segment_operand (addr, VOIDmode))
2029 if (warning (0, "accessing data memory with"
2030 " program memory address"))
2032 output_addr_const (stderr, addr);
2033 fprintf(stderr,"\n");
2035 output_addr_const (file, addr);
2037 else if (code == 'i')
2039 avr_print_operand (file, addr, 'i');
2041 else if (code == 'o')
2043 if (GET_CODE (addr) != PLUS)
2044 fatal_insn ("bad address, not (reg+disp):", addr);
2046 avr_print_operand (file, XEXP (addr, 1), 0);
2048 else if (code == 'p' || code == 'r')
2050 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2051 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2053 if (code == 'p')
2054 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2055 else
2056 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2058 else if (GET_CODE (addr) == PLUS)
2060 avr_print_operand_address (file, XEXP (addr,0));
2061 if (REGNO (XEXP (addr, 0)) == REG_X)
2062 fatal_insn ("internal compiler error. Bad address:"
2063 ,addr);
2064 fputc ('+', file);
2065 avr_print_operand (file, XEXP (addr,1), code);
2067 else
2068 avr_print_operand_address (file, addr);
2070 else if (code == 'i')
2072 fatal_insn ("bad address, not an I/O address:", x);
2074 else if (code == 'x')
2076 /* Constant progmem address - like used in jmp or call */
2077 if (0 == text_segment_operand (x, VOIDmode))
2078 if (warning (0, "accessing program memory"
2079 " with data memory address"))
2081 output_addr_const (stderr, x);
2082 fprintf(stderr,"\n");
2084 /* Use normal symbol for direct address no linker trampoline needed */
2085 output_addr_const (file, x);
2087 else if (GET_CODE (x) == CONST_DOUBLE)
2089 long val;
2090 REAL_VALUE_TYPE rv;
2091 if (GET_MODE (x) != SFmode)
2092 fatal_insn ("internal compiler error. Unknown mode:", x);
2093 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2094 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2095 fprintf (file, "0x%lx", val);
2097 else if (GET_CODE (x) == CONST_STRING)
2098 fputs (XSTR (x, 0), file);
2099 else if (code == 'j')
2100 fputs (cond_string (GET_CODE (x)), file);
2101 else if (code == 'k')
2102 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2103 else
2104 avr_print_operand_address (file, x);
2107 /* Update the condition code in the INSN. */
2109 void
2110 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2112 rtx set;
2113 enum attr_cc cc = get_attr_cc (insn);
2115 switch (cc)
2117 default:
2118 break;
2120 case CC_OUT_PLUS:
2121 case CC_OUT_PLUS_NOCLOBBER:
2122 case CC_LDI:
2124 rtx *op = recog_data.operand;
2125 int len_dummy, icc;
2127 /* Extract insn's operands. */
2128 extract_constrain_insn_cached (insn);
2130 switch (cc)
2132 default:
2133 gcc_unreachable();
2135 case CC_OUT_PLUS:
2136 avr_out_plus (op, &len_dummy, &icc);
2137 cc = (enum attr_cc) icc;
2138 break;
2140 case CC_OUT_PLUS_NOCLOBBER:
2141 avr_out_plus_noclobber (op, &len_dummy, &icc);
2142 cc = (enum attr_cc) icc;
2143 break;
2145 case CC_LDI:
2147 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2148 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2149 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2150 ? CC_CLOBBER
2151 /* Any other "r,rL" combination does not alter cc0. */
2152 : CC_NONE;
2154 break;
2155 } /* inner switch */
2157 break;
2159 } /* outer swicth */
2161 switch (cc)
2163 default:
2164 /* Special values like CC_OUT_PLUS from above have been
2165 mapped to "standard" CC_* values so we never come here. */
2167 gcc_unreachable();
2168 break;
2170 case CC_NONE:
2171 /* Insn does not affect CC at all. */
2172 break;
2174 case CC_SET_N:
2175 CC_STATUS_INIT;
2176 break;
2178 case CC_SET_ZN:
2179 set = single_set (insn);
2180 CC_STATUS_INIT;
2181 if (set)
2183 cc_status.flags |= CC_NO_OVERFLOW;
2184 cc_status.value1 = SET_DEST (set);
2186 break;
2188 case CC_SET_CZN:
2189 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2190 The V flag may or may not be known but that's ok because
2191 alter_cond will change tests to use EQ/NE. */
2192 set = single_set (insn);
2193 CC_STATUS_INIT;
2194 if (set)
2196 cc_status.value1 = SET_DEST (set);
2197 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2199 break;
2201 case CC_COMPARE:
2202 set = single_set (insn);
2203 CC_STATUS_INIT;
2204 if (set)
2205 cc_status.value1 = SET_SRC (set);
2206 break;
2208 case CC_CLOBBER:
2209 /* Insn doesn't leave CC in a usable state. */
2210 CC_STATUS_INIT;
2211 break;
2215 /* Choose mode for jump insn:
2216 1 - relative jump in range -63 <= x <= 62 ;
2217 2 - relative jump in range -2046 <= x <= 2045 ;
2218 3 - absolute jump (only for ATmega[16]03). */
2221 avr_jump_mode (rtx x, rtx insn)
2223 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2224 ? XEXP (x, 0) : x));
2225 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2226 int jump_distance = cur_addr - dest_addr;
2228 if (-63 <= jump_distance && jump_distance <= 62)
2229 return 1;
2230 else if (-2046 <= jump_distance && jump_distance <= 2045)
2231 return 2;
2232 else if (AVR_HAVE_JMP_CALL)
2233 return 3;
2235 return 2;
2238 /* return an AVR condition jump commands.
2239 X is a comparison RTX.
2240 LEN is a number returned by avr_jump_mode function.
2241 if REVERSE nonzero then condition code in X must be reversed. */
2243 const char *
2244 ret_cond_branch (rtx x, int len, int reverse)
2246 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2248 switch (cond)
2250 case GT:
2251 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2252 return (len == 1 ? ("breq .+2" CR_TAB
2253 "brpl %0") :
2254 len == 2 ? ("breq .+4" CR_TAB
2255 "brmi .+2" CR_TAB
2256 "rjmp %0") :
2257 ("breq .+6" CR_TAB
2258 "brmi .+4" CR_TAB
2259 "jmp %0"));
2261 else
2262 return (len == 1 ? ("breq .+2" CR_TAB
2263 "brge %0") :
2264 len == 2 ? ("breq .+4" CR_TAB
2265 "brlt .+2" CR_TAB
2266 "rjmp %0") :
2267 ("breq .+6" CR_TAB
2268 "brlt .+4" CR_TAB
2269 "jmp %0"));
2270 case GTU:
2271 return (len == 1 ? ("breq .+2" CR_TAB
2272 "brsh %0") :
2273 len == 2 ? ("breq .+4" CR_TAB
2274 "brlo .+2" CR_TAB
2275 "rjmp %0") :
2276 ("breq .+6" CR_TAB
2277 "brlo .+4" CR_TAB
2278 "jmp %0"));
2279 case LE:
2280 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2281 return (len == 1 ? ("breq %0" CR_TAB
2282 "brmi %0") :
2283 len == 2 ? ("breq .+2" CR_TAB
2284 "brpl .+2" CR_TAB
2285 "rjmp %0") :
2286 ("breq .+2" CR_TAB
2287 "brpl .+4" CR_TAB
2288 "jmp %0"));
2289 else
2290 return (len == 1 ? ("breq %0" CR_TAB
2291 "brlt %0") :
2292 len == 2 ? ("breq .+2" CR_TAB
2293 "brge .+2" CR_TAB
2294 "rjmp %0") :
2295 ("breq .+2" CR_TAB
2296 "brge .+4" CR_TAB
2297 "jmp %0"));
2298 case LEU:
2299 return (len == 1 ? ("breq %0" CR_TAB
2300 "brlo %0") :
2301 len == 2 ? ("breq .+2" CR_TAB
2302 "brsh .+2" CR_TAB
2303 "rjmp %0") :
2304 ("breq .+2" CR_TAB
2305 "brsh .+4" CR_TAB
2306 "jmp %0"));
2307 default:
2308 if (reverse)
2310 switch (len)
2312 case 1:
2313 return "br%k1 %0";
2314 case 2:
2315 return ("br%j1 .+2" CR_TAB
2316 "rjmp %0");
2317 default:
2318 return ("br%j1 .+4" CR_TAB
2319 "jmp %0");
2322 else
2324 switch (len)
2326 case 1:
2327 return "br%j1 %0";
2328 case 2:
2329 return ("br%k1 .+2" CR_TAB
2330 "rjmp %0");
2331 default:
2332 return ("br%k1 .+4" CR_TAB
2333 "jmp %0");
2337 return "";
2340 /* Output insn cost for next insn. */
2342 void
2343 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2344 int num_operands ATTRIBUTE_UNUSED)
2346 if (avr_log.rtx_costs)
2348 rtx set = single_set (insn);
2350 if (set)
2351 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2352 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2353 else
2354 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2355 rtx_cost (PATTERN (insn), INSN, 0,
2356 optimize_insn_for_speed_p()));
2360 /* Return 0 if undefined, 1 if always true or always false. */
2363 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2365 unsigned int max = (mode == QImode ? 0xff :
2366 mode == HImode ? 0xffff :
2367 mode == PSImode ? 0xffffff :
2368 mode == SImode ? 0xffffffff : 0);
2369 if (max && op && GET_CODE (x) == CONST_INT)
2371 if (unsigned_condition (op) != op)
2372 max >>= 1;
2374 if (max != (INTVAL (x) & max)
2375 && INTVAL (x) != 0xff)
2376 return 1;
2378 return 0;
2382 /* Returns nonzero if REGNO is the number of a hard
2383 register in which function arguments are sometimes passed. */
2386 function_arg_regno_p(int r)
2388 return (r >= 8 && r <= 25);
2391 /* Initializing the variable cum for the state at the beginning
2392 of the argument list. */
2394 void
2395 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2396 tree fndecl ATTRIBUTE_UNUSED)
2398 cum->nregs = 18;
2399 cum->regno = FIRST_CUM_REG;
2400 if (!libname && stdarg_p (fntype))
2401 cum->nregs = 0;
2403 /* Assume the calle may be tail called */
2405 cfun->machine->sibcall_fails = 0;
2408 /* Returns the number of registers to allocate for a function argument. */
2410 static int
2411 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2413 int size;
2415 if (mode == BLKmode)
2416 size = int_size_in_bytes (type);
2417 else
2418 size = GET_MODE_SIZE (mode);
2420 /* Align all function arguments to start in even-numbered registers.
2421 Odd-sized arguments leave holes above them. */
2423 return (size + 1) & ~1;
2426 /* Controls whether a function argument is passed
2427 in a register, and which register. */
2429 static rtx
2430 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2431 const_tree type, bool named ATTRIBUTE_UNUSED)
2433 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2434 int bytes = avr_num_arg_regs (mode, type);
2436 if (cum->nregs && bytes <= cum->nregs)
2437 return gen_rtx_REG (mode, cum->regno - bytes);
2439 return NULL_RTX;
2442 /* Update the summarizer variable CUM to advance past an argument
2443 in the argument list. */
2445 static void
2446 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2447 const_tree type, bool named ATTRIBUTE_UNUSED)
2449 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2450 int bytes = avr_num_arg_regs (mode, type);
2452 cum->nregs -= bytes;
2453 cum->regno -= bytes;
2455 /* A parameter is being passed in a call-saved register. As the original
2456 contents of these regs has to be restored before leaving the function,
2457 a function must not pass arguments in call-saved regs in order to get
2458 tail-called. */
2460 if (cum->regno >= 8
2461 && cum->nregs >= 0
2462 && !call_used_regs[cum->regno])
2464 /* FIXME: We ship info on failing tail-call in struct machine_function.
2465 This uses internals of calls.c:expand_call() and the way args_so_far
2466 is used. targetm.function_ok_for_sibcall() needs to be extended to
2467 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2468 dependent so that such an extension is not wanted. */
2470 cfun->machine->sibcall_fails = 1;
2473 /* Test if all registers needed by the ABI are actually available. If the
2474 user has fixed a GPR needed to pass an argument, an (implicit) function
2475 call will clobber that fixed register. See PR45099 for an example. */
2477 if (cum->regno >= 8
2478 && cum->nregs >= 0)
2480 int regno;
2482 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2483 if (fixed_regs[regno])
2484 warning (0, "fixed register %s used to pass parameter to function",
2485 reg_names[regno]);
2488 if (cum->nregs <= 0)
2490 cum->nregs = 0;
2491 cum->regno = FIRST_CUM_REG;
2495 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2496 /* Decide whether we can make a sibling call to a function. DECL is the
2497 declaration of the function being targeted by the call and EXP is the
2498 CALL_EXPR representing the call. */
2500 static bool
2501 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2503 tree fntype_callee;
2505 /* Tail-calling must fail if callee-saved regs are used to pass
2506 function args. We must not tail-call when `epilogue_restores'
2507 is used. Unfortunately, we cannot tell at this point if that
2508 actually will happen or not, and we cannot step back from
2509 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2511 if (cfun->machine->sibcall_fails
2512 || TARGET_CALL_PROLOGUES)
2514 return false;
2517 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2519 if (decl_callee)
2521 decl_callee = TREE_TYPE (decl_callee);
2523 else
2525 decl_callee = fntype_callee;
2527 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2528 && METHOD_TYPE != TREE_CODE (decl_callee))
2530 decl_callee = TREE_TYPE (decl_callee);
2534 /* Ensure that caller and callee have compatible epilogues */
2536 if (interrupt_function_p (current_function_decl)
2537 || signal_function_p (current_function_decl)
2538 || avr_naked_function_p (decl_callee)
2539 || avr_naked_function_p (current_function_decl)
2540 /* FIXME: For OS_task and OS_main, we are over-conservative.
2541 This is due to missing documentation of these attributes
2542 and what they actually should do and should not do. */
2543 || (avr_OS_task_function_p (decl_callee)
2544 != avr_OS_task_function_p (current_function_decl))
2545 || (avr_OS_main_function_p (decl_callee)
2546 != avr_OS_main_function_p (current_function_decl)))
2548 return false;
2551 return true;
2554 /***********************************************************************
2555 Functions for outputting various mov's for a various modes
2556 ************************************************************************/
2558 /* Return true if a value of mode MODE is read from flash by
2559 __load_* function from libgcc. */
2561 bool
2562 avr_load_libgcc_p (rtx op)
2564 enum machine_mode mode = GET_MODE (op);
2565 int n_bytes = GET_MODE_SIZE (mode);
2567 return (n_bytes > 2
2568 && !AVR_HAVE_LPMX
2569 && avr_mem_flash_p (op));
2572 /* Return true if a value of mode MODE is read by __xload_* function. */
2574 bool
2575 avr_xload_libgcc_p (enum machine_mode mode)
2577 int n_bytes = GET_MODE_SIZE (mode);
2579 return (n_bytes > 1
2580 || avr_current_device->n_flash > 1);
2584 /* Find an unused d-register to be used as scratch in INSN.
2585 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2586 is a register, skip all possible return values that overlap EXCLUDE.
2587 The policy for the returned register is similar to that of
2588 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2589 of INSN.
2591 Return a QImode d-register or NULL_RTX if nothing found. */
2593 static rtx
2594 avr_find_unused_d_reg (rtx insn, rtx exclude)
2596 int regno;
2597 bool isr_p = (interrupt_function_p (current_function_decl)
2598 || signal_function_p (current_function_decl));
2600 for (regno = 16; regno < 32; regno++)
2602 rtx reg = all_regs_rtx[regno];
2604 if ((exclude
2605 && reg_overlap_mentioned_p (exclude, reg))
2606 || fixed_regs[regno])
2608 continue;
2611 /* Try non-live register */
2613 if (!df_regs_ever_live_p (regno)
2614 && (TREE_THIS_VOLATILE (current_function_decl)
2615 || cfun->machine->is_OS_task
2616 || cfun->machine->is_OS_main
2617 || (!isr_p && call_used_regs[regno])))
2619 return reg;
2622 /* Any live register can be used if it is unused after.
2623 Prologue/epilogue will care for it as needed. */
2625 if (df_regs_ever_live_p (regno)
2626 && reg_unused_after (insn, reg))
2628 return reg;
2632 return NULL_RTX;
2636 /* Helper function for the next function in the case where only restricted
2637 version of LPM instruction is available. */
2639 static const char*
2640 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2642 rtx dest = xop[0];
2643 rtx addr = xop[1];
2644 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2645 int regno_dest;
2647 regno_dest = REGNO (dest);
2649 /* The implicit target register of LPM. */
2650 xop[3] = lpm_reg_rtx;
2652 switch (GET_CODE (addr))
2654 default:
2655 gcc_unreachable();
2657 case REG:
2659 gcc_assert (REG_Z == REGNO (addr));
2661 switch (n_bytes)
2663 default:
2664 gcc_unreachable();
2666 case 1:
2667 avr_asm_len ("%4lpm", xop, plen, 1);
2669 if (regno_dest != LPM_REGNO)
2670 avr_asm_len ("mov %0,%3", xop, plen, 1);
2672 return "";
2674 case 2:
2675 if (REGNO (dest) == REG_Z)
2676 return avr_asm_len ("%4lpm" CR_TAB
2677 "push %3" CR_TAB
2678 "adiw %2,1" CR_TAB
2679 "%4lpm" CR_TAB
2680 "mov %B0,%3" CR_TAB
2681 "pop %A0", xop, plen, 6);
2683 avr_asm_len ("%4lpm" CR_TAB
2684 "mov %A0,%3" CR_TAB
2685 "adiw %2,1" CR_TAB
2686 "%4lpm" CR_TAB
2687 "mov %B0,%3", xop, plen, 5);
2689 if (!reg_unused_after (insn, addr))
2690 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2692 break; /* 2 */
2695 break; /* REG */
2697 case POST_INC:
2699 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2700 && n_bytes <= 4);
2702 if (regno_dest == LPM_REGNO)
2703 avr_asm_len ("%4lpm" CR_TAB
2704 "adiw %2,1", xop, plen, 2);
2705 else
2706 avr_asm_len ("%4lpm" CR_TAB
2707 "mov %A0,%3" CR_TAB
2708 "adiw %2,1", xop, plen, 3);
2710 if (n_bytes >= 2)
2711 avr_asm_len ("%4lpm" CR_TAB
2712 "mov %B0,%3" CR_TAB
2713 "adiw %2,1", xop, plen, 3);
2715 if (n_bytes >= 3)
2716 avr_asm_len ("%4lpm" CR_TAB
2717 "mov %C0,%3" CR_TAB
2718 "adiw %2,1", xop, plen, 3);
2720 if (n_bytes >= 4)
2721 avr_asm_len ("%4lpm" CR_TAB
2722 "mov %D0,%3" CR_TAB
2723 "adiw %2,1", xop, plen, 3);
2725 break; /* POST_INC */
2727 } /* switch CODE (addr) */
2729 return "";
2733 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2734 OP[1] in AS1 to register OP[0].
2735 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2736 Return "". */
2738 static const char*
2739 avr_out_lpm (rtx insn, rtx *op, int *plen)
2741 rtx xop[6];
2742 rtx dest = op[0];
2743 rtx src = SET_SRC (single_set (insn));
2744 rtx addr;
2745 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2746 int regno_dest;
2747 int segment;
2748 RTX_CODE code;
2749 addr_space_t as = MEM_ADDR_SPACE (src);
2751 if (plen)
2752 *plen = 0;
2754 if (MEM_P (dest))
2756 warning (0, "writing to address space %qs not supported",
2757 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2759 return "";
2762 addr = XEXP (src, 0);
2763 code = GET_CODE (addr);
2765 gcc_assert (REG_P (dest));
2766 gcc_assert (REG == code || POST_INC == code);
2768 xop[0] = dest;
2769 xop[1] = addr;
2770 xop[2] = lpm_addr_reg_rtx;
2771 xop[4] = xstring_empty;
2772 xop[5] = tmp_reg_rtx;
2774 regno_dest = REGNO (dest);
2776 segment = avr_addrspace[as].segment;
2778 /* Set RAMPZ as needed. */
2780 if (segment)
2782 xop[4] = GEN_INT (segment);
2784 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2785 xop[3])
2787 avr_asm_len ("ldi %3,%4" CR_TAB
2788 "out __RAMPZ__,%3", xop, plen, 2);
2790 else if (segment == 1)
2792 avr_asm_len ("clr %5" CR_TAB
2793 "inc %5" CR_TAB
2794 "out __RAMPZ__,%5", xop, plen, 3);
2796 else
2798 avr_asm_len ("mov %5,%2" CR_TAB
2799 "ldi %2,%4" CR_TAB
2800 "out __RAMPZ__,%2" CR_TAB
2801 "mov %2,%5", xop, plen, 4);
2804 xop[4] = xstring_e;
2806 if (!AVR_HAVE_ELPMX)
2807 return avr_out_lpm_no_lpmx (insn, xop, plen);
2809 else if (!AVR_HAVE_LPMX)
2811 return avr_out_lpm_no_lpmx (insn, xop, plen);
2814 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2816 switch (GET_CODE (addr))
2818 default:
2819 gcc_unreachable();
2821 case REG:
2823 gcc_assert (REG_Z == REGNO (addr));
2825 switch (n_bytes)
2827 default:
2828 gcc_unreachable();
2830 case 1:
2831 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2833 case 2:
2834 if (REGNO (dest) == REG_Z)
2835 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2836 "%4lpm %B0,%a2" CR_TAB
2837 "mov %A0,%5", xop, plen, 3);
2838 else
2840 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2841 "%4lpm %B0,%a2", xop, plen, 2);
2843 if (!reg_unused_after (insn, addr))
2844 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2847 break; /* 2 */
2849 case 3:
2851 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2852 "%4lpm %B0,%a2+" CR_TAB
2853 "%4lpm %C0,%a2", xop, plen, 3);
2855 if (!reg_unused_after (insn, addr))
2856 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2858 break; /* 3 */
2860 case 4:
2862 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2863 "%4lpm %B0,%a2+", xop, plen, 2);
2865 if (REGNO (dest) == REG_Z - 2)
2866 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2867 "%4lpm %C0,%a2" CR_TAB
2868 "mov %D0,%5", xop, plen, 3);
2869 else
2871 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2872 "%4lpm %D0,%a2", xop, plen, 2);
2874 if (!reg_unused_after (insn, addr))
2875 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2878 break; /* 4 */
2879 } /* n_bytes */
2881 break; /* REG */
2883 case POST_INC:
2885 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2886 && n_bytes <= 4);
2888 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2889 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2890 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2891 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2893 break; /* POST_INC */
2895 } /* switch CODE (addr) */
2897 return "";
2901 /* Worker function for xload_8 insn. */
2903 const char*
2904 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2906 rtx xop[4];
2908 xop[0] = op[0];
2909 xop[1] = op[1];
2910 xop[2] = lpm_addr_reg_rtx;
2911 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2913 if (plen)
2914 *plen = 0;
2916 avr_asm_len ("ld %3,%a2" CR_TAB
2917 "sbrs %1,7", xop, plen, 2);
2919 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2921 if (REGNO (xop[0]) != REGNO (xop[3]))
2922 avr_asm_len ("mov %0,%3", xop, plen, 1);
2924 return "";
2928 const char *
2929 output_movqi (rtx insn, rtx operands[], int *l)
2931 int dummy;
2932 rtx dest = operands[0];
2933 rtx src = operands[1];
2934 int *real_l = l;
2936 if (avr_mem_flash_p (src)
2937 || avr_mem_flash_p (dest))
2939 return avr_out_lpm (insn, operands, real_l);
2942 if (!l)
2943 l = &dummy;
2945 *l = 1;
2947 if (register_operand (dest, QImode))
2949 if (register_operand (src, QImode)) /* mov r,r */
2951 if (test_hard_reg_class (STACK_REG, dest))
2952 return "out %0,%1";
2953 else if (test_hard_reg_class (STACK_REG, src))
2954 return "in %0,%1";
2956 return "mov %0,%1";
2958 else if (CONSTANT_P (src))
2960 output_reload_in_const (operands, NULL_RTX, real_l, false);
2961 return "";
2963 else if (GET_CODE (src) == MEM)
2964 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2966 else if (GET_CODE (dest) == MEM)
2968 rtx xop[2];
2970 xop[0] = dest;
2971 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2973 return out_movqi_mr_r (insn, xop, real_l);
2975 return "";
2979 const char *
2980 output_movhi (rtx insn, rtx xop[], int *plen)
2982 rtx dest = xop[0];
2983 rtx src = xop[1];
2985 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2987 if (avr_mem_flash_p (src)
2988 || avr_mem_flash_p (dest))
2990 return avr_out_lpm (insn, xop, plen);
2993 if (REG_P (dest))
2995 if (REG_P (src)) /* mov r,r */
2997 if (test_hard_reg_class (STACK_REG, dest))
2999 if (AVR_HAVE_8BIT_SP)
3000 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3002 if (AVR_XMEGA)
3003 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3004 "out __SP_H__,%B1", xop, plen, -2);
3006 /* Use simple load of SP if no interrupts are used. */
3008 return TARGET_NO_INTERRUPTS
3009 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3010 "out __SP_L__,%A1", xop, plen, -2)
3012 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3013 "cli" CR_TAB
3014 "out __SP_H__,%B1" CR_TAB
3015 "out __SREG__,__tmp_reg__" CR_TAB
3016 "out __SP_L__,%A1", xop, plen, -5);
3018 else if (test_hard_reg_class (STACK_REG, src))
3020 return AVR_HAVE_8BIT_SP
3021 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3022 "clr %B0", xop, plen, -2)
3024 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3025 "in %B0,__SP_H__", xop, plen, -2);
3028 return AVR_HAVE_MOVW
3029 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3031 : avr_asm_len ("mov %A0,%A1" CR_TAB
3032 "mov %B0,%B1", xop, plen, -2);
3033 } /* REG_P (src) */
3034 else if (CONSTANT_P (src))
3036 return output_reload_inhi (xop, NULL, plen);
3038 else if (MEM_P (src))
3040 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3043 else if (MEM_P (dest))
3045 rtx xop[2];
3047 xop[0] = dest;
3048 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
3050 return out_movhi_mr_r (insn, xop, plen);
3053 fatal_insn ("invalid insn:", insn);
3055 return "";
3058 static const char*
3059 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3061 rtx dest = op[0];
3062 rtx src = op[1];
3063 rtx x = XEXP (src, 0);
3065 if (CONSTANT_ADDRESS_P (x))
3067 return optimize > 0 && io_address_operand (x, QImode)
3068 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3069 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3071 else if (GET_CODE (x) == PLUS
3072 && REG_P (XEXP (x, 0))
3073 && CONST_INT_P (XEXP (x, 1)))
3075 /* memory access by reg+disp */
3077 int disp = INTVAL (XEXP (x, 1));
3079 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3081 if (REGNO (XEXP (x, 0)) != REG_Y)
3082 fatal_insn ("incorrect insn:",insn);
3084 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3085 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3086 "ldd %0,Y+63" CR_TAB
3087 "sbiw r28,%o1-63", op, plen, -3);
3089 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3090 "sbci r29,hi8(-%o1)" CR_TAB
3091 "ld %0,Y" CR_TAB
3092 "subi r28,lo8(%o1)" CR_TAB
3093 "sbci r29,hi8(%o1)", op, plen, -5);
3095 else if (REGNO (XEXP (x, 0)) == REG_X)
3097 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3098 it but I have this situation with extremal optimizing options. */
3100 avr_asm_len ("adiw r26,%o1" CR_TAB
3101 "ld %0,X", op, plen, -2);
3103 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3104 && !reg_unused_after (insn, XEXP (x,0)))
3106 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3109 return "";
3112 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3115 return avr_asm_len ("ld %0,%1", op, plen, -1);
3118 static const char*
3119 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3121 rtx dest = op[0];
3122 rtx src = op[1];
3123 rtx base = XEXP (src, 0);
3124 int reg_dest = true_regnum (dest);
3125 int reg_base = true_regnum (base);
3126 /* "volatile" forces reading low byte first, even if less efficient,
3127 for correct operation with 16-bit I/O registers. */
3128 int mem_volatile_p = MEM_VOLATILE_P (src);
3130 if (reg_base > 0)
3132 if (reg_dest == reg_base) /* R = (R) */
3133 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3134 "ld %B0,%1" CR_TAB
3135 "mov %A0,__tmp_reg__", op, plen, -3);
3137 if (reg_base != REG_X)
3138 return avr_asm_len ("ld %A0,%1" CR_TAB
3139 "ldd %B0,%1+1", op, plen, -2);
3141 avr_asm_len ("ld %A0,X+" CR_TAB
3142 "ld %B0,X", op, plen, -2);
3144 if (!reg_unused_after (insn, base))
3145 avr_asm_len ("sbiw r26,1", op, plen, 1);
3147 return "";
3149 else if (GET_CODE (base) == PLUS) /* (R + i) */
3151 int disp = INTVAL (XEXP (base, 1));
3152 int reg_base = true_regnum (XEXP (base, 0));
3154 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3156 if (REGNO (XEXP (base, 0)) != REG_Y)
3157 fatal_insn ("incorrect insn:",insn);
3159 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3160 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3161 "ldd %A0,Y+62" CR_TAB
3162 "ldd %B0,Y+63" CR_TAB
3163 "sbiw r28,%o1-62", op, plen, -4)
3165 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3166 "sbci r29,hi8(-%o1)" CR_TAB
3167 "ld %A0,Y" CR_TAB
3168 "ldd %B0,Y+1" CR_TAB
3169 "subi r28,lo8(%o1)" CR_TAB
3170 "sbci r29,hi8(%o1)", op, plen, -6);
3173 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3174 it but I have this situation with extremal
3175 optimization options. */
3177 if (reg_base == REG_X)
3178 return reg_base == reg_dest
3179 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3180 "ld __tmp_reg__,X+" CR_TAB
3181 "ld %B0,X" CR_TAB
3182 "mov %A0,__tmp_reg__", op, plen, -4)
3184 : avr_asm_len ("adiw r26,%o1" CR_TAB
3185 "ld %A0,X+" CR_TAB
3186 "ld %B0,X" CR_TAB
3187 "sbiw r26,%o1+1", op, plen, -4);
3189 return reg_base == reg_dest
3190 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3191 "ldd %B0,%B1" CR_TAB
3192 "mov %A0,__tmp_reg__", op, plen, -3)
3194 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3195 "ldd %B0,%B1", op, plen, -2);
3197 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3199 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3200 fatal_insn ("incorrect insn:", insn);
3202 if (!mem_volatile_p)
3203 return avr_asm_len ("ld %B0,%1" CR_TAB
3204 "ld %A0,%1", op, plen, -2);
3206 return REGNO (XEXP (base, 0)) == REG_X
3207 ? avr_asm_len ("sbiw r26,2" CR_TAB
3208 "ld %A0,X+" CR_TAB
3209 "ld %B0,X" CR_TAB
3210 "sbiw r26,1", op, plen, -4)
3212 : avr_asm_len ("sbiw %r1,2" CR_TAB
3213 "ld %A0,%p1" CR_TAB
3214 "ldd %B0,%p1+1", op, plen, -3);
3216 else if (GET_CODE (base) == POST_INC) /* (R++) */
3218 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3219 fatal_insn ("incorrect insn:", insn);
3221 return avr_asm_len ("ld %A0,%1" CR_TAB
3222 "ld %B0,%1", op, plen, -2);
3224 else if (CONSTANT_ADDRESS_P (base))
3226 return optimize > 0 && io_address_operand (base, HImode)
3227 ? avr_asm_len ("in %A0,%i1" CR_TAB
3228 "in %B0,%i1+1", op, plen, -2)
3230 : avr_asm_len ("lds %A0,%m1" CR_TAB
3231 "lds %B0,%m1+1", op, plen, -4);
3234 fatal_insn ("unknown move insn:",insn);
3235 return "";
3238 static const char*
3239 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3241 rtx dest = op[0];
3242 rtx src = op[1];
3243 rtx base = XEXP (src, 0);
3244 int reg_dest = true_regnum (dest);
3245 int reg_base = true_regnum (base);
3246 int tmp;
3248 if (!l)
3249 l = &tmp;
3251 if (reg_base > 0)
3253 if (reg_base == REG_X) /* (R26) */
3255 if (reg_dest == REG_X)
3256 /* "ld r26,-X" is undefined */
3257 return *l=7, ("adiw r26,3" CR_TAB
3258 "ld r29,X" CR_TAB
3259 "ld r28,-X" CR_TAB
3260 "ld __tmp_reg__,-X" CR_TAB
3261 "sbiw r26,1" CR_TAB
3262 "ld r26,X" CR_TAB
3263 "mov r27,__tmp_reg__");
3264 else if (reg_dest == REG_X - 2)
3265 return *l=5, ("ld %A0,X+" CR_TAB
3266 "ld %B0,X+" CR_TAB
3267 "ld __tmp_reg__,X+" CR_TAB
3268 "ld %D0,X" CR_TAB
3269 "mov %C0,__tmp_reg__");
3270 else if (reg_unused_after (insn, base))
3271 return *l=4, ("ld %A0,X+" CR_TAB
3272 "ld %B0,X+" CR_TAB
3273 "ld %C0,X+" CR_TAB
3274 "ld %D0,X");
3275 else
3276 return *l=5, ("ld %A0,X+" CR_TAB
3277 "ld %B0,X+" CR_TAB
3278 "ld %C0,X+" CR_TAB
3279 "ld %D0,X" CR_TAB
3280 "sbiw r26,3");
3282 else
3284 if (reg_dest == reg_base)
3285 return *l=5, ("ldd %D0,%1+3" CR_TAB
3286 "ldd %C0,%1+2" CR_TAB
3287 "ldd __tmp_reg__,%1+1" CR_TAB
3288 "ld %A0,%1" CR_TAB
3289 "mov %B0,__tmp_reg__");
3290 else if (reg_base == reg_dest + 2)
3291 return *l=5, ("ld %A0,%1" CR_TAB
3292 "ldd %B0,%1+1" CR_TAB
3293 "ldd __tmp_reg__,%1+2" CR_TAB
3294 "ldd %D0,%1+3" CR_TAB
3295 "mov %C0,__tmp_reg__");
3296 else
3297 return *l=4, ("ld %A0,%1" CR_TAB
3298 "ldd %B0,%1+1" CR_TAB
3299 "ldd %C0,%1+2" CR_TAB
3300 "ldd %D0,%1+3");
3303 else if (GET_CODE (base) == PLUS) /* (R + i) */
3305 int disp = INTVAL (XEXP (base, 1));
3307 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3309 if (REGNO (XEXP (base, 0)) != REG_Y)
3310 fatal_insn ("incorrect insn:",insn);
3312 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3313 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3314 "ldd %A0,Y+60" CR_TAB
3315 "ldd %B0,Y+61" CR_TAB
3316 "ldd %C0,Y+62" CR_TAB
3317 "ldd %D0,Y+63" CR_TAB
3318 "sbiw r28,%o1-60");
3320 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3321 "sbci r29,hi8(-%o1)" CR_TAB
3322 "ld %A0,Y" CR_TAB
3323 "ldd %B0,Y+1" CR_TAB
3324 "ldd %C0,Y+2" CR_TAB
3325 "ldd %D0,Y+3" CR_TAB
3326 "subi r28,lo8(%o1)" CR_TAB
3327 "sbci r29,hi8(%o1)");
3330 reg_base = true_regnum (XEXP (base, 0));
3331 if (reg_base == REG_X)
3333 /* R = (X + d) */
3334 if (reg_dest == REG_X)
3336 *l = 7;
3337 /* "ld r26,-X" is undefined */
3338 return ("adiw r26,%o1+3" CR_TAB
3339 "ld r29,X" CR_TAB
3340 "ld r28,-X" CR_TAB
3341 "ld __tmp_reg__,-X" CR_TAB
3342 "sbiw r26,1" CR_TAB
3343 "ld r26,X" CR_TAB
3344 "mov r27,__tmp_reg__");
3346 *l = 6;
3347 if (reg_dest == REG_X - 2)
3348 return ("adiw r26,%o1" CR_TAB
3349 "ld r24,X+" CR_TAB
3350 "ld r25,X+" CR_TAB
3351 "ld __tmp_reg__,X+" CR_TAB
3352 "ld r27,X" CR_TAB
3353 "mov r26,__tmp_reg__");
3355 return ("adiw r26,%o1" CR_TAB
3356 "ld %A0,X+" CR_TAB
3357 "ld %B0,X+" CR_TAB
3358 "ld %C0,X+" CR_TAB
3359 "ld %D0,X" CR_TAB
3360 "sbiw r26,%o1+3");
3362 if (reg_dest == reg_base)
3363 return *l=5, ("ldd %D0,%D1" CR_TAB
3364 "ldd %C0,%C1" CR_TAB
3365 "ldd __tmp_reg__,%B1" CR_TAB
3366 "ldd %A0,%A1" CR_TAB
3367 "mov %B0,__tmp_reg__");
3368 else if (reg_dest == reg_base - 2)
3369 return *l=5, ("ldd %A0,%A1" CR_TAB
3370 "ldd %B0,%B1" CR_TAB
3371 "ldd __tmp_reg__,%C1" CR_TAB
3372 "ldd %D0,%D1" CR_TAB
3373 "mov %C0,__tmp_reg__");
3374 return *l=4, ("ldd %A0,%A1" CR_TAB
3375 "ldd %B0,%B1" CR_TAB
3376 "ldd %C0,%C1" CR_TAB
3377 "ldd %D0,%D1");
3379 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3380 return *l=4, ("ld %D0,%1" CR_TAB
3381 "ld %C0,%1" CR_TAB
3382 "ld %B0,%1" CR_TAB
3383 "ld %A0,%1");
3384 else if (GET_CODE (base) == POST_INC) /* (R++) */
3385 return *l=4, ("ld %A0,%1" CR_TAB
3386 "ld %B0,%1" CR_TAB
3387 "ld %C0,%1" CR_TAB
3388 "ld %D0,%1");
3389 else if (CONSTANT_ADDRESS_P (base))
3390 return *l=8, ("lds %A0,%m1" CR_TAB
3391 "lds %B0,%m1+1" CR_TAB
3392 "lds %C0,%m1+2" CR_TAB
3393 "lds %D0,%m1+3");
3395 fatal_insn ("unknown move insn:",insn);
3396 return "";
3399 static const char*
3400 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3402 rtx dest = op[0];
3403 rtx src = op[1];
3404 rtx base = XEXP (dest, 0);
3405 int reg_base = true_regnum (base);
3406 int reg_src = true_regnum (src);
3407 int tmp;
3409 if (!l)
3410 l = &tmp;
3412 if (CONSTANT_ADDRESS_P (base))
3413 return *l=8,("sts %m0,%A1" CR_TAB
3414 "sts %m0+1,%B1" CR_TAB
3415 "sts %m0+2,%C1" CR_TAB
3416 "sts %m0+3,%D1");
3417 if (reg_base > 0) /* (r) */
3419 if (reg_base == REG_X) /* (R26) */
3421 if (reg_src == REG_X)
3423 /* "st X+,r26" is undefined */
3424 if (reg_unused_after (insn, base))
3425 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3426 "st X,r26" CR_TAB
3427 "adiw r26,1" CR_TAB
3428 "st X+,__tmp_reg__" CR_TAB
3429 "st X+,r28" CR_TAB
3430 "st X,r29");
3431 else
3432 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3433 "st X,r26" CR_TAB
3434 "adiw r26,1" CR_TAB
3435 "st X+,__tmp_reg__" CR_TAB
3436 "st X+,r28" CR_TAB
3437 "st X,r29" CR_TAB
3438 "sbiw r26,3");
3440 else if (reg_base == reg_src + 2)
3442 if (reg_unused_after (insn, base))
3443 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3444 "mov __tmp_reg__,%D1" CR_TAB
3445 "st %0+,%A1" CR_TAB
3446 "st %0+,%B1" CR_TAB
3447 "st %0+,__zero_reg__" CR_TAB
3448 "st %0,__tmp_reg__" CR_TAB
3449 "clr __zero_reg__");
3450 else
3451 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3452 "mov __tmp_reg__,%D1" CR_TAB
3453 "st %0+,%A1" CR_TAB
3454 "st %0+,%B1" CR_TAB
3455 "st %0+,__zero_reg__" CR_TAB
3456 "st %0,__tmp_reg__" CR_TAB
3457 "clr __zero_reg__" CR_TAB
3458 "sbiw r26,3");
3460 return *l=5, ("st %0+,%A1" CR_TAB
3461 "st %0+,%B1" CR_TAB
3462 "st %0+,%C1" CR_TAB
3463 "st %0,%D1" CR_TAB
3464 "sbiw r26,3");
3466 else
3467 return *l=4, ("st %0,%A1" CR_TAB
3468 "std %0+1,%B1" CR_TAB
3469 "std %0+2,%C1" CR_TAB
3470 "std %0+3,%D1");
3472 else if (GET_CODE (base) == PLUS) /* (R + i) */
3474 int disp = INTVAL (XEXP (base, 1));
3475 reg_base = REGNO (XEXP (base, 0));
3476 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3478 if (reg_base != REG_Y)
3479 fatal_insn ("incorrect insn:",insn);
3481 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3482 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3483 "std Y+60,%A1" CR_TAB
3484 "std Y+61,%B1" CR_TAB
3485 "std Y+62,%C1" CR_TAB
3486 "std Y+63,%D1" CR_TAB
3487 "sbiw r28,%o0-60");
3489 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3490 "sbci r29,hi8(-%o0)" CR_TAB
3491 "st Y,%A1" CR_TAB
3492 "std Y+1,%B1" CR_TAB
3493 "std Y+2,%C1" CR_TAB
3494 "std Y+3,%D1" CR_TAB
3495 "subi r28,lo8(%o0)" CR_TAB
3496 "sbci r29,hi8(%o0)");
3498 if (reg_base == REG_X)
3500 /* (X + d) = R */
3501 if (reg_src == REG_X)
3503 *l = 9;
3504 return ("mov __tmp_reg__,r26" CR_TAB
3505 "mov __zero_reg__,r27" CR_TAB
3506 "adiw r26,%o0" CR_TAB
3507 "st X+,__tmp_reg__" CR_TAB
3508 "st X+,__zero_reg__" CR_TAB
3509 "st X+,r28" CR_TAB
3510 "st X,r29" CR_TAB
3511 "clr __zero_reg__" CR_TAB
3512 "sbiw r26,%o0+3");
3514 else if (reg_src == REG_X - 2)
3516 *l = 9;
3517 return ("mov __tmp_reg__,r26" CR_TAB
3518 "mov __zero_reg__,r27" CR_TAB
3519 "adiw r26,%o0" CR_TAB
3520 "st X+,r24" CR_TAB
3521 "st X+,r25" CR_TAB
3522 "st X+,__tmp_reg__" CR_TAB
3523 "st X,__zero_reg__" CR_TAB
3524 "clr __zero_reg__" CR_TAB
3525 "sbiw r26,%o0+3");
3527 *l = 6;
3528 return ("adiw r26,%o0" CR_TAB
3529 "st X+,%A1" CR_TAB
3530 "st X+,%B1" CR_TAB
3531 "st X+,%C1" CR_TAB
3532 "st X,%D1" CR_TAB
3533 "sbiw r26,%o0+3");
3535 return *l=4, ("std %A0,%A1" CR_TAB
3536 "std %B0,%B1" CR_TAB
3537 "std %C0,%C1" CR_TAB
3538 "std %D0,%D1");
3540 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3541 return *l=4, ("st %0,%D1" CR_TAB
3542 "st %0,%C1" CR_TAB
3543 "st %0,%B1" CR_TAB
3544 "st %0,%A1");
3545 else if (GET_CODE (base) == POST_INC) /* (R++) */
3546 return *l=4, ("st %0,%A1" CR_TAB
3547 "st %0,%B1" CR_TAB
3548 "st %0,%C1" CR_TAB
3549 "st %0,%D1");
3550 fatal_insn ("unknown move insn:",insn);
3551 return "";
3554 const char *
3555 output_movsisf (rtx insn, rtx operands[], int *l)
3557 int dummy;
3558 rtx dest = operands[0];
3559 rtx src = operands[1];
3560 int *real_l = l;
3562 if (avr_mem_flash_p (src)
3563 || avr_mem_flash_p (dest))
3565 return avr_out_lpm (insn, operands, real_l);
3568 if (!l)
3569 l = &dummy;
3571 if (register_operand (dest, VOIDmode))
3573 if (register_operand (src, VOIDmode)) /* mov r,r */
3575 if (true_regnum (dest) > true_regnum (src))
3577 if (AVR_HAVE_MOVW)
3579 *l = 2;
3580 return ("movw %C0,%C1" CR_TAB
3581 "movw %A0,%A1");
3583 *l = 4;
3584 return ("mov %D0,%D1" CR_TAB
3585 "mov %C0,%C1" CR_TAB
3586 "mov %B0,%B1" CR_TAB
3587 "mov %A0,%A1");
3589 else
3591 if (AVR_HAVE_MOVW)
3593 *l = 2;
3594 return ("movw %A0,%A1" CR_TAB
3595 "movw %C0,%C1");
3597 *l = 4;
3598 return ("mov %A0,%A1" CR_TAB
3599 "mov %B0,%B1" CR_TAB
3600 "mov %C0,%C1" CR_TAB
3601 "mov %D0,%D1");
3604 else if (CONSTANT_P (src))
3606 return output_reload_insisf (operands, NULL_RTX, real_l);
3608 else if (GET_CODE (src) == MEM)
3609 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3611 else if (GET_CODE (dest) == MEM)
3613 const char *templ;
3615 if (src == CONST0_RTX (GET_MODE (dest)))
3616 operands[1] = zero_reg_rtx;
3618 templ = out_movsi_mr_r (insn, operands, real_l);
3620 if (!real_l)
3621 output_asm_insn (templ, operands);
3623 operands[1] = src;
3624 return "";
3626 fatal_insn ("invalid insn:", insn);
3627 return "";
3631 /* Handle loads of 24-bit types from memory to register. */
3633 static const char*
3634 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3636 rtx dest = op[0];
3637 rtx src = op[1];
3638 rtx base = XEXP (src, 0);
3639 int reg_dest = true_regnum (dest);
3640 int reg_base = true_regnum (base);
3642 if (reg_base > 0)
3644 if (reg_base == REG_X) /* (R26) */
3646 if (reg_dest == REG_X)
3647 /* "ld r26,-X" is undefined */
3648 return avr_asm_len ("adiw r26,2" CR_TAB
3649 "ld r28,X" CR_TAB
3650 "ld __tmp_reg__,-X" CR_TAB
3651 "sbiw r26,1" CR_TAB
3652 "ld r26,X" CR_TAB
3653 "mov r27,__tmp_reg__", op, plen, -6);
3654 else
3656 avr_asm_len ("ld %A0,X+" CR_TAB
3657 "ld %B0,X+" CR_TAB
3658 "ld %C0,X", op, plen, -3);
3660 if (reg_dest != REG_X - 2
3661 && !reg_unused_after (insn, base))
3663 avr_asm_len ("sbiw r26,2", op, plen, 1);
3666 return "";
3669 else /* reg_base != REG_X */
3671 if (reg_dest == reg_base)
3672 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3673 "ldd __tmp_reg__,%1+1" CR_TAB
3674 "ld %A0,%1" CR_TAB
3675 "mov %B0,__tmp_reg__", op, plen, -4);
3676 else
3677 return avr_asm_len ("ld %A0,%1" CR_TAB
3678 "ldd %B0,%1+1" CR_TAB
3679 "ldd %C0,%1+2", op, plen, -3);
3682 else if (GET_CODE (base) == PLUS) /* (R + i) */
3684 int disp = INTVAL (XEXP (base, 1));
3686 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3688 if (REGNO (XEXP (base, 0)) != REG_Y)
3689 fatal_insn ("incorrect insn:",insn);
3691 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3692 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3693 "ldd %A0,Y+61" CR_TAB
3694 "ldd %B0,Y+62" CR_TAB
3695 "ldd %C0,Y+63" CR_TAB
3696 "sbiw r28,%o1-61", op, plen, -5);
3698 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3699 "sbci r29,hi8(-%o1)" CR_TAB
3700 "ld %A0,Y" CR_TAB
3701 "ldd %B0,Y+1" CR_TAB
3702 "ldd %C0,Y+2" CR_TAB
3703 "subi r28,lo8(%o1)" CR_TAB
3704 "sbci r29,hi8(%o1)", op, plen, -7);
3707 reg_base = true_regnum (XEXP (base, 0));
3708 if (reg_base == REG_X)
3710 /* R = (X + d) */
3711 if (reg_dest == REG_X)
3713 /* "ld r26,-X" is undefined */
3714 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3715 "ld r28,X" CR_TAB
3716 "ld __tmp_reg__,-X" CR_TAB
3717 "sbiw r26,1" CR_TAB
3718 "ld r26,X" CR_TAB
3719 "mov r27,__tmp_reg__", op, plen, -6);
3722 avr_asm_len ("adiw r26,%o1" CR_TAB
3723 "ld r24,X+" CR_TAB
3724 "ld r25,X+" CR_TAB
3725 "ld r26,X", op, plen, -4);
3727 if (reg_dest != REG_X - 2)
3728 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3730 return "";
3733 if (reg_dest == reg_base)
3734 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3735 "ldd __tmp_reg__,%B1" CR_TAB
3736 "ldd %A0,%A1" CR_TAB
3737 "mov %B0,__tmp_reg__", op, plen, -4);
3739 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3740 "ldd %B0,%B1" CR_TAB
3741 "ldd %C0,%C1", op, plen, -3);
3743 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3744 return avr_asm_len ("ld %C0,%1" CR_TAB
3745 "ld %B0,%1" CR_TAB
3746 "ld %A0,%1", op, plen, -3);
3747 else if (GET_CODE (base) == POST_INC) /* (R++) */
3748 return avr_asm_len ("ld %A0,%1" CR_TAB
3749 "ld %B0,%1" CR_TAB
3750 "ld %C0,%1", op, plen, -3);
3752 else if (CONSTANT_ADDRESS_P (base))
3753 return avr_asm_len ("lds %A0,%m1" CR_TAB
3754 "lds %B0,%m1+1" CR_TAB
3755 "lds %C0,%m1+2", op, plen , -6);
3757 fatal_insn ("unknown move insn:",insn);
3758 return "";
3761 /* Handle store of 24-bit type from register or zero to memory. */
3763 static const char*
3764 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3766 rtx dest = op[0];
3767 rtx src = op[1];
3768 rtx base = XEXP (dest, 0);
3769 int reg_base = true_regnum (base);
3771 if (CONSTANT_ADDRESS_P (base))
3772 return avr_asm_len ("sts %m0,%A1" CR_TAB
3773 "sts %m0+1,%B1" CR_TAB
3774 "sts %m0+2,%C1", op, plen, -6);
3776 if (reg_base > 0) /* (r) */
3778 if (reg_base == REG_X) /* (R26) */
3780 gcc_assert (!reg_overlap_mentioned_p (base, src));
3782 avr_asm_len ("st %0+,%A1" CR_TAB
3783 "st %0+,%B1" CR_TAB
3784 "st %0,%C1", op, plen, -3);
3786 if (!reg_unused_after (insn, base))
3787 avr_asm_len ("sbiw r26,2", op, plen, 1);
3789 return "";
3791 else
3792 return avr_asm_len ("st %0,%A1" CR_TAB
3793 "std %0+1,%B1" CR_TAB
3794 "std %0+2,%C1", op, plen, -3);
3796 else if (GET_CODE (base) == PLUS) /* (R + i) */
3798 int disp = INTVAL (XEXP (base, 1));
3799 reg_base = REGNO (XEXP (base, 0));
3801 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3803 if (reg_base != REG_Y)
3804 fatal_insn ("incorrect insn:",insn);
3806 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3807 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3808 "std Y+61,%A1" CR_TAB
3809 "std Y+62,%B1" CR_TAB
3810 "std Y+63,%C1" CR_TAB
3811 "sbiw r28,%o0-60", op, plen, -5);
3813 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3814 "sbci r29,hi8(-%o0)" CR_TAB
3815 "st Y,%A1" CR_TAB
3816 "std Y+1,%B1" CR_TAB
3817 "std Y+2,%C1" CR_TAB
3818 "subi r28,lo8(%o0)" CR_TAB
3819 "sbci r29,hi8(%o0)", op, plen, -7);
3821 if (reg_base == REG_X)
3823 /* (X + d) = R */
3824 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3826 avr_asm_len ("adiw r26,%o0" CR_TAB
3827 "st X+,%A1" CR_TAB
3828 "st X+,%B1" CR_TAB
3829 "st X,%C1", op, plen, -4);
3831 if (!reg_unused_after (insn, XEXP (base, 0)))
3832 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3834 return "";
3837 return avr_asm_len ("std %A0,%A1" CR_TAB
3838 "std %B0,%B1" CR_TAB
3839 "std %C0,%C1", op, plen, -3);
3841 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3842 return avr_asm_len ("st %0,%C1" CR_TAB
3843 "st %0,%B1" CR_TAB
3844 "st %0,%A1", op, plen, -3);
3845 else if (GET_CODE (base) == POST_INC) /* (R++) */
3846 return avr_asm_len ("st %0,%A1" CR_TAB
3847 "st %0,%B1" CR_TAB
3848 "st %0,%C1", op, plen, -3);
3850 fatal_insn ("unknown move insn:",insn);
3851 return "";
3855 /* Move around 24-bit stuff. */
3857 const char *
3858 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3860 rtx dest = op[0];
3861 rtx src = op[1];
3863 if (avr_mem_flash_p (src)
3864 || avr_mem_flash_p (dest))
3866 return avr_out_lpm (insn, op, plen);
3869 if (register_operand (dest, VOIDmode))
3871 if (register_operand (src, VOIDmode)) /* mov r,r */
3873 if (true_regnum (dest) > true_regnum (src))
3875 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3877 if (AVR_HAVE_MOVW)
3878 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3879 else
3880 return avr_asm_len ("mov %B0,%B1" CR_TAB
3881 "mov %A0,%A1", op, plen, 2);
3883 else
3885 if (AVR_HAVE_MOVW)
3886 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3887 else
3888 avr_asm_len ("mov %A0,%A1" CR_TAB
3889 "mov %B0,%B1", op, plen, -2);
3891 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3894 else if (CONSTANT_P (src))
3896 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3898 else if (MEM_P (src))
3899 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3901 else if (MEM_P (dest))
3903 rtx xop[2];
3905 xop[0] = dest;
3906 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3908 return avr_out_store_psi (insn, xop, plen);
3911 fatal_insn ("invalid insn:", insn);
3912 return "";
3916 static const char*
3917 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3919 rtx dest = op[0];
3920 rtx src = op[1];
3921 rtx x = XEXP (dest, 0);
3923 if (CONSTANT_ADDRESS_P (x))
3925 return optimize > 0 && io_address_operand (x, QImode)
3926 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3927 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3929 else if (GET_CODE (x) == PLUS
3930 && REG_P (XEXP (x, 0))
3931 && CONST_INT_P (XEXP (x, 1)))
3933 /* memory access by reg+disp */
3935 int disp = INTVAL (XEXP (x, 1));
3937 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3939 if (REGNO (XEXP (x, 0)) != REG_Y)
3940 fatal_insn ("incorrect insn:",insn);
3942 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3943 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3944 "std Y+63,%1" CR_TAB
3945 "sbiw r28,%o0-63", op, plen, -3);
3947 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3948 "sbci r29,hi8(-%o0)" CR_TAB
3949 "st Y,%1" CR_TAB
3950 "subi r28,lo8(%o0)" CR_TAB
3951 "sbci r29,hi8(%o0)", op, plen, -5);
3953 else if (REGNO (XEXP (x,0)) == REG_X)
3955 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3957 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3958 "adiw r26,%o0" CR_TAB
3959 "st X,__tmp_reg__", op, plen, -3);
3961 else
3963 avr_asm_len ("adiw r26,%o0" CR_TAB
3964 "st X,%1", op, plen, -2);
3967 if (!reg_unused_after (insn, XEXP (x,0)))
3968 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3970 return "";
3973 return avr_asm_len ("std %0,%1", op, plen, -1);
3976 return avr_asm_len ("st %0,%1", op, plen, -1);
3980 /* Helper for the next function for XMEGA. It does the same
3981 but with low byte first. */
3983 static const char*
3984 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3986 rtx dest = op[0];
3987 rtx src = op[1];
3988 rtx base = XEXP (dest, 0);
3989 int reg_base = true_regnum (base);
3990 int reg_src = true_regnum (src);
3992 /* "volatile" forces writing low byte first, even if less efficient,
3993 for correct operation with 16-bit I/O registers like SP. */
3994 int mem_volatile_p = MEM_VOLATILE_P (dest);
3996 if (CONSTANT_ADDRESS_P (base))
3997 return optimize > 0 && io_address_operand (base, HImode)
3998 ? avr_asm_len ("out %i0,%A1" CR_TAB
3999 "out %i0+1,%B1", op, plen, -2)
4001 : avr_asm_len ("sts %m0,%A1" CR_TAB
4002 "sts %m0+1,%B1", op, plen, -4);
4004 if (reg_base > 0)
4006 if (reg_base != REG_X)
4007 return avr_asm_len ("st %0,%A1" CR_TAB
4008 "std %0+1,%B1", op, plen, -2);
4010 if (reg_src == REG_X)
4011 /* "st X+,r26" and "st -X,r26" are undefined. */
4012 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4013 "st X,r26" CR_TAB
4014 "adiw r26,1" CR_TAB
4015 "st X,__tmp_reg__", op, plen, -4);
4016 else
4017 avr_asm_len ("st X+,%A1" CR_TAB
4018 "st X,%B1", op, plen, -2);
4020 return reg_unused_after (insn, base)
4021 ? ""
4022 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4024 else if (GET_CODE (base) == PLUS)
4026 int disp = INTVAL (XEXP (base, 1));
4027 reg_base = REGNO (XEXP (base, 0));
4028 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4030 if (reg_base != REG_Y)
4031 fatal_insn ("incorrect insn:",insn);
4033 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4034 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4035 "std Y+62,%A1" CR_TAB
4036 "std Y+63,%B1" CR_TAB
4037 "sbiw r28,%o0-62", op, plen, -4)
4039 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4040 "sbci r29,hi8(-%o0)" CR_TAB
4041 "st Y,%A1" CR_TAB
4042 "std Y+1,%B1" CR_TAB
4043 "subi r28,lo8(%o0)" CR_TAB
4044 "sbci r29,hi8(%o0)", op, plen, -6);
4047 if (reg_base != REG_X)
4048 return avr_asm_len ("std %A0,%A1" CR_TAB
4049 "std %B0,%B1", op, plen, -2);
4050 /* (X + d) = R */
4051 return reg_src == REG_X
4052 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4053 "mov __zero_reg__,r27" CR_TAB
4054 "adiw r26,%o0" CR_TAB
4055 "st X+,__tmp_reg__" CR_TAB
4056 "st X,__zero_reg__" CR_TAB
4057 "clr __zero_reg__" CR_TAB
4058 "sbiw r26,%o0+1", op, plen, -7)
4060 : avr_asm_len ("adiw r26,%o0" CR_TAB
4061 "st X+,%A1" CR_TAB
4062 "st X,%B1" CR_TAB
4063 "sbiw r26,%o0+1", op, plen, -4);
4065 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4067 if (!mem_volatile_p)
4068 return avr_asm_len ("st %0,%B1" CR_TAB
4069 "st %0,%A1", op, plen, -2);
4071 return REGNO (XEXP (base, 0)) == REG_X
4072 ? avr_asm_len ("sbiw r26,2" CR_TAB
4073 "st X+,%A1" CR_TAB
4074 "st X,%B1" CR_TAB
4075 "sbiw r26,1", op, plen, -4)
4077 : avr_asm_len ("sbiw %r0,2" CR_TAB
4078 "st %p0,%A1" CR_TAB
4079 "std %p0+1,%B1", op, plen, -3);
4081 else if (GET_CODE (base) == POST_INC) /* (R++) */
4083 return avr_asm_len ("st %0,%A1" CR_TAB
4084 "st %0,%B1", op, plen, -2);
4087 fatal_insn ("unknown move insn:",insn);
4088 return "";
4092 static const char*
4093 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4095 rtx dest = op[0];
4096 rtx src = op[1];
4097 rtx base = XEXP (dest, 0);
4098 int reg_base = true_regnum (base);
4099 int reg_src = true_regnum (src);
4100 int mem_volatile_p;
4102 /* "volatile" forces writing high-byte first (no-xmega) resp.
4103 low-byte first (xmega) even if less efficient, for correct
4104 operation with 16-bit I/O registers like. */
4106 if (AVR_XMEGA)
4107 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4109 mem_volatile_p = MEM_VOLATILE_P (dest);
4111 if (CONSTANT_ADDRESS_P (base))
4112 return optimize > 0 && io_address_operand (base, HImode)
4113 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4114 "out %i0,%A1", op, plen, -2)
4116 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4117 "sts %m0,%A1", op, plen, -4);
4119 if (reg_base > 0)
4121 if (reg_base != REG_X)
4122 return avr_asm_len ("std %0+1,%B1" CR_TAB
4123 "st %0,%A1", op, plen, -2);
4125 if (reg_src == REG_X)
4126 /* "st X+,r26" and "st -X,r26" are undefined. */
4127 return !mem_volatile_p && reg_unused_after (insn, src)
4128 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4129 "st X,r26" CR_TAB
4130 "adiw r26,1" CR_TAB
4131 "st X,__tmp_reg__", op, plen, -4)
4133 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4134 "adiw r26,1" CR_TAB
4135 "st X,__tmp_reg__" CR_TAB
4136 "sbiw r26,1" CR_TAB
4137 "st X,r26", op, plen, -5);
4139 return !mem_volatile_p && reg_unused_after (insn, base)
4140 ? avr_asm_len ("st X+,%A1" CR_TAB
4141 "st X,%B1", op, plen, -2)
4142 : avr_asm_len ("adiw r26,1" CR_TAB
4143 "st X,%B1" CR_TAB
4144 "st -X,%A1", op, plen, -3);
4146 else if (GET_CODE (base) == PLUS)
4148 int disp = INTVAL (XEXP (base, 1));
4149 reg_base = REGNO (XEXP (base, 0));
4150 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4152 if (reg_base != REG_Y)
4153 fatal_insn ("incorrect insn:",insn);
4155 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4156 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4157 "std Y+63,%B1" CR_TAB
4158 "std Y+62,%A1" CR_TAB
4159 "sbiw r28,%o0-62", op, plen, -4)
4161 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4162 "sbci r29,hi8(-%o0)" CR_TAB
4163 "std Y+1,%B1" CR_TAB
4164 "st Y,%A1" CR_TAB
4165 "subi r28,lo8(%o0)" CR_TAB
4166 "sbci r29,hi8(%o0)", op, plen, -6);
4169 if (reg_base != REG_X)
4170 return avr_asm_len ("std %B0,%B1" CR_TAB
4171 "std %A0,%A1", op, plen, -2);
4172 /* (X + d) = R */
4173 return reg_src == REG_X
4174 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4175 "mov __zero_reg__,r27" CR_TAB
4176 "adiw r26,%o0+1" CR_TAB
4177 "st X,__zero_reg__" CR_TAB
4178 "st -X,__tmp_reg__" CR_TAB
4179 "clr __zero_reg__" CR_TAB
4180 "sbiw r26,%o0", op, plen, -7)
4182 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4183 "st X,%B1" CR_TAB
4184 "st -X,%A1" CR_TAB
4185 "sbiw r26,%o0", op, plen, -4);
4187 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4189 return avr_asm_len ("st %0,%B1" CR_TAB
4190 "st %0,%A1", op, plen, -2);
4192 else if (GET_CODE (base) == POST_INC) /* (R++) */
4194 if (!mem_volatile_p)
4195 return avr_asm_len ("st %0,%A1" CR_TAB
4196 "st %0,%B1", op, plen, -2);
4198 return REGNO (XEXP (base, 0)) == REG_X
4199 ? avr_asm_len ("adiw r26,1" CR_TAB
4200 "st X,%B1" CR_TAB
4201 "st -X,%A1" CR_TAB
4202 "adiw r26,2", op, plen, -4)
4204 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4205 "st %p0,%A1" CR_TAB
4206 "adiw %r0,2", op, plen, -3);
4208 fatal_insn ("unknown move insn:",insn);
4209 return "";
4212 /* Return 1 if frame pointer for current function required. */
4214 static bool
4215 avr_frame_pointer_required_p (void)
4217 return (cfun->calls_alloca
4218 || cfun->calls_setjmp
4219 || cfun->has_nonlocal_label
4220 || crtl->args.info.nregs == 0
4221 || get_frame_size () > 0);
4224 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4226 static RTX_CODE
4227 compare_condition (rtx insn)
4229 rtx next = next_real_insn (insn);
4231 if (next && JUMP_P (next))
4233 rtx pat = PATTERN (next);
4234 rtx src = SET_SRC (pat);
4236 if (IF_THEN_ELSE == GET_CODE (src))
4237 return GET_CODE (XEXP (src, 0));
4240 return UNKNOWN;
4244 /* Returns true iff INSN is a tst insn that only tests the sign. */
4246 static bool
4247 compare_sign_p (rtx insn)
4249 RTX_CODE cond = compare_condition (insn);
4250 return (cond == GE || cond == LT);
4254 /* Returns true iff the next insn is a JUMP_INSN with a condition
4255 that needs to be swapped (GT, GTU, LE, LEU). */
4257 static bool
4258 compare_diff_p (rtx insn)
4260 RTX_CODE cond = compare_condition (insn);
4261 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4264 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4266 static bool
4267 compare_eq_p (rtx insn)
4269 RTX_CODE cond = compare_condition (insn);
4270 return (cond == EQ || cond == NE);
4274 /* Output compare instruction
4276 compare (XOP[0], XOP[1])
4278 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4279 XOP[2] is an 8-bit scratch register as needed.
4281 PLEN == NULL: Output instructions.
4282 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4283 Don't output anything. */
4285 const char*
4286 avr_out_compare (rtx insn, rtx *xop, int *plen)
4288 /* Register to compare and value to compare against. */
4289 rtx xreg = xop[0];
4290 rtx xval = xop[1];
4292 /* MODE of the comparison. */
4293 enum machine_mode mode = GET_MODE (xreg);
4295 /* Number of bytes to operate on. */
4296 int i, n_bytes = GET_MODE_SIZE (mode);
4298 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4299 int clobber_val = -1;
4301 gcc_assert (REG_P (xreg));
4302 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4303 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4305 if (plen)
4306 *plen = 0;
4308 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4309 against 0 by ORing the bytes. This is one instruction shorter.
4310 Notice that DImode comparisons are always against reg:DI 18
4311 and therefore don't use this. */
4313 if (!test_hard_reg_class (LD_REGS, xreg)
4314 && compare_eq_p (insn)
4315 && reg_unused_after (insn, xreg))
4317 if (xval == const1_rtx)
4319 avr_asm_len ("dec %A0" CR_TAB
4320 "or %A0,%B0", xop, plen, 2);
4322 if (n_bytes >= 3)
4323 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4325 if (n_bytes >= 4)
4326 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4328 return "";
4330 else if (xval == constm1_rtx)
4332 if (n_bytes >= 4)
4333 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4335 if (n_bytes >= 3)
4336 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4338 return avr_asm_len ("and %A0,%B0" CR_TAB
4339 "com %A0", xop, plen, 2);
4343 for (i = 0; i < n_bytes; i++)
4345 /* We compare byte-wise. */
4346 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4347 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4349 /* 8-bit value to compare with this byte. */
4350 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4352 /* Registers R16..R31 can operate with immediate. */
4353 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4355 xop[0] = reg8;
4356 xop[1] = gen_int_mode (val8, QImode);
4358 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4360 if (i == 0
4361 && test_hard_reg_class (ADDW_REGS, reg8))
4363 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4365 if (IN_RANGE (val16, 0, 63)
4366 && (val8 == 0
4367 || reg_unused_after (insn, xreg)))
4369 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4370 i++;
4371 continue;
4374 if (n_bytes == 2
4375 && IN_RANGE (val16, -63, -1)
4376 && compare_eq_p (insn)
4377 && reg_unused_after (insn, xreg))
4379 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4383 /* Comparing against 0 is easy. */
4385 if (val8 == 0)
4387 avr_asm_len (i == 0
4388 ? "cp %0,__zero_reg__"
4389 : "cpc %0,__zero_reg__", xop, plen, 1);
4390 continue;
4393 /* Upper registers can compare and subtract-with-carry immediates.
4394 Notice that compare instructions do the same as respective subtract
4395 instruction; the only difference is that comparisons don't write
4396 the result back to the target register. */
4398 if (ld_reg_p)
4400 if (i == 0)
4402 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4403 continue;
4405 else if (reg_unused_after (insn, xreg))
4407 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4408 continue;
4412 /* Must load the value into the scratch register. */
4414 gcc_assert (REG_P (xop[2]));
4416 if (clobber_val != (int) val8)
4417 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4418 clobber_val = (int) val8;
4420 avr_asm_len (i == 0
4421 ? "cp %0,%2"
4422 : "cpc %0,%2", xop, plen, 1);
4425 return "";
4429 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4431 const char*
4432 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4434 rtx xop[3];
4436 xop[0] = gen_rtx_REG (DImode, 18);
4437 xop[1] = op[0];
4438 xop[2] = op[1];
4440 return avr_out_compare (insn, xop, plen);
4443 /* Output test instruction for HImode. */
4445 const char*
4446 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4448 if (compare_sign_p (insn))
4450 avr_asm_len ("tst %B0", op, plen, -1);
4452 else if (reg_unused_after (insn, op[0])
4453 && compare_eq_p (insn))
4455 /* Faster than sbiw if we can clobber the operand. */
4456 avr_asm_len ("or %A0,%B0", op, plen, -1);
4458 else
4460 avr_out_compare (insn, op, plen);
4463 return "";
4467 /* Output test instruction for PSImode. */
4469 const char*
4470 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4472 if (compare_sign_p (insn))
4474 avr_asm_len ("tst %C0", op, plen, -1);
4476 else if (reg_unused_after (insn, op[0])
4477 && compare_eq_p (insn))
4479 /* Faster than sbiw if we can clobber the operand. */
4480 avr_asm_len ("or %A0,%B0" CR_TAB
4481 "or %A0,%C0", op, plen, -2);
4483 else
4485 avr_out_compare (insn, op, plen);
4488 return "";
4492 /* Output test instruction for SImode. */
4494 const char*
4495 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4497 if (compare_sign_p (insn))
4499 avr_asm_len ("tst %D0", op, plen, -1);
4501 else if (reg_unused_after (insn, op[0])
4502 && compare_eq_p (insn))
4504 /* Faster than sbiw if we can clobber the operand. */
4505 avr_asm_len ("or %A0,%B0" CR_TAB
4506 "or %A0,%C0" CR_TAB
4507 "or %A0,%D0", op, plen, -3);
4509 else
4511 avr_out_compare (insn, op, plen);
4514 return "";
4518 /* Generate asm equivalent for various shifts. This only handles cases
4519 that are not already carefully hand-optimized in ?sh??i3_out.
4521 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4522 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4523 OPERANDS[3] is a QImode scratch register from LD regs if
4524 available and SCRATCH, otherwise (no scratch available)
4526 TEMPL is an assembler template that shifts by one position.
4527 T_LEN is the length of this template. */
4529 void
4530 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4531 int *plen, int t_len)
4533 bool second_label = true;
4534 bool saved_in_tmp = false;
4535 bool use_zero_reg = false;
4536 rtx op[5];
4538 op[0] = operands[0];
4539 op[1] = operands[1];
4540 op[2] = operands[2];
4541 op[3] = operands[3];
4543 if (plen)
4544 *plen = 0;
4546 if (CONST_INT_P (operands[2]))
4548 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4549 && REG_P (operands[3]));
4550 int count = INTVAL (operands[2]);
4551 int max_len = 10; /* If larger than this, always use a loop. */
4553 if (count <= 0)
4554 return;
4556 if (count < 8 && !scratch)
4557 use_zero_reg = true;
4559 if (optimize_size)
4560 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4562 if (t_len * count <= max_len)
4564 /* Output shifts inline with no loop - faster. */
4566 while (count-- > 0)
4567 avr_asm_len (templ, op, plen, t_len);
4569 return;
4572 if (scratch)
4574 avr_asm_len ("ldi %3,%2", op, plen, 1);
4576 else if (use_zero_reg)
4578 /* Hack to save one word: use __zero_reg__ as loop counter.
4579 Set one bit, then shift in a loop until it is 0 again. */
4581 op[3] = zero_reg_rtx;
4583 avr_asm_len ("set" CR_TAB
4584 "bld %3,%2-1", op, plen, 2);
4586 else
4588 /* No scratch register available, use one from LD_REGS (saved in
4589 __tmp_reg__) that doesn't overlap with registers to shift. */
4591 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4592 op[4] = tmp_reg_rtx;
4593 saved_in_tmp = true;
4595 avr_asm_len ("mov %4,%3" CR_TAB
4596 "ldi %3,%2", op, plen, 2);
4599 second_label = false;
4601 else if (MEM_P (op[2]))
4603 rtx op_mov[2];
4605 op_mov[0] = op[3] = tmp_reg_rtx;
4606 op_mov[1] = op[2];
4608 out_movqi_r_mr (insn, op_mov, plen);
4610 else if (register_operand (op[2], QImode))
4612 op[3] = op[2];
4614 if (!reg_unused_after (insn, op[2])
4615 || reg_overlap_mentioned_p (op[0], op[2]))
4617 op[3] = tmp_reg_rtx;
4618 avr_asm_len ("mov %3,%2", op, plen, 1);
4621 else
4622 fatal_insn ("bad shift insn:", insn);
4624 if (second_label)
4625 avr_asm_len ("rjmp 2f", op, plen, 1);
4627 avr_asm_len ("1:", op, plen, 0);
4628 avr_asm_len (templ, op, plen, t_len);
4630 if (second_label)
4631 avr_asm_len ("2:", op, plen, 0);
4633 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4634 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4636 if (saved_in_tmp)
4637 avr_asm_len ("mov %3,%4", op, plen, 1);
4641 /* 8bit shift left ((char)x << i) */
4643 const char *
4644 ashlqi3_out (rtx insn, rtx operands[], int *len)
4646 if (GET_CODE (operands[2]) == CONST_INT)
4648 int k;
4650 if (!len)
4651 len = &k;
4653 switch (INTVAL (operands[2]))
4655 default:
4656 if (INTVAL (operands[2]) < 8)
4657 break;
4659 *len = 1;
4660 return "clr %0";
4662 case 1:
4663 *len = 1;
4664 return "lsl %0";
4666 case 2:
4667 *len = 2;
4668 return ("lsl %0" CR_TAB
4669 "lsl %0");
4671 case 3:
4672 *len = 3;
4673 return ("lsl %0" CR_TAB
4674 "lsl %0" CR_TAB
4675 "lsl %0");
4677 case 4:
4678 if (test_hard_reg_class (LD_REGS, operands[0]))
4680 *len = 2;
4681 return ("swap %0" CR_TAB
4682 "andi %0,0xf0");
4684 *len = 4;
4685 return ("lsl %0" CR_TAB
4686 "lsl %0" CR_TAB
4687 "lsl %0" CR_TAB
4688 "lsl %0");
4690 case 5:
4691 if (test_hard_reg_class (LD_REGS, operands[0]))
4693 *len = 3;
4694 return ("swap %0" CR_TAB
4695 "lsl %0" CR_TAB
4696 "andi %0,0xe0");
4698 *len = 5;
4699 return ("lsl %0" CR_TAB
4700 "lsl %0" CR_TAB
4701 "lsl %0" CR_TAB
4702 "lsl %0" CR_TAB
4703 "lsl %0");
4705 case 6:
4706 if (test_hard_reg_class (LD_REGS, operands[0]))
4708 *len = 4;
4709 return ("swap %0" CR_TAB
4710 "lsl %0" CR_TAB
4711 "lsl %0" CR_TAB
4712 "andi %0,0xc0");
4714 *len = 6;
4715 return ("lsl %0" CR_TAB
4716 "lsl %0" CR_TAB
4717 "lsl %0" CR_TAB
4718 "lsl %0" CR_TAB
4719 "lsl %0" CR_TAB
4720 "lsl %0");
4722 case 7:
4723 *len = 3;
4724 return ("ror %0" CR_TAB
4725 "clr %0" CR_TAB
4726 "ror %0");
4729 else if (CONSTANT_P (operands[2]))
4730 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4732 out_shift_with_cnt ("lsl %0",
4733 insn, operands, len, 1);
4734 return "";
4738 /* 16bit shift left ((short)x << i) */
4740 const char *
4741 ashlhi3_out (rtx insn, rtx operands[], int *len)
4743 if (GET_CODE (operands[2]) == CONST_INT)
4745 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4746 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4747 int k;
4748 int *t = len;
4750 if (!len)
4751 len = &k;
4753 switch (INTVAL (operands[2]))
4755 default:
4756 if (INTVAL (operands[2]) < 16)
4757 break;
4759 *len = 2;
4760 return ("clr %B0" CR_TAB
4761 "clr %A0");
4763 case 4:
4764 if (optimize_size && scratch)
4765 break; /* 5 */
4766 if (ldi_ok)
4768 *len = 6;
4769 return ("swap %A0" CR_TAB
4770 "swap %B0" CR_TAB
4771 "andi %B0,0xf0" CR_TAB
4772 "eor %B0,%A0" CR_TAB
4773 "andi %A0,0xf0" CR_TAB
4774 "eor %B0,%A0");
4776 if (scratch)
4778 *len = 7;
4779 return ("swap %A0" CR_TAB
4780 "swap %B0" CR_TAB
4781 "ldi %3,0xf0" CR_TAB
4782 "and %B0,%3" CR_TAB
4783 "eor %B0,%A0" CR_TAB
4784 "and %A0,%3" CR_TAB
4785 "eor %B0,%A0");
4787 break; /* optimize_size ? 6 : 8 */
4789 case 5:
4790 if (optimize_size)
4791 break; /* scratch ? 5 : 6 */
4792 if (ldi_ok)
4794 *len = 8;
4795 return ("lsl %A0" CR_TAB
4796 "rol %B0" CR_TAB
4797 "swap %A0" CR_TAB
4798 "swap %B0" CR_TAB
4799 "andi %B0,0xf0" CR_TAB
4800 "eor %B0,%A0" CR_TAB
4801 "andi %A0,0xf0" CR_TAB
4802 "eor %B0,%A0");
4804 if (scratch)
4806 *len = 9;
4807 return ("lsl %A0" CR_TAB
4808 "rol %B0" CR_TAB
4809 "swap %A0" CR_TAB
4810 "swap %B0" CR_TAB
4811 "ldi %3,0xf0" CR_TAB
4812 "and %B0,%3" CR_TAB
4813 "eor %B0,%A0" CR_TAB
4814 "and %A0,%3" CR_TAB
4815 "eor %B0,%A0");
4817 break; /* 10 */
4819 case 6:
4820 if (optimize_size)
4821 break; /* scratch ? 5 : 6 */
4822 *len = 9;
4823 return ("clr __tmp_reg__" CR_TAB
4824 "lsr %B0" CR_TAB
4825 "ror %A0" CR_TAB
4826 "ror __tmp_reg__" CR_TAB
4827 "lsr %B0" CR_TAB
4828 "ror %A0" CR_TAB
4829 "ror __tmp_reg__" CR_TAB
4830 "mov %B0,%A0" CR_TAB
4831 "mov %A0,__tmp_reg__");
4833 case 7:
4834 *len = 5;
4835 return ("lsr %B0" CR_TAB
4836 "mov %B0,%A0" CR_TAB
4837 "clr %A0" CR_TAB
4838 "ror %B0" CR_TAB
4839 "ror %A0");
4841 case 8:
4842 return *len = 2, ("mov %B0,%A1" CR_TAB
4843 "clr %A0");
4845 case 9:
4846 *len = 3;
4847 return ("mov %B0,%A0" CR_TAB
4848 "clr %A0" CR_TAB
4849 "lsl %B0");
4851 case 10:
4852 *len = 4;
4853 return ("mov %B0,%A0" CR_TAB
4854 "clr %A0" CR_TAB
4855 "lsl %B0" CR_TAB
4856 "lsl %B0");
4858 case 11:
4859 *len = 5;
4860 return ("mov %B0,%A0" CR_TAB
4861 "clr %A0" CR_TAB
4862 "lsl %B0" CR_TAB
4863 "lsl %B0" CR_TAB
4864 "lsl %B0");
4866 case 12:
4867 if (ldi_ok)
4869 *len = 4;
4870 return ("mov %B0,%A0" CR_TAB
4871 "clr %A0" CR_TAB
4872 "swap %B0" CR_TAB
4873 "andi %B0,0xf0");
4875 if (scratch)
4877 *len = 5;
4878 return ("mov %B0,%A0" CR_TAB
4879 "clr %A0" CR_TAB
4880 "swap %B0" CR_TAB
4881 "ldi %3,0xf0" CR_TAB
4882 "and %B0,%3");
4884 *len = 6;
4885 return ("mov %B0,%A0" CR_TAB
4886 "clr %A0" CR_TAB
4887 "lsl %B0" CR_TAB
4888 "lsl %B0" CR_TAB
4889 "lsl %B0" CR_TAB
4890 "lsl %B0");
4892 case 13:
4893 if (ldi_ok)
4895 *len = 5;
4896 return ("mov %B0,%A0" CR_TAB
4897 "clr %A0" CR_TAB
4898 "swap %B0" CR_TAB
4899 "lsl %B0" CR_TAB
4900 "andi %B0,0xe0");
4902 if (AVR_HAVE_MUL && scratch)
4904 *len = 5;
4905 return ("ldi %3,0x20" CR_TAB
4906 "mul %A0,%3" CR_TAB
4907 "mov %B0,r0" CR_TAB
4908 "clr %A0" CR_TAB
4909 "clr __zero_reg__");
4911 if (optimize_size && scratch)
4912 break; /* 5 */
4913 if (scratch)
4915 *len = 6;
4916 return ("mov %B0,%A0" CR_TAB
4917 "clr %A0" CR_TAB
4918 "swap %B0" CR_TAB
4919 "lsl %B0" CR_TAB
4920 "ldi %3,0xe0" CR_TAB
4921 "and %B0,%3");
4923 if (AVR_HAVE_MUL)
4925 *len = 6;
4926 return ("set" CR_TAB
4927 "bld r1,5" CR_TAB
4928 "mul %A0,r1" CR_TAB
4929 "mov %B0,r0" CR_TAB
4930 "clr %A0" CR_TAB
4931 "clr __zero_reg__");
4933 *len = 7;
4934 return ("mov %B0,%A0" CR_TAB
4935 "clr %A0" CR_TAB
4936 "lsl %B0" CR_TAB
4937 "lsl %B0" CR_TAB
4938 "lsl %B0" CR_TAB
4939 "lsl %B0" CR_TAB
4940 "lsl %B0");
4942 case 14:
4943 if (AVR_HAVE_MUL && ldi_ok)
4945 *len = 5;
4946 return ("ldi %B0,0x40" CR_TAB
4947 "mul %A0,%B0" CR_TAB
4948 "mov %B0,r0" CR_TAB
4949 "clr %A0" CR_TAB
4950 "clr __zero_reg__");
4952 if (AVR_HAVE_MUL && scratch)
4954 *len = 5;
4955 return ("ldi %3,0x40" CR_TAB
4956 "mul %A0,%3" CR_TAB
4957 "mov %B0,r0" CR_TAB
4958 "clr %A0" CR_TAB
4959 "clr __zero_reg__");
4961 if (optimize_size && ldi_ok)
4963 *len = 5;
4964 return ("mov %B0,%A0" CR_TAB
4965 "ldi %A0,6" "\n1:\t"
4966 "lsl %B0" CR_TAB
4967 "dec %A0" CR_TAB
4968 "brne 1b");
4970 if (optimize_size && scratch)
4971 break; /* 5 */
4972 *len = 6;
4973 return ("clr %B0" CR_TAB
4974 "lsr %A0" CR_TAB
4975 "ror %B0" CR_TAB
4976 "lsr %A0" CR_TAB
4977 "ror %B0" CR_TAB
4978 "clr %A0");
4980 case 15:
4981 *len = 4;
4982 return ("clr %B0" CR_TAB
4983 "lsr %A0" CR_TAB
4984 "ror %B0" CR_TAB
4985 "clr %A0");
4987 len = t;
4989 out_shift_with_cnt ("lsl %A0" CR_TAB
4990 "rol %B0", insn, operands, len, 2);
4991 return "";
4995 /* 24-bit shift left */
4997 const char*
4998 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5000 if (plen)
5001 *plen = 0;
5003 if (CONST_INT_P (op[2]))
5005 switch (INTVAL (op[2]))
5007 default:
5008 if (INTVAL (op[2]) < 24)
5009 break;
5011 return avr_asm_len ("clr %A0" CR_TAB
5012 "clr %B0" CR_TAB
5013 "clr %C0", op, plen, 3);
5015 case 8:
5017 int reg0 = REGNO (op[0]);
5018 int reg1 = REGNO (op[1]);
5020 if (reg0 >= reg1)
5021 return avr_asm_len ("mov %C0,%B1" CR_TAB
5022 "mov %B0,%A1" CR_TAB
5023 "clr %A0", op, plen, 3);
5024 else
5025 return avr_asm_len ("clr %A0" CR_TAB
5026 "mov %B0,%A1" CR_TAB
5027 "mov %C0,%B1", op, plen, 3);
5030 case 16:
5032 int reg0 = REGNO (op[0]);
5033 int reg1 = REGNO (op[1]);
5035 if (reg0 + 2 != reg1)
5036 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5038 return avr_asm_len ("clr %B0" CR_TAB
5039 "clr %A0", op, plen, 2);
5042 case 23:
5043 return avr_asm_len ("clr %C0" CR_TAB
5044 "lsr %A0" CR_TAB
5045 "ror %C0" CR_TAB
5046 "clr %B0" CR_TAB
5047 "clr %A0", op, plen, 5);
5051 out_shift_with_cnt ("lsl %A0" CR_TAB
5052 "rol %B0" CR_TAB
5053 "rol %C0", insn, op, plen, 3);
5054 return "";
5058 /* 32bit shift left ((long)x << i) */
5060 const char *
5061 ashlsi3_out (rtx insn, rtx operands[], int *len)
5063 if (GET_CODE (operands[2]) == CONST_INT)
5065 int k;
5066 int *t = len;
5068 if (!len)
5069 len = &k;
5071 switch (INTVAL (operands[2]))
5073 default:
5074 if (INTVAL (operands[2]) < 32)
5075 break;
5077 if (AVR_HAVE_MOVW)
5078 return *len = 3, ("clr %D0" CR_TAB
5079 "clr %C0" CR_TAB
5080 "movw %A0,%C0");
5081 *len = 4;
5082 return ("clr %D0" CR_TAB
5083 "clr %C0" CR_TAB
5084 "clr %B0" CR_TAB
5085 "clr %A0");
5087 case 8:
5089 int reg0 = true_regnum (operands[0]);
5090 int reg1 = true_regnum (operands[1]);
5091 *len = 4;
5092 if (reg0 >= reg1)
5093 return ("mov %D0,%C1" CR_TAB
5094 "mov %C0,%B1" CR_TAB
5095 "mov %B0,%A1" CR_TAB
5096 "clr %A0");
5097 else
5098 return ("clr %A0" CR_TAB
5099 "mov %B0,%A1" CR_TAB
5100 "mov %C0,%B1" CR_TAB
5101 "mov %D0,%C1");
5104 case 16:
5106 int reg0 = true_regnum (operands[0]);
5107 int reg1 = true_regnum (operands[1]);
5108 if (reg0 + 2 == reg1)
5109 return *len = 2, ("clr %B0" CR_TAB
5110 "clr %A0");
5111 if (AVR_HAVE_MOVW)
5112 return *len = 3, ("movw %C0,%A1" CR_TAB
5113 "clr %B0" CR_TAB
5114 "clr %A0");
5115 else
5116 return *len = 4, ("mov %C0,%A1" CR_TAB
5117 "mov %D0,%B1" CR_TAB
5118 "clr %B0" CR_TAB
5119 "clr %A0");
5122 case 24:
5123 *len = 4;
5124 return ("mov %D0,%A1" CR_TAB
5125 "clr %C0" CR_TAB
5126 "clr %B0" CR_TAB
5127 "clr %A0");
5129 case 31:
5130 *len = 6;
5131 return ("clr %D0" CR_TAB
5132 "lsr %A0" CR_TAB
5133 "ror %D0" CR_TAB
5134 "clr %C0" CR_TAB
5135 "clr %B0" CR_TAB
5136 "clr %A0");
5138 len = t;
5140 out_shift_with_cnt ("lsl %A0" CR_TAB
5141 "rol %B0" CR_TAB
5142 "rol %C0" CR_TAB
5143 "rol %D0", insn, operands, len, 4);
5144 return "";
5147 /* 8bit arithmetic shift right ((signed char)x >> i) */
5149 const char *
5150 ashrqi3_out (rtx insn, rtx operands[], int *len)
5152 if (GET_CODE (operands[2]) == CONST_INT)
5154 int k;
5156 if (!len)
5157 len = &k;
5159 switch (INTVAL (operands[2]))
5161 case 1:
5162 *len = 1;
5163 return "asr %0";
5165 case 2:
5166 *len = 2;
5167 return ("asr %0" CR_TAB
5168 "asr %0");
5170 case 3:
5171 *len = 3;
5172 return ("asr %0" CR_TAB
5173 "asr %0" CR_TAB
5174 "asr %0");
5176 case 4:
5177 *len = 4;
5178 return ("asr %0" CR_TAB
5179 "asr %0" CR_TAB
5180 "asr %0" CR_TAB
5181 "asr %0");
5183 case 5:
5184 *len = 5;
5185 return ("asr %0" CR_TAB
5186 "asr %0" CR_TAB
5187 "asr %0" CR_TAB
5188 "asr %0" CR_TAB
5189 "asr %0");
5191 case 6:
5192 *len = 4;
5193 return ("bst %0,6" CR_TAB
5194 "lsl %0" CR_TAB
5195 "sbc %0,%0" CR_TAB
5196 "bld %0,0");
5198 default:
5199 if (INTVAL (operands[2]) < 8)
5200 break;
5202 /* fall through */
5204 case 7:
5205 *len = 2;
5206 return ("lsl %0" CR_TAB
5207 "sbc %0,%0");
5210 else if (CONSTANT_P (operands[2]))
5211 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5213 out_shift_with_cnt ("asr %0",
5214 insn, operands, len, 1);
5215 return "";
5219 /* 16bit arithmetic shift right ((signed short)x >> i) */
5221 const char *
5222 ashrhi3_out (rtx insn, rtx operands[], int *len)
5224 if (GET_CODE (operands[2]) == CONST_INT)
5226 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5227 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5228 int k;
5229 int *t = len;
5231 if (!len)
5232 len = &k;
5234 switch (INTVAL (operands[2]))
5236 case 4:
5237 case 5:
5238 /* XXX try to optimize this too? */
5239 break;
5241 case 6:
5242 if (optimize_size)
5243 break; /* scratch ? 5 : 6 */
5244 *len = 8;
5245 return ("mov __tmp_reg__,%A0" CR_TAB
5246 "mov %A0,%B0" CR_TAB
5247 "lsl __tmp_reg__" CR_TAB
5248 "rol %A0" CR_TAB
5249 "sbc %B0,%B0" CR_TAB
5250 "lsl __tmp_reg__" CR_TAB
5251 "rol %A0" CR_TAB
5252 "rol %B0");
5254 case 7:
5255 *len = 4;
5256 return ("lsl %A0" CR_TAB
5257 "mov %A0,%B0" CR_TAB
5258 "rol %A0" CR_TAB
5259 "sbc %B0,%B0");
5261 case 8:
5263 int reg0 = true_regnum (operands[0]);
5264 int reg1 = true_regnum (operands[1]);
5266 if (reg0 == reg1)
5267 return *len = 3, ("mov %A0,%B0" CR_TAB
5268 "lsl %B0" CR_TAB
5269 "sbc %B0,%B0");
5270 else
5271 return *len = 4, ("mov %A0,%B1" CR_TAB
5272 "clr %B0" CR_TAB
5273 "sbrc %A0,7" CR_TAB
5274 "dec %B0");
5277 case 9:
5278 *len = 4;
5279 return ("mov %A0,%B0" CR_TAB
5280 "lsl %B0" CR_TAB
5281 "sbc %B0,%B0" CR_TAB
5282 "asr %A0");
5284 case 10:
5285 *len = 5;
5286 return ("mov %A0,%B0" CR_TAB
5287 "lsl %B0" CR_TAB
5288 "sbc %B0,%B0" CR_TAB
5289 "asr %A0" CR_TAB
5290 "asr %A0");
5292 case 11:
5293 if (AVR_HAVE_MUL && ldi_ok)
5295 *len = 5;
5296 return ("ldi %A0,0x20" CR_TAB
5297 "muls %B0,%A0" CR_TAB
5298 "mov %A0,r1" CR_TAB
5299 "sbc %B0,%B0" CR_TAB
5300 "clr __zero_reg__");
5302 if (optimize_size && scratch)
5303 break; /* 5 */
5304 *len = 6;
5305 return ("mov %A0,%B0" CR_TAB
5306 "lsl %B0" CR_TAB
5307 "sbc %B0,%B0" CR_TAB
5308 "asr %A0" CR_TAB
5309 "asr %A0" CR_TAB
5310 "asr %A0");
5312 case 12:
5313 if (AVR_HAVE_MUL && ldi_ok)
5315 *len = 5;
5316 return ("ldi %A0,0x10" CR_TAB
5317 "muls %B0,%A0" CR_TAB
5318 "mov %A0,r1" CR_TAB
5319 "sbc %B0,%B0" CR_TAB
5320 "clr __zero_reg__");
5322 if (optimize_size && scratch)
5323 break; /* 5 */
5324 *len = 7;
5325 return ("mov %A0,%B0" CR_TAB
5326 "lsl %B0" CR_TAB
5327 "sbc %B0,%B0" CR_TAB
5328 "asr %A0" CR_TAB
5329 "asr %A0" CR_TAB
5330 "asr %A0" CR_TAB
5331 "asr %A0");
5333 case 13:
5334 if (AVR_HAVE_MUL && ldi_ok)
5336 *len = 5;
5337 return ("ldi %A0,0x08" CR_TAB
5338 "muls %B0,%A0" CR_TAB
5339 "mov %A0,r1" CR_TAB
5340 "sbc %B0,%B0" CR_TAB
5341 "clr __zero_reg__");
5343 if (optimize_size)
5344 break; /* scratch ? 5 : 7 */
5345 *len = 8;
5346 return ("mov %A0,%B0" CR_TAB
5347 "lsl %B0" CR_TAB
5348 "sbc %B0,%B0" CR_TAB
5349 "asr %A0" CR_TAB
5350 "asr %A0" CR_TAB
5351 "asr %A0" CR_TAB
5352 "asr %A0" CR_TAB
5353 "asr %A0");
5355 case 14:
5356 *len = 5;
5357 return ("lsl %B0" CR_TAB
5358 "sbc %A0,%A0" CR_TAB
5359 "lsl %B0" CR_TAB
5360 "mov %B0,%A0" CR_TAB
5361 "rol %A0");
5363 default:
5364 if (INTVAL (operands[2]) < 16)
5365 break;
5367 /* fall through */
5369 case 15:
5370 return *len = 3, ("lsl %B0" CR_TAB
5371 "sbc %A0,%A0" CR_TAB
5372 "mov %B0,%A0");
5374 len = t;
5376 out_shift_with_cnt ("asr %B0" CR_TAB
5377 "ror %A0", insn, operands, len, 2);
5378 return "";
5382 /* 24-bit arithmetic shift right */
5384 const char*
5385 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5387 int dest = REGNO (op[0]);
5388 int src = REGNO (op[1]);
5390 if (CONST_INT_P (op[2]))
5392 if (plen)
5393 *plen = 0;
5395 switch (INTVAL (op[2]))
5397 case 8:
5398 if (dest <= src)
5399 return avr_asm_len ("mov %A0,%B1" CR_TAB
5400 "mov %B0,%C1" CR_TAB
5401 "clr %C0" CR_TAB
5402 "sbrc %B0,7" CR_TAB
5403 "dec %C0", op, plen, 5);
5404 else
5405 return avr_asm_len ("clr %C0" CR_TAB
5406 "sbrc %C1,7" CR_TAB
5407 "dec %C0" CR_TAB
5408 "mov %B0,%C1" CR_TAB
5409 "mov %A0,%B1", op, plen, 5);
5411 case 16:
5412 if (dest != src + 2)
5413 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5415 return avr_asm_len ("clr %B0" CR_TAB
5416 "sbrc %A0,7" CR_TAB
5417 "com %B0" CR_TAB
5418 "mov %C0,%B0", op, plen, 4);
5420 default:
5421 if (INTVAL (op[2]) < 24)
5422 break;
5424 /* fall through */
5426 case 23:
5427 return avr_asm_len ("lsl %C0" CR_TAB
5428 "sbc %A0,%A0" CR_TAB
5429 "mov %B0,%A0" CR_TAB
5430 "mov %C0,%A0", op, plen, 4);
5431 } /* switch */
5434 out_shift_with_cnt ("asr %C0" CR_TAB
5435 "ror %B0" CR_TAB
5436 "ror %A0", insn, op, plen, 3);
5437 return "";
5441 /* 32bit arithmetic shift right ((signed long)x >> i) */
5443 const char *
5444 ashrsi3_out (rtx insn, rtx operands[], int *len)
5446 if (GET_CODE (operands[2]) == CONST_INT)
5448 int k;
5449 int *t = len;
5451 if (!len)
5452 len = &k;
5454 switch (INTVAL (operands[2]))
5456 case 8:
5458 int reg0 = true_regnum (operands[0]);
5459 int reg1 = true_regnum (operands[1]);
5460 *len=6;
5461 if (reg0 <= reg1)
5462 return ("mov %A0,%B1" CR_TAB
5463 "mov %B0,%C1" CR_TAB
5464 "mov %C0,%D1" CR_TAB
5465 "clr %D0" CR_TAB
5466 "sbrc %C0,7" CR_TAB
5467 "dec %D0");
5468 else
5469 return ("clr %D0" CR_TAB
5470 "sbrc %D1,7" CR_TAB
5471 "dec %D0" CR_TAB
5472 "mov %C0,%D1" CR_TAB
5473 "mov %B0,%C1" CR_TAB
5474 "mov %A0,%B1");
5477 case 16:
5479 int reg0 = true_regnum (operands[0]);
5480 int reg1 = true_regnum (operands[1]);
5482 if (reg0 == reg1 + 2)
5483 return *len = 4, ("clr %D0" CR_TAB
5484 "sbrc %B0,7" CR_TAB
5485 "com %D0" CR_TAB
5486 "mov %C0,%D0");
5487 if (AVR_HAVE_MOVW)
5488 return *len = 5, ("movw %A0,%C1" CR_TAB
5489 "clr %D0" CR_TAB
5490 "sbrc %B0,7" CR_TAB
5491 "com %D0" CR_TAB
5492 "mov %C0,%D0");
5493 else
5494 return *len = 6, ("mov %B0,%D1" CR_TAB
5495 "mov %A0,%C1" CR_TAB
5496 "clr %D0" CR_TAB
5497 "sbrc %B0,7" CR_TAB
5498 "com %D0" CR_TAB
5499 "mov %C0,%D0");
5502 case 24:
5503 return *len = 6, ("mov %A0,%D1" CR_TAB
5504 "clr %D0" CR_TAB
5505 "sbrc %A0,7" CR_TAB
5506 "com %D0" CR_TAB
5507 "mov %B0,%D0" CR_TAB
5508 "mov %C0,%D0");
5510 default:
5511 if (INTVAL (operands[2]) < 32)
5512 break;
5514 /* fall through */
5516 case 31:
5517 if (AVR_HAVE_MOVW)
5518 return *len = 4, ("lsl %D0" CR_TAB
5519 "sbc %A0,%A0" CR_TAB
5520 "mov %B0,%A0" CR_TAB
5521 "movw %C0,%A0");
5522 else
5523 return *len = 5, ("lsl %D0" CR_TAB
5524 "sbc %A0,%A0" CR_TAB
5525 "mov %B0,%A0" CR_TAB
5526 "mov %C0,%A0" CR_TAB
5527 "mov %D0,%A0");
5529 len = t;
5531 out_shift_with_cnt ("asr %D0" CR_TAB
5532 "ror %C0" CR_TAB
5533 "ror %B0" CR_TAB
5534 "ror %A0", insn, operands, len, 4);
5535 return "";
5538 /* 8bit logic shift right ((unsigned char)x >> i) */
5540 const char *
5541 lshrqi3_out (rtx insn, rtx operands[], int *len)
5543 if (GET_CODE (operands[2]) == CONST_INT)
5545 int k;
5547 if (!len)
5548 len = &k;
5550 switch (INTVAL (operands[2]))
5552 default:
5553 if (INTVAL (operands[2]) < 8)
5554 break;
5556 *len = 1;
5557 return "clr %0";
5559 case 1:
5560 *len = 1;
5561 return "lsr %0";
5563 case 2:
5564 *len = 2;
5565 return ("lsr %0" CR_TAB
5566 "lsr %0");
5567 case 3:
5568 *len = 3;
5569 return ("lsr %0" CR_TAB
5570 "lsr %0" CR_TAB
5571 "lsr %0");
5573 case 4:
5574 if (test_hard_reg_class (LD_REGS, operands[0]))
5576 *len=2;
5577 return ("swap %0" CR_TAB
5578 "andi %0,0x0f");
5580 *len = 4;
5581 return ("lsr %0" CR_TAB
5582 "lsr %0" CR_TAB
5583 "lsr %0" CR_TAB
5584 "lsr %0");
5586 case 5:
5587 if (test_hard_reg_class (LD_REGS, operands[0]))
5589 *len = 3;
5590 return ("swap %0" CR_TAB
5591 "lsr %0" CR_TAB
5592 "andi %0,0x7");
5594 *len = 5;
5595 return ("lsr %0" CR_TAB
5596 "lsr %0" CR_TAB
5597 "lsr %0" CR_TAB
5598 "lsr %0" CR_TAB
5599 "lsr %0");
5601 case 6:
5602 if (test_hard_reg_class (LD_REGS, operands[0]))
5604 *len = 4;
5605 return ("swap %0" CR_TAB
5606 "lsr %0" CR_TAB
5607 "lsr %0" CR_TAB
5608 "andi %0,0x3");
5610 *len = 6;
5611 return ("lsr %0" CR_TAB
5612 "lsr %0" CR_TAB
5613 "lsr %0" CR_TAB
5614 "lsr %0" CR_TAB
5615 "lsr %0" CR_TAB
5616 "lsr %0");
5618 case 7:
5619 *len = 3;
5620 return ("rol %0" CR_TAB
5621 "clr %0" CR_TAB
5622 "rol %0");
5625 else if (CONSTANT_P (operands[2]))
5626 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5628 out_shift_with_cnt ("lsr %0",
5629 insn, operands, len, 1);
5630 return "";
5633 /* 16bit logic shift right ((unsigned short)x >> i) */
5635 const char *
5636 lshrhi3_out (rtx insn, rtx operands[], int *len)
5638 if (GET_CODE (operands[2]) == CONST_INT)
5640 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5641 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5642 int k;
5643 int *t = len;
5645 if (!len)
5646 len = &k;
5648 switch (INTVAL (operands[2]))
5650 default:
5651 if (INTVAL (operands[2]) < 16)
5652 break;
5654 *len = 2;
5655 return ("clr %B0" CR_TAB
5656 "clr %A0");
5658 case 4:
5659 if (optimize_size && scratch)
5660 break; /* 5 */
5661 if (ldi_ok)
5663 *len = 6;
5664 return ("swap %B0" CR_TAB
5665 "swap %A0" CR_TAB
5666 "andi %A0,0x0f" CR_TAB
5667 "eor %A0,%B0" CR_TAB
5668 "andi %B0,0x0f" CR_TAB
5669 "eor %A0,%B0");
5671 if (scratch)
5673 *len = 7;
5674 return ("swap %B0" CR_TAB
5675 "swap %A0" CR_TAB
5676 "ldi %3,0x0f" CR_TAB
5677 "and %A0,%3" CR_TAB
5678 "eor %A0,%B0" CR_TAB
5679 "and %B0,%3" CR_TAB
5680 "eor %A0,%B0");
5682 break; /* optimize_size ? 6 : 8 */
5684 case 5:
5685 if (optimize_size)
5686 break; /* scratch ? 5 : 6 */
5687 if (ldi_ok)
5689 *len = 8;
5690 return ("lsr %B0" CR_TAB
5691 "ror %A0" CR_TAB
5692 "swap %B0" CR_TAB
5693 "swap %A0" CR_TAB
5694 "andi %A0,0x0f" CR_TAB
5695 "eor %A0,%B0" CR_TAB
5696 "andi %B0,0x0f" CR_TAB
5697 "eor %A0,%B0");
5699 if (scratch)
5701 *len = 9;
5702 return ("lsr %B0" CR_TAB
5703 "ror %A0" CR_TAB
5704 "swap %B0" CR_TAB
5705 "swap %A0" CR_TAB
5706 "ldi %3,0x0f" CR_TAB
5707 "and %A0,%3" CR_TAB
5708 "eor %A0,%B0" CR_TAB
5709 "and %B0,%3" CR_TAB
5710 "eor %A0,%B0");
5712 break; /* 10 */
5714 case 6:
5715 if (optimize_size)
5716 break; /* scratch ? 5 : 6 */
5717 *len = 9;
5718 return ("clr __tmp_reg__" CR_TAB
5719 "lsl %A0" CR_TAB
5720 "rol %B0" CR_TAB
5721 "rol __tmp_reg__" CR_TAB
5722 "lsl %A0" CR_TAB
5723 "rol %B0" CR_TAB
5724 "rol __tmp_reg__" CR_TAB
5725 "mov %A0,%B0" CR_TAB
5726 "mov %B0,__tmp_reg__");
5728 case 7:
5729 *len = 5;
5730 return ("lsl %A0" CR_TAB
5731 "mov %A0,%B0" CR_TAB
5732 "rol %A0" CR_TAB
5733 "sbc %B0,%B0" CR_TAB
5734 "neg %B0");
5736 case 8:
5737 return *len = 2, ("mov %A0,%B1" CR_TAB
5738 "clr %B0");
5740 case 9:
5741 *len = 3;
5742 return ("mov %A0,%B0" CR_TAB
5743 "clr %B0" CR_TAB
5744 "lsr %A0");
5746 case 10:
5747 *len = 4;
5748 return ("mov %A0,%B0" CR_TAB
5749 "clr %B0" CR_TAB
5750 "lsr %A0" CR_TAB
5751 "lsr %A0");
5753 case 11:
5754 *len = 5;
5755 return ("mov %A0,%B0" CR_TAB
5756 "clr %B0" CR_TAB
5757 "lsr %A0" CR_TAB
5758 "lsr %A0" CR_TAB
5759 "lsr %A0");
5761 case 12:
5762 if (ldi_ok)
5764 *len = 4;
5765 return ("mov %A0,%B0" CR_TAB
5766 "clr %B0" CR_TAB
5767 "swap %A0" CR_TAB
5768 "andi %A0,0x0f");
5770 if (scratch)
5772 *len = 5;
5773 return ("mov %A0,%B0" CR_TAB
5774 "clr %B0" CR_TAB
5775 "swap %A0" CR_TAB
5776 "ldi %3,0x0f" CR_TAB
5777 "and %A0,%3");
5779 *len = 6;
5780 return ("mov %A0,%B0" CR_TAB
5781 "clr %B0" CR_TAB
5782 "lsr %A0" CR_TAB
5783 "lsr %A0" CR_TAB
5784 "lsr %A0" CR_TAB
5785 "lsr %A0");
5787 case 13:
5788 if (ldi_ok)
5790 *len = 5;
5791 return ("mov %A0,%B0" CR_TAB
5792 "clr %B0" CR_TAB
5793 "swap %A0" CR_TAB
5794 "lsr %A0" CR_TAB
5795 "andi %A0,0x07");
5797 if (AVR_HAVE_MUL && scratch)
5799 *len = 5;
5800 return ("ldi %3,0x08" CR_TAB
5801 "mul %B0,%3" CR_TAB
5802 "mov %A0,r1" CR_TAB
5803 "clr %B0" CR_TAB
5804 "clr __zero_reg__");
5806 if (optimize_size && scratch)
5807 break; /* 5 */
5808 if (scratch)
5810 *len = 6;
5811 return ("mov %A0,%B0" CR_TAB
5812 "clr %B0" CR_TAB
5813 "swap %A0" CR_TAB
5814 "lsr %A0" CR_TAB
5815 "ldi %3,0x07" CR_TAB
5816 "and %A0,%3");
5818 if (AVR_HAVE_MUL)
5820 *len = 6;
5821 return ("set" CR_TAB
5822 "bld r1,3" CR_TAB
5823 "mul %B0,r1" CR_TAB
5824 "mov %A0,r1" CR_TAB
5825 "clr %B0" CR_TAB
5826 "clr __zero_reg__");
5828 *len = 7;
5829 return ("mov %A0,%B0" CR_TAB
5830 "clr %B0" CR_TAB
5831 "lsr %A0" CR_TAB
5832 "lsr %A0" CR_TAB
5833 "lsr %A0" CR_TAB
5834 "lsr %A0" CR_TAB
5835 "lsr %A0");
5837 case 14:
5838 if (AVR_HAVE_MUL && ldi_ok)
5840 *len = 5;
5841 return ("ldi %A0,0x04" CR_TAB
5842 "mul %B0,%A0" CR_TAB
5843 "mov %A0,r1" CR_TAB
5844 "clr %B0" CR_TAB
5845 "clr __zero_reg__");
5847 if (AVR_HAVE_MUL && scratch)
5849 *len = 5;
5850 return ("ldi %3,0x04" CR_TAB
5851 "mul %B0,%3" CR_TAB
5852 "mov %A0,r1" CR_TAB
5853 "clr %B0" CR_TAB
5854 "clr __zero_reg__");
5856 if (optimize_size && ldi_ok)
5858 *len = 5;
5859 return ("mov %A0,%B0" CR_TAB
5860 "ldi %B0,6" "\n1:\t"
5861 "lsr %A0" CR_TAB
5862 "dec %B0" CR_TAB
5863 "brne 1b");
5865 if (optimize_size && scratch)
5866 break; /* 5 */
5867 *len = 6;
5868 return ("clr %A0" CR_TAB
5869 "lsl %B0" CR_TAB
5870 "rol %A0" CR_TAB
5871 "lsl %B0" CR_TAB
5872 "rol %A0" CR_TAB
5873 "clr %B0");
5875 case 15:
5876 *len = 4;
5877 return ("clr %A0" CR_TAB
5878 "lsl %B0" CR_TAB
5879 "rol %A0" CR_TAB
5880 "clr %B0");
5882 len = t;
5884 out_shift_with_cnt ("lsr %B0" CR_TAB
5885 "ror %A0", insn, operands, len, 2);
5886 return "";
5890 /* 24-bit logic shift right */
5892 const char*
5893 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5895 int dest = REGNO (op[0]);
5896 int src = REGNO (op[1]);
5898 if (CONST_INT_P (op[2]))
5900 if (plen)
5901 *plen = 0;
5903 switch (INTVAL (op[2]))
5905 case 8:
5906 if (dest <= src)
5907 return avr_asm_len ("mov %A0,%B1" CR_TAB
5908 "mov %B0,%C1" CR_TAB
5909 "clr %C0", op, plen, 3);
5910 else
5911 return avr_asm_len ("clr %C0" CR_TAB
5912 "mov %B0,%C1" CR_TAB
5913 "mov %A0,%B1", op, plen, 3);
5915 case 16:
5916 if (dest != src + 2)
5917 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5919 return avr_asm_len ("clr %B0" CR_TAB
5920 "clr %C0", op, plen, 2);
5922 default:
5923 if (INTVAL (op[2]) < 24)
5924 break;
5926 /* fall through */
5928 case 23:
5929 return avr_asm_len ("clr %A0" CR_TAB
5930 "sbrc %C0,7" CR_TAB
5931 "inc %A0" CR_TAB
5932 "clr %B0" CR_TAB
5933 "clr %C0", op, plen, 5);
5934 } /* switch */
5937 out_shift_with_cnt ("lsr %C0" CR_TAB
5938 "ror %B0" CR_TAB
5939 "ror %A0", insn, op, plen, 3);
5940 return "";
5944 /* 32bit logic shift right ((unsigned int)x >> i) */
5946 const char *
5947 lshrsi3_out (rtx insn, rtx operands[], int *len)
5949 if (GET_CODE (operands[2]) == CONST_INT)
5951 int k;
5952 int *t = len;
5954 if (!len)
5955 len = &k;
5957 switch (INTVAL (operands[2]))
5959 default:
5960 if (INTVAL (operands[2]) < 32)
5961 break;
5963 if (AVR_HAVE_MOVW)
5964 return *len = 3, ("clr %D0" CR_TAB
5965 "clr %C0" CR_TAB
5966 "movw %A0,%C0");
5967 *len = 4;
5968 return ("clr %D0" CR_TAB
5969 "clr %C0" CR_TAB
5970 "clr %B0" CR_TAB
5971 "clr %A0");
5973 case 8:
5975 int reg0 = true_regnum (operands[0]);
5976 int reg1 = true_regnum (operands[1]);
5977 *len = 4;
5978 if (reg0 <= reg1)
5979 return ("mov %A0,%B1" CR_TAB
5980 "mov %B0,%C1" CR_TAB
5981 "mov %C0,%D1" CR_TAB
5982 "clr %D0");
5983 else
5984 return ("clr %D0" CR_TAB
5985 "mov %C0,%D1" CR_TAB
5986 "mov %B0,%C1" CR_TAB
5987 "mov %A0,%B1");
5990 case 16:
5992 int reg0 = true_regnum (operands[0]);
5993 int reg1 = true_regnum (operands[1]);
5995 if (reg0 == reg1 + 2)
5996 return *len = 2, ("clr %C0" CR_TAB
5997 "clr %D0");
5998 if (AVR_HAVE_MOVW)
5999 return *len = 3, ("movw %A0,%C1" CR_TAB
6000 "clr %C0" CR_TAB
6001 "clr %D0");
6002 else
6003 return *len = 4, ("mov %B0,%D1" CR_TAB
6004 "mov %A0,%C1" CR_TAB
6005 "clr %C0" CR_TAB
6006 "clr %D0");
6009 case 24:
6010 return *len = 4, ("mov %A0,%D1" CR_TAB
6011 "clr %B0" CR_TAB
6012 "clr %C0" CR_TAB
6013 "clr %D0");
6015 case 31:
6016 *len = 6;
6017 return ("clr %A0" CR_TAB
6018 "sbrc %D0,7" CR_TAB
6019 "inc %A0" CR_TAB
6020 "clr %B0" CR_TAB
6021 "clr %C0" CR_TAB
6022 "clr %D0");
6024 len = t;
6026 out_shift_with_cnt ("lsr %D0" CR_TAB
6027 "ror %C0" CR_TAB
6028 "ror %B0" CR_TAB
6029 "ror %A0", insn, operands, len, 4);
6030 return "";
6034 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6036 XOP[0] = XOP[0] + XOP[2]
6038 and return "". If PLEN == NULL, print assembler instructions to perform the
6039 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6040 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
6041 CODE == PLUS: perform addition by using ADD instructions.
6042 CODE == MINUS: perform addition by using SUB instructions.
6043 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
6045 static void
6046 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
6048 /* MODE of the operation. */
6049 enum machine_mode mode = GET_MODE (xop[0]);
6051 /* Number of bytes to operate on. */
6052 int i, n_bytes = GET_MODE_SIZE (mode);
6054 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6055 int clobber_val = -1;
6057 /* op[0]: 8-bit destination register
6058 op[1]: 8-bit const int
6059 op[2]: 8-bit scratch register */
6060 rtx op[3];
6062 /* Started the operation? Before starting the operation we may skip
6063 adding 0. This is no more true after the operation started because
6064 carry must be taken into account. */
6065 bool started = false;
6067 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6068 rtx xval = xop[2];
6070 /* Except in the case of ADIW with 16-bit register (see below)
6071 addition does not set cc0 in a usable way. */
6073 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6075 if (MINUS == code)
6076 xval = simplify_unary_operation (NEG, mode, xval, mode);
6078 op[2] = xop[3];
6080 if (plen)
6081 *plen = 0;
6083 for (i = 0; i < n_bytes; i++)
6085 /* We operate byte-wise on the destination. */
6086 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6087 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6089 /* 8-bit value to operate with this byte. */
6090 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6092 /* Registers R16..R31 can operate with immediate. */
6093 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6095 op[0] = reg8;
6096 op[1] = gen_int_mode (val8, QImode);
6098 /* To get usable cc0 no low-bytes must have been skipped. */
6100 if (i && !started)
6101 *pcc = CC_CLOBBER;
6103 if (!started
6104 && i % 2 == 0
6105 && i + 2 <= n_bytes
6106 && test_hard_reg_class (ADDW_REGS, reg8))
6108 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
6109 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6111 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6112 i.e. operate word-wise. */
6114 if (val16 < 64)
6116 if (val16 != 0)
6118 started = true;
6119 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6120 op, plen, 1);
6122 if (n_bytes == 2 && PLUS == code)
6123 *pcc = CC_SET_ZN;
6126 i++;
6127 continue;
6131 if (val8 == 0)
6133 if (started)
6134 avr_asm_len (code == PLUS
6135 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6136 op, plen, 1);
6137 continue;
6139 else if ((val8 == 1 || val8 == 0xff)
6140 && !started
6141 && i == n_bytes - 1)
6143 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6144 op, plen, 1);
6145 break;
6148 switch (code)
6150 case PLUS:
6152 gcc_assert (plen != NULL || REG_P (op[2]));
6154 if (clobber_val != (int) val8)
6155 avr_asm_len ("ldi %2,%1", op, plen, 1);
6156 clobber_val = (int) val8;
6158 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6160 break; /* PLUS */
6162 case MINUS:
6164 if (ld_reg_p)
6165 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6166 else
6168 gcc_assert (plen != NULL || REG_P (op[2]));
6170 if (clobber_val != (int) val8)
6171 avr_asm_len ("ldi %2,%1", op, plen, 1);
6172 clobber_val = (int) val8;
6174 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6177 break; /* MINUS */
6179 default:
6180 /* Unknown code */
6181 gcc_unreachable();
6184 started = true;
6186 } /* for all sub-bytes */
6188 /* No output doesn't change cc0. */
6190 if (plen && *plen == 0)
6191 *pcc = CC_NONE;
6195 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6197 XOP[0] = XOP[0] + XOP[2]
6199 and return "". If PLEN == NULL, print assembler instructions to perform the
6200 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6201 words) printed with PLEN == NULL.
6202 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6203 condition code (with respect to XOP[0]). */
6205 const char*
6206 avr_out_plus (rtx *xop, int *plen, int *pcc)
6208 int len_plus, len_minus;
6209 int cc_plus, cc_minus, cc_dummy;
6211 if (!pcc)
6212 pcc = &cc_dummy;
6214 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6216 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6217 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6219 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6221 if (plen)
6223 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6224 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6226 else if (len_minus <= len_plus)
6227 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6228 else
6229 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6231 return "";
6235 /* Same as above but XOP has just 3 entries.
6236 Supply a dummy 4th operand. */
6238 const char*
6239 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6241 rtx op[4];
6243 op[0] = xop[0];
6244 op[1] = xop[1];
6245 op[2] = xop[2];
6246 op[3] = NULL_RTX;
6248 return avr_out_plus (op, plen, pcc);
6252 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6254 const char*
6255 avr_out_plus64 (rtx addend, int *plen)
6257 int cc_dummy;
6258 rtx op[4];
6260 op[0] = gen_rtx_REG (DImode, 18);
6261 op[1] = op[0];
6262 op[2] = addend;
6263 op[3] = NULL_RTX;
6265 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6267 return "";
6270 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6271 time constant XOP[2]:
6273 XOP[0] = XOP[0] <op> XOP[2]
6275 and return "". If PLEN == NULL, print assembler instructions to perform the
6276 operation; otherwise, set *PLEN to the length of the instruction sequence
6277 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6278 register or SCRATCH if no clobber register is needed for the operation. */
6280 const char*
6281 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6283 /* CODE and MODE of the operation. */
6284 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6285 enum machine_mode mode = GET_MODE (xop[0]);
6287 /* Number of bytes to operate on. */
6288 int i, n_bytes = GET_MODE_SIZE (mode);
6290 /* Value of T-flag (0 or 1) or -1 if unknow. */
6291 int set_t = -1;
6293 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6294 int clobber_val = -1;
6296 /* op[0]: 8-bit destination register
6297 op[1]: 8-bit const int
6298 op[2]: 8-bit clobber register or SCRATCH
6299 op[3]: 8-bit register containing 0xff or NULL_RTX */
6300 rtx op[4];
6302 op[2] = xop[3];
6303 op[3] = NULL_RTX;
6305 if (plen)
6306 *plen = 0;
6308 for (i = 0; i < n_bytes; i++)
6310 /* We operate byte-wise on the destination. */
6311 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6312 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6314 /* 8-bit value to operate with this byte. */
6315 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6317 /* Number of bits set in the current byte of the constant. */
6318 int pop8 = avr_popcount (val8);
6320 /* Registers R16..R31 can operate with immediate. */
6321 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6323 op[0] = reg8;
6324 op[1] = GEN_INT (val8);
6326 switch (code)
6328 case IOR:
6330 if (0 == pop8)
6331 continue;
6332 else if (ld_reg_p)
6333 avr_asm_len ("ori %0,%1", op, plen, 1);
6334 else if (1 == pop8)
6336 if (set_t != 1)
6337 avr_asm_len ("set", op, plen, 1);
6338 set_t = 1;
6340 op[1] = GEN_INT (exact_log2 (val8));
6341 avr_asm_len ("bld %0,%1", op, plen, 1);
6343 else if (8 == pop8)
6345 if (op[3] != NULL_RTX)
6346 avr_asm_len ("mov %0,%3", op, plen, 1);
6347 else
6348 avr_asm_len ("clr %0" CR_TAB
6349 "dec %0", op, plen, 2);
6351 op[3] = op[0];
6353 else
6355 if (clobber_val != (int) val8)
6356 avr_asm_len ("ldi %2,%1", op, plen, 1);
6357 clobber_val = (int) val8;
6359 avr_asm_len ("or %0,%2", op, plen, 1);
6362 continue; /* IOR */
6364 case AND:
6366 if (8 == pop8)
6367 continue;
6368 else if (0 == pop8)
6369 avr_asm_len ("clr %0", op, plen, 1);
6370 else if (ld_reg_p)
6371 avr_asm_len ("andi %0,%1", op, plen, 1);
6372 else if (7 == pop8)
6374 if (set_t != 0)
6375 avr_asm_len ("clt", op, plen, 1);
6376 set_t = 0;
6378 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6379 avr_asm_len ("bld %0,%1", op, plen, 1);
6381 else
6383 if (clobber_val != (int) val8)
6384 avr_asm_len ("ldi %2,%1", op, plen, 1);
6385 clobber_val = (int) val8;
6387 avr_asm_len ("and %0,%2", op, plen, 1);
6390 continue; /* AND */
6392 case XOR:
6394 if (0 == pop8)
6395 continue;
6396 else if (8 == pop8)
6397 avr_asm_len ("com %0", op, plen, 1);
6398 else if (ld_reg_p && val8 == (1 << 7))
6399 avr_asm_len ("subi %0,%1", op, plen, 1);
6400 else
6402 if (clobber_val != (int) val8)
6403 avr_asm_len ("ldi %2,%1", op, plen, 1);
6404 clobber_val = (int) val8;
6406 avr_asm_len ("eor %0,%2", op, plen, 1);
6409 continue; /* XOR */
6411 default:
6412 /* Unknown rtx_code */
6413 gcc_unreachable();
6415 } /* for all sub-bytes */
6417 return "";
6421 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6422 PLEN != NULL: Set *PLEN to the length of that sequence.
6423 Return "". */
6425 const char*
6426 avr_out_addto_sp (rtx *op, int *plen)
6428 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6429 int addend = INTVAL (op[0]);
6431 if (plen)
6432 *plen = 0;
6434 if (addend < 0)
6436 if (flag_verbose_asm || flag_print_asm_name)
6437 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6439 while (addend <= -pc_len)
6441 addend += pc_len;
6442 avr_asm_len ("rcall .", op, plen, 1);
6445 while (addend++ < 0)
6446 avr_asm_len ("push __zero_reg__", op, plen, 1);
6448 else if (addend > 0)
6450 if (flag_verbose_asm || flag_print_asm_name)
6451 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6453 while (addend-- > 0)
6454 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6457 return "";
6461 /* Create RTL split patterns for byte sized rotate expressions. This
6462 produces a series of move instructions and considers overlap situations.
6463 Overlapping non-HImode operands need a scratch register. */
6465 bool
6466 avr_rotate_bytes (rtx operands[])
6468 int i, j;
6469 enum machine_mode mode = GET_MODE (operands[0]);
6470 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6471 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6472 int num = INTVAL (operands[2]);
6473 rtx scratch = operands[3];
6474 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6475 Word move if no scratch is needed, otherwise use size of scratch. */
6476 enum machine_mode move_mode = QImode;
6477 int move_size, offset, size;
6479 if (num & 0xf)
6480 move_mode = QImode;
6481 else if ((mode == SImode && !same_reg) || !overlapped)
6482 move_mode = HImode;
6483 else
6484 move_mode = GET_MODE (scratch);
6486 /* Force DI rotate to use QI moves since other DI moves are currently split
6487 into QI moves so forward propagation works better. */
6488 if (mode == DImode)
6489 move_mode = QImode;
6490 /* Make scratch smaller if needed. */
6491 if (SCRATCH != GET_CODE (scratch)
6492 && HImode == GET_MODE (scratch)
6493 && QImode == move_mode)
6494 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6496 move_size = GET_MODE_SIZE (move_mode);
6497 /* Number of bytes/words to rotate. */
6498 offset = (num >> 3) / move_size;
6499 /* Number of moves needed. */
6500 size = GET_MODE_SIZE (mode) / move_size;
6501 /* Himode byte swap is special case to avoid a scratch register. */
6502 if (mode == HImode && same_reg)
6504 /* HImode byte swap, using xor. This is as quick as using scratch. */
6505 rtx src, dst;
6506 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6507 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6508 if (!rtx_equal_p (dst, src))
6510 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6511 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6512 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6515 else
6517 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6518 /* Create linked list of moves to determine move order. */
6519 struct {
6520 rtx src, dst;
6521 int links;
6522 } move[MAX_SIZE + 8];
6523 int blocked, moves;
6525 gcc_assert (size <= MAX_SIZE);
6526 /* Generate list of subreg moves. */
6527 for (i = 0; i < size; i++)
6529 int from = i;
6530 int to = (from + offset) % size;
6531 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6532 mode, from * move_size);
6533 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6534 mode, to * move_size);
6535 move[i].links = -1;
6537 /* Mark dependence where a dst of one move is the src of another move.
6538 The first move is a conflict as it must wait until second is
6539 performed. We ignore moves to self - we catch this later. */
6540 if (overlapped)
6541 for (i = 0; i < size; i++)
6542 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6543 for (j = 0; j < size; j++)
6544 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6546 /* The dst of move i is the src of move j. */
6547 move[i].links = j;
6548 break;
6551 blocked = -1;
6552 moves = 0;
6553 /* Go through move list and perform non-conflicting moves. As each
6554 non-overlapping move is made, it may remove other conflicts
6555 so the process is repeated until no conflicts remain. */
6558 blocked = -1;
6559 moves = 0;
6560 /* Emit move where dst is not also a src or we have used that
6561 src already. */
6562 for (i = 0; i < size; i++)
6563 if (move[i].src != NULL_RTX)
6565 if (move[i].links == -1
6566 || move[move[i].links].src == NULL_RTX)
6568 moves++;
6569 /* Ignore NOP moves to self. */
6570 if (!rtx_equal_p (move[i].dst, move[i].src))
6571 emit_move_insn (move[i].dst, move[i].src);
6573 /* Remove conflict from list. */
6574 move[i].src = NULL_RTX;
6576 else
6577 blocked = i;
6580 /* Check for deadlock. This is when no moves occurred and we have
6581 at least one blocked move. */
6582 if (moves == 0 && blocked != -1)
6584 /* Need to use scratch register to break deadlock.
6585 Add move to put dst of blocked move into scratch.
6586 When this move occurs, it will break chain deadlock.
6587 The scratch register is substituted for real move. */
6589 gcc_assert (SCRATCH != GET_CODE (scratch));
6591 move[size].src = move[blocked].dst;
6592 move[size].dst = scratch;
6593 /* Scratch move is never blocked. */
6594 move[size].links = -1;
6595 /* Make sure we have valid link. */
6596 gcc_assert (move[blocked].links != -1);
6597 /* Replace src of blocking move with scratch reg. */
6598 move[move[blocked].links].src = scratch;
6599 /* Make dependent on scratch move occuring. */
6600 move[blocked].links = size;
6601 size=size+1;
6604 while (blocked != -1);
6606 return true;
6609 /* Modifies the length assigned to instruction INSN
6610 LEN is the initially computed length of the insn. */
6613 adjust_insn_length (rtx insn, int len)
6615 rtx *op = recog_data.operand;
6616 enum attr_adjust_len adjust_len;
6618 /* Some complex insns don't need length adjustment and therefore
6619 the length need not/must not be adjusted for these insns.
6620 It is easier to state this in an insn attribute "adjust_len" than
6621 to clutter up code here... */
6623 if (-1 == recog_memoized (insn))
6625 return len;
6628 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6630 adjust_len = get_attr_adjust_len (insn);
6632 if (adjust_len == ADJUST_LEN_NO)
6634 /* Nothing to adjust: The length from attribute "length" is fine.
6635 This is the default. */
6637 return len;
6640 /* Extract insn's operands. */
6642 extract_constrain_insn_cached (insn);
6644 /* Dispatch to right function. */
6646 switch (adjust_len)
6648 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6649 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6650 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6652 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6654 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6655 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6656 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6657 avr_out_plus_noclobber (op, &len, NULL); break;
6659 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6661 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6662 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6663 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6664 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6665 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6666 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6668 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6669 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6670 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6671 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6672 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6674 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6675 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6676 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6678 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6679 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6680 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6682 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6683 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6684 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6686 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6687 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6688 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6690 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6692 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6694 default:
6695 gcc_unreachable();
6698 return len;
6701 /* Return nonzero if register REG dead after INSN. */
6704 reg_unused_after (rtx insn, rtx reg)
6706 return (dead_or_set_p (insn, reg)
6707 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6710 /* Return nonzero if REG is not used after INSN.
6711 We assume REG is a reload reg, and therefore does
6712 not live past labels. It may live past calls or jumps though. */
6715 _reg_unused_after (rtx insn, rtx reg)
6717 enum rtx_code code;
6718 rtx set;
6720 /* If the reg is set by this instruction, then it is safe for our
6721 case. Disregard the case where this is a store to memory, since
6722 we are checking a register used in the store address. */
6723 set = single_set (insn);
6724 if (set && GET_CODE (SET_DEST (set)) != MEM
6725 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6726 return 1;
6728 while ((insn = NEXT_INSN (insn)))
6730 rtx set;
6731 code = GET_CODE (insn);
6733 #if 0
6734 /* If this is a label that existed before reload, then the register
6735 if dead here. However, if this is a label added by reorg, then
6736 the register may still be live here. We can't tell the difference,
6737 so we just ignore labels completely. */
6738 if (code == CODE_LABEL)
6739 return 1;
6740 /* else */
6741 #endif
6743 if (!INSN_P (insn))
6744 continue;
6746 if (code == JUMP_INSN)
6747 return 0;
6749 /* If this is a sequence, we must handle them all at once.
6750 We could have for instance a call that sets the target register,
6751 and an insn in a delay slot that uses the register. In this case,
6752 we must return 0. */
6753 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6755 int i;
6756 int retval = 0;
6758 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6760 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6761 rtx set = single_set (this_insn);
6763 if (GET_CODE (this_insn) == CALL_INSN)
6764 code = CALL_INSN;
6765 else if (GET_CODE (this_insn) == JUMP_INSN)
6767 if (INSN_ANNULLED_BRANCH_P (this_insn))
6768 return 0;
6769 code = JUMP_INSN;
6772 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6773 return 0;
6774 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6776 if (GET_CODE (SET_DEST (set)) != MEM)
6777 retval = 1;
6778 else
6779 return 0;
6781 if (set == 0
6782 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6783 return 0;
6785 if (retval == 1)
6786 return 1;
6787 else if (code == JUMP_INSN)
6788 return 0;
6791 if (code == CALL_INSN)
6793 rtx tem;
6794 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6795 if (GET_CODE (XEXP (tem, 0)) == USE
6796 && REG_P (XEXP (XEXP (tem, 0), 0))
6797 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6798 return 0;
6799 if (call_used_regs[REGNO (reg)])
6800 return 1;
6803 set = single_set (insn);
6805 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6806 return 0;
6807 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6808 return GET_CODE (SET_DEST (set)) != MEM;
6809 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6810 return 0;
6812 return 1;
6816 /* Return RTX that represents the lower 16 bits of a constant address.
6817 Unfortunately, simplify_gen_subreg does not handle this case. */
6819 static rtx
6820 avr_const_address_lo16 (rtx x)
6822 rtx lo16;
6824 switch (GET_CODE (x))
6826 default:
6827 break;
6829 case CONST:
6830 if (PLUS == GET_CODE (XEXP (x, 0))
6831 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6832 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6834 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6835 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6837 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6838 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6840 return lo16;
6843 break;
6845 case SYMBOL_REF:
6847 const char *name = XSTR (x, 0);
6849 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6853 avr_edump ("\n%?: %r\n", x);
6854 gcc_unreachable();
6858 /* Target hook for assembling integer objects. The AVR version needs
6859 special handling for references to certain labels. */
6861 static bool
6862 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6864 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6865 && text_segment_operand (x, VOIDmode) )
6867 fputs ("\t.word\tgs(", asm_out_file);
6868 output_addr_const (asm_out_file, x);
6869 fputs (")\n", asm_out_file);
6871 return true;
6873 else if (GET_MODE (x) == PSImode)
6875 default_assemble_integer (avr_const_address_lo16 (x),
6876 GET_MODE_SIZE (HImode), aligned_p);
6878 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6879 " extension for hh8(", asm_out_file);
6880 output_addr_const (asm_out_file, x);
6881 fputs (")\"\n", asm_out_file);
6883 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6884 output_addr_const (asm_out_file, x);
6885 fputs (")\n", asm_out_file);
6887 return true;
6890 return default_assemble_integer (x, size, aligned_p);
6894 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6896 void
6897 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6900 /* If the function has the 'signal' or 'interrupt' attribute, test to
6901 make sure that the name of the function is "__vector_NN" so as to
6902 catch when the user misspells the interrupt vector name. */
6904 if (cfun->machine->is_interrupt)
6906 if (!STR_PREFIX_P (name, "__vector"))
6908 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6909 "%qs appears to be a misspelled interrupt handler",
6910 name);
6913 else if (cfun->machine->is_signal)
6915 if (!STR_PREFIX_P (name, "__vector"))
6917 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6918 "%qs appears to be a misspelled signal handler",
6919 name);
6923 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6924 ASM_OUTPUT_LABEL (file, name);
6928 /* Return value is nonzero if pseudos that have been
6929 assigned to registers of class CLASS would likely be spilled
6930 because registers of CLASS are needed for spill registers. */
6932 static bool
6933 avr_class_likely_spilled_p (reg_class_t c)
6935 return (c != ALL_REGS && c != ADDW_REGS);
6938 /* Valid attributes:
6939 progmem - put data to program memory;
6940 signal - make a function to be hardware interrupt. After function
6941 prologue interrupts are disabled;
6942 interrupt - make a function to be hardware interrupt. After function
6943 prologue interrupts are enabled;
6944 naked - don't generate function prologue/epilogue and `ret' command.
6946 Only `progmem' attribute valid for type. */
6948 /* Handle a "progmem" attribute; arguments as in
6949 struct attribute_spec.handler. */
6950 static tree
6951 avr_handle_progmem_attribute (tree *node, tree name,
6952 tree args ATTRIBUTE_UNUSED,
6953 int flags ATTRIBUTE_UNUSED,
6954 bool *no_add_attrs)
6956 if (DECL_P (*node))
6958 if (TREE_CODE (*node) == TYPE_DECL)
6960 /* This is really a decl attribute, not a type attribute,
6961 but try to handle it for GCC 3.0 backwards compatibility. */
6963 tree type = TREE_TYPE (*node);
6964 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6965 tree newtype = build_type_attribute_variant (type, attr);
6967 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6968 TREE_TYPE (*node) = newtype;
6969 *no_add_attrs = true;
6971 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6973 *no_add_attrs = false;
6975 else
6977 warning (OPT_Wattributes, "%qE attribute ignored",
6978 name);
6979 *no_add_attrs = true;
6983 return NULL_TREE;
6986 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6987 struct attribute_spec.handler. */
6989 static tree
6990 avr_handle_fndecl_attribute (tree *node, tree name,
6991 tree args ATTRIBUTE_UNUSED,
6992 int flags ATTRIBUTE_UNUSED,
6993 bool *no_add_attrs)
6995 if (TREE_CODE (*node) != FUNCTION_DECL)
6997 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6998 name);
6999 *no_add_attrs = true;
7002 return NULL_TREE;
7005 static tree
7006 avr_handle_fntype_attribute (tree *node, tree name,
7007 tree args ATTRIBUTE_UNUSED,
7008 int flags ATTRIBUTE_UNUSED,
7009 bool *no_add_attrs)
7011 if (TREE_CODE (*node) != FUNCTION_TYPE)
7013 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7014 name);
7015 *no_add_attrs = true;
7018 return NULL_TREE;
7022 /* AVR attributes. */
7023 static const struct attribute_spec
7024 avr_attribute_table[] =
7026 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7027 affects_type_identity } */
7028 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
7029 false },
7030 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7031 false },
7032 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7033 false },
7034 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
7035 false },
7036 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
7037 false },
7038 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
7039 false },
7040 { NULL, 0, 0, false, false, false, NULL, false }
7044 /* Look if DECL shall be placed in program memory space by
7045 means of attribute `progmem' or some address-space qualifier.
7046 Return non-zero if DECL is data that must end up in Flash and
7047 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7049 Return 2 if DECL is located in 24-bit flash address-space
7050 Return 1 if DECL is located in 16-bit flash address-space
7051 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7052 Return 0 otherwise */
7055 avr_progmem_p (tree decl, tree attributes)
7057 tree a;
7059 if (TREE_CODE (decl) != VAR_DECL)
7060 return 0;
7062 if (avr_decl_memx_p (decl))
7063 return 2;
7065 if (avr_decl_flash_p (decl))
7066 return 1;
7068 if (NULL_TREE
7069 != lookup_attribute ("progmem", attributes))
7070 return -1;
7072 a = decl;
7075 a = TREE_TYPE(a);
7076 while (TREE_CODE (a) == ARRAY_TYPE);
7078 if (a == error_mark_node)
7079 return 0;
7081 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
7082 return -1;
7084 return 0;
7088 /* Scan type TYP for pointer references to address space ASn.
7089 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7090 the AS are also declared to be CONST.
7091 Otherwise, return the respective addres space, i.e. a value != 0. */
7093 static addr_space_t
7094 avr_nonconst_pointer_addrspace (tree typ)
7096 while (ARRAY_TYPE == TREE_CODE (typ))
7097 typ = TREE_TYPE (typ);
7099 if (POINTER_TYPE_P (typ))
7101 addr_space_t as;
7102 tree target = TREE_TYPE (typ);
7104 /* Pointer to function: Test the function's return type. */
7106 if (FUNCTION_TYPE == TREE_CODE (target))
7107 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
7109 /* "Ordinary" pointers... */
7111 while (TREE_CODE (target) == ARRAY_TYPE)
7112 target = TREE_TYPE (target);
7114 /* Pointers to non-generic address space must be const.
7115 Refuse address spaces outside the device's flash. */
7117 as = TYPE_ADDR_SPACE (target);
7119 if (!ADDR_SPACE_GENERIC_P (as)
7120 && (!TYPE_READONLY (target)
7121 || avr_addrspace[as].segment >= avr_current_device->n_flash))
7123 return as;
7126 /* Scan pointer's target type. */
7128 return avr_nonconst_pointer_addrspace (target);
7131 return ADDR_SPACE_GENERIC;
7135 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
7136 go along with CONST qualifier. Writing to these address spaces should
7137 be detected and complained about as early as possible. */
7139 static bool
7140 avr_pgm_check_var_decl (tree node)
7142 const char *reason = NULL;
7144 addr_space_t as = ADDR_SPACE_GENERIC;
7146 gcc_assert (as == 0);
7148 if (avr_log.progmem)
7149 avr_edump ("%?: %t\n", node);
7151 switch (TREE_CODE (node))
7153 default:
7154 break;
7156 case VAR_DECL:
7157 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7158 reason = "variable";
7159 break;
7161 case PARM_DECL:
7162 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7163 reason = "function parameter";
7164 break;
7166 case FIELD_DECL:
7167 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7168 reason = "structure field";
7169 break;
7171 case FUNCTION_DECL:
7172 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7174 reason = "return type of function";
7175 break;
7177 case POINTER_TYPE:
7178 if (as = avr_nonconst_pointer_addrspace (node), as)
7179 reason = "pointer";
7180 break;
7183 if (reason)
7185 avr_edump ("%?: %s, %d, %d\n",
7186 avr_addrspace[as].name,
7187 avr_addrspace[as].segment, avr_current_device->n_flash);
7188 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7190 if (TYPE_P (node))
7191 error ("%qT uses address space %qs beyond flash of %qs",
7192 node, avr_addrspace[as].name, avr_current_device->name);
7193 else
7194 error ("%s %q+D uses address space %qs beyond flash of %qs",
7195 reason, node, avr_addrspace[as].name,
7196 avr_current_device->name);
7198 else
7200 if (TYPE_P (node))
7201 error ("pointer targeting address space %qs must be const in %qT",
7202 avr_addrspace[as].name, node);
7203 else
7204 error ("pointer targeting address space %qs must be const"
7205 " in %s %q+D",
7206 avr_addrspace[as].name, reason, node);
7210 return reason == NULL;
7214 /* Add the section attribute if the variable is in progmem. */
7216 static void
7217 avr_insert_attributes (tree node, tree *attributes)
7219 avr_pgm_check_var_decl (node);
7221 if (TREE_CODE (node) == VAR_DECL
7222 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7223 && avr_progmem_p (node, *attributes))
7225 addr_space_t as;
7226 tree node0 = node;
7228 /* For C++, we have to peel arrays in order to get correct
7229 determination of readonlyness. */
7232 node0 = TREE_TYPE (node0);
7233 while (TREE_CODE (node0) == ARRAY_TYPE);
7235 if (error_mark_node == node0)
7236 return;
7238 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7240 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7242 error ("variable %q+D located in address space %qs"
7243 " beyond flash of %qs",
7244 node, avr_addrspace[as].name, avr_current_device->name);
7247 if (!TYPE_READONLY (node0)
7248 && !TREE_READONLY (node))
7250 const char *reason = "__attribute__((progmem))";
7252 if (!ADDR_SPACE_GENERIC_P (as))
7253 reason = avr_addrspace[as].name;
7255 if (avr_log.progmem)
7256 avr_edump ("\n%?: %t\n%t\n", node, node0);
7258 error ("variable %q+D must be const in order to be put into"
7259 " read-only section by means of %qs", node, reason);
7265 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7266 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7267 /* Track need of __do_clear_bss. */
7269 void
7270 avr_asm_output_aligned_decl_common (FILE * stream,
7271 const_tree decl ATTRIBUTE_UNUSED,
7272 const char *name,
7273 unsigned HOST_WIDE_INT size,
7274 unsigned int align, bool local_p)
7276 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7277 There is no need to trigger __do_clear_bss code for them. */
7279 if (!STR_PREFIX_P (name, "__gnu_lto"))
7280 avr_need_clear_bss_p = true;
7282 if (local_p)
7283 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7284 else
7285 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7289 /* Unnamed section callback for data_section
7290 to track need of __do_copy_data. */
7292 static void
7293 avr_output_data_section_asm_op (const void *data)
7295 avr_need_copy_data_p = true;
7297 /* Dispatch to default. */
7298 output_section_asm_op (data);
7302 /* Unnamed section callback for bss_section
7303 to track need of __do_clear_bss. */
7305 static void
7306 avr_output_bss_section_asm_op (const void *data)
7308 avr_need_clear_bss_p = true;
7310 /* Dispatch to default. */
7311 output_section_asm_op (data);
7315 /* Unnamed section callback for progmem*.data sections. */
7317 static void
7318 avr_output_progmem_section_asm_op (const void *data)
7320 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7321 (const char*) data);
7325 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7327 static void
7328 avr_asm_init_sections (void)
7330 unsigned int n;
7332 /* Set up a section for jump tables. Alignment is handled by
7333 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7335 if (AVR_HAVE_JMP_CALL)
7337 progmem_swtable_section
7338 = get_unnamed_section (0, output_section_asm_op,
7339 "\t.section\t.progmem.gcc_sw_table"
7340 ",\"a\",@progbits");
7342 else
7344 progmem_swtable_section
7345 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7346 "\t.section\t.progmem.gcc_sw_table"
7347 ",\"ax\",@progbits");
7350 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7352 progmem_section[n]
7353 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7354 progmem_section_prefix[n]);
7357 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7358 resp. `avr_need_copy_data_p'. */
7360 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7361 data_section->unnamed.callback = avr_output_data_section_asm_op;
7362 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7366 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7368 static section*
7369 avr_asm_function_rodata_section (tree decl)
7371 /* If a function is unused and optimized out by -ffunction-sections
7372 and --gc-sections, ensure that the same will happen for its jump
7373 tables by putting them into individual sections. */
7375 unsigned int flags;
7376 section * frodata;
7378 /* Get the frodata section from the default function in varasm.c
7379 but treat function-associated data-like jump tables as code
7380 rather than as user defined data. AVR has no constant pools. */
7382 int fdata = flag_data_sections;
7384 flag_data_sections = flag_function_sections;
7385 frodata = default_function_rodata_section (decl);
7386 flag_data_sections = fdata;
7387 flags = frodata->common.flags;
7390 if (frodata != readonly_data_section
7391 && flags & SECTION_NAMED)
7393 /* Adjust section flags and replace section name prefix. */
7395 unsigned int i;
7397 static const char* const prefix[] =
7399 ".rodata", ".progmem.gcc_sw_table",
7400 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7403 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7405 const char * old_prefix = prefix[i];
7406 const char * new_prefix = prefix[i+1];
7407 const char * name = frodata->named.name;
7409 if (STR_PREFIX_P (name, old_prefix))
7411 const char *rname = ACONCAT ((new_prefix,
7412 name + strlen (old_prefix), NULL));
7413 flags &= ~SECTION_CODE;
7414 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7416 return get_section (rname, flags, frodata->named.decl);
7421 return progmem_swtable_section;
7425 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7426 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7428 static void
7429 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7431 if (flags & AVR_SECTION_PROGMEM)
7433 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7434 int segment = avr_addrspace[as].segment;
7435 const char *old_prefix = ".rodata";
7436 const char *new_prefix = progmem_section_prefix[segment];
7438 if (STR_PREFIX_P (name, old_prefix))
7440 const char *sname = ACONCAT ((new_prefix,
7441 name + strlen (old_prefix), NULL));
7442 default_elf_asm_named_section (sname, flags, decl);
7443 return;
7446 default_elf_asm_named_section (new_prefix, flags, decl);
7447 return;
7450 if (!avr_need_copy_data_p)
7451 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7452 || STR_PREFIX_P (name, ".rodata")
7453 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7455 if (!avr_need_clear_bss_p)
7456 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7458 default_elf_asm_named_section (name, flags, decl);
7461 static unsigned int
7462 avr_section_type_flags (tree decl, const char *name, int reloc)
7464 unsigned int flags = default_section_type_flags (decl, name, reloc);
7466 if (STR_PREFIX_P (name, ".noinit"))
7468 if (decl && TREE_CODE (decl) == VAR_DECL
7469 && DECL_INITIAL (decl) == NULL_TREE)
7470 flags |= SECTION_BSS; /* @nobits */
7471 else
7472 warning (0, "only uninitialized variables can be placed in the "
7473 ".noinit section");
7476 if (decl && DECL_P (decl)
7477 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7479 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7481 /* Attribute progmem puts data in generic address space.
7482 Set section flags as if it was in __flash to get the right
7483 section prefix in the remainder. */
7485 if (ADDR_SPACE_GENERIC_P (as))
7486 as = ADDR_SPACE_FLASH;
7488 flags |= as * SECTION_MACH_DEP;
7489 flags &= ~SECTION_WRITE;
7490 flags &= ~SECTION_BSS;
7493 return flags;
7497 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7499 static void
7500 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7502 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7503 readily available, see PR34734. So we postpone the warning
7504 about uninitialized data in program memory section until here. */
7506 if (new_decl_p
7507 && decl && DECL_P (decl)
7508 && NULL_TREE == DECL_INITIAL (decl)
7509 && !DECL_EXTERNAL (decl)
7510 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7512 warning (OPT_Wuninitialized,
7513 "uninitialized variable %q+D put into "
7514 "program memory area", decl);
7517 default_encode_section_info (decl, rtl, new_decl_p);
7519 if (decl && DECL_P (decl)
7520 && TREE_CODE (decl) != FUNCTION_DECL
7521 && MEM_P (rtl)
7522 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7524 rtx sym = XEXP (rtl, 0);
7525 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7527 /* PSTR strings are in generic space but located in flash:
7528 patch address space. */
7530 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7531 as = ADDR_SPACE_FLASH;
7533 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7538 /* Implement `TARGET_ASM_SELECT_SECTION' */
7540 static section *
7541 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7543 section * sect = default_elf_select_section (decl, reloc, align);
7545 if (decl && DECL_P (decl)
7546 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7548 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7549 int segment = avr_addrspace[as].segment;
7551 if (sect->common.flags & SECTION_NAMED)
7553 const char * name = sect->named.name;
7554 const char * old_prefix = ".rodata";
7555 const char * new_prefix = progmem_section_prefix[segment];
7557 if (STR_PREFIX_P (name, old_prefix))
7559 const char *sname = ACONCAT ((new_prefix,
7560 name + strlen (old_prefix), NULL));
7561 return get_section (sname, sect->common.flags, sect->named.decl);
7565 return progmem_section[segment];
7568 return sect;
7571 /* Implement `TARGET_ASM_FILE_START'. */
7572 /* Outputs some text at the start of each assembler file. */
7574 static void
7575 avr_file_start (void)
7577 int sfr_offset = avr_current_arch->sfr_offset;
7579 if (avr_current_arch->asm_only)
7580 error ("MCU %qs supported for assembler only", avr_current_device->name);
7582 default_file_start ();
7584 /* Print I/O addresses of some SFRs used with IN and OUT. */
7586 if (!AVR_HAVE_8BIT_SP)
7587 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7589 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7590 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7591 if (AVR_HAVE_RAMPZ)
7592 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7593 if (AVR_HAVE_RAMPY)
7594 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7595 if (AVR_HAVE_RAMPX)
7596 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7597 if (AVR_HAVE_RAMPD)
7598 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7599 if (AVR_XMEGA)
7600 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7601 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7602 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7606 /* Implement `TARGET_ASM_FILE_END'. */
7607 /* Outputs to the stdio stream FILE some
7608 appropriate text to go at the end of an assembler file. */
7610 static void
7611 avr_file_end (void)
7613 /* Output these only if there is anything in the
7614 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7615 input section(s) - some code size can be saved by not
7616 linking in the initialization code from libgcc if resp.
7617 sections are empty. */
7619 if (avr_need_copy_data_p)
7620 fputs (".global __do_copy_data\n", asm_out_file);
7622 if (avr_need_clear_bss_p)
7623 fputs (".global __do_clear_bss\n", asm_out_file);
7626 /* Choose the order in which to allocate hard registers for
7627 pseudo-registers local to a basic block.
7629 Store the desired register order in the array `reg_alloc_order'.
7630 Element 0 should be the register to allocate first; element 1, the
7631 next register; and so on. */
7633 void
7634 order_regs_for_local_alloc (void)
7636 unsigned int i;
7637 static const int order_0[] = {
7638 24,25,
7639 18,19,
7640 20,21,
7641 22,23,
7642 30,31,
7643 26,27,
7644 28,29,
7645 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7646 0,1,
7647 32,33,34,35
7649 static const int order_1[] = {
7650 18,19,
7651 20,21,
7652 22,23,
7653 24,25,
7654 30,31,
7655 26,27,
7656 28,29,
7657 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7658 0,1,
7659 32,33,34,35
7661 static const int order_2[] = {
7662 25,24,
7663 23,22,
7664 21,20,
7665 19,18,
7666 30,31,
7667 26,27,
7668 28,29,
7669 17,16,
7670 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7671 1,0,
7672 32,33,34,35
7675 const int *order = (TARGET_ORDER_1 ? order_1 :
7676 TARGET_ORDER_2 ? order_2 :
7677 order_0);
7678 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7679 reg_alloc_order[i] = order[i];
7683 /* Implement `TARGET_REGISTER_MOVE_COST' */
7685 static int
7686 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7687 reg_class_t from, reg_class_t to)
7689 return (from == STACK_REG ? 6
7690 : to == STACK_REG ? 12
7691 : 2);
7695 /* Implement `TARGET_MEMORY_MOVE_COST' */
7697 static int
7698 avr_memory_move_cost (enum machine_mode mode,
7699 reg_class_t rclass ATTRIBUTE_UNUSED,
7700 bool in ATTRIBUTE_UNUSED)
7702 return (mode == QImode ? 2
7703 : mode == HImode ? 4
7704 : mode == SImode ? 8
7705 : mode == SFmode ? 8
7706 : 16);
7710 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7711 cost of an RTX operand given its context. X is the rtx of the
7712 operand, MODE is its mode, and OUTER is the rtx_code of this
7713 operand's parent operator. */
7715 static int
7716 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7717 int opno, bool speed)
7719 enum rtx_code code = GET_CODE (x);
7720 int total;
7722 switch (code)
7724 case REG:
7725 case SUBREG:
7726 return 0;
7728 case CONST_INT:
7729 case CONST_DOUBLE:
7730 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7732 default:
7733 break;
7736 total = 0;
7737 avr_rtx_costs (x, code, outer, opno, &total, speed);
7738 return total;
7741 /* Worker function for AVR backend's rtx_cost function.
7742 X is rtx expression whose cost is to be calculated.
7743 Return true if the complete cost has been computed.
7744 Return false if subexpressions should be scanned.
7745 In either case, *TOTAL contains the cost result. */
7747 static bool
7748 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7749 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7751 enum rtx_code code = (enum rtx_code) codearg;
7752 enum machine_mode mode = GET_MODE (x);
7753 HOST_WIDE_INT val;
7755 switch (code)
7757 case CONST_INT:
7758 case CONST_DOUBLE:
7759 case SYMBOL_REF:
7760 case CONST:
7761 case LABEL_REF:
7762 /* Immediate constants are as cheap as registers. */
7763 *total = 0;
7764 return true;
7766 case MEM:
7767 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7768 return true;
7770 case NEG:
7771 switch (mode)
7773 case QImode:
7774 case SFmode:
7775 *total = COSTS_N_INSNS (1);
7776 break;
7778 case HImode:
7779 case PSImode:
7780 case SImode:
7781 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7782 break;
7784 default:
7785 return false;
7787 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7788 return true;
7790 case ABS:
7791 switch (mode)
7793 case QImode:
7794 case SFmode:
7795 *total = COSTS_N_INSNS (1);
7796 break;
7798 default:
7799 return false;
7801 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7802 return true;
7804 case NOT:
7805 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7806 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7807 return true;
7809 case ZERO_EXTEND:
7810 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7811 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7812 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7813 return true;
7815 case SIGN_EXTEND:
7816 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7817 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7818 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7819 return true;
7821 case PLUS:
7822 switch (mode)
7824 case QImode:
7825 if (AVR_HAVE_MUL
7826 && MULT == GET_CODE (XEXP (x, 0))
7827 && register_operand (XEXP (x, 1), QImode))
7829 /* multiply-add */
7830 *total = COSTS_N_INSNS (speed ? 4 : 3);
7831 /* multiply-add with constant: will be split and load constant. */
7832 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7833 *total = COSTS_N_INSNS (1) + *total;
7834 return true;
7836 *total = COSTS_N_INSNS (1);
7837 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7838 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7839 break;
7841 case HImode:
7842 if (AVR_HAVE_MUL
7843 && (MULT == GET_CODE (XEXP (x, 0))
7844 || ASHIFT == GET_CODE (XEXP (x, 0)))
7845 && register_operand (XEXP (x, 1), HImode)
7846 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7847 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7849 /* multiply-add */
7850 *total = COSTS_N_INSNS (speed ? 5 : 4);
7851 /* multiply-add with constant: will be split and load constant. */
7852 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7853 *total = COSTS_N_INSNS (1) + *total;
7854 return true;
7856 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7858 *total = COSTS_N_INSNS (2);
7859 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7860 speed);
7862 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7863 *total = COSTS_N_INSNS (1);
7864 else
7865 *total = COSTS_N_INSNS (2);
7866 break;
7868 case PSImode:
7869 if (!CONST_INT_P (XEXP (x, 1)))
7871 *total = COSTS_N_INSNS (3);
7872 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7873 speed);
7875 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7876 *total = COSTS_N_INSNS (2);
7877 else
7878 *total = COSTS_N_INSNS (3);
7879 break;
7881 case SImode:
7882 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7884 *total = COSTS_N_INSNS (4);
7885 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7886 speed);
7888 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7889 *total = COSTS_N_INSNS (1);
7890 else
7891 *total = COSTS_N_INSNS (4);
7892 break;
7894 default:
7895 return false;
7897 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7898 return true;
7900 case MINUS:
7901 if (AVR_HAVE_MUL
7902 && QImode == mode
7903 && register_operand (XEXP (x, 0), QImode)
7904 && MULT == GET_CODE (XEXP (x, 1)))
7906 /* multiply-sub */
7907 *total = COSTS_N_INSNS (speed ? 4 : 3);
7908 /* multiply-sub with constant: will be split and load constant. */
7909 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7910 *total = COSTS_N_INSNS (1) + *total;
7911 return true;
7913 if (AVR_HAVE_MUL
7914 && HImode == mode
7915 && register_operand (XEXP (x, 0), HImode)
7916 && (MULT == GET_CODE (XEXP (x, 1))
7917 || ASHIFT == GET_CODE (XEXP (x, 1)))
7918 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7919 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7921 /* multiply-sub */
7922 *total = COSTS_N_INSNS (speed ? 5 : 4);
7923 /* multiply-sub with constant: will be split and load constant. */
7924 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7925 *total = COSTS_N_INSNS (1) + *total;
7926 return true;
7928 /* FALLTHRU */
7929 case AND:
7930 case IOR:
7931 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7932 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7933 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7934 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7935 return true;
7937 case XOR:
7938 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7939 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7940 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7941 return true;
7943 case MULT:
7944 switch (mode)
7946 case QImode:
7947 if (AVR_HAVE_MUL)
7948 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7949 else if (!speed)
7950 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7951 else
7952 return false;
7953 break;
7955 case HImode:
7956 if (AVR_HAVE_MUL)
7958 rtx op0 = XEXP (x, 0);
7959 rtx op1 = XEXP (x, 1);
7960 enum rtx_code code0 = GET_CODE (op0);
7961 enum rtx_code code1 = GET_CODE (op1);
7962 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7963 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7965 if (ex0
7966 && (u8_operand (op1, HImode)
7967 || s8_operand (op1, HImode)))
7969 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7970 return true;
7972 if (ex0
7973 && register_operand (op1, HImode))
7975 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7976 return true;
7978 else if (ex0 || ex1)
7980 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7981 return true;
7983 else if (register_operand (op0, HImode)
7984 && (u8_operand (op1, HImode)
7985 || s8_operand (op1, HImode)))
7987 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7988 return true;
7990 else
7991 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7993 else if (!speed)
7994 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7995 else
7996 return false;
7997 break;
7999 case PSImode:
8000 if (!speed)
8001 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8002 else
8003 *total = 10;
8004 break;
8006 case SImode:
8007 if (AVR_HAVE_MUL)
8009 if (!speed)
8011 /* Add some additional costs besides CALL like moves etc. */
8013 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8015 else
8017 /* Just a rough estimate. Even with -O2 we don't want bulky
8018 code expanded inline. */
8020 *total = COSTS_N_INSNS (25);
8023 else
8025 if (speed)
8026 *total = COSTS_N_INSNS (300);
8027 else
8028 /* Add some additional costs besides CALL like moves etc. */
8029 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8032 return true;
8034 default:
8035 return false;
8037 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8038 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8039 return true;
8041 case DIV:
8042 case MOD:
8043 case UDIV:
8044 case UMOD:
8045 if (!speed)
8046 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8047 else
8048 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
8049 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8050 /* For div/mod with const-int divisor we have at least the cost of
8051 loading the divisor. */
8052 if (CONST_INT_P (XEXP (x, 1)))
8053 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
8054 /* Add some overall penaly for clobbering and moving around registers */
8055 *total += COSTS_N_INSNS (2);
8056 return true;
8058 case ROTATE:
8059 switch (mode)
8061 case QImode:
8062 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
8063 *total = COSTS_N_INSNS (1);
8065 break;
8067 case HImode:
8068 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
8069 *total = COSTS_N_INSNS (3);
8071 break;
8073 case SImode:
8074 if (CONST_INT_P (XEXP (x, 1)))
8075 switch (INTVAL (XEXP (x, 1)))
8077 case 8:
8078 case 24:
8079 *total = COSTS_N_INSNS (5);
8080 break;
8081 case 16:
8082 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
8083 break;
8085 break;
8087 default:
8088 return false;
8090 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8091 return true;
8093 case ASHIFT:
8094 switch (mode)
8096 case QImode:
8097 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8099 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8100 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8101 speed);
8103 else
8105 val = INTVAL (XEXP (x, 1));
8106 if (val == 7)
8107 *total = COSTS_N_INSNS (3);
8108 else if (val >= 0 && val <= 7)
8109 *total = COSTS_N_INSNS (val);
8110 else
8111 *total = COSTS_N_INSNS (1);
8113 break;
8115 case HImode:
8116 if (AVR_HAVE_MUL)
8118 if (const_2_to_7_operand (XEXP (x, 1), HImode)
8119 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
8120 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
8122 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8123 return true;
8127 if (const1_rtx == (XEXP (x, 1))
8128 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8130 *total = COSTS_N_INSNS (2);
8131 return true;
8134 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8136 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8137 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8138 speed);
8140 else
8141 switch (INTVAL (XEXP (x, 1)))
8143 case 0:
8144 *total = 0;
8145 break;
8146 case 1:
8147 case 8:
8148 *total = COSTS_N_INSNS (2);
8149 break;
8150 case 9:
8151 *total = COSTS_N_INSNS (3);
8152 break;
8153 case 2:
8154 case 3:
8155 case 10:
8156 case 15:
8157 *total = COSTS_N_INSNS (4);
8158 break;
8159 case 7:
8160 case 11:
8161 case 12:
8162 *total = COSTS_N_INSNS (5);
8163 break;
8164 case 4:
8165 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8166 break;
8167 case 6:
8168 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8169 break;
8170 case 5:
8171 *total = COSTS_N_INSNS (!speed ? 5 : 10);
8172 break;
8173 default:
8174 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8175 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8176 speed);
8178 break;
8180 case PSImode:
8181 if (!CONST_INT_P (XEXP (x, 1)))
8183 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8185 else
8186 switch (INTVAL (XEXP (x, 1)))
8188 case 0:
8189 *total = 0;
8190 break;
8191 case 1:
8192 case 8:
8193 case 16:
8194 *total = COSTS_N_INSNS (3);
8195 break;
8196 case 23:
8197 *total = COSTS_N_INSNS (5);
8198 break;
8199 default:
8200 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8201 break;
8203 break;
8205 case SImode:
8206 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8208 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8209 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8210 speed);
8212 else
8213 switch (INTVAL (XEXP (x, 1)))
8215 case 0:
8216 *total = 0;
8217 break;
8218 case 24:
8219 *total = COSTS_N_INSNS (3);
8220 break;
8221 case 1:
8222 case 8:
8223 case 16:
8224 *total = COSTS_N_INSNS (4);
8225 break;
8226 case 31:
8227 *total = COSTS_N_INSNS (6);
8228 break;
8229 case 2:
8230 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8231 break;
8232 default:
8233 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8235 speed);
8237 break;
8239 default:
8240 return false;
8242 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8243 return true;
8245 case ASHIFTRT:
8246 switch (mode)
8248 case QImode:
8249 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8251 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8252 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8253 speed);
8255 else
8257 val = INTVAL (XEXP (x, 1));
8258 if (val == 6)
8259 *total = COSTS_N_INSNS (4);
8260 else if (val == 7)
8261 *total = COSTS_N_INSNS (2);
8262 else if (val >= 0 && val <= 7)
8263 *total = COSTS_N_INSNS (val);
8264 else
8265 *total = COSTS_N_INSNS (1);
8267 break;
8269 case HImode:
8270 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8272 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8273 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8274 speed);
8276 else
8277 switch (INTVAL (XEXP (x, 1)))
8279 case 0:
8280 *total = 0;
8281 break;
8282 case 1:
8283 *total = COSTS_N_INSNS (2);
8284 break;
8285 case 15:
8286 *total = COSTS_N_INSNS (3);
8287 break;
8288 case 2:
8289 case 7:
8290 case 8:
8291 case 9:
8292 *total = COSTS_N_INSNS (4);
8293 break;
8294 case 10:
8295 case 14:
8296 *total = COSTS_N_INSNS (5);
8297 break;
8298 case 11:
8299 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8300 break;
8301 case 12:
8302 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8303 break;
8304 case 6:
8305 case 13:
8306 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8307 break;
8308 default:
8309 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8310 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8311 speed);
8313 break;
8315 case PSImode:
8316 if (!CONST_INT_P (XEXP (x, 1)))
8318 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8320 else
8321 switch (INTVAL (XEXP (x, 1)))
8323 case 0:
8324 *total = 0;
8325 break;
8326 case 1:
8327 *total = COSTS_N_INSNS (3);
8328 break;
8329 case 16:
8330 case 8:
8331 *total = COSTS_N_INSNS (5);
8332 break;
8333 case 23:
8334 *total = COSTS_N_INSNS (4);
8335 break;
8336 default:
8337 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8338 break;
8340 break;
8342 case SImode:
8343 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8345 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8346 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8347 speed);
8349 else
8350 switch (INTVAL (XEXP (x, 1)))
8352 case 0:
8353 *total = 0;
8354 break;
8355 case 1:
8356 *total = COSTS_N_INSNS (4);
8357 break;
8358 case 8:
8359 case 16:
8360 case 24:
8361 *total = COSTS_N_INSNS (6);
8362 break;
8363 case 2:
8364 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8365 break;
8366 case 31:
8367 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8368 break;
8369 default:
8370 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8371 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8372 speed);
8374 break;
8376 default:
8377 return false;
8379 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8380 return true;
8382 case LSHIFTRT:
8383 switch (mode)
8385 case QImode:
8386 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8388 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8389 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8390 speed);
8392 else
8394 val = INTVAL (XEXP (x, 1));
8395 if (val == 7)
8396 *total = COSTS_N_INSNS (3);
8397 else if (val >= 0 && val <= 7)
8398 *total = COSTS_N_INSNS (val);
8399 else
8400 *total = COSTS_N_INSNS (1);
8402 break;
8404 case HImode:
8405 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8407 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8408 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8409 speed);
8411 else
8412 switch (INTVAL (XEXP (x, 1)))
8414 case 0:
8415 *total = 0;
8416 break;
8417 case 1:
8418 case 8:
8419 *total = COSTS_N_INSNS (2);
8420 break;
8421 case 9:
8422 *total = COSTS_N_INSNS (3);
8423 break;
8424 case 2:
8425 case 10:
8426 case 15:
8427 *total = COSTS_N_INSNS (4);
8428 break;
8429 case 7:
8430 case 11:
8431 *total = COSTS_N_INSNS (5);
8432 break;
8433 case 3:
8434 case 12:
8435 case 13:
8436 case 14:
8437 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8438 break;
8439 case 4:
8440 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8441 break;
8442 case 5:
8443 case 6:
8444 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8445 break;
8446 default:
8447 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8448 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8449 speed);
8451 break;
8453 case PSImode:
8454 if (!CONST_INT_P (XEXP (x, 1)))
8456 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8458 else
8459 switch (INTVAL (XEXP (x, 1)))
8461 case 0:
8462 *total = 0;
8463 break;
8464 case 1:
8465 case 8:
8466 case 16:
8467 *total = COSTS_N_INSNS (3);
8468 break;
8469 case 23:
8470 *total = COSTS_N_INSNS (5);
8471 break;
8472 default:
8473 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8474 break;
8476 break;
8478 case SImode:
8479 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8481 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8483 speed);
8485 else
8486 switch (INTVAL (XEXP (x, 1)))
8488 case 0:
8489 *total = 0;
8490 break;
8491 case 1:
8492 *total = COSTS_N_INSNS (4);
8493 break;
8494 case 2:
8495 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8496 break;
8497 case 8:
8498 case 16:
8499 case 24:
8500 *total = COSTS_N_INSNS (4);
8501 break;
8502 case 31:
8503 *total = COSTS_N_INSNS (6);
8504 break;
8505 default:
8506 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8507 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8508 speed);
8510 break;
8512 default:
8513 return false;
8515 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8516 return true;
8518 case COMPARE:
8519 switch (GET_MODE (XEXP (x, 0)))
8521 case QImode:
8522 *total = COSTS_N_INSNS (1);
8523 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8524 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8525 break;
8527 case HImode:
8528 *total = COSTS_N_INSNS (2);
8529 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8530 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8531 else if (INTVAL (XEXP (x, 1)) != 0)
8532 *total += COSTS_N_INSNS (1);
8533 break;
8535 case PSImode:
8536 *total = COSTS_N_INSNS (3);
8537 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8538 *total += COSTS_N_INSNS (2);
8539 break;
8541 case SImode:
8542 *total = COSTS_N_INSNS (4);
8543 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8544 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8545 else if (INTVAL (XEXP (x, 1)) != 0)
8546 *total += COSTS_N_INSNS (3);
8547 break;
8549 default:
8550 return false;
8552 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8553 return true;
8555 case TRUNCATE:
8556 if (AVR_HAVE_MUL
8557 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8558 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8559 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8561 if (QImode == mode || HImode == mode)
8563 *total = COSTS_N_INSNS (2);
8564 return true;
8567 break;
8569 default:
8570 break;
8572 return false;
8576 /* Implement `TARGET_RTX_COSTS'. */
8578 static bool
8579 avr_rtx_costs (rtx x, int codearg, int outer_code,
8580 int opno, int *total, bool speed)
8582 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8583 opno, total, speed);
8585 if (avr_log.rtx_costs)
8587 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8588 done, speed ? "speed" : "size", *total, outer_code, x);
8591 return done;
8595 /* Implement `TARGET_ADDRESS_COST'. */
8597 static int
8598 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8600 int cost = 4;
8602 if (GET_CODE (x) == PLUS
8603 && CONST_INT_P (XEXP (x, 1))
8604 && (REG_P (XEXP (x, 0))
8605 || GET_CODE (XEXP (x, 0)) == SUBREG))
8607 if (INTVAL (XEXP (x, 1)) >= 61)
8608 cost = 18;
8610 else if (CONSTANT_ADDRESS_P (x))
8612 if (optimize > 0
8613 && io_address_operand (x, QImode))
8614 cost = 2;
8617 if (avr_log.address_cost)
8618 avr_edump ("\n%?: %d = %r\n", cost, x);
8620 return cost;
8623 /* Test for extra memory constraint 'Q'.
8624 It's a memory address based on Y or Z pointer with valid displacement. */
8627 extra_constraint_Q (rtx x)
8629 int ok = 0;
8631 if (GET_CODE (XEXP (x,0)) == PLUS
8632 && REG_P (XEXP (XEXP (x,0), 0))
8633 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8634 && (INTVAL (XEXP (XEXP (x,0), 1))
8635 <= MAX_LD_OFFSET (GET_MODE (x))))
8637 rtx xx = XEXP (XEXP (x,0), 0);
8638 int regno = REGNO (xx);
8640 ok = (/* allocate pseudos */
8641 regno >= FIRST_PSEUDO_REGISTER
8642 /* strictly check */
8643 || regno == REG_Z || regno == REG_Y
8644 /* XXX frame & arg pointer checks */
8645 || xx == frame_pointer_rtx
8646 || xx == arg_pointer_rtx);
8648 if (avr_log.constraints)
8649 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8650 ok, reload_completed, reload_in_progress, x);
8653 return ok;
8656 /* Convert condition code CONDITION to the valid AVR condition code. */
8658 RTX_CODE
8659 avr_normalize_condition (RTX_CODE condition)
8661 switch (condition)
8663 case GT:
8664 return GE;
8665 case GTU:
8666 return GEU;
8667 case LE:
8668 return LT;
8669 case LEU:
8670 return LTU;
8671 default:
8672 gcc_unreachable ();
8676 /* Helper function for `avr_reorg'. */
8678 static rtx
8679 avr_compare_pattern (rtx insn)
8681 rtx pattern = single_set (insn);
8683 if (pattern
8684 && NONJUMP_INSN_P (insn)
8685 && SET_DEST (pattern) == cc0_rtx
8686 && GET_CODE (SET_SRC (pattern)) == COMPARE
8687 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8688 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8690 return pattern;
8693 return NULL_RTX;
8696 /* Helper function for `avr_reorg'. */
8698 /* Expansion of switch/case decision trees leads to code like
8700 cc0 = compare (Reg, Num)
8701 if (cc0 == 0)
8702 goto L1
8704 cc0 = compare (Reg, Num)
8705 if (cc0 > 0)
8706 goto L2
8708 The second comparison is superfluous and can be deleted.
8709 The second jump condition can be transformed from a
8710 "difficult" one to a "simple" one because "cc0 > 0" and
8711 "cc0 >= 0" will have the same effect here.
8713 This function relies on the way switch/case is being expaned
8714 as binary decision tree. For example code see PR 49903.
8716 Return TRUE if optimization performed.
8717 Return FALSE if nothing changed.
8719 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8721 We don't want to do this in text peephole because it is
8722 tedious to work out jump offsets there and the second comparison
8723 might have been transormed by `avr_reorg'.
8725 RTL peephole won't do because peephole2 does not scan across
8726 basic blocks. */
8728 static bool
8729 avr_reorg_remove_redundant_compare (rtx insn1)
8731 rtx comp1, ifelse1, xcond1, branch1;
8732 rtx comp2, ifelse2, xcond2, branch2, insn2;
8733 enum rtx_code code;
8734 rtx jump, target, cond;
8736 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8738 branch1 = next_nonnote_nondebug_insn (insn1);
8739 if (!branch1 || !JUMP_P (branch1))
8740 return false;
8742 insn2 = next_nonnote_nondebug_insn (branch1);
8743 if (!insn2 || !avr_compare_pattern (insn2))
8744 return false;
8746 branch2 = next_nonnote_nondebug_insn (insn2);
8747 if (!branch2 || !JUMP_P (branch2))
8748 return false;
8750 comp1 = avr_compare_pattern (insn1);
8751 comp2 = avr_compare_pattern (insn2);
8752 xcond1 = single_set (branch1);
8753 xcond2 = single_set (branch2);
8755 if (!comp1 || !comp2
8756 || !rtx_equal_p (comp1, comp2)
8757 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8758 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8759 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8760 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8762 return false;
8765 comp1 = SET_SRC (comp1);
8766 ifelse1 = SET_SRC (xcond1);
8767 ifelse2 = SET_SRC (xcond2);
8769 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8771 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8772 || !REG_P (XEXP (comp1, 0))
8773 || !CONST_INT_P (XEXP (comp1, 1))
8774 || XEXP (ifelse1, 2) != pc_rtx
8775 || XEXP (ifelse2, 2) != pc_rtx
8776 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8777 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8778 || !COMPARISON_P (XEXP (ifelse2, 0))
8779 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8780 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8781 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8782 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8784 return false;
8787 /* We filtered the insn sequence to look like
8789 (set (cc0)
8790 (compare (reg:M N)
8791 (const_int VAL)))
8792 (set (pc)
8793 (if_then_else (eq (cc0)
8794 (const_int 0))
8795 (label_ref L1)
8796 (pc)))
8798 (set (cc0)
8799 (compare (reg:M N)
8800 (const_int VAL)))
8801 (set (pc)
8802 (if_then_else (CODE (cc0)
8803 (const_int 0))
8804 (label_ref L2)
8805 (pc)))
8808 code = GET_CODE (XEXP (ifelse2, 0));
8810 /* Map GT/GTU to GE/GEU which is easier for AVR.
8811 The first two instructions compare/branch on EQ
8812 so we may replace the difficult
8814 if (x == VAL) goto L1;
8815 if (x > VAL) goto L2;
8817 with easy
8819 if (x == VAL) goto L1;
8820 if (x >= VAL) goto L2;
8822 Similarly, replace LE/LEU by LT/LTU. */
8824 switch (code)
8826 case EQ:
8827 case LT: case LTU:
8828 case GE: case GEU:
8829 break;
8831 case LE: case LEU:
8832 case GT: case GTU:
8833 code = avr_normalize_condition (code);
8834 break;
8836 default:
8837 return false;
8840 /* Wrap the branches into UNSPECs so they won't be changed or
8841 optimized in the remainder. */
8843 target = XEXP (XEXP (ifelse1, 1), 0);
8844 cond = XEXP (ifelse1, 0);
8845 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8847 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8849 target = XEXP (XEXP (ifelse2, 1), 0);
8850 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8851 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8853 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8855 /* The comparisons in insn1 and insn2 are exactly the same;
8856 insn2 is superfluous so delete it. */
8858 delete_insn (insn2);
8859 delete_insn (branch1);
8860 delete_insn (branch2);
8862 return true;
8866 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8867 /* Optimize conditional jumps. */
8869 static void
8870 avr_reorg (void)
8872 rtx insn = get_insns();
8874 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8876 rtx pattern = avr_compare_pattern (insn);
8878 if (!pattern)
8879 continue;
8881 if (optimize
8882 && avr_reorg_remove_redundant_compare (insn))
8884 continue;
8887 if (compare_diff_p (insn))
8889 /* Now we work under compare insn with difficult branch. */
8891 rtx next = next_real_insn (insn);
8892 rtx pat = PATTERN (next);
8894 pattern = SET_SRC (pattern);
8896 if (true_regnum (XEXP (pattern, 0)) >= 0
8897 && true_regnum (XEXP (pattern, 1)) >= 0)
8899 rtx x = XEXP (pattern, 0);
8900 rtx src = SET_SRC (pat);
8901 rtx t = XEXP (src,0);
8902 PUT_CODE (t, swap_condition (GET_CODE (t)));
8903 XEXP (pattern, 0) = XEXP (pattern, 1);
8904 XEXP (pattern, 1) = x;
8905 INSN_CODE (next) = -1;
8907 else if (true_regnum (XEXP (pattern, 0)) >= 0
8908 && XEXP (pattern, 1) == const0_rtx)
8910 /* This is a tst insn, we can reverse it. */
8911 rtx src = SET_SRC (pat);
8912 rtx t = XEXP (src,0);
8914 PUT_CODE (t, swap_condition (GET_CODE (t)));
8915 XEXP (pattern, 1) = XEXP (pattern, 0);
8916 XEXP (pattern, 0) = const0_rtx;
8917 INSN_CODE (next) = -1;
8918 INSN_CODE (insn) = -1;
8920 else if (true_regnum (XEXP (pattern, 0)) >= 0
8921 && CONST_INT_P (XEXP (pattern, 1)))
8923 rtx x = XEXP (pattern, 1);
8924 rtx src = SET_SRC (pat);
8925 rtx t = XEXP (src,0);
8926 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8928 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8930 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8931 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8932 INSN_CODE (next) = -1;
8933 INSN_CODE (insn) = -1;
8940 /* Returns register number for function return value.*/
8942 static inline unsigned int
8943 avr_ret_register (void)
8945 return 24;
8948 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8950 static bool
8951 avr_function_value_regno_p (const unsigned int regno)
8953 return (regno == avr_ret_register ());
8956 /* Create an RTX representing the place where a
8957 library function returns a value of mode MODE. */
8959 static rtx
8960 avr_libcall_value (enum machine_mode mode,
8961 const_rtx func ATTRIBUTE_UNUSED)
8963 int offs = GET_MODE_SIZE (mode);
8965 if (offs <= 4)
8966 offs = (offs + 1) & ~1;
8968 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8971 /* Create an RTX representing the place where a
8972 function returns a value of data type VALTYPE. */
8974 static rtx
8975 avr_function_value (const_tree type,
8976 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8977 bool outgoing ATTRIBUTE_UNUSED)
8979 unsigned int offs;
8981 if (TYPE_MODE (type) != BLKmode)
8982 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8984 offs = int_size_in_bytes (type);
8985 if (offs < 2)
8986 offs = 2;
8987 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8988 offs = GET_MODE_SIZE (SImode);
8989 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8990 offs = GET_MODE_SIZE (DImode);
8992 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8996 test_hard_reg_class (enum reg_class rclass, rtx x)
8998 int regno = true_regnum (x);
8999 if (regno < 0)
9000 return 0;
9002 if (TEST_HARD_REG_CLASS (rclass, regno))
9003 return 1;
9005 return 0;
9009 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9010 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9012 static bool
9013 avr_2word_insn_p (rtx insn)
9015 if (avr_current_device->errata_skip
9016 || !insn
9017 || 2 != get_attr_length (insn))
9019 return false;
9022 switch (INSN_CODE (insn))
9024 default:
9025 return false;
9027 case CODE_FOR_movqi_insn:
9029 rtx set = single_set (insn);
9030 rtx src = SET_SRC (set);
9031 rtx dest = SET_DEST (set);
9033 /* Factor out LDS and STS from movqi_insn. */
9035 if (MEM_P (dest)
9036 && (REG_P (src) || src == const0_rtx))
9038 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
9040 else if (REG_P (dest)
9041 && MEM_P (src))
9043 return CONSTANT_ADDRESS_P (XEXP (src, 0));
9046 return false;
9049 case CODE_FOR_call_insn:
9050 case CODE_FOR_call_value_insn:
9051 return true;
9057 jump_over_one_insn_p (rtx insn, rtx dest)
9059 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
9060 ? XEXP (dest, 0)
9061 : dest);
9062 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
9063 int dest_addr = INSN_ADDRESSES (uid);
9064 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
9066 return (jump_offset == 1
9067 || (jump_offset == 2
9068 && avr_2word_insn_p (next_active_insn (insn))));
9071 /* Returns 1 if a value of mode MODE can be stored starting with hard
9072 register number REGNO. On the enhanced core, anything larger than
9073 1 byte must start in even numbered register for "movw" to work
9074 (this way we don't have to check for odd registers everywhere). */
9077 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
9079 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9080 Disallowing QI et al. in these regs might lead to code like
9081 (set (subreg:QI (reg:HI 28) n) ...)
9082 which will result in wrong code because reload does not
9083 handle SUBREGs of hard regsisters like this.
9084 This could be fixed in reload. However, it appears
9085 that fixing reload is not wanted by reload people. */
9087 /* Any GENERAL_REGS register can hold 8-bit values. */
9089 if (GET_MODE_SIZE (mode) == 1)
9090 return 1;
9092 /* FIXME: Ideally, the following test is not needed.
9093 However, it turned out that it can reduce the number
9094 of spill fails. AVR and it's poor endowment with
9095 address registers is extreme stress test for reload. */
9097 if (GET_MODE_SIZE (mode) >= 4
9098 && regno >= REG_X)
9099 return 0;
9101 /* All modes larger than 8 bits should start in an even register. */
9103 return !(regno & 1);
9107 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9109 reg_class_t
9110 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
9111 addr_space_t as, RTX_CODE outer_code,
9112 RTX_CODE index_code ATTRIBUTE_UNUSED)
9114 if (!ADDR_SPACE_GENERIC_P (as))
9116 return POINTER_Z_REGS;
9119 if (!avr_strict_X)
9120 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
9122 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
9126 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9128 bool
9129 avr_regno_mode_code_ok_for_base_p (int regno,
9130 enum machine_mode mode ATTRIBUTE_UNUSED,
9131 addr_space_t as ATTRIBUTE_UNUSED,
9132 RTX_CODE outer_code,
9133 RTX_CODE index_code ATTRIBUTE_UNUSED)
9135 bool ok = false;
9137 if (!ADDR_SPACE_GENERIC_P (as))
9139 if (regno < FIRST_PSEUDO_REGISTER
9140 && regno == REG_Z)
9142 return true;
9145 if (reg_renumber)
9147 regno = reg_renumber[regno];
9149 if (regno == REG_Z)
9151 return true;
9155 return false;
9158 if (regno < FIRST_PSEUDO_REGISTER
9159 && (regno == REG_X
9160 || regno == REG_Y
9161 || regno == REG_Z
9162 || regno == ARG_POINTER_REGNUM))
9164 ok = true;
9166 else if (reg_renumber)
9168 regno = reg_renumber[regno];
9170 if (regno == REG_X
9171 || regno == REG_Y
9172 || regno == REG_Z
9173 || regno == ARG_POINTER_REGNUM)
9175 ok = true;
9179 if (avr_strict_X
9180 && PLUS == outer_code
9181 && regno == REG_X)
9183 ok = false;
9186 return ok;
9190 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9191 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9192 CLOBBER_REG is a QI clobber register or NULL_RTX.
9193 LEN == NULL: output instructions.
9194 LEN != NULL: set *LEN to the length of the instruction sequence
9195 (in words) printed with LEN = NULL.
9196 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9197 If CLEAR_P is false, nothing is known about OP[0].
9199 The effect on cc0 is as follows:
9201 Load 0 to any register except ZERO_REG : NONE
9202 Load ld register with any value : NONE
9203 Anything else: : CLOBBER */
9205 static void
9206 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9208 rtx src = op[1];
9209 rtx dest = op[0];
9210 rtx xval, xdest[4];
9211 int ival[4];
9212 int clobber_val = 1234;
9213 bool cooked_clobber_p = false;
9214 bool set_p = false;
9215 enum machine_mode mode = GET_MODE (dest);
9216 int n, n_bytes = GET_MODE_SIZE (mode);
9218 gcc_assert (REG_P (dest)
9219 && CONSTANT_P (src));
9221 if (len)
9222 *len = 0;
9224 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9225 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9227 if (REGNO (dest) < 16
9228 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9230 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9233 /* We might need a clobber reg but don't have one. Look at the value to
9234 be loaded more closely. A clobber is only needed if it is a symbol
9235 or contains a byte that is neither 0, -1 or a power of 2. */
9237 if (NULL_RTX == clobber_reg
9238 && !test_hard_reg_class (LD_REGS, dest)
9239 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9240 || !avr_popcount_each_byte (src, n_bytes,
9241 (1 << 0) | (1 << 1) | (1 << 8))))
9243 /* We have no clobber register but need one. Cook one up.
9244 That's cheaper than loading from constant pool. */
9246 cooked_clobber_p = true;
9247 clobber_reg = all_regs_rtx[REG_Z + 1];
9248 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9251 /* Now start filling DEST from LSB to MSB. */
9253 for (n = 0; n < n_bytes; n++)
9255 int ldreg_p;
9256 bool done_byte = false;
9257 int j;
9258 rtx xop[3];
9260 /* Crop the n-th destination byte. */
9262 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9263 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9265 if (!CONST_INT_P (src)
9266 && !CONST_DOUBLE_P (src))
9268 static const char* const asm_code[][2] =
9270 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9271 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9272 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9273 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9276 xop[0] = xdest[n];
9277 xop[1] = src;
9278 xop[2] = clobber_reg;
9280 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9282 continue;
9285 /* Crop the n-th source byte. */
9287 xval = simplify_gen_subreg (QImode, src, mode, n);
9288 ival[n] = INTVAL (xval);
9290 /* Look if we can reuse the low word by means of MOVW. */
9292 if (n == 2
9293 && n_bytes >= 4
9294 && AVR_HAVE_MOVW)
9296 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9297 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9299 if (INTVAL (lo16) == INTVAL (hi16))
9301 if (0 != INTVAL (lo16)
9302 || !clear_p)
9304 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9307 break;
9311 /* Don't use CLR so that cc0 is set as expected. */
9313 if (ival[n] == 0)
9315 if (!clear_p)
9316 avr_asm_len (ldreg_p ? "ldi %0,0"
9317 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9318 : "mov %0,__zero_reg__",
9319 &xdest[n], len, 1);
9320 continue;
9323 if (clobber_val == ival[n]
9324 && REGNO (clobber_reg) == REGNO (xdest[n]))
9326 continue;
9329 /* LD_REGS can use LDI to move a constant value */
9331 if (ldreg_p)
9333 xop[0] = xdest[n];
9334 xop[1] = xval;
9335 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9336 continue;
9339 /* Try to reuse value already loaded in some lower byte. */
9341 for (j = 0; j < n; j++)
9342 if (ival[j] == ival[n])
9344 xop[0] = xdest[n];
9345 xop[1] = xdest[j];
9347 avr_asm_len ("mov %0,%1", xop, len, 1);
9348 done_byte = true;
9349 break;
9352 if (done_byte)
9353 continue;
9355 /* Need no clobber reg for -1: Use CLR/DEC */
9357 if (-1 == ival[n])
9359 if (!clear_p)
9360 avr_asm_len ("clr %0", &xdest[n], len, 1);
9362 avr_asm_len ("dec %0", &xdest[n], len, 1);
9363 continue;
9365 else if (1 == ival[n])
9367 if (!clear_p)
9368 avr_asm_len ("clr %0", &xdest[n], len, 1);
9370 avr_asm_len ("inc %0", &xdest[n], len, 1);
9371 continue;
9374 /* Use T flag or INC to manage powers of 2 if we have
9375 no clobber reg. */
9377 if (NULL_RTX == clobber_reg
9378 && single_one_operand (xval, QImode))
9380 xop[0] = xdest[n];
9381 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9383 gcc_assert (constm1_rtx != xop[1]);
9385 if (!set_p)
9387 set_p = true;
9388 avr_asm_len ("set", xop, len, 1);
9391 if (!clear_p)
9392 avr_asm_len ("clr %0", xop, len, 1);
9394 avr_asm_len ("bld %0,%1", xop, len, 1);
9395 continue;
9398 /* We actually need the LD_REGS clobber reg. */
9400 gcc_assert (NULL_RTX != clobber_reg);
9402 xop[0] = xdest[n];
9403 xop[1] = xval;
9404 xop[2] = clobber_reg;
9405 clobber_val = ival[n];
9407 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9408 "mov %0,%2", xop, len, 2);
9411 /* If we cooked up a clobber reg above, restore it. */
9413 if (cooked_clobber_p)
9415 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9420 /* Reload the constant OP[1] into the HI register OP[0].
9421 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9422 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9423 need a clobber reg or have to cook one up.
9425 PLEN == NULL: Output instructions.
9426 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9427 by the insns printed.
9429 Return "". */
9431 const char*
9432 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9434 output_reload_in_const (op, clobber_reg, plen, false);
9435 return "";
9439 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9440 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9441 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9442 need a clobber reg or have to cook one up.
9444 LEN == NULL: Output instructions.
9446 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9447 by the insns printed.
9449 Return "". */
9451 const char *
9452 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9454 if (AVR_HAVE_MOVW
9455 && !test_hard_reg_class (LD_REGS, op[0])
9456 && (CONST_INT_P (op[1])
9457 || CONST_DOUBLE_P (op[1])))
9459 int len_clr, len_noclr;
9461 /* In some cases it is better to clear the destination beforehand, e.g.
9463 CLR R2 CLR R3 MOVW R4,R2 INC R2
9465 is shorther than
9467 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9469 We find it too tedious to work that out in the print function.
9470 Instead, we call the print function twice to get the lengths of
9471 both methods and use the shortest one. */
9473 output_reload_in_const (op, clobber_reg, &len_clr, true);
9474 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9476 if (len_noclr - len_clr == 4)
9478 /* Default needs 4 CLR instructions: clear register beforehand. */
9480 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9481 "mov %B0,__zero_reg__" CR_TAB
9482 "movw %C0,%A0", &op[0], len, 3);
9484 output_reload_in_const (op, clobber_reg, len, true);
9486 if (len)
9487 *len += 3;
9489 return "";
9493 /* Default: destination not pre-cleared. */
9495 output_reload_in_const (op, clobber_reg, len, false);
9496 return "";
9499 const char *
9500 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9502 output_reload_in_const (op, clobber_reg, len, false);
9503 return "";
9507 void
9508 avr_output_addr_vec_elt (FILE *stream, int value)
9510 if (AVR_HAVE_JMP_CALL)
9511 fprintf (stream, "\t.word gs(.L%d)\n", value);
9512 else
9513 fprintf (stream, "\trjmp .L%d\n", value);
9516 /* Returns true if SCRATCH are safe to be allocated as a scratch
9517 registers (for a define_peephole2) in the current function. */
9519 static bool
9520 avr_hard_regno_scratch_ok (unsigned int regno)
9522 /* Interrupt functions can only use registers that have already been saved
9523 by the prologue, even if they would normally be call-clobbered. */
9525 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9526 && !df_regs_ever_live_p (regno))
9527 return false;
9529 /* Don't allow hard registers that might be part of the frame pointer.
9530 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9531 and don't care for a frame pointer that spans more than one register. */
9533 if ((!reload_completed || frame_pointer_needed)
9534 && (regno == REG_Y || regno == REG_Y + 1))
9536 return false;
9539 return true;
9542 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9545 avr_hard_regno_rename_ok (unsigned int old_reg,
9546 unsigned int new_reg)
9548 /* Interrupt functions can only use registers that have already been
9549 saved by the prologue, even if they would normally be
9550 call-clobbered. */
9552 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9553 && !df_regs_ever_live_p (new_reg))
9554 return 0;
9556 /* Don't allow hard registers that might be part of the frame pointer.
9557 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9558 and don't care for a frame pointer that spans more than one register. */
9560 if ((!reload_completed || frame_pointer_needed)
9561 && (old_reg == REG_Y || old_reg == REG_Y + 1
9562 || new_reg == REG_Y || new_reg == REG_Y + 1))
9564 return 0;
9567 return 1;
9570 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9571 or memory location in the I/O space (QImode only).
9573 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9574 Operand 1: register operand to test, or CONST_INT memory address.
9575 Operand 2: bit number.
9576 Operand 3: label to jump to if the test is true. */
9578 const char *
9579 avr_out_sbxx_branch (rtx insn, rtx operands[])
9581 enum rtx_code comp = GET_CODE (operands[0]);
9582 bool long_jump = get_attr_length (insn) >= 4;
9583 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9585 if (comp == GE)
9586 comp = EQ;
9587 else if (comp == LT)
9588 comp = NE;
9590 if (reverse)
9591 comp = reverse_condition (comp);
9593 switch (GET_CODE (operands[1]))
9595 default:
9596 gcc_unreachable();
9598 case CONST_INT:
9600 if (low_io_address_operand (operands[1], QImode))
9602 if (comp == EQ)
9603 output_asm_insn ("sbis %i1,%2", operands);
9604 else
9605 output_asm_insn ("sbic %i1,%2", operands);
9607 else
9609 output_asm_insn ("in __tmp_reg__,%i1", operands);
9610 if (comp == EQ)
9611 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9612 else
9613 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9616 break; /* CONST_INT */
9618 case REG:
9620 if (comp == EQ)
9621 output_asm_insn ("sbrs %T1%T2", operands);
9622 else
9623 output_asm_insn ("sbrc %T1%T2", operands);
9625 break; /* REG */
9626 } /* switch */
9628 if (long_jump)
9629 return ("rjmp .+4" CR_TAB
9630 "jmp %x3");
9632 if (!reverse)
9633 return "rjmp %x3";
9635 return "";
9638 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9640 static void
9641 avr_asm_out_ctor (rtx symbol, int priority)
9643 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9644 default_ctor_section_asm_out_constructor (symbol, priority);
9647 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9649 static void
9650 avr_asm_out_dtor (rtx symbol, int priority)
9652 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9653 default_dtor_section_asm_out_destructor (symbol, priority);
9656 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9658 static bool
9659 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9661 if (TYPE_MODE (type) == BLKmode)
9663 HOST_WIDE_INT size = int_size_in_bytes (type);
9664 return (size == -1 || size > 8);
9666 else
9667 return false;
9670 /* Worker function for CASE_VALUES_THRESHOLD. */
9672 static unsigned int
9673 avr_case_values_threshold (void)
9675 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9679 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9681 static enum machine_mode
9682 avr_addr_space_address_mode (addr_space_t as)
9684 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9688 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9690 static enum machine_mode
9691 avr_addr_space_pointer_mode (addr_space_t as)
9693 return avr_addr_space_address_mode (as);
9697 /* Helper for following function. */
9699 static bool
9700 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9702 gcc_assert (REG_P (reg));
9704 if (strict)
9706 return REGNO (reg) == REG_Z;
9709 /* Avoid combine to propagate hard regs. */
9711 if (can_create_pseudo_p()
9712 && REGNO (reg) < REG_Z)
9714 return false;
9717 return true;
9721 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9723 static bool
9724 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9725 bool strict, addr_space_t as)
9727 bool ok = false;
9729 switch (as)
9731 default:
9732 gcc_unreachable();
9734 case ADDR_SPACE_GENERIC:
9735 return avr_legitimate_address_p (mode, x, strict);
9737 case ADDR_SPACE_FLASH:
9738 case ADDR_SPACE_FLASH1:
9739 case ADDR_SPACE_FLASH2:
9740 case ADDR_SPACE_FLASH3:
9741 case ADDR_SPACE_FLASH4:
9742 case ADDR_SPACE_FLASH5:
9744 switch (GET_CODE (x))
9746 case REG:
9747 ok = avr_reg_ok_for_pgm_addr (x, strict);
9748 break;
9750 case POST_INC:
9751 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9752 break;
9754 default:
9755 break;
9758 break; /* FLASH */
9760 case ADDR_SPACE_MEMX:
9761 if (REG_P (x))
9762 ok = (!strict
9763 && can_create_pseudo_p());
9765 if (LO_SUM == GET_CODE (x))
9767 rtx hi = XEXP (x, 0);
9768 rtx lo = XEXP (x, 1);
9770 ok = (REG_P (hi)
9771 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9772 && REG_P (lo)
9773 && REGNO (lo) == REG_Z);
9776 break; /* MEMX */
9779 if (avr_log.legitimate_address_p)
9781 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9782 "reload_completed=%d reload_in_progress=%d %s:",
9783 ok, mode, strict, reload_completed, reload_in_progress,
9784 reg_renumber ? "(reg_renumber)" : "");
9786 if (GET_CODE (x) == PLUS
9787 && REG_P (XEXP (x, 0))
9788 && CONST_INT_P (XEXP (x, 1))
9789 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9790 && reg_renumber)
9792 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9793 true_regnum (XEXP (x, 0)));
9796 avr_edump ("\n%r\n", x);
9799 return ok;
9803 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9805 static rtx
9806 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9807 enum machine_mode mode, addr_space_t as)
9809 if (ADDR_SPACE_GENERIC_P (as))
9810 return avr_legitimize_address (x, old_x, mode);
9812 if (avr_log.legitimize_address)
9814 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9817 return old_x;
9821 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9823 static rtx
9824 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9826 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9827 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9829 if (avr_log.progmem)
9830 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9831 src, type_from, type_to);
9833 /* Up-casting from 16-bit to 24-bit pointer. */
9835 if (as_from != ADDR_SPACE_MEMX
9836 && as_to == ADDR_SPACE_MEMX)
9838 int msb;
9839 rtx sym = src;
9840 rtx reg = gen_reg_rtx (PSImode);
9842 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9843 sym = XEXP (sym, 0);
9845 /* Look at symbol flags: avr_encode_section_info set the flags
9846 also if attribute progmem was seen so that we get the right
9847 promotion for, e.g. PSTR-like strings that reside in generic space
9848 but are located in flash. In that case we patch the incoming
9849 address space. */
9851 if (SYMBOL_REF == GET_CODE (sym)
9852 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9854 as_from = ADDR_SPACE_FLASH;
9857 /* Linearize memory: RAM has bit 23 set. */
9859 msb = ADDR_SPACE_GENERIC_P (as_from)
9860 ? 0x80
9861 : avr_addrspace[as_from].segment;
9863 src = force_reg (Pmode, src);
9865 emit_insn (msb == 0
9866 ? gen_zero_extendhipsi2 (reg, src)
9867 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9869 return reg;
9872 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9874 if (as_from == ADDR_SPACE_MEMX
9875 && as_to != ADDR_SPACE_MEMX)
9877 rtx new_src = gen_reg_rtx (Pmode);
9879 src = force_reg (PSImode, src);
9881 emit_move_insn (new_src,
9882 simplify_gen_subreg (Pmode, src, PSImode, 0));
9883 return new_src;
9886 return src;
9890 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9892 static bool
9893 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9894 addr_space_t superset ATTRIBUTE_UNUSED)
9896 /* Allow any kind of pointer mess. */
9898 return true;
9902 /* Worker function for movmemhi expander.
9903 XOP[0] Destination as MEM:BLK
9904 XOP[1] Source " "
9905 XOP[2] # Bytes to copy
9907 Return TRUE if the expansion is accomplished.
9908 Return FALSE if the operand compination is not supported. */
9910 bool
9911 avr_emit_movmemhi (rtx *xop)
9913 HOST_WIDE_INT count;
9914 enum machine_mode loop_mode;
9915 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9916 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9917 rtx a_hi8 = NULL_RTX;
9919 if (avr_mem_flash_p (xop[0]))
9920 return false;
9922 if (!CONST_INT_P (xop[2]))
9923 return false;
9925 count = INTVAL (xop[2]);
9926 if (count <= 0)
9927 return false;
9929 a_src = XEXP (xop[1], 0);
9930 a_dest = XEXP (xop[0], 0);
9932 if (PSImode == GET_MODE (a_src))
9934 gcc_assert (as == ADDR_SPACE_MEMX);
9936 loop_mode = (count < 0x100) ? QImode : HImode;
9937 loop_reg = gen_rtx_REG (loop_mode, 24);
9938 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9940 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9941 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9943 else
9945 int segment = avr_addrspace[as].segment;
9947 if (segment
9948 && avr_current_device->n_flash > 1)
9950 a_hi8 = GEN_INT (segment);
9951 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9953 else if (!ADDR_SPACE_GENERIC_P (as))
9955 as = ADDR_SPACE_FLASH;
9958 addr1 = a_src;
9960 loop_mode = (count <= 0x100) ? QImode : HImode;
9961 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9964 xas = GEN_INT (as);
9966 /* FIXME: Register allocator might come up with spill fails if it is left
9967 on its own. Thus, we allocate the pointer registers by hand:
9968 Z = source address
9969 X = destination address */
9971 emit_move_insn (lpm_addr_reg_rtx, addr1);
9972 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9974 /* FIXME: Register allocator does a bad job and might spill address
9975 register(s) inside the loop leading to additional move instruction
9976 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9977 load and store as seperate insns. Instead, we perform the copy
9978 by means of one monolithic insn. */
9980 gcc_assert (TMP_REGNO == LPM_REGNO);
9982 if (as != ADDR_SPACE_MEMX)
9984 /* Load instruction ([E]LPM or LD) is known at compile time:
9985 Do the copy-loop inline. */
9987 rtx (*fun) (rtx, rtx, rtx)
9988 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9990 insn = fun (xas, loop_reg, loop_reg);
9992 else
9994 rtx (*fun) (rtx, rtx)
9995 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9997 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9999 insn = fun (xas, GEN_INT (avr_addr.rampz));
10002 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
10003 emit_insn (insn);
10005 return true;
10009 /* Print assembler for movmem_qi, movmem_hi insns...
10010 $0 : Address Space
10011 $1, $2 : Loop register
10012 Z : Source address
10013 X : Destination address
10016 const char*
10017 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
10019 addr_space_t as = (addr_space_t) INTVAL (op[0]);
10020 enum machine_mode loop_mode = GET_MODE (op[1]);
10021 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
10022 rtx xop[3];
10024 if (plen)
10025 *plen = 0;
10027 xop[0] = op[0];
10028 xop[1] = op[1];
10029 xop[2] = tmp_reg_rtx;
10031 /* Loop label */
10033 avr_asm_len ("0:", xop, plen, 0);
10035 /* Load with post-increment */
10037 switch (as)
10039 default:
10040 gcc_unreachable();
10042 case ADDR_SPACE_GENERIC:
10044 avr_asm_len ("ld %2,Z+", xop, plen, 1);
10045 break;
10047 case ADDR_SPACE_FLASH:
10049 if (AVR_HAVE_LPMX)
10050 avr_asm_len ("lpm %2,%Z+", xop, plen, 1);
10051 else
10052 avr_asm_len ("lpm" CR_TAB
10053 "adiw r30,1", xop, plen, 2);
10054 break;
10056 case ADDR_SPACE_FLASH1:
10057 case ADDR_SPACE_FLASH2:
10058 case ADDR_SPACE_FLASH3:
10059 case ADDR_SPACE_FLASH4:
10060 case ADDR_SPACE_FLASH5:
10062 if (AVR_HAVE_ELPMX)
10063 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
10064 else
10065 avr_asm_len ("elpm" CR_TAB
10066 "adiw r30,1", xop, plen, 2);
10067 break;
10070 /* Store with post-increment */
10072 avr_asm_len ("st X+,%2", xop, plen, 1);
10074 /* Decrement loop-counter and set Z-flag */
10076 if (QImode == loop_mode)
10078 avr_asm_len ("dec %1", xop, plen, 1);
10080 else if (sbiw_p)
10082 avr_asm_len ("sbiw %1,1", xop, plen, 1);
10084 else
10086 avr_asm_len ("subi %A1,1" CR_TAB
10087 "sbci %B1,0", xop, plen, 2);
10090 /* Loop until zero */
10092 return avr_asm_len ("brne 0b", xop, plen, 1);
10097 /* Helper for __builtin_avr_delay_cycles */
10099 static void
10100 avr_expand_delay_cycles (rtx operands0)
10102 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
10103 unsigned HOST_WIDE_INT cycles_used;
10104 unsigned HOST_WIDE_INT loop_count;
10106 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
10108 loop_count = ((cycles - 9) / 6) + 1;
10109 cycles_used = ((loop_count - 1) * 6) + 9;
10110 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
10111 cycles -= cycles_used;
10114 if (IN_RANGE (cycles, 262145, 83886081))
10116 loop_count = ((cycles - 7) / 5) + 1;
10117 if (loop_count > 0xFFFFFF)
10118 loop_count = 0xFFFFFF;
10119 cycles_used = ((loop_count - 1) * 5) + 7;
10120 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
10121 cycles -= cycles_used;
10124 if (IN_RANGE (cycles, 768, 262144))
10126 loop_count = ((cycles - 5) / 4) + 1;
10127 if (loop_count > 0xFFFF)
10128 loop_count = 0xFFFF;
10129 cycles_used = ((loop_count - 1) * 4) + 5;
10130 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
10131 cycles -= cycles_used;
10134 if (IN_RANGE (cycles, 6, 767))
10136 loop_count = cycles / 3;
10137 if (loop_count > 255)
10138 loop_count = 255;
10139 cycles_used = loop_count * 3;
10140 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
10141 cycles -= cycles_used;
10144 while (cycles >= 2)
10146 emit_insn (gen_nopv (GEN_INT(2)));
10147 cycles -= 2;
10150 if (cycles == 1)
10152 emit_insn (gen_nopv (GEN_INT(1)));
10153 cycles--;
10158 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10160 static double_int
10161 avr_double_int_push_digit (double_int val, int base,
10162 unsigned HOST_WIDE_INT digit)
10164 val = 0 == base
10165 ? double_int_lshift (val, 32, 64, false)
10166 : double_int_mul (val, uhwi_to_double_int (base));
10168 return double_int_add (val, uhwi_to_double_int (digit));
10172 /* Compute the image of x under f, i.e. perform x --> f(x) */
10174 static int
10175 avr_map (double_int f, int x)
10177 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10181 /* Return some metrics of map A. */
10183 enum
10185 /* Number of fixed points in { 0 ... 7 } */
10186 MAP_FIXED_0_7,
10188 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10189 MAP_NONFIXED_0_7,
10191 /* Mask representing the fixed points in { 0 ... 7 } */
10192 MAP_MASK_FIXED_0_7,
10194 /* Size of the preimage of { 0 ... 7 } */
10195 MAP_PREIMAGE_0_7,
10197 /* Mask that represents the preimage of { f } */
10198 MAP_MASK_PREIMAGE_F
10201 static unsigned
10202 avr_map_metric (double_int a, int mode)
10204 unsigned i, metric = 0;
10206 for (i = 0; i < 8; i++)
10208 unsigned ai = avr_map (a, i);
10210 if (mode == MAP_FIXED_0_7)
10211 metric += ai == i;
10212 else if (mode == MAP_NONFIXED_0_7)
10213 metric += ai < 8 && ai != i;
10214 else if (mode == MAP_MASK_FIXED_0_7)
10215 metric |= ((unsigned) (ai == i)) << i;
10216 else if (mode == MAP_PREIMAGE_0_7)
10217 metric += ai < 8;
10218 else if (mode == MAP_MASK_PREIMAGE_F)
10219 metric |= ((unsigned) (ai == 0xf)) << i;
10220 else
10221 gcc_unreachable();
10224 return metric;
10228 /* Return true if IVAL has a 0xf in its hexadecimal representation
10229 and false, otherwise. Only nibbles 0..7 are taken into account.
10230 Used as constraint helper for C0f and Cxf. */
10232 bool
10233 avr_has_nibble_0xf (rtx ival)
10235 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10239 /* We have a set of bits that are mapped by a function F.
10240 Try to decompose F by means of a second function G so that
10242 F = F o G^-1 o G
10246 cost (F o G^-1) + cost (G) < cost (F)
10248 Example: Suppose builtin insert_bits supplies us with the map
10249 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10250 nibble of the result, we can just as well rotate the bits before inserting
10251 them and use the map 0x7654ffff which is cheaper than the original map.
10252 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10254 typedef struct
10256 /* tree code of binary function G */
10257 enum tree_code code;
10259 /* The constant second argument of G */
10260 int arg;
10262 /* G^-1, the inverse of G (*, arg) */
10263 unsigned ginv;
10265 /* The cost of appplying G (*, arg) */
10266 int cost;
10268 /* The composition F o G^-1 (*, arg) for some function F */
10269 double_int map;
10271 /* For debug purpose only */
10272 const char *str;
10273 } avr_map_op_t;
10275 static const avr_map_op_t avr_map_op[] =
10277 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10278 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10279 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10280 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10281 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10282 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10283 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10284 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10285 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10286 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10287 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10288 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10289 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10290 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10291 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10295 /* Try to decompose F as F = (F o G^-1) o G as described above.
10296 The result is a struct representing F o G^-1 and G.
10297 If result.cost < 0 then such a decomposition does not exist. */
10299 static avr_map_op_t
10300 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10302 int i;
10303 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10304 avr_map_op_t f_ginv = *g;
10305 double_int ginv = uhwi_to_double_int (g->ginv);
10307 f_ginv.cost = -1;
10309 /* Step 1: Computing F o G^-1 */
10311 for (i = 7; i >= 0; i--)
10313 int x = avr_map (f, i);
10315 if (x <= 7)
10317 x = avr_map (ginv, x);
10319 /* The bit is no element of the image of G: no avail (cost = -1) */
10321 if (x > 7)
10322 return f_ginv;
10325 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10328 /* Step 2: Compute the cost of the operations.
10329 The overall cost of doing an operation prior to the insertion is
10330 the cost of the insertion plus the cost of the operation. */
10332 /* Step 2a: Compute cost of F o G^-1 */
10334 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10336 /* The mapping consists only of fixed points and can be folded
10337 to AND/OR logic in the remainder. Reasonable cost is 3. */
10339 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10341 else
10343 rtx xop[4];
10345 /* Get the cost of the insn by calling the output worker with some
10346 fake values. Mimic effect of reloading xop[3]: Unused operands
10347 are mapped to 0 and used operands are reloaded to xop[0]. */
10349 xop[0] = all_regs_rtx[24];
10350 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10351 xop[2] = all_regs_rtx[25];
10352 xop[3] = val_used_p ? xop[0] : const0_rtx;
10354 avr_out_insert_bits (xop, &f_ginv.cost);
10356 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10359 /* Step 2b: Add cost of G */
10361 f_ginv.cost += g->cost;
10363 if (avr_log.builtin)
10364 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10366 return f_ginv;
10370 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10371 XOP[0] and XOP[1] don't overlap.
10372 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10373 If FIXP_P = false: Just move the bit if its position in the destination
10374 is different to its source position. */
10376 static void
10377 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10379 int bit_dest, b;
10381 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10382 int t_bit_src = -1;
10384 /* We order the operations according to the requested source bit b. */
10386 for (b = 0; b < 8; b++)
10387 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10389 int bit_src = avr_map (map, bit_dest);
10391 if (b != bit_src
10392 || bit_src >= 8
10393 /* Same position: No need to copy as requested by FIXP_P. */
10394 || (bit_dest == bit_src && !fixp_p))
10395 continue;
10397 if (t_bit_src != bit_src)
10399 /* Source bit is not yet in T: Store it to T. */
10401 t_bit_src = bit_src;
10403 xop[3] = GEN_INT (bit_src);
10404 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10407 /* Load destination bit with T. */
10409 xop[3] = GEN_INT (bit_dest);
10410 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10415 /* PLEN == 0: Print assembler code for `insert_bits'.
10416 PLEN != 0: Compute code length in bytes.
10418 OP[0]: Result
10419 OP[1]: The mapping composed of nibbles. If nibble no. N is
10420 0: Bit N of result is copied from bit OP[2].0
10421 ... ...
10422 7: Bit N of result is copied from bit OP[2].7
10423 0xf: Bit N of result is copied from bit OP[3].N
10424 OP[2]: Bits to be inserted
10425 OP[3]: Target value */
10427 const char*
10428 avr_out_insert_bits (rtx *op, int *plen)
10430 double_int map = rtx_to_double_int (op[1]);
10431 unsigned mask_fixed;
10432 bool fixp_p = true;
10433 rtx xop[4];
10435 xop[0] = op[0];
10436 xop[1] = op[2];
10437 xop[2] = op[3];
10439 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10441 if (plen)
10442 *plen = 0;
10443 else if (flag_print_asm_name)
10444 fprintf (asm_out_file,
10445 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10446 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10448 /* If MAP has fixed points it might be better to initialize the result
10449 with the bits to be inserted instead of moving all bits by hand. */
10451 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10453 if (REGNO (xop[0]) == REGNO (xop[1]))
10455 /* Avoid early-clobber conflicts */
10457 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10458 xop[1] = tmp_reg_rtx;
10459 fixp_p = false;
10462 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10464 /* XOP[2] is used and reloaded to XOP[0] already */
10466 int n_fix = 0, n_nofix = 0;
10468 gcc_assert (REG_P (xop[2]));
10470 /* Get the code size of the bit insertions; once with all bits
10471 moved and once with fixed points omitted. */
10473 avr_move_bits (xop, map, true, &n_fix);
10474 avr_move_bits (xop, map, false, &n_nofix);
10476 if (fixp_p && n_fix - n_nofix > 3)
10478 xop[3] = gen_int_mode (~mask_fixed, QImode);
10480 avr_asm_len ("eor %0,%1" CR_TAB
10481 "andi %0,%3" CR_TAB
10482 "eor %0,%1", xop, plen, 3);
10483 fixp_p = false;
10486 else
10488 /* XOP[2] is unused */
10490 if (fixp_p && mask_fixed)
10492 avr_asm_len ("mov %0,%1", xop, plen, 1);
10493 fixp_p = false;
10497 /* Move/insert remaining bits. */
10499 avr_move_bits (xop, map, fixp_p, plen);
10501 return "";
10505 /* IDs for all the AVR builtins. */
10507 enum avr_builtin_id
10510 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10511 #include "builtins.def"
10512 #undef DEF_BUILTIN
10514 AVR_BUILTIN_COUNT
10517 static void
10518 avr_init_builtin_int24 (void)
10520 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10521 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10523 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10524 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10527 /* Implement `TARGET_INIT_BUILTINS' */
10528 /* Set up all builtin functions for this target. */
10530 static void
10531 avr_init_builtins (void)
10533 tree void_ftype_void
10534 = build_function_type_list (void_type_node, NULL_TREE);
10535 tree uchar_ftype_uchar
10536 = build_function_type_list (unsigned_char_type_node,
10537 unsigned_char_type_node,
10538 NULL_TREE);
10539 tree uint_ftype_uchar_uchar
10540 = build_function_type_list (unsigned_type_node,
10541 unsigned_char_type_node,
10542 unsigned_char_type_node,
10543 NULL_TREE);
10544 tree int_ftype_char_char
10545 = build_function_type_list (integer_type_node,
10546 char_type_node,
10547 char_type_node,
10548 NULL_TREE);
10549 tree int_ftype_char_uchar
10550 = build_function_type_list (integer_type_node,
10551 char_type_node,
10552 unsigned_char_type_node,
10553 NULL_TREE);
10554 tree void_ftype_ulong
10555 = build_function_type_list (void_type_node,
10556 long_unsigned_type_node,
10557 NULL_TREE);
10559 tree uchar_ftype_ulong_uchar_uchar
10560 = build_function_type_list (unsigned_char_type_node,
10561 long_unsigned_type_node,
10562 unsigned_char_type_node,
10563 unsigned_char_type_node,
10564 NULL_TREE);
10566 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10567 add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10568 #include "builtins.def"
10569 #undef DEF_BUILTIN
10571 avr_init_builtin_int24 ();
10575 struct avr_builtin_description
10577 enum insn_code icode;
10578 const char *name;
10579 enum avr_builtin_id id;
10580 int n_args;
10583 static const struct avr_builtin_description
10584 avr_bdesc[] =
10587 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10588 { ICODE, NAME, ID, N_ARGS },
10589 #include "builtins.def"
10590 #undef DEF_BUILTIN
10592 { CODE_FOR_nothing, NULL, 0, -1 }
10596 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10598 static rtx
10599 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10600 rtx target)
10602 rtx pat;
10603 tree arg0 = CALL_EXPR_ARG (exp, 0);
10604 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10605 enum machine_mode op0mode = GET_MODE (op0);
10606 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10607 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10609 if (! target
10610 || GET_MODE (target) != tmode
10611 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10613 target = gen_reg_rtx (tmode);
10616 if (op0mode == SImode && mode0 == HImode)
10618 op0mode = HImode;
10619 op0 = gen_lowpart (HImode, op0);
10622 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10624 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10625 op0 = copy_to_mode_reg (mode0, op0);
10627 pat = GEN_FCN (icode) (target, op0);
10628 if (! pat)
10629 return 0;
10631 emit_insn (pat);
10633 return target;
10637 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10639 static rtx
10640 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10642 rtx pat;
10643 tree arg0 = CALL_EXPR_ARG (exp, 0);
10644 tree arg1 = CALL_EXPR_ARG (exp, 1);
10645 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10646 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10647 enum machine_mode op0mode = GET_MODE (op0);
10648 enum machine_mode op1mode = GET_MODE (op1);
10649 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10650 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10651 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10653 if (! target
10654 || GET_MODE (target) != tmode
10655 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10657 target = gen_reg_rtx (tmode);
10660 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10662 op0mode = HImode;
10663 op0 = gen_lowpart (HImode, op0);
10666 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10668 op1mode = HImode;
10669 op1 = gen_lowpart (HImode, op1);
10672 /* In case the insn wants input operands in modes different from
10673 the result, abort. */
10675 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10676 && (op1mode == mode1 || op1mode == VOIDmode));
10678 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10679 op0 = copy_to_mode_reg (mode0, op0);
10681 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10682 op1 = copy_to_mode_reg (mode1, op1);
10684 pat = GEN_FCN (icode) (target, op0, op1);
10686 if (! pat)
10687 return 0;
10689 emit_insn (pat);
10690 return target;
10693 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10695 static rtx
10696 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10698 rtx pat;
10699 tree arg0 = CALL_EXPR_ARG (exp, 0);
10700 tree arg1 = CALL_EXPR_ARG (exp, 1);
10701 tree arg2 = CALL_EXPR_ARG (exp, 2);
10702 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10703 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10704 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10705 enum machine_mode op0mode = GET_MODE (op0);
10706 enum machine_mode op1mode = GET_MODE (op1);
10707 enum machine_mode op2mode = GET_MODE (op2);
10708 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10709 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10710 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10711 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10713 if (! target
10714 || GET_MODE (target) != tmode
10715 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10717 target = gen_reg_rtx (tmode);
10720 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10722 op0mode = HImode;
10723 op0 = gen_lowpart (HImode, op0);
10726 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10728 op1mode = HImode;
10729 op1 = gen_lowpart (HImode, op1);
10732 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10734 op2mode = HImode;
10735 op2 = gen_lowpart (HImode, op2);
10738 /* In case the insn wants input operands in modes different from
10739 the result, abort. */
10741 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10742 && (op1mode == mode1 || op1mode == VOIDmode)
10743 && (op2mode == mode2 || op2mode == VOIDmode));
10745 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10746 op0 = copy_to_mode_reg (mode0, op0);
10748 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10749 op1 = copy_to_mode_reg (mode1, op1);
10751 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10752 op2 = copy_to_mode_reg (mode2, op2);
10754 pat = GEN_FCN (icode) (target, op0, op1, op2);
10756 if (! pat)
10757 return 0;
10759 emit_insn (pat);
10760 return target;
10764 /* Expand an expression EXP that calls a built-in function,
10765 with result going to TARGET if that's convenient
10766 (and in mode MODE if that's convenient).
10767 SUBTARGET may be used as the target for computing one of EXP's operands.
10768 IGNORE is nonzero if the value is to be ignored. */
10770 static rtx
10771 avr_expand_builtin (tree exp, rtx target,
10772 rtx subtarget ATTRIBUTE_UNUSED,
10773 enum machine_mode mode ATTRIBUTE_UNUSED,
10774 int ignore ATTRIBUTE_UNUSED)
10776 size_t i;
10777 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10778 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10779 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10780 tree arg0;
10781 rtx op0;
10783 switch (id)
10785 case AVR_BUILTIN_NOP:
10786 emit_insn (gen_nopv (GEN_INT(1)));
10787 return 0;
10789 case AVR_BUILTIN_DELAY_CYCLES:
10791 arg0 = CALL_EXPR_ARG (exp, 0);
10792 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10794 if (!CONST_INT_P (op0))
10795 error ("%s expects a compile time integer constant", bname);
10796 else
10797 avr_expand_delay_cycles (op0);
10799 return 0;
10802 case AVR_BUILTIN_INSERT_BITS:
10804 arg0 = CALL_EXPR_ARG (exp, 0);
10805 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10807 if (!CONST_INT_P (op0))
10809 error ("%s expects a compile time long integer constant"
10810 " as first argument", bname);
10811 return target;
10816 for (i = 0; avr_bdesc[i].name; i++)
10818 const struct avr_builtin_description *d = &avr_bdesc[i];
10820 if (d->id == id)
10821 switch (d->n_args)
10823 case 0:
10824 emit_insn ((GEN_FCN (d->icode)) (target));
10825 return 0;
10827 case 1:
10828 return avr_expand_unop_builtin (d->icode, exp, target);
10830 case 2:
10831 return avr_expand_binop_builtin (d->icode, exp, target);
10833 case 3:
10834 return avr_expand_triop_builtin (d->icode, exp, target);
10836 default:
10837 gcc_unreachable();
10841 gcc_unreachable ();
10845 /* Implement `TARGET_FOLD_BUILTIN'. */
10847 static tree
10848 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10849 bool ignore ATTRIBUTE_UNUSED)
10851 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10852 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10854 if (!optimize)
10855 return NULL_TREE;
10857 switch (fcode)
10859 default:
10860 break;
10862 case AVR_BUILTIN_SWAP:
10864 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10865 build_int_cst (val_type, 4));
10868 case AVR_BUILTIN_INSERT_BITS:
10870 tree tbits = arg[1];
10871 tree tval = arg[2];
10872 tree tmap;
10873 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10874 double_int map;
10875 bool changed = false;
10876 unsigned i;
10877 avr_map_op_t best_g;
10879 if (TREE_CODE (arg[0]) != INTEGER_CST)
10881 /* No constant as first argument: Don't fold this and run into
10882 error in avr_expand_builtin. */
10884 break;
10887 map = tree_to_double_int (arg[0]);
10888 tmap = double_int_to_tree (map_type, map);
10890 if (TREE_CODE (tval) != INTEGER_CST
10891 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10893 /* There are no F in the map, i.e. 3rd operand is unused.
10894 Replace that argument with some constant to render
10895 respective input unused. */
10897 tval = build_int_cst (val_type, 0);
10898 changed = true;
10901 if (TREE_CODE (tbits) != INTEGER_CST
10902 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10904 /* Similar for the bits to be inserted. If they are unused,
10905 we can just as well pass 0. */
10907 tbits = build_int_cst (val_type, 0);
10910 if (TREE_CODE (tbits) == INTEGER_CST)
10912 /* Inserting bits known at compile time is easy and can be
10913 performed by AND and OR with appropriate masks. */
10915 int bits = TREE_INT_CST_LOW (tbits);
10916 int mask_ior = 0, mask_and = 0xff;
10918 for (i = 0; i < 8; i++)
10920 int mi = avr_map (map, i);
10922 if (mi < 8)
10924 if (bits & (1 << mi)) mask_ior |= (1 << i);
10925 else mask_and &= ~(1 << i);
10929 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10930 build_int_cst (val_type, mask_ior));
10931 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10932 build_int_cst (val_type, mask_and));
10935 if (changed)
10936 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10938 /* If bits don't change their position we can use vanilla logic
10939 to merge the two arguments. */
10941 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10943 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10944 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10946 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10947 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10948 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10951 /* Try to decomposing map to reduce overall cost. */
10953 if (avr_log.builtin)
10954 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10956 best_g = avr_map_op[0];
10957 best_g.cost = 1000;
10959 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10961 avr_map_op_t g
10962 = avr_map_decompose (map, avr_map_op + i,
10963 TREE_CODE (tval) == INTEGER_CST);
10965 if (g.cost >= 0 && g.cost < best_g.cost)
10966 best_g = g;
10969 if (avr_log.builtin)
10970 avr_edump ("\n");
10972 if (best_g.arg == 0)
10973 /* No optimization found */
10974 break;
10976 /* Apply operation G to the 2nd argument. */
10978 if (avr_log.builtin)
10979 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10980 best_g.str, best_g.arg, best_g.map, best_g.cost);
10982 /* Do right-shifts arithmetically: They copy the MSB instead of
10983 shifting in a non-usable value (0) as with logic right-shift. */
10985 tbits = fold_convert (signed_char_type_node, tbits);
10986 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10987 build_int_cst (val_type, best_g.arg));
10988 tbits = fold_convert (val_type, tbits);
10990 /* Use map o G^-1 instead of original map to undo the effect of G. */
10992 tmap = double_int_to_tree (map_type, best_g.map);
10994 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10995 } /* AVR_BUILTIN_INSERT_BITS */
10998 return NULL_TREE;
11003 struct gcc_target targetm = TARGET_INITIALIZER;
11005 #include "gt-avr.h"