1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
111 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
112 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
113 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
114 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
115 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
117 static int avr_naked_function_p (tree
);
118 static int interrupt_function_p (tree
);
119 static int signal_function_p (tree
);
120 static int avr_OS_task_function_p (tree
);
121 static int avr_OS_main_function_p (tree
);
122 static int avr_regs_to_save (HARD_REG_SET
*);
123 static int get_sequence_length (rtx insns
);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code
);
127 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
128 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
130 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
131 static struct machine_function
* avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 extern GTY(()) rtx lpm_reg_rtx
;
146 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
147 extern GTY(()) rtx lpm_addr_reg_rtx
;
148 rtx lpm_addr_reg_rtx
;
150 /* Temporary register RTX (reg:QI TMP_REGNO) */
151 extern GTY(()) rtx tmp_reg_rtx
;
154 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
155 extern GTY(()) rtx zero_reg_rtx
;
158 /* RTXs for all general purpose registers as QImode */
159 extern GTY(()) rtx all_regs_rtx
[32];
160 rtx all_regs_rtx
[32];
162 /* RAMPZ special function register */
163 extern GTY(()) rtx rampz_rtx
;
166 /* RTX containing the strings "" and "e", respectively */
167 static GTY(()) rtx xstring_empty
;
168 static GTY(()) rtx xstring_e
;
170 /* Preprocessor macros to define depending on MCU type. */
171 const char *avr_extra_arch_macro
;
173 /* Current architecture. */
174 const struct base_arch_s
*avr_current_arch
;
176 /* Current device. */
177 const struct mcu_type_s
*avr_current_device
;
179 /* Section to put switch tables in. */
180 static GTY(()) section
*progmem_swtable_section
;
182 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
183 or to address space __flash*. */
184 static GTY(()) section
*progmem_section
[6];
186 /* Condition for insns/expanders from avr-dimode.md. */
187 bool avr_have_dimode
= true;
189 /* To track if code will use .bss and/or .data. */
190 bool avr_need_clear_bss_p
= false;
191 bool avr_need_copy_data_p
= false;
194 /* Initialize the GCC target structure. */
195 #undef TARGET_ASM_ALIGNED_HI_OP
196 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
197 #undef TARGET_ASM_ALIGNED_SI_OP
198 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
199 #undef TARGET_ASM_UNALIGNED_HI_OP
200 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
201 #undef TARGET_ASM_UNALIGNED_SI_OP
202 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
203 #undef TARGET_ASM_INTEGER
204 #define TARGET_ASM_INTEGER avr_assemble_integer
205 #undef TARGET_ASM_FILE_START
206 #define TARGET_ASM_FILE_START avr_file_start
207 #undef TARGET_ASM_FILE_END
208 #define TARGET_ASM_FILE_END avr_file_end
210 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
211 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
212 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
213 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
215 #undef TARGET_FUNCTION_VALUE
216 #define TARGET_FUNCTION_VALUE avr_function_value
217 #undef TARGET_LIBCALL_VALUE
218 #define TARGET_LIBCALL_VALUE avr_libcall_value
219 #undef TARGET_FUNCTION_VALUE_REGNO_P
220 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
224 #undef TARGET_INSERT_ATTRIBUTES
225 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
226 #undef TARGET_SECTION_TYPE_FLAGS
227 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
229 #undef TARGET_ASM_NAMED_SECTION
230 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
231 #undef TARGET_ASM_INIT_SECTIONS
232 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
233 #undef TARGET_ENCODE_SECTION_INFO
234 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
235 #undef TARGET_ASM_SELECT_SECTION
236 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
238 #undef TARGET_REGISTER_MOVE_COST
239 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
240 #undef TARGET_MEMORY_MOVE_COST
241 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
242 #undef TARGET_RTX_COSTS
243 #define TARGET_RTX_COSTS avr_rtx_costs
244 #undef TARGET_ADDRESS_COST
245 #define TARGET_ADDRESS_COST avr_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
248 #undef TARGET_FUNCTION_ARG
249 #define TARGET_FUNCTION_ARG avr_function_arg
250 #undef TARGET_FUNCTION_ARG_ADVANCE
251 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
253 #undef TARGET_RETURN_IN_MEMORY
254 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
256 #undef TARGET_STRICT_ARGUMENT_NAMING
257 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
259 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
260 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
262 #undef TARGET_HARD_REGNO_SCRATCH_OK
263 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
264 #undef TARGET_CASE_VALUES_THRESHOLD
265 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
267 #undef TARGET_FRAME_POINTER_REQUIRED
268 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
269 #undef TARGET_CAN_ELIMINATE
270 #define TARGET_CAN_ELIMINATE avr_can_eliminate
272 #undef TARGET_CLASS_LIKELY_SPILLED_P
273 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
275 #undef TARGET_OPTION_OVERRIDE
276 #define TARGET_OPTION_OVERRIDE avr_option_override
278 #undef TARGET_CANNOT_MODIFY_JUMPS_P
279 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
281 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
282 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
284 #undef TARGET_INIT_BUILTINS
285 #define TARGET_INIT_BUILTINS avr_init_builtins
287 #undef TARGET_EXPAND_BUILTIN
288 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
290 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
291 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
293 #undef TARGET_SCALAR_MODE_SUPPORTED_P
294 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
296 #undef TARGET_ADDR_SPACE_SUBSET_P
297 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
299 #undef TARGET_ADDR_SPACE_CONVERT
300 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
302 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
303 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
305 #undef TARGET_ADDR_SPACE_POINTER_MODE
306 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
308 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
309 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
311 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
312 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
314 #undef TARGET_PRINT_OPERAND
315 #define TARGET_PRINT_OPERAND avr_print_operand
316 #undef TARGET_PRINT_OPERAND_ADDRESS
317 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
318 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
319 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
323 /* Custom function to count number of set bits. */
326 avr_popcount (unsigned int val
)
340 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
341 Return true if the least significant N_BYTES bytes of XVAL all have a
342 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
343 of integers which contains an integer N iff bit N of POP_MASK is set. */
346 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
350 enum machine_mode mode
= GET_MODE (xval
);
352 if (VOIDmode
== mode
)
355 for (i
= 0; i
< n_bytes
; i
++)
357 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
358 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
360 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
368 avr_option_override (void)
370 flag_delete_null_pointer_checks
= 0;
372 /* caller-save.c looks for call-clobbered hard registers that are assigned
373 to pseudos that cross calls and tries so save-restore them around calls
374 in order to reduce the number of stack slots needed.
376 This might leads to situations where reload is no more able to cope
377 with the challenge of AVR's very few address registers and fails to
378 perform the requested spills. */
381 flag_caller_saves
= 0;
383 /* Unwind tables currently require a frame pointer for correctness,
384 see toplev.c:process_options(). */
386 if ((flag_unwind_tables
387 || flag_non_call_exceptions
388 || flag_asynchronous_unwind_tables
)
389 && !ACCUMULATE_OUTGOING_ARGS
)
391 flag_omit_frame_pointer
= 0;
394 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
395 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
396 avr_extra_arch_macro
= avr_current_device
->macro
;
398 init_machine_status
= avr_init_machine_status
;
400 avr_log_set_avr_log();
403 /* Function to set up the backend function structure. */
405 static struct machine_function
*
406 avr_init_machine_status (void)
408 return ggc_alloc_cleared_machine_function ();
412 /* Implement `INIT_EXPANDERS'. */
413 /* The function works like a singleton. */
416 avr_init_expanders (void)
420 static bool done
= false;
427 for (regno
= 0; regno
< 32; regno
++)
428 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
430 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
431 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
432 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
434 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
436 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
));
438 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
439 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
443 /* Return register class for register R. */
446 avr_regno_reg_class (int r
)
448 static const enum reg_class reg_class_tab
[] =
452 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
453 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
454 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
455 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
457 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
458 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
460 ADDW_REGS
, ADDW_REGS
,
462 POINTER_X_REGS
, POINTER_X_REGS
,
464 POINTER_Y_REGS
, POINTER_Y_REGS
,
466 POINTER_Z_REGS
, POINTER_Z_REGS
,
472 return reg_class_tab
[r
];
479 avr_scalar_mode_supported_p (enum machine_mode mode
)
484 return default_scalar_mode_supported_p (mode
);
488 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
491 avr_decl_flash_p (tree decl
)
493 if (TREE_CODE (decl
) != VAR_DECL
494 || TREE_TYPE (decl
) == error_mark_node
)
499 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
503 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
504 address space and FALSE, otherwise. */
507 avr_decl_memx_p (tree decl
)
509 if (TREE_CODE (decl
) != VAR_DECL
510 || TREE_TYPE (decl
) == error_mark_node
)
515 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
519 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
522 avr_mem_flash_p (rtx x
)
525 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
529 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
530 address space and FALSE, otherwise. */
533 avr_mem_memx_p (rtx x
)
536 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
540 /* A helper for the subsequent function attribute used to dig for
541 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
544 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
546 if (FUNCTION_DECL
== TREE_CODE (func
))
548 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
553 func
= TREE_TYPE (func
);
556 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
557 || TREE_CODE (func
) == METHOD_TYPE
);
559 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
562 /* Return nonzero if FUNC is a naked function. */
565 avr_naked_function_p (tree func
)
567 return avr_lookup_function_attribute1 (func
, "naked");
570 /* Return nonzero if FUNC is an interrupt function as specified
571 by the "interrupt" attribute. */
574 interrupt_function_p (tree func
)
576 return avr_lookup_function_attribute1 (func
, "interrupt");
579 /* Return nonzero if FUNC is a signal function as specified
580 by the "signal" attribute. */
583 signal_function_p (tree func
)
585 return avr_lookup_function_attribute1 (func
, "signal");
588 /* Return nonzero if FUNC is an OS_task function. */
591 avr_OS_task_function_p (tree func
)
593 return avr_lookup_function_attribute1 (func
, "OS_task");
596 /* Return nonzero if FUNC is an OS_main function. */
599 avr_OS_main_function_p (tree func
)
601 return avr_lookup_function_attribute1 (func
, "OS_main");
605 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
607 avr_accumulate_outgoing_args (void)
610 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
612 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
613 what offset is correct. In some cases it is relative to
614 virtual_outgoing_args_rtx and in others it is relative to
615 virtual_stack_vars_rtx. For example code see
616 gcc.c-torture/execute/built-in-setjmp.c
617 gcc.c-torture/execute/builtins/sprintf-chk.c */
619 return (TARGET_ACCUMULATE_OUTGOING_ARGS
620 && !(cfun
->calls_setjmp
621 || cfun
->has_nonlocal_label
));
625 /* Report contribution of accumulated outgoing arguments to stack size. */
628 avr_outgoing_args_size (void)
630 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
634 /* Implement `STARTING_FRAME_OFFSET'. */
635 /* This is the offset from the frame pointer register to the first stack slot
636 that contains a variable living in the frame. */
639 avr_starting_frame_offset (void)
641 return 1 + avr_outgoing_args_size ();
645 /* Return the number of hard registers to push/pop in the prologue/epilogue
646 of the current function, and optionally store these registers in SET. */
649 avr_regs_to_save (HARD_REG_SET
*set
)
652 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
653 || signal_function_p (current_function_decl
));
656 CLEAR_HARD_REG_SET (*set
);
659 /* No need to save any registers if the function never returns or
660 has the "OS_task" or "OS_main" attribute. */
661 if (TREE_THIS_VOLATILE (current_function_decl
)
662 || cfun
->machine
->is_OS_task
663 || cfun
->machine
->is_OS_main
)
666 for (reg
= 0; reg
< 32; reg
++)
668 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
669 any global register variables. */
673 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
674 || (df_regs_ever_live_p (reg
)
675 && (int_or_sig_p
|| !call_used_regs
[reg
])
676 /* Don't record frame pointer registers here. They are treated
677 indivitually in prologue. */
678 && !(frame_pointer_needed
679 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
682 SET_HARD_REG_BIT (*set
, reg
);
689 /* Return true if register FROM can be eliminated via register TO. */
692 avr_can_eliminate (const int from
, const int to
)
694 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
695 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
696 || ((from
== FRAME_POINTER_REGNUM
697 || from
== FRAME_POINTER_REGNUM
+ 1)
698 && !frame_pointer_needed
));
701 /* Compute offset between arg_pointer and frame_pointer. */
704 avr_initial_elimination_offset (int from
, int to
)
706 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
710 int offset
= frame_pointer_needed
? 2 : 0;
711 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
713 offset
+= avr_regs_to_save (NULL
);
714 return (get_frame_size () + avr_outgoing_args_size()
715 + avr_pc_size
+ 1 + offset
);
719 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
720 frame pointer by +STARTING_FRAME_OFFSET.
721 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
722 avoids creating add/sub of offset in nonlocal goto and setjmp. */
725 avr_builtin_setjmp_frame_value (void)
727 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
728 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
731 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
732 This is return address of function. */
734 avr_return_addr_rtx (int count
, rtx tem
)
738 /* Can only return this function's return address. Others not supported. */
744 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
745 warning (0, "'builtin_return_address' contains only 2 bytes of address");
748 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
750 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
751 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
752 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
756 /* Return 1 if the function epilogue is just a single "ret". */
759 avr_simple_epilogue (void)
761 return (! frame_pointer_needed
762 && get_frame_size () == 0
763 && avr_outgoing_args_size() == 0
764 && avr_regs_to_save (NULL
) == 0
765 && ! interrupt_function_p (current_function_decl
)
766 && ! signal_function_p (current_function_decl
)
767 && ! avr_naked_function_p (current_function_decl
)
768 && ! TREE_THIS_VOLATILE (current_function_decl
));
771 /* This function checks sequence of live registers. */
774 sequent_regs_live (void)
780 for (reg
= 0; reg
< 18; ++reg
)
784 /* Don't recognize sequences that contain global register
793 if (!call_used_regs
[reg
])
795 if (df_regs_ever_live_p (reg
))
805 if (!frame_pointer_needed
)
807 if (df_regs_ever_live_p (REG_Y
))
815 if (df_regs_ever_live_p (REG_Y
+1))
828 return (cur_seq
== live_seq
) ? live_seq
: 0;
831 /* Obtain the length sequence of insns. */
834 get_sequence_length (rtx insns
)
839 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
840 length
+= get_attr_length (insn
);
845 /* Implement INCOMING_RETURN_ADDR_RTX. */
848 avr_incoming_return_addr_rtx (void)
850 /* The return address is at the top of the stack. Note that the push
851 was via post-decrement, which means the actual address is off by one. */
852 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
855 /* Helper for expand_prologue. Emit a push of a byte register. */
858 emit_push_byte (unsigned regno
, bool frame_related_p
)
862 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
863 mem
= gen_frame_mem (QImode
, mem
);
864 reg
= gen_rtx_REG (QImode
, regno
);
866 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
868 RTX_FRAME_RELATED_P (insn
) = 1;
870 cfun
->machine
->stack_usage
++;
874 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
877 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
878 int live_seq
= sequent_regs_live ();
880 bool minimize
= (TARGET_CALL_PROLOGUES
883 && !cfun
->machine
->is_OS_task
884 && !cfun
->machine
->is_OS_main
);
887 && (frame_pointer_needed
888 || avr_outgoing_args_size() > 8
889 || (AVR_2_BYTE_PC
&& live_seq
> 6)
893 int first_reg
, reg
, offset
;
895 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
896 gen_int_mode (size
, HImode
));
898 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
899 gen_int_mode (live_seq
+size
, HImode
));
900 insn
= emit_insn (pattern
);
901 RTX_FRAME_RELATED_P (insn
) = 1;
903 /* Describe the effect of the unspec_volatile call to prologue_saves.
904 Note that this formulation assumes that add_reg_note pushes the
905 notes to the front. Thus we build them in the reverse order of
906 how we want dwarf2out to process them. */
908 /* The function does always set frame_pointer_rtx, but whether that
909 is going to be permanent in the function is frame_pointer_needed. */
911 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
912 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
914 : stack_pointer_rtx
),
915 plus_constant (stack_pointer_rtx
,
916 -(size
+ live_seq
))));
918 /* Note that live_seq always contains r28+r29, but the other
919 registers to be saved are all below 18. */
921 first_reg
= 18 - (live_seq
- 2);
923 for (reg
= 29, offset
= -live_seq
+ 1;
925 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
929 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
930 r
= gen_rtx_REG (QImode
, reg
);
931 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
934 cfun
->machine
->stack_usage
+= size
+ live_seq
;
940 for (reg
= 0; reg
< 32; ++reg
)
941 if (TEST_HARD_REG_BIT (set
, reg
))
942 emit_push_byte (reg
, true);
944 if (frame_pointer_needed
945 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
947 /* Push frame pointer. Always be consistent about the
948 ordering of pushes -- epilogue_restores expects the
949 register pair to be pushed low byte first. */
951 emit_push_byte (REG_Y
, true);
952 emit_push_byte (REG_Y
+ 1, true);
955 if (frame_pointer_needed
958 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
959 RTX_FRAME_RELATED_P (insn
) = 1;
964 /* Creating a frame can be done by direct manipulation of the
965 stack or via the frame pointer. These two methods are:
972 the optimum method depends on function type, stack and
973 frame size. To avoid a complex logic, both methods are
974 tested and shortest is selected.
976 There is also the case where SIZE != 0 and no frame pointer is
977 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
978 In that case, insn (*) is not needed in that case.
979 We use the X register as scratch. This is save because in X
981 In an interrupt routine, the case of SIZE != 0 together with
982 !frame_pointer_needed can only occur if the function is not a
983 leaf function and thus X has already been saved. */
985 rtx fp_plus_insns
, fp
, my_fp
;
986 rtx sp_minus_size
= plus_constant (stack_pointer_rtx
, -size
);
988 gcc_assert (frame_pointer_needed
990 || !current_function_is_leaf
);
992 fp
= my_fp
= (frame_pointer_needed
994 : gen_rtx_REG (Pmode
, REG_X
));
996 if (AVR_HAVE_8BIT_SP
)
998 /* The high byte (r29) does not change:
999 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1001 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1004 /************ Method 1: Adjust frame pointer ************/
1008 /* Normally, the dwarf2out frame-related-expr interpreter does
1009 not expect to have the CFA change once the frame pointer is
1010 set up. Thus, we avoid marking the move insn below and
1011 instead indicate that the entire operation is complete after
1012 the frame pointer subtraction is done. */
1014 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1015 if (!frame_pointer_needed
)
1016 RTX_FRAME_RELATED_P (insn
) = 1;
1018 insn
= emit_move_insn (my_fp
, plus_constant (my_fp
, -size
));
1019 RTX_FRAME_RELATED_P (insn
) = 1;
1021 if (frame_pointer_needed
)
1023 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1024 gen_rtx_SET (VOIDmode
, fp
, sp_minus_size
));
1027 /* Copy to stack pointer. Note that since we've already
1028 changed the CFA to the frame pointer this operation
1029 need not be annotated if frame pointer is needed. */
1031 if (AVR_HAVE_8BIT_SP
)
1033 insn
= emit_move_insn (stack_pointer_rtx
, fp
);
1035 else if (TARGET_NO_INTERRUPTS
1037 || cfun
->machine
->is_OS_main
)
1039 rtx irqs_are_on
= GEN_INT (!!cfun
->machine
->is_interrupt
);
1041 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1046 insn
= emit_move_insn (stack_pointer_rtx
, fp
);
1049 if (!frame_pointer_needed
)
1050 RTX_FRAME_RELATED_P (insn
) = 1;
1052 fp_plus_insns
= get_insns ();
1055 /************ Method 2: Adjust Stack pointer ************/
1057 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1058 can only handle specific offsets. */
1060 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1066 insn
= emit_move_insn (stack_pointer_rtx
, sp_minus_size
);
1067 RTX_FRAME_RELATED_P (insn
) = 1;
1069 if (frame_pointer_needed
)
1071 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1072 RTX_FRAME_RELATED_P (insn
) = 1;
1075 sp_plus_insns
= get_insns ();
1078 /************ Use shortest method ************/
1080 emit_insn (get_sequence_length (sp_plus_insns
)
1081 < get_sequence_length (fp_plus_insns
)
1087 emit_insn (fp_plus_insns
);
1090 cfun
->machine
->stack_usage
+= size
;
1091 } /* !minimize && size != 0 */
1096 /* Output function prologue. */
1099 expand_prologue (void)
1104 size
= get_frame_size() + avr_outgoing_args_size();
1106 /* Init cfun->machine. */
1107 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
1108 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
1109 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
1110 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
1111 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
1112 cfun
->machine
->stack_usage
= 0;
1114 /* Prologue: naked. */
1115 if (cfun
->machine
->is_naked
)
1120 avr_regs_to_save (&set
);
1122 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1124 /* Enable interrupts. */
1125 if (cfun
->machine
->is_interrupt
)
1126 emit_insn (gen_enable_interrupt ());
1128 /* Push zero reg. */
1129 emit_push_byte (ZERO_REGNO
, true);
1132 emit_push_byte (TMP_REGNO
, true);
1135 /* ??? There's no dwarf2 column reserved for SREG. */
1136 emit_move_insn (tmp_reg_rtx
, gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
1137 emit_push_byte (TMP_REGNO
, false);
1140 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1142 && TEST_HARD_REG_BIT (set
, REG_Z
)
1143 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1145 emit_move_insn (tmp_reg_rtx
, rampz_rtx
);
1146 emit_push_byte (TMP_REGNO
, false);
1149 /* Clear zero reg. */
1150 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1152 /* Prevent any attempt to delete the setting of ZERO_REG! */
1153 emit_use (zero_reg_rtx
);
1156 avr_prologue_setup_frame (size
, set
);
1158 if (flag_stack_usage_info
)
1159 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1162 /* Output summary at end of function prologue. */
1165 avr_asm_function_end_prologue (FILE *file
)
1167 if (cfun
->machine
->is_naked
)
1169 fputs ("/* prologue: naked */\n", file
);
1173 if (cfun
->machine
->is_interrupt
)
1175 fputs ("/* prologue: Interrupt */\n", file
);
1177 else if (cfun
->machine
->is_signal
)
1179 fputs ("/* prologue: Signal */\n", file
);
1182 fputs ("/* prologue: function */\n", file
);
1185 if (ACCUMULATE_OUTGOING_ARGS
)
1186 fprintf (file
, "/* outgoing args size = %d */\n",
1187 avr_outgoing_args_size());
1189 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1191 fprintf (file
, "/* stack size = %d */\n",
1192 cfun
->machine
->stack_usage
);
1193 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1194 usage for offset so that SP + .L__stack_offset = return address. */
1195 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1199 /* Implement EPILOGUE_USES. */
1202 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1204 if (reload_completed
1206 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1211 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1214 emit_pop_byte (unsigned regno
)
1218 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1219 mem
= gen_frame_mem (QImode
, mem
);
1220 reg
= gen_rtx_REG (QImode
, regno
);
1222 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1225 /* Output RTL epilogue. */
1228 expand_epilogue (bool sibcall_p
)
1235 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1237 size
= get_frame_size() + avr_outgoing_args_size();
1239 /* epilogue: naked */
1240 if (cfun
->machine
->is_naked
)
1242 gcc_assert (!sibcall_p
);
1244 emit_jump_insn (gen_return ());
1248 avr_regs_to_save (&set
);
1249 live_seq
= sequent_regs_live ();
1251 minimize
= (TARGET_CALL_PROLOGUES
1254 && !cfun
->machine
->is_OS_task
1255 && !cfun
->machine
->is_OS_main
);
1259 || frame_pointer_needed
1262 /* Get rid of frame. */
1264 if (!frame_pointer_needed
)
1266 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1271 emit_move_insn (frame_pointer_rtx
,
1272 plus_constant (frame_pointer_rtx
, size
));
1275 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1281 /* Try two methods to adjust stack and select shortest. */
1286 gcc_assert (frame_pointer_needed
1288 || !current_function_is_leaf
);
1290 fp
= my_fp
= (frame_pointer_needed
1292 : gen_rtx_REG (Pmode
, REG_X
));
1294 if (AVR_HAVE_8BIT_SP
)
1296 /* The high byte (r29) does not change:
1297 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1299 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1302 /********** Method 1: Adjust fp register **********/
1306 if (!frame_pointer_needed
)
1307 emit_move_insn (fp
, stack_pointer_rtx
);
1309 emit_move_insn (my_fp
, plus_constant (my_fp
, size
));
1311 /* Copy to stack pointer. */
1313 if (AVR_HAVE_8BIT_SP
)
1315 emit_move_insn (stack_pointer_rtx
, fp
);
1317 else if (TARGET_NO_INTERRUPTS
1319 || cfun
->machine
->is_OS_main
)
1321 rtx irqs_are_on
= GEN_INT (!!cfun
->machine
->is_interrupt
);
1323 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
, irqs_are_on
));
1327 emit_move_insn (stack_pointer_rtx
, fp
);
1330 fp_plus_insns
= get_insns ();
1333 /********** Method 2: Adjust Stack pointer **********/
1335 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1341 emit_move_insn (stack_pointer_rtx
,
1342 plus_constant (stack_pointer_rtx
, size
));
1344 sp_plus_insns
= get_insns ();
1347 /************ Use shortest method ************/
1349 emit_insn (get_sequence_length (sp_plus_insns
)
1350 < get_sequence_length (fp_plus_insns
)
1355 emit_insn (fp_plus_insns
);
1358 if (frame_pointer_needed
1359 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1361 /* Restore previous frame_pointer. See expand_prologue for
1362 rationale for not using pophi. */
1364 emit_pop_byte (REG_Y
+ 1);
1365 emit_pop_byte (REG_Y
);
1368 /* Restore used registers. */
1370 for (reg
= 31; reg
>= 0; --reg
)
1371 if (TEST_HARD_REG_BIT (set
, reg
))
1372 emit_pop_byte (reg
);
1376 /* Restore RAMPZ using tmp reg as scratch. */
1379 && TEST_HARD_REG_BIT (set
, REG_Z
)
1380 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1382 emit_pop_byte (TMP_REGNO
);
1383 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1386 /* Restore SREG using tmp reg as scratch. */
1388 emit_pop_byte (TMP_REGNO
);
1389 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)),
1392 /* Restore tmp REG. */
1393 emit_pop_byte (TMP_REGNO
);
1395 /* Restore zero REG. */
1396 emit_pop_byte (ZERO_REGNO
);
1400 emit_jump_insn (gen_return ());
1403 /* Output summary messages at beginning of function epilogue. */
1406 avr_asm_function_begin_epilogue (FILE *file
)
1408 fprintf (file
, "/* epilogue start */\n");
1412 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1415 avr_cannot_modify_jumps_p (void)
1418 /* Naked Functions must not have any instructions after
1419 their epilogue, see PR42240 */
1421 if (reload_completed
1423 && cfun
->machine
->is_naked
)
1432 /* Helper function for `avr_legitimate_address_p'. */
1435 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1436 RTX_CODE outer_code
, bool strict
)
1439 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1440 as
, outer_code
, UNKNOWN
)
1442 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1446 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1447 machine for a memory operand of mode MODE. */
1450 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1452 bool ok
= CONSTANT_ADDRESS_P (x
);
1454 switch (GET_CODE (x
))
1457 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1462 && REG_X
== REGNO (x
))
1470 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1471 GET_CODE (x
), strict
);
1476 rtx reg
= XEXP (x
, 0);
1477 rtx op1
= XEXP (x
, 1);
1480 && CONST_INT_P (op1
)
1481 && INTVAL (op1
) >= 0)
1483 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1488 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1491 if (reg
== frame_pointer_rtx
1492 || reg
== arg_pointer_rtx
)
1497 else if (frame_pointer_needed
1498 && reg
== frame_pointer_rtx
)
1510 if (avr_log
.legitimate_address_p
)
1512 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1513 "reload_completed=%d reload_in_progress=%d %s:",
1514 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1515 reg_renumber
? "(reg_renumber)" : "");
1517 if (GET_CODE (x
) == PLUS
1518 && REG_P (XEXP (x
, 0))
1519 && CONST_INT_P (XEXP (x
, 1))
1520 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1523 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1524 true_regnum (XEXP (x
, 0)));
1527 avr_edump ("\n%r\n", x
);
1534 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1535 now only a helper for avr_addr_space_legitimize_address. */
1536 /* Attempts to replace X with a valid
1537 memory address for an operand of mode MODE */
1540 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1542 bool big_offset_p
= false;
1546 if (GET_CODE (oldx
) == PLUS
1547 && REG_P (XEXP (oldx
, 0)))
1549 if (REG_P (XEXP (oldx
, 1)))
1550 x
= force_reg (GET_MODE (oldx
), oldx
);
1551 else if (CONST_INT_P (XEXP (oldx
, 1)))
1553 int offs
= INTVAL (XEXP (oldx
, 1));
1554 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1555 && offs
> MAX_LD_OFFSET (mode
))
1557 big_offset_p
= true;
1558 x
= force_reg (GET_MODE (oldx
), oldx
);
1563 if (avr_log
.legitimize_address
)
1565 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1568 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1575 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1576 /* This will allow register R26/27 to be used where it is no worse than normal
1577 base pointers R28/29 or R30/31. For example, if base offset is greater
1578 than 63 bytes or for R++ or --R addressing. */
1581 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1582 int opnum
, int type
, int addr_type
,
1583 int ind_levels ATTRIBUTE_UNUSED
,
1584 rtx (*mk_memloc
)(rtx
,int))
1588 if (avr_log
.legitimize_reload_address
)
1589 avr_edump ("\n%?:%m %r\n", mode
, x
);
1591 if (1 && (GET_CODE (x
) == POST_INC
1592 || GET_CODE (x
) == PRE_DEC
))
1594 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1595 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1596 opnum
, RELOAD_OTHER
);
1598 if (avr_log
.legitimize_reload_address
)
1599 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1600 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1605 if (GET_CODE (x
) == PLUS
1606 && REG_P (XEXP (x
, 0))
1607 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1608 && CONST_INT_P (XEXP (x
, 1))
1609 && INTVAL (XEXP (x
, 1)) >= 1)
1611 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1615 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1617 int regno
= REGNO (XEXP (x
, 0));
1618 rtx mem
= mk_memloc (x
, regno
);
1620 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1621 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1624 if (avr_log
.legitimize_reload_address
)
1625 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1626 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1628 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1629 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1632 if (avr_log
.legitimize_reload_address
)
1633 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1634 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1639 else if (! (frame_pointer_needed
1640 && XEXP (x
, 0) == frame_pointer_rtx
))
1642 push_reload (x
, NULL_RTX
, px
, NULL
,
1643 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1646 if (avr_log
.legitimize_reload_address
)
1647 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1648 POINTER_REGS
, x
, NULL_RTX
);
1658 /* Helper function to print assembler resp. track instruction
1659 sequence lengths. Always return "".
1662 Output assembler code from template TPL with operands supplied
1663 by OPERANDS. This is just forwarding to output_asm_insn.
1666 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1667 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1668 Don't output anything.
1672 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1676 output_asm_insn (tpl
, operands
);
1690 /* Return a pointer register name as a string. */
1693 ptrreg_to_str (int regno
)
1697 case REG_X
: return "X";
1698 case REG_Y
: return "Y";
1699 case REG_Z
: return "Z";
1701 output_operand_lossage ("address operand requires constraint for"
1702 " X, Y, or Z register");
1707 /* Return the condition name as a string.
1708 Used in conditional jump constructing */
1711 cond_string (enum rtx_code code
)
1720 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1725 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1741 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1742 /* Output ADDR to FILE as address. */
1745 avr_print_operand_address (FILE *file
, rtx addr
)
1747 switch (GET_CODE (addr
))
1750 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1754 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1758 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1762 if (CONSTANT_ADDRESS_P (addr
)
1763 && text_segment_operand (addr
, VOIDmode
))
1766 if (GET_CODE (x
) == CONST
)
1768 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1770 /* Assembler gs() will implant word address. Make offset
1771 a byte offset inside gs() for assembler. This is
1772 needed because the more logical (constant+gs(sym)) is not
1773 accepted by gas. For 128K and lower devices this is ok.
1774 For large devices it will create a Trampoline to offset
1775 from symbol which may not be what the user really wanted. */
1776 fprintf (file
, "gs(");
1777 output_addr_const (file
, XEXP (x
,0));
1778 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1779 2 * INTVAL (XEXP (x
, 1)));
1781 if (warning (0, "pointer offset from symbol maybe incorrect"))
1783 output_addr_const (stderr
, addr
);
1784 fprintf(stderr
,"\n");
1789 fprintf (file
, "gs(");
1790 output_addr_const (file
, addr
);
1791 fprintf (file
, ")");
1795 output_addr_const (file
, addr
);
1800 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1803 avr_print_operand_punct_valid_p (unsigned char code
)
1805 return code
== '~' || code
== '!';
1809 /* Implement `TARGET_PRINT_OPERAND'. */
1810 /* Output X as assembler operand to file FILE.
1811 For a description of supported %-codes, see top of avr.md. */
1814 avr_print_operand (FILE *file
, rtx x
, int code
)
1818 if (code
>= 'A' && code
<= 'D')
1823 if (!AVR_HAVE_JMP_CALL
)
1826 else if (code
== '!')
1828 if (AVR_HAVE_EIJMP_EICALL
)
1831 else if (code
== 't'
1834 static int t_regno
= -1;
1835 static int t_nbits
= -1;
1837 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
1839 t_regno
= REGNO (x
);
1840 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
1842 else if (CONST_INT_P (x
) && t_regno
>= 0
1843 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
1845 int bpos
= INTVAL (x
);
1847 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
1849 fprintf (file
, ",%d", bpos
% 8);
1854 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
1858 if (x
== zero_reg_rtx
)
1859 fprintf (file
, "__zero_reg__");
1861 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1863 else if (CONST_INT_P (x
))
1865 HOST_WIDE_INT ival
= INTVAL (x
);
1868 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
1869 else if (low_io_address_operand (x
, VOIDmode
)
1870 || high_io_address_operand (x
, VOIDmode
))
1874 case RAMPZ_ADDR
: fprintf (file
, "__RAMPZ__"); break;
1875 case SREG_ADDR
: fprintf (file
, "__SREG__"); break;
1876 case SP_ADDR
: fprintf (file
, "__SP_L__"); break;
1877 case SP_ADDR
+1: fprintf (file
, "__SP_H__"); break;
1880 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
1881 ival
- avr_current_arch
->sfr_offset
);
1886 fatal_insn ("bad address, not an I/O address:", x
);
1890 rtx addr
= XEXP (x
, 0);
1894 if (!CONSTANT_P (addr
))
1895 fatal_insn ("bad address, not a constant:", addr
);
1896 /* Assembler template with m-code is data - not progmem section */
1897 if (text_segment_operand (addr
, VOIDmode
))
1898 if (warning (0, "accessing data memory with"
1899 " program memory address"))
1901 output_addr_const (stderr
, addr
);
1902 fprintf(stderr
,"\n");
1904 output_addr_const (file
, addr
);
1906 else if (code
== 'i')
1908 avr_print_operand (file
, addr
, 'i');
1910 else if (code
== 'o')
1912 if (GET_CODE (addr
) != PLUS
)
1913 fatal_insn ("bad address, not (reg+disp):", addr
);
1915 avr_print_operand (file
, XEXP (addr
, 1), 0);
1917 else if (code
== 'p' || code
== 'r')
1919 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1920 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1923 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1925 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1927 else if (GET_CODE (addr
) == PLUS
)
1929 avr_print_operand_address (file
, XEXP (addr
,0));
1930 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1931 fatal_insn ("internal compiler error. Bad address:"
1934 avr_print_operand (file
, XEXP (addr
,1), code
);
1937 avr_print_operand_address (file
, addr
);
1939 else if (code
== 'i')
1941 fatal_insn ("bad address, not an I/O address:", x
);
1943 else if (code
== 'x')
1945 /* Constant progmem address - like used in jmp or call */
1946 if (0 == text_segment_operand (x
, VOIDmode
))
1947 if (warning (0, "accessing program memory"
1948 " with data memory address"))
1950 output_addr_const (stderr
, x
);
1951 fprintf(stderr
,"\n");
1953 /* Use normal symbol for direct address no linker trampoline needed */
1954 output_addr_const (file
, x
);
1956 else if (GET_CODE (x
) == CONST_DOUBLE
)
1960 if (GET_MODE (x
) != SFmode
)
1961 fatal_insn ("internal compiler error. Unknown mode:", x
);
1962 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1963 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1964 fprintf (file
, "0x%lx", val
);
1966 else if (GET_CODE (x
) == CONST_STRING
)
1967 fputs (XSTR (x
, 0), file
);
1968 else if (code
== 'j')
1969 fputs (cond_string (GET_CODE (x
)), file
);
1970 else if (code
== 'k')
1971 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1973 avr_print_operand_address (file
, x
);
1976 /* Update the condition code in the INSN. */
1979 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1982 enum attr_cc cc
= get_attr_cc (insn
);
1990 case CC_OUT_PLUS_NOCLOBBER
:
1993 rtx
*op
= recog_data
.operand
;
1996 /* Extract insn's operands. */
1997 extract_constrain_insn_cached (insn
);
2005 avr_out_plus (op
, &len_dummy
, &icc
);
2006 cc
= (enum attr_cc
) icc
;
2009 case CC_OUT_PLUS_NOCLOBBER
:
2010 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2011 cc
= (enum attr_cc
) icc
;
2016 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2017 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2018 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2020 /* Any other "r,rL" combination does not alter cc0. */
2024 } /* inner switch */
2028 } /* outer swicth */
2033 /* Special values like CC_OUT_PLUS from above have been
2034 mapped to "standard" CC_* values so we never come here. */
2040 /* Insn does not affect CC at all. */
2048 set
= single_set (insn
);
2052 cc_status
.flags
|= CC_NO_OVERFLOW
;
2053 cc_status
.value1
= SET_DEST (set
);
2058 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059 The V flag may or may not be known but that's ok because
2060 alter_cond will change tests to use EQ/NE. */
2061 set
= single_set (insn
);
2065 cc_status
.value1
= SET_DEST (set
);
2066 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2071 set
= single_set (insn
);
2074 cc_status
.value1
= SET_SRC (set
);
2078 /* Insn doesn't leave CC in a usable state. */
2084 /* Choose mode for jump insn:
2085 1 - relative jump in range -63 <= x <= 62 ;
2086 2 - relative jump in range -2046 <= x <= 2045 ;
2087 3 - absolute jump (only for ATmega[16]03). */
2090 avr_jump_mode (rtx x
, rtx insn
)
2092 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2093 ? XEXP (x
, 0) : x
));
2094 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2095 int jump_distance
= cur_addr
- dest_addr
;
2097 if (-63 <= jump_distance
&& jump_distance
<= 62)
2099 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2101 else if (AVR_HAVE_JMP_CALL
)
2107 /* return an AVR condition jump commands.
2108 X is a comparison RTX.
2109 LEN is a number returned by avr_jump_mode function.
2110 if REVERSE nonzero then condition code in X must be reversed. */
2113 ret_cond_branch (rtx x
, int len
, int reverse
)
2115 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2120 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2121 return (len
== 1 ? ("breq .+2" CR_TAB
2123 len
== 2 ? ("breq .+4" CR_TAB
2131 return (len
== 1 ? ("breq .+2" CR_TAB
2133 len
== 2 ? ("breq .+4" CR_TAB
2140 return (len
== 1 ? ("breq .+2" CR_TAB
2142 len
== 2 ? ("breq .+4" CR_TAB
2149 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2150 return (len
== 1 ? ("breq %0" CR_TAB
2152 len
== 2 ? ("breq .+2" CR_TAB
2159 return (len
== 1 ? ("breq %0" CR_TAB
2161 len
== 2 ? ("breq .+2" CR_TAB
2168 return (len
== 1 ? ("breq %0" CR_TAB
2170 len
== 2 ? ("breq .+2" CR_TAB
2184 return ("br%j1 .+2" CR_TAB
2187 return ("br%j1 .+4" CR_TAB
2198 return ("br%k1 .+2" CR_TAB
2201 return ("br%k1 .+4" CR_TAB
2209 /* Output insn cost for next insn. */
2212 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2213 int num_operands ATTRIBUTE_UNUSED
)
2215 if (avr_log
.rtx_costs
)
2217 rtx set
= single_set (insn
);
2220 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2221 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2223 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2224 rtx_cost (PATTERN (insn
), INSN
, 0,
2225 optimize_insn_for_speed_p()));
2229 /* Return 0 if undefined, 1 if always true or always false. */
2232 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2234 unsigned int max
= (mode
== QImode
? 0xff :
2235 mode
== HImode
? 0xffff :
2236 mode
== PSImode
? 0xffffff :
2237 mode
== SImode
? 0xffffffff : 0);
2238 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2240 if (unsigned_condition (op
) != op
)
2243 if (max
!= (INTVAL (x
) & max
)
2244 && INTVAL (x
) != 0xff)
2251 /* Returns nonzero if REGNO is the number of a hard
2252 register in which function arguments are sometimes passed. */
2255 function_arg_regno_p(int r
)
2257 return (r
>= 8 && r
<= 25);
2260 /* Initializing the variable cum for the state at the beginning
2261 of the argument list. */
2264 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2265 tree fndecl ATTRIBUTE_UNUSED
)
2268 cum
->regno
= FIRST_CUM_REG
;
2269 if (!libname
&& stdarg_p (fntype
))
2272 /* Assume the calle may be tail called */
2274 cfun
->machine
->sibcall_fails
= 0;
2277 /* Returns the number of registers to allocate for a function argument. */
2280 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2284 if (mode
== BLKmode
)
2285 size
= int_size_in_bytes (type
);
2287 size
= GET_MODE_SIZE (mode
);
2289 /* Align all function arguments to start in even-numbered registers.
2290 Odd-sized arguments leave holes above them. */
2292 return (size
+ 1) & ~1;
2295 /* Controls whether a function argument is passed
2296 in a register, and which register. */
2299 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2300 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2302 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2303 int bytes
= avr_num_arg_regs (mode
, type
);
2305 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2306 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2311 /* Update the summarizer variable CUM to advance past an argument
2312 in the argument list. */
2315 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2316 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2318 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2319 int bytes
= avr_num_arg_regs (mode
, type
);
2321 cum
->nregs
-= bytes
;
2322 cum
->regno
-= bytes
;
2324 /* A parameter is being passed in a call-saved register. As the original
2325 contents of these regs has to be restored before leaving the function,
2326 a function must not pass arguments in call-saved regs in order to get
2331 && !call_used_regs
[cum
->regno
])
2333 /* FIXME: We ship info on failing tail-call in struct machine_function.
2334 This uses internals of calls.c:expand_call() and the way args_so_far
2335 is used. targetm.function_ok_for_sibcall() needs to be extended to
2336 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337 dependent so that such an extension is not wanted. */
2339 cfun
->machine
->sibcall_fails
= 1;
2342 /* Test if all registers needed by the ABI are actually available. If the
2343 user has fixed a GPR needed to pass an argument, an (implicit) function
2344 call will clobber that fixed register. See PR45099 for an example. */
2351 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2352 if (fixed_regs
[regno
])
2353 warning (0, "fixed register %s used to pass parameter to function",
2357 if (cum
->nregs
<= 0)
2360 cum
->regno
= FIRST_CUM_REG
;
2364 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365 /* Decide whether we can make a sibling call to a function. DECL is the
2366 declaration of the function being targeted by the call and EXP is the
2367 CALL_EXPR representing the call. */
2370 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2374 /* Tail-calling must fail if callee-saved regs are used to pass
2375 function args. We must not tail-call when `epilogue_restores'
2376 is used. Unfortunately, we cannot tell at this point if that
2377 actually will happen or not, and we cannot step back from
2378 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2380 if (cfun
->machine
->sibcall_fails
2381 || TARGET_CALL_PROLOGUES
)
2386 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2390 decl_callee
= TREE_TYPE (decl_callee
);
2394 decl_callee
= fntype_callee
;
2396 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2397 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2399 decl_callee
= TREE_TYPE (decl_callee
);
2403 /* Ensure that caller and callee have compatible epilogues */
2405 if (interrupt_function_p (current_function_decl
)
2406 || signal_function_p (current_function_decl
)
2407 || avr_naked_function_p (decl_callee
)
2408 || avr_naked_function_p (current_function_decl
)
2409 /* FIXME: For OS_task and OS_main, we are over-conservative.
2410 This is due to missing documentation of these attributes
2411 and what they actually should do and should not do. */
2412 || (avr_OS_task_function_p (decl_callee
)
2413 != avr_OS_task_function_p (current_function_decl
))
2414 || (avr_OS_main_function_p (decl_callee
)
2415 != avr_OS_main_function_p (current_function_decl
)))
2423 /***********************************************************************
2424 Functions for outputting various mov's for a various modes
2425 ************************************************************************/
2427 /* Return true if a value of mode MODE is read from flash by
2428 __load_* function from libgcc. */
2431 avr_load_libgcc_p (rtx op
)
2433 enum machine_mode mode
= GET_MODE (op
);
2434 int n_bytes
= GET_MODE_SIZE (mode
);
2438 && avr_mem_flash_p (op
));
2441 /* Return true if a value of mode MODE is read by __xload_* function. */
2444 avr_xload_libgcc_p (enum machine_mode mode
)
2446 int n_bytes
= GET_MODE_SIZE (mode
);
2449 || avr_current_arch
->n_segments
> 1);
2453 /* Find an unused d-register to be used as scratch in INSN.
2454 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455 is a register, skip all possible return values that overlap EXCLUDE.
2456 The policy for the returned register is similar to that of
2457 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2460 Return a QImode d-register or NULL_RTX if nothing found. */
2463 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2466 bool isr_p
= (interrupt_function_p (current_function_decl
)
2467 || signal_function_p (current_function_decl
));
2469 for (regno
= 16; regno
< 32; regno
++)
2471 rtx reg
= all_regs_rtx
[regno
];
2474 && reg_overlap_mentioned_p (exclude
, reg
))
2475 || fixed_regs
[regno
])
2480 /* Try non-live register */
2482 if (!df_regs_ever_live_p (regno
)
2483 && (TREE_THIS_VOLATILE (current_function_decl
)
2484 || cfun
->machine
->is_OS_task
2485 || cfun
->machine
->is_OS_main
2486 || (!isr_p
&& call_used_regs
[regno
])))
2491 /* Any live register can be used if it is unused after.
2492 Prologue/epilogue will care for it as needed. */
2494 if (df_regs_ever_live_p (regno
)
2495 && reg_unused_after (insn
, reg
))
2505 /* Helper function for the next function in the case where only restricted
2506 version of LPM instruction is available. */
2509 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2513 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2516 regno_dest
= REGNO (dest
);
2518 /* The implicit target register of LPM. */
2519 xop
[3] = lpm_reg_rtx
;
2521 switch (GET_CODE (addr
))
2528 gcc_assert (REG_Z
== REGNO (addr
));
2536 avr_asm_len ("%4lpm", xop
, plen
, 1);
2538 if (regno_dest
!= LPM_REGNO
)
2539 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2544 if (REGNO (dest
) == REG_Z
)
2545 return avr_asm_len ("%4lpm" CR_TAB
2550 "pop %A0", xop
, plen
, 6);
2552 avr_asm_len ("%4lpm" CR_TAB
2556 "mov %B0,%3", xop
, plen
, 5);
2558 if (!reg_unused_after (insn
, addr
))
2559 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2568 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2571 if (regno_dest
== LPM_REGNO
)
2572 avr_asm_len ("%4lpm" CR_TAB
2573 "adiw %2,1", xop
, plen
, 2);
2575 avr_asm_len ("%4lpm" CR_TAB
2577 "adiw %2,1", xop
, plen
, 3);
2580 avr_asm_len ("%4lpm" CR_TAB
2582 "adiw %2,1", xop
, plen
, 3);
2585 avr_asm_len ("%4lpm" CR_TAB
2587 "adiw %2,1", xop
, plen
, 3);
2590 avr_asm_len ("%4lpm" CR_TAB
2592 "adiw %2,1", xop
, plen
, 3);
2594 break; /* POST_INC */
2596 } /* switch CODE (addr) */
2602 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603 OP[1] in AS1 to register OP[0].
2604 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2608 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2612 rtx src
= SET_SRC (single_set (insn
));
2614 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2618 addr_space_t as
= MEM_ADDR_SPACE (src
);
2625 warning (0, "writing to address space %qs not supported",
2626 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2631 addr
= XEXP (src
, 0);
2632 code
= GET_CODE (addr
);
2634 gcc_assert (REG_P (dest
));
2635 gcc_assert (REG
== code
|| POST_INC
== code
);
2639 xop
[2] = lpm_addr_reg_rtx
;
2640 xop
[4] = xstring_empty
;
2641 xop
[5] = tmp_reg_rtx
;
2643 regno_dest
= REGNO (dest
);
2645 /* Cut down segment number to a number the device actually supports.
2646 We do this late to preserve the address space's name for diagnostics. */
2648 segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
2650 /* Set RAMPZ as needed. */
2654 xop
[4] = GEN_INT (segment
);
2656 if (xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
),
2659 avr_asm_len ("ldi %3,%4" CR_TAB
2660 "out __RAMPZ__,%3", xop
, plen
, 2);
2662 else if (segment
== 1)
2664 avr_asm_len ("clr %5" CR_TAB
2666 "out __RAMPZ__,%5", xop
, plen
, 3);
2670 avr_asm_len ("mov %5,%2" CR_TAB
2672 "out __RAMPZ__,%2" CR_TAB
2673 "mov %2,%5", xop
, plen
, 4);
2678 if (!AVR_HAVE_ELPMX
)
2679 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2681 else if (!AVR_HAVE_LPMX
)
2683 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2686 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2688 switch (GET_CODE (addr
))
2695 gcc_assert (REG_Z
== REGNO (addr
));
2703 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
2706 if (REGNO (dest
) == REG_Z
)
2707 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2708 "%4lpm %B0,%a2" CR_TAB
2709 "mov %A0,%5", xop
, plen
, 3);
2712 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2713 "%4lpm %B0,%a2", xop
, plen
, 2);
2715 if (!reg_unused_after (insn
, addr
))
2716 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2723 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2724 "%4lpm %B0,%a2+" CR_TAB
2725 "%4lpm %C0,%a2", xop
, plen
, 3);
2727 if (!reg_unused_after (insn
, addr
))
2728 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
2734 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2735 "%4lpm %B0,%a2+", xop
, plen
, 2);
2737 if (REGNO (dest
) == REG_Z
- 2)
2738 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2739 "%4lpm %C0,%a2" CR_TAB
2740 "mov %D0,%5", xop
, plen
, 3);
2743 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2744 "%4lpm %D0,%a2", xop
, plen
, 2);
2746 if (!reg_unused_after (insn
, addr
))
2747 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
2757 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2760 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
2761 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
2762 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
2763 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
2765 break; /* POST_INC */
2767 } /* switch CODE (addr) */
2773 /* Worker function for xload_8 insn. */
2776 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2782 xop
[2] = lpm_addr_reg_rtx
;
2783 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2788 avr_asm_len ("ld %3,%a2" CR_TAB
2789 "sbrs %1,7", xop
, plen
, 2);
2791 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2793 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2794 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2801 output_movqi (rtx insn
, rtx operands
[], int *l
)
2804 rtx dest
= operands
[0];
2805 rtx src
= operands
[1];
2808 if (avr_mem_flash_p (src
)
2809 || avr_mem_flash_p (dest
))
2811 return avr_out_lpm (insn
, operands
, real_l
);
2819 if (register_operand (dest
, QImode
))
2821 if (register_operand (src
, QImode
)) /* mov r,r */
2823 if (test_hard_reg_class (STACK_REG
, dest
))
2825 else if (test_hard_reg_class (STACK_REG
, src
))
2830 else if (CONSTANT_P (src
))
2832 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2835 else if (GET_CODE (src
) == MEM
)
2836 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2838 else if (GET_CODE (dest
) == MEM
)
2843 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2845 return out_movqi_mr_r (insn
, xop
, real_l
);
2852 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2857 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2859 if (avr_mem_flash_p (src
)
2860 || avr_mem_flash_p (dest
))
2862 return avr_out_lpm (insn
, xop
, plen
);
2867 if (REG_P (src
)) /* mov r,r */
2869 if (test_hard_reg_class (STACK_REG
, dest
))
2871 if (AVR_HAVE_8BIT_SP
)
2872 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2874 /* Use simple load of SP if no interrupts are used. */
2876 return TARGET_NO_INTERRUPTS
2877 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2878 "out __SP_L__,%A1", xop
, plen
, -2)
2880 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2882 "out __SP_H__,%B1" CR_TAB
2883 "out __SREG__,__tmp_reg__" CR_TAB
2884 "out __SP_L__,%A1", xop
, plen
, -5);
2886 else if (test_hard_reg_class (STACK_REG
, src
))
2888 return AVR_HAVE_8BIT_SP
2889 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2890 "clr %B0", xop
, plen
, -2)
2892 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2893 "in %B0,__SP_H__", xop
, plen
, -2);
2896 return AVR_HAVE_MOVW
2897 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2899 : avr_asm_len ("mov %A0,%A1" CR_TAB
2900 "mov %B0,%B1", xop
, plen
, -2);
2902 else if (CONSTANT_P (src
))
2904 return output_reload_inhi (xop
, NULL
, plen
);
2906 else if (MEM_P (src
))
2908 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2911 else if (MEM_P (dest
))
2916 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2918 return out_movhi_mr_r (insn
, xop
, plen
);
2921 fatal_insn ("invalid insn:", insn
);
2927 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
2931 rtx x
= XEXP (src
, 0);
2933 if (CONSTANT_ADDRESS_P (x
))
2935 return optimize
> 0 && io_address_operand (x
, QImode
)
2936 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
2937 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
2939 else if (GET_CODE (x
) == PLUS
2940 && REG_P (XEXP (x
, 0))
2941 && CONST_INT_P (XEXP (x
, 1)))
2943 /* memory access by reg+disp */
2945 int disp
= INTVAL (XEXP (x
, 1));
2947 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
2949 if (REGNO (XEXP (x
, 0)) != REG_Y
)
2950 fatal_insn ("incorrect insn:",insn
);
2952 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2953 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2954 "ldd %0,Y+63" CR_TAB
2955 "sbiw r28,%o1-63", op
, plen
, -3);
2957 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2958 "sbci r29,hi8(-%o1)" CR_TAB
2960 "subi r28,lo8(%o1)" CR_TAB
2961 "sbci r29,hi8(%o1)", op
, plen
, -5);
2963 else if (REGNO (XEXP (x
, 0)) == REG_X
)
2965 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2966 it but I have this situation with extremal optimizing options. */
2968 avr_asm_len ("adiw r26,%o1" CR_TAB
2969 "ld %0,X", op
, plen
, -2);
2971 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2972 && !reg_unused_after (insn
, XEXP (x
,0)))
2974 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
2980 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
2983 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
2987 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
2991 rtx base
= XEXP (src
, 0);
2992 int reg_dest
= true_regnum (dest
);
2993 int reg_base
= true_regnum (base
);
2994 /* "volatile" forces reading low byte first, even if less efficient,
2995 for correct operation with 16-bit I/O registers. */
2996 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3000 if (reg_dest
== reg_base
) /* R = (R) */
3001 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3003 "mov %A0,__tmp_reg__", op
, plen
, -3);
3005 if (reg_base
!= REG_X
)
3006 return avr_asm_len ("ld %A0,%1" CR_TAB
3007 "ldd %B0,%1+1", op
, plen
, -2);
3009 avr_asm_len ("ld %A0,X+" CR_TAB
3010 "ld %B0,X", op
, plen
, -2);
3012 if (!reg_unused_after (insn
, base
))
3013 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3017 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3019 int disp
= INTVAL (XEXP (base
, 1));
3020 int reg_base
= true_regnum (XEXP (base
, 0));
3022 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3024 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3025 fatal_insn ("incorrect insn:",insn
);
3027 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3028 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3029 "ldd %A0,Y+62" CR_TAB
3030 "ldd %B0,Y+63" CR_TAB
3031 "sbiw r28,%o1-62", op
, plen
, -4)
3033 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3034 "sbci r29,hi8(-%o1)" CR_TAB
3036 "ldd %B0,Y+1" CR_TAB
3037 "subi r28,lo8(%o1)" CR_TAB
3038 "sbci r29,hi8(%o1)", op
, plen
, -6);
3041 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3042 it but I have this situation with extremal
3043 optimization options. */
3045 if (reg_base
== REG_X
)
3046 return reg_base
== reg_dest
3047 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3048 "ld __tmp_reg__,X+" CR_TAB
3050 "mov %A0,__tmp_reg__", op
, plen
, -4)
3052 : avr_asm_len ("adiw r26,%o1" CR_TAB
3055 "sbiw r26,%o1+1", op
, plen
, -4);
3057 return reg_base
== reg_dest
3058 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3059 "ldd %B0,%B1" CR_TAB
3060 "mov %A0,__tmp_reg__", op
, plen
, -3)
3062 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3063 "ldd %B0,%B1", op
, plen
, -2);
3065 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3067 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3068 fatal_insn ("incorrect insn:", insn
);
3070 if (!mem_volatile_p
)
3071 return avr_asm_len ("ld %B0,%1" CR_TAB
3072 "ld %A0,%1", op
, plen
, -2);
3074 return REGNO (XEXP (base
, 0)) == REG_X
3075 ? avr_asm_len ("sbiw r26,2" CR_TAB
3078 "sbiw r26,1", op
, plen
, -4)
3080 : avr_asm_len ("sbiw %r1,2" CR_TAB
3082 "ldd %B0,%p1+1", op
, plen
, -3);
3084 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3086 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3087 fatal_insn ("incorrect insn:", insn
);
3089 return avr_asm_len ("ld %A0,%1" CR_TAB
3090 "ld %B0,%1", op
, plen
, -2);
3092 else if (CONSTANT_ADDRESS_P (base
))
3094 return optimize
> 0 && io_address_operand (base
, HImode
)
3095 ? avr_asm_len ("in %A0,%i1" CR_TAB
3096 "in %B0,%i1+1", op
, plen
, -2)
3098 : avr_asm_len ("lds %A0,%m1" CR_TAB
3099 "lds %B0,%m1+1", op
, plen
, -4);
3102 fatal_insn ("unknown move insn:",insn
);
3107 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3111 rtx base
= XEXP (src
, 0);
3112 int reg_dest
= true_regnum (dest
);
3113 int reg_base
= true_regnum (base
);
3121 if (reg_base
== REG_X
) /* (R26) */
3123 if (reg_dest
== REG_X
)
3124 /* "ld r26,-X" is undefined */
3125 return *l
=7, ("adiw r26,3" CR_TAB
3128 "ld __tmp_reg__,-X" CR_TAB
3131 "mov r27,__tmp_reg__");
3132 else if (reg_dest
== REG_X
- 2)
3133 return *l
=5, ("ld %A0,X+" CR_TAB
3135 "ld __tmp_reg__,X+" CR_TAB
3137 "mov %C0,__tmp_reg__");
3138 else if (reg_unused_after (insn
, base
))
3139 return *l
=4, ("ld %A0,X+" CR_TAB
3144 return *l
=5, ("ld %A0,X+" CR_TAB
3152 if (reg_dest
== reg_base
)
3153 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3154 "ldd %C0,%1+2" CR_TAB
3155 "ldd __tmp_reg__,%1+1" CR_TAB
3157 "mov %B0,__tmp_reg__");
3158 else if (reg_base
== reg_dest
+ 2)
3159 return *l
=5, ("ld %A0,%1" CR_TAB
3160 "ldd %B0,%1+1" CR_TAB
3161 "ldd __tmp_reg__,%1+2" CR_TAB
3162 "ldd %D0,%1+3" CR_TAB
3163 "mov %C0,__tmp_reg__");
3165 return *l
=4, ("ld %A0,%1" CR_TAB
3166 "ldd %B0,%1+1" CR_TAB
3167 "ldd %C0,%1+2" CR_TAB
3171 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3173 int disp
= INTVAL (XEXP (base
, 1));
3175 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3177 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3178 fatal_insn ("incorrect insn:",insn
);
3180 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3181 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3182 "ldd %A0,Y+60" CR_TAB
3183 "ldd %B0,Y+61" CR_TAB
3184 "ldd %C0,Y+62" CR_TAB
3185 "ldd %D0,Y+63" CR_TAB
3188 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3189 "sbci r29,hi8(-%o1)" CR_TAB
3191 "ldd %B0,Y+1" CR_TAB
3192 "ldd %C0,Y+2" CR_TAB
3193 "ldd %D0,Y+3" CR_TAB
3194 "subi r28,lo8(%o1)" CR_TAB
3195 "sbci r29,hi8(%o1)");
3198 reg_base
= true_regnum (XEXP (base
, 0));
3199 if (reg_base
== REG_X
)
3202 if (reg_dest
== REG_X
)
3205 /* "ld r26,-X" is undefined */
3206 return ("adiw r26,%o1+3" CR_TAB
3209 "ld __tmp_reg__,-X" CR_TAB
3212 "mov r27,__tmp_reg__");
3215 if (reg_dest
== REG_X
- 2)
3216 return ("adiw r26,%o1" CR_TAB
3219 "ld __tmp_reg__,X+" CR_TAB
3221 "mov r26,__tmp_reg__");
3223 return ("adiw r26,%o1" CR_TAB
3230 if (reg_dest
== reg_base
)
3231 return *l
=5, ("ldd %D0,%D1" CR_TAB
3232 "ldd %C0,%C1" CR_TAB
3233 "ldd __tmp_reg__,%B1" CR_TAB
3234 "ldd %A0,%A1" CR_TAB
3235 "mov %B0,__tmp_reg__");
3236 else if (reg_dest
== reg_base
- 2)
3237 return *l
=5, ("ldd %A0,%A1" CR_TAB
3238 "ldd %B0,%B1" CR_TAB
3239 "ldd __tmp_reg__,%C1" CR_TAB
3240 "ldd %D0,%D1" CR_TAB
3241 "mov %C0,__tmp_reg__");
3242 return *l
=4, ("ldd %A0,%A1" CR_TAB
3243 "ldd %B0,%B1" CR_TAB
3244 "ldd %C0,%C1" CR_TAB
3247 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3248 return *l
=4, ("ld %D0,%1" CR_TAB
3252 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3253 return *l
=4, ("ld %A0,%1" CR_TAB
3257 else if (CONSTANT_ADDRESS_P (base
))
3258 return *l
=8, ("lds %A0,%m1" CR_TAB
3259 "lds %B0,%m1+1" CR_TAB
3260 "lds %C0,%m1+2" CR_TAB
3263 fatal_insn ("unknown move insn:",insn
);
3268 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3272 rtx base
= XEXP (dest
, 0);
3273 int reg_base
= true_regnum (base
);
3274 int reg_src
= true_regnum (src
);
3280 if (CONSTANT_ADDRESS_P (base
))
3281 return *l
=8,("sts %m0,%A1" CR_TAB
3282 "sts %m0+1,%B1" CR_TAB
3283 "sts %m0+2,%C1" CR_TAB
3285 if (reg_base
> 0) /* (r) */
3287 if (reg_base
== REG_X
) /* (R26) */
3289 if (reg_src
== REG_X
)
3291 /* "st X+,r26" is undefined */
3292 if (reg_unused_after (insn
, base
))
3293 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3296 "st X+,__tmp_reg__" CR_TAB
3300 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3303 "st X+,__tmp_reg__" CR_TAB
3308 else if (reg_base
== reg_src
+ 2)
3310 if (reg_unused_after (insn
, base
))
3311 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3312 "mov __tmp_reg__,%D1" CR_TAB
3315 "st %0+,__zero_reg__" CR_TAB
3316 "st %0,__tmp_reg__" CR_TAB
3317 "clr __zero_reg__");
3319 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3320 "mov __tmp_reg__,%D1" CR_TAB
3323 "st %0+,__zero_reg__" CR_TAB
3324 "st %0,__tmp_reg__" CR_TAB
3325 "clr __zero_reg__" CR_TAB
3328 return *l
=5, ("st %0+,%A1" CR_TAB
3335 return *l
=4, ("st %0,%A1" CR_TAB
3336 "std %0+1,%B1" CR_TAB
3337 "std %0+2,%C1" CR_TAB
3340 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3342 int disp
= INTVAL (XEXP (base
, 1));
3343 reg_base
= REGNO (XEXP (base
, 0));
3344 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3346 if (reg_base
!= REG_Y
)
3347 fatal_insn ("incorrect insn:",insn
);
3349 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3350 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3351 "std Y+60,%A1" CR_TAB
3352 "std Y+61,%B1" CR_TAB
3353 "std Y+62,%C1" CR_TAB
3354 "std Y+63,%D1" CR_TAB
3357 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3358 "sbci r29,hi8(-%o0)" CR_TAB
3360 "std Y+1,%B1" CR_TAB
3361 "std Y+2,%C1" CR_TAB
3362 "std Y+3,%D1" CR_TAB
3363 "subi r28,lo8(%o0)" CR_TAB
3364 "sbci r29,hi8(%o0)");
3366 if (reg_base
== REG_X
)
3369 if (reg_src
== REG_X
)
3372 return ("mov __tmp_reg__,r26" CR_TAB
3373 "mov __zero_reg__,r27" CR_TAB
3374 "adiw r26,%o0" CR_TAB
3375 "st X+,__tmp_reg__" CR_TAB
3376 "st X+,__zero_reg__" CR_TAB
3379 "clr __zero_reg__" CR_TAB
3382 else if (reg_src
== REG_X
- 2)
3385 return ("mov __tmp_reg__,r26" CR_TAB
3386 "mov __zero_reg__,r27" CR_TAB
3387 "adiw r26,%o0" CR_TAB
3390 "st X+,__tmp_reg__" CR_TAB
3391 "st X,__zero_reg__" CR_TAB
3392 "clr __zero_reg__" CR_TAB
3396 return ("adiw r26,%o0" CR_TAB
3403 return *l
=4, ("std %A0,%A1" CR_TAB
3404 "std %B0,%B1" CR_TAB
3405 "std %C0,%C1" CR_TAB
3408 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3409 return *l
=4, ("st %0,%D1" CR_TAB
3413 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3414 return *l
=4, ("st %0,%A1" CR_TAB
3418 fatal_insn ("unknown move insn:",insn
);
3423 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3426 rtx dest
= operands
[0];
3427 rtx src
= operands
[1];
3430 if (avr_mem_flash_p (src
)
3431 || avr_mem_flash_p (dest
))
3433 return avr_out_lpm (insn
, operands
, real_l
);
3439 if (register_operand (dest
, VOIDmode
))
3441 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3443 if (true_regnum (dest
) > true_regnum (src
))
3448 return ("movw %C0,%C1" CR_TAB
3452 return ("mov %D0,%D1" CR_TAB
3453 "mov %C0,%C1" CR_TAB
3454 "mov %B0,%B1" CR_TAB
3462 return ("movw %A0,%A1" CR_TAB
3466 return ("mov %A0,%A1" CR_TAB
3467 "mov %B0,%B1" CR_TAB
3468 "mov %C0,%C1" CR_TAB
3472 else if (CONSTANT_P (src
))
3474 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3476 else if (GET_CODE (src
) == MEM
)
3477 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3479 else if (GET_CODE (dest
) == MEM
)
3483 if (src
== CONST0_RTX (GET_MODE (dest
)))
3484 operands
[1] = zero_reg_rtx
;
3486 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3489 output_asm_insn (templ
, operands
);
3494 fatal_insn ("invalid insn:", insn
);
3499 /* Handle loads of 24-bit types from memory to register. */
3502 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3506 rtx base
= XEXP (src
, 0);
3507 int reg_dest
= true_regnum (dest
);
3508 int reg_base
= true_regnum (base
);
3512 if (reg_base
== REG_X
) /* (R26) */
3514 if (reg_dest
== REG_X
)
3515 /* "ld r26,-X" is undefined */
3516 return avr_asm_len ("adiw r26,2" CR_TAB
3518 "ld __tmp_reg__,-X" CR_TAB
3521 "mov r27,__tmp_reg__", op
, plen
, -6);
3524 avr_asm_len ("ld %A0,X+" CR_TAB
3526 "ld %C0,X", op
, plen
, -3);
3528 if (reg_dest
!= REG_X
- 2
3529 && !reg_unused_after (insn
, base
))
3531 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3537 else /* reg_base != REG_X */
3539 if (reg_dest
== reg_base
)
3540 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3541 "ldd __tmp_reg__,%1+1" CR_TAB
3543 "mov %B0,__tmp_reg__", op
, plen
, -4);
3545 return avr_asm_len ("ld %A0,%1" CR_TAB
3546 "ldd %B0,%1+1" CR_TAB
3547 "ldd %C0,%1+2", op
, plen
, -3);
3550 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3552 int disp
= INTVAL (XEXP (base
, 1));
3554 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3556 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3557 fatal_insn ("incorrect insn:",insn
);
3559 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3560 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3561 "ldd %A0,Y+61" CR_TAB
3562 "ldd %B0,Y+62" CR_TAB
3563 "ldd %C0,Y+63" CR_TAB
3564 "sbiw r28,%o1-61", op
, plen
, -5);
3566 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3567 "sbci r29,hi8(-%o1)" CR_TAB
3569 "ldd %B0,Y+1" CR_TAB
3570 "ldd %C0,Y+2" CR_TAB
3571 "subi r28,lo8(%o1)" CR_TAB
3572 "sbci r29,hi8(%o1)", op
, plen
, -7);
3575 reg_base
= true_regnum (XEXP (base
, 0));
3576 if (reg_base
== REG_X
)
3579 if (reg_dest
== REG_X
)
3581 /* "ld r26,-X" is undefined */
3582 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3584 "ld __tmp_reg__,-X" CR_TAB
3587 "mov r27,__tmp_reg__", op
, plen
, -6);
3590 avr_asm_len ("adiw r26,%o1" CR_TAB
3593 "ld r26,X", op
, plen
, -4);
3595 if (reg_dest
!= REG_X
- 2)
3596 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3601 if (reg_dest
== reg_base
)
3602 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3603 "ldd __tmp_reg__,%B1" CR_TAB
3604 "ldd %A0,%A1" CR_TAB
3605 "mov %B0,__tmp_reg__", op
, plen
, -4);
3607 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3608 "ldd %B0,%B1" CR_TAB
3609 "ldd %C0,%C1", op
, plen
, -3);
3611 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3612 return avr_asm_len ("ld %C0,%1" CR_TAB
3614 "ld %A0,%1", op
, plen
, -3);
3615 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3616 return avr_asm_len ("ld %A0,%1" CR_TAB
3618 "ld %C0,%1", op
, plen
, -3);
3620 else if (CONSTANT_ADDRESS_P (base
))
3621 return avr_asm_len ("lds %A0,%m1" CR_TAB
3622 "lds %B0,%m1+1" CR_TAB
3623 "lds %C0,%m1+2", op
, plen
, -6);
3625 fatal_insn ("unknown move insn:",insn
);
3629 /* Handle store of 24-bit type from register or zero to memory. */
3632 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3636 rtx base
= XEXP (dest
, 0);
3637 int reg_base
= true_regnum (base
);
3639 if (CONSTANT_ADDRESS_P (base
))
3640 return avr_asm_len ("sts %m0,%A1" CR_TAB
3641 "sts %m0+1,%B1" CR_TAB
3642 "sts %m0+2,%C1", op
, plen
, -6);
3644 if (reg_base
> 0) /* (r) */
3646 if (reg_base
== REG_X
) /* (R26) */
3648 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3650 avr_asm_len ("st %0+,%A1" CR_TAB
3652 "st %0,%C1", op
, plen
, -3);
3654 if (!reg_unused_after (insn
, base
))
3655 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3660 return avr_asm_len ("st %0,%A1" CR_TAB
3661 "std %0+1,%B1" CR_TAB
3662 "std %0+2,%C1", op
, plen
, -3);
3664 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3666 int disp
= INTVAL (XEXP (base
, 1));
3667 reg_base
= REGNO (XEXP (base
, 0));
3669 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3671 if (reg_base
!= REG_Y
)
3672 fatal_insn ("incorrect insn:",insn
);
3674 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3675 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3676 "std Y+61,%A1" CR_TAB
3677 "std Y+62,%B1" CR_TAB
3678 "std Y+63,%C1" CR_TAB
3679 "sbiw r28,%o0-60", op
, plen
, -5);
3681 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3682 "sbci r29,hi8(-%o0)" CR_TAB
3684 "std Y+1,%B1" CR_TAB
3685 "std Y+2,%C1" CR_TAB
3686 "subi r28,lo8(%o0)" CR_TAB
3687 "sbci r29,hi8(%o0)", op
, plen
, -7);
3689 if (reg_base
== REG_X
)
3692 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3694 avr_asm_len ("adiw r26,%o0" CR_TAB
3697 "st X,%C1", op
, plen
, -4);
3699 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3700 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3705 return avr_asm_len ("std %A0,%A1" CR_TAB
3706 "std %B0,%B1" CR_TAB
3707 "std %C0,%C1", op
, plen
, -3);
3709 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3710 return avr_asm_len ("st %0,%C1" CR_TAB
3712 "st %0,%A1", op
, plen
, -3);
3713 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3714 return avr_asm_len ("st %0,%A1" CR_TAB
3716 "st %0,%C1", op
, plen
, -3);
3718 fatal_insn ("unknown move insn:",insn
);
3723 /* Move around 24-bit stuff. */
3726 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3731 if (avr_mem_flash_p (src
)
3732 || avr_mem_flash_p (dest
))
3734 return avr_out_lpm (insn
, op
, plen
);
3737 if (register_operand (dest
, VOIDmode
))
3739 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3741 if (true_regnum (dest
) > true_regnum (src
))
3743 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3746 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3748 return avr_asm_len ("mov %B0,%B1" CR_TAB
3749 "mov %A0,%A1", op
, plen
, 2);
3754 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3756 avr_asm_len ("mov %A0,%A1" CR_TAB
3757 "mov %B0,%B1", op
, plen
, -2);
3759 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3762 else if (CONSTANT_P (src
))
3764 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3766 else if (MEM_P (src
))
3767 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3769 else if (MEM_P (dest
))
3774 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3776 return avr_out_store_psi (insn
, xop
, plen
);
3779 fatal_insn ("invalid insn:", insn
);
3785 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3789 rtx x
= XEXP (dest
, 0);
3791 if (CONSTANT_ADDRESS_P (x
))
3793 return optimize
> 0 && io_address_operand (x
, QImode
)
3794 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3795 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3797 else if (GET_CODE (x
) == PLUS
3798 && REG_P (XEXP (x
, 0))
3799 && CONST_INT_P (XEXP (x
, 1)))
3801 /* memory access by reg+disp */
3803 int disp
= INTVAL (XEXP (x
, 1));
3805 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3807 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3808 fatal_insn ("incorrect insn:",insn
);
3810 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3811 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3812 "std Y+63,%1" CR_TAB
3813 "sbiw r28,%o0-63", op
, plen
, -3);
3815 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3816 "sbci r29,hi8(-%o0)" CR_TAB
3818 "subi r28,lo8(%o0)" CR_TAB
3819 "sbci r29,hi8(%o0)", op
, plen
, -5);
3821 else if (REGNO (XEXP (x
,0)) == REG_X
)
3823 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3825 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3826 "adiw r26,%o0" CR_TAB
3827 "st X,__tmp_reg__", op
, plen
, -3);
3831 avr_asm_len ("adiw r26,%o0" CR_TAB
3832 "st X,%1", op
, plen
, -2);
3835 if (!reg_unused_after (insn
, XEXP (x
,0)))
3836 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3841 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3844 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3848 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
3852 rtx base
= XEXP (dest
, 0);
3853 int reg_base
= true_regnum (base
);
3854 int reg_src
= true_regnum (src
);
3855 /* "volatile" forces writing high byte first, even if less efficient,
3856 for correct operation with 16-bit I/O registers. */
3857 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3859 if (CONSTANT_ADDRESS_P (base
))
3860 return optimize
> 0 && io_address_operand (base
, HImode
)
3861 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3862 "out %i0,%A1", op
, plen
, -2)
3864 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3865 "sts %m0,%A1", op
, plen
, -4);
3869 if (reg_base
!= REG_X
)
3870 return avr_asm_len ("std %0+1,%B1" CR_TAB
3871 "st %0,%A1", op
, plen
, -2);
3873 if (reg_src
== REG_X
)
3874 /* "st X+,r26" and "st -X,r26" are undefined. */
3875 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
3876 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3879 "st X,__tmp_reg__", op
, plen
, -4)
3881 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3883 "st X,__tmp_reg__" CR_TAB
3885 "st X,r26", op
, plen
, -5);
3887 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
3888 ? avr_asm_len ("st X+,%A1" CR_TAB
3889 "st X,%B1", op
, plen
, -2)
3890 : avr_asm_len ("adiw r26,1" CR_TAB
3892 "st -X,%A1", op
, plen
, -3);
3894 else if (GET_CODE (base
) == PLUS
)
3896 int disp
= INTVAL (XEXP (base
, 1));
3897 reg_base
= REGNO (XEXP (base
, 0));
3898 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3900 if (reg_base
!= REG_Y
)
3901 fatal_insn ("incorrect insn:",insn
);
3903 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3904 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3905 "std Y+63,%B1" CR_TAB
3906 "std Y+62,%A1" CR_TAB
3907 "sbiw r28,%o0-62", op
, plen
, -4)
3909 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3910 "sbci r29,hi8(-%o0)" CR_TAB
3911 "std Y+1,%B1" CR_TAB
3913 "subi r28,lo8(%o0)" CR_TAB
3914 "sbci r29,hi8(%o0)", op
, plen
, -6);
3917 if (reg_base
!= REG_X
)
3918 return avr_asm_len ("std %B0,%B1" CR_TAB
3919 "std %A0,%A1", op
, plen
, -2);
3921 return reg_src
== REG_X
3922 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3923 "mov __zero_reg__,r27" CR_TAB
3924 "adiw r26,%o0+1" CR_TAB
3925 "st X,__zero_reg__" CR_TAB
3926 "st -X,__tmp_reg__" CR_TAB
3927 "clr __zero_reg__" CR_TAB
3928 "sbiw r26,%o0", op
, plen
, -7)
3930 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3933 "sbiw r26,%o0", op
, plen
, -4);
3935 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3937 return avr_asm_len ("st %0,%B1" CR_TAB
3938 "st %0,%A1", op
, plen
, -2);
3940 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3942 if (!mem_volatile_p
)
3943 return avr_asm_len ("st %0,%A1" CR_TAB
3944 "st %0,%B1", op
, plen
, -2);
3946 return REGNO (XEXP (base
, 0)) == REG_X
3947 ? avr_asm_len ("adiw r26,1" CR_TAB
3950 "adiw r26,2", op
, plen
, -4)
3952 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3954 "adiw %r0,2", op
, plen
, -3);
3956 fatal_insn ("unknown move insn:",insn
);
3960 /* Return 1 if frame pointer for current function required. */
3963 avr_frame_pointer_required_p (void)
3965 return (cfun
->calls_alloca
3966 || cfun
->calls_setjmp
3967 || cfun
->has_nonlocal_label
3968 || crtl
->args
.info
.nregs
== 0
3969 || get_frame_size () > 0);
3972 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3975 compare_condition (rtx insn
)
3977 rtx next
= next_real_insn (insn
);
3979 if (next
&& JUMP_P (next
))
3981 rtx pat
= PATTERN (next
);
3982 rtx src
= SET_SRC (pat
);
3984 if (IF_THEN_ELSE
== GET_CODE (src
))
3985 return GET_CODE (XEXP (src
, 0));
3992 /* Returns true iff INSN is a tst insn that only tests the sign. */
3995 compare_sign_p (rtx insn
)
3997 RTX_CODE cond
= compare_condition (insn
);
3998 return (cond
== GE
|| cond
== LT
);
4002 /* Returns true iff the next insn is a JUMP_INSN with a condition
4003 that needs to be swapped (GT, GTU, LE, LEU). */
4006 compare_diff_p (rtx insn
)
4008 RTX_CODE cond
= compare_condition (insn
);
4009 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4012 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4015 compare_eq_p (rtx insn
)
4017 RTX_CODE cond
= compare_condition (insn
);
4018 return (cond
== EQ
|| cond
== NE
);
4022 /* Output compare instruction
4024 compare (XOP[0], XOP[1])
4026 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4027 XOP[2] is an 8-bit scratch register as needed.
4029 PLEN == NULL: Output instructions.
4030 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4031 Don't output anything. */
4034 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4036 /* Register to compare and value to compare against. */
4040 /* MODE of the comparison. */
4041 enum machine_mode mode
= GET_MODE (xreg
);
4043 /* Number of bytes to operate on. */
4044 int i
, n_bytes
= GET_MODE_SIZE (mode
);
4046 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4047 int clobber_val
= -1;
4049 gcc_assert (REG_P (xreg
));
4050 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4051 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4056 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4057 against 0 by ORing the bytes. This is one instruction shorter.
4058 Notice that DImode comparisons are always against reg:DI 18
4059 and therefore don't use this. */
4061 if (!test_hard_reg_class (LD_REGS
, xreg
)
4062 && compare_eq_p (insn
)
4063 && reg_unused_after (insn
, xreg
))
4065 if (xval
== const1_rtx
)
4067 avr_asm_len ("dec %A0" CR_TAB
4068 "or %A0,%B0", xop
, plen
, 2);
4071 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4074 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4078 else if (xval
== constm1_rtx
)
4081 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4084 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4086 return avr_asm_len ("and %A0,%B0" CR_TAB
4087 "com %A0", xop
, plen
, 2);
4091 for (i
= 0; i
< n_bytes
; i
++)
4093 /* We compare byte-wise. */
4094 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4095 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4097 /* 8-bit value to compare with this byte. */
4098 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4100 /* Registers R16..R31 can operate with immediate. */
4101 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4104 xop
[1] = gen_int_mode (val8
, QImode
);
4106 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4109 && test_hard_reg_class (ADDW_REGS
, reg8
))
4111 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4113 if (IN_RANGE (val16
, 0, 63)
4115 || reg_unused_after (insn
, xreg
)))
4117 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4123 && IN_RANGE (val16
, -63, -1)
4124 && compare_eq_p (insn
)
4125 && reg_unused_after (insn
, xreg
))
4127 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4131 /* Comparing against 0 is easy. */
4136 ? "cp %0,__zero_reg__"
4137 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4141 /* Upper registers can compare and subtract-with-carry immediates.
4142 Notice that compare instructions do the same as respective subtract
4143 instruction; the only difference is that comparisons don't write
4144 the result back to the target register. */
4150 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4153 else if (reg_unused_after (insn
, xreg
))
4155 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4160 /* Must load the value into the scratch register. */
4162 gcc_assert (REG_P (xop
[2]));
4164 if (clobber_val
!= (int) val8
)
4165 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4166 clobber_val
= (int) val8
;
4170 : "cpc %0,%2", xop
, plen
, 1);
4177 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4180 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4184 xop
[0] = gen_rtx_REG (DImode
, 18);
4188 return avr_out_compare (insn
, xop
, plen
);
4191 /* Output test instruction for HImode. */
4194 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4196 if (compare_sign_p (insn
))
4198 avr_asm_len ("tst %B0", op
, plen
, -1);
4200 else if (reg_unused_after (insn
, op
[0])
4201 && compare_eq_p (insn
))
4203 /* Faster than sbiw if we can clobber the operand. */
4204 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4208 avr_out_compare (insn
, op
, plen
);
4215 /* Output test instruction for PSImode. */
4218 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4220 if (compare_sign_p (insn
))
4222 avr_asm_len ("tst %C0", op
, plen
, -1);
4224 else if (reg_unused_after (insn
, op
[0])
4225 && compare_eq_p (insn
))
4227 /* Faster than sbiw if we can clobber the operand. */
4228 avr_asm_len ("or %A0,%B0" CR_TAB
4229 "or %A0,%C0", op
, plen
, -2);
4233 avr_out_compare (insn
, op
, plen
);
4240 /* Output test instruction for SImode. */
4243 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4245 if (compare_sign_p (insn
))
4247 avr_asm_len ("tst %D0", op
, plen
, -1);
4249 else if (reg_unused_after (insn
, op
[0])
4250 && compare_eq_p (insn
))
4252 /* Faster than sbiw if we can clobber the operand. */
4253 avr_asm_len ("or %A0,%B0" CR_TAB
4255 "or %A0,%D0", op
, plen
, -3);
4259 avr_out_compare (insn
, op
, plen
);
4266 /* Generate asm equivalent for various shifts. This only handles cases
4267 that are not already carefully hand-optimized in ?sh??i3_out.
4269 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4270 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4271 OPERANDS[3] is a QImode scratch register from LD regs if
4272 available and SCRATCH, otherwise (no scratch available)
4274 TEMPL is an assembler template that shifts by one position.
4275 T_LEN is the length of this template. */
4278 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4279 int *plen
, int t_len
)
4281 bool second_label
= true;
4282 bool saved_in_tmp
= false;
4283 bool use_zero_reg
= false;
4286 op
[0] = operands
[0];
4287 op
[1] = operands
[1];
4288 op
[2] = operands
[2];
4289 op
[3] = operands
[3];
4294 if (CONST_INT_P (operands
[2]))
4296 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4297 && REG_P (operands
[3]));
4298 int count
= INTVAL (operands
[2]);
4299 int max_len
= 10; /* If larger than this, always use a loop. */
4304 if (count
< 8 && !scratch
)
4305 use_zero_reg
= true;
4308 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4310 if (t_len
* count
<= max_len
)
4312 /* Output shifts inline with no loop - faster. */
4315 avr_asm_len (templ
, op
, plen
, t_len
);
4322 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4324 else if (use_zero_reg
)
4326 /* Hack to save one word: use __zero_reg__ as loop counter.
4327 Set one bit, then shift in a loop until it is 0 again. */
4329 op
[3] = zero_reg_rtx
;
4331 avr_asm_len ("set" CR_TAB
4332 "bld %3,%2-1", op
, plen
, 2);
4336 /* No scratch register available, use one from LD_REGS (saved in
4337 __tmp_reg__) that doesn't overlap with registers to shift. */
4339 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4340 op
[4] = tmp_reg_rtx
;
4341 saved_in_tmp
= true;
4343 avr_asm_len ("mov %4,%3" CR_TAB
4344 "ldi %3,%2", op
, plen
, 2);
4347 second_label
= false;
4349 else if (MEM_P (op
[2]))
4353 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4356 out_movqi_r_mr (insn
, op_mov
, plen
);
4358 else if (register_operand (op
[2], QImode
))
4362 if (!reg_unused_after (insn
, op
[2])
4363 || reg_overlap_mentioned_p (op
[0], op
[2]))
4365 op
[3] = tmp_reg_rtx
;
4366 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4370 fatal_insn ("bad shift insn:", insn
);
4373 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4375 avr_asm_len ("1:", op
, plen
, 0);
4376 avr_asm_len (templ
, op
, plen
, t_len
);
4379 avr_asm_len ("2:", op
, plen
, 0);
4381 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4382 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4385 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4389 /* 8bit shift left ((char)x << i) */
4392 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4394 if (GET_CODE (operands
[2]) == CONST_INT
)
4401 switch (INTVAL (operands
[2]))
4404 if (INTVAL (operands
[2]) < 8)
4416 return ("lsl %0" CR_TAB
4421 return ("lsl %0" CR_TAB
4426 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4429 return ("swap %0" CR_TAB
4433 return ("lsl %0" CR_TAB
4439 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4442 return ("swap %0" CR_TAB
4447 return ("lsl %0" CR_TAB
4454 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4457 return ("swap %0" CR_TAB
4463 return ("lsl %0" CR_TAB
4472 return ("ror %0" CR_TAB
4477 else if (CONSTANT_P (operands
[2]))
4478 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4480 out_shift_with_cnt ("lsl %0",
4481 insn
, operands
, len
, 1);
4486 /* 16bit shift left ((short)x << i) */
4489 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4491 if (GET_CODE (operands
[2]) == CONST_INT
)
4493 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4494 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4501 switch (INTVAL (operands
[2]))
4504 if (INTVAL (operands
[2]) < 16)
4508 return ("clr %B0" CR_TAB
4512 if (optimize_size
&& scratch
)
4517 return ("swap %A0" CR_TAB
4519 "andi %B0,0xf0" CR_TAB
4520 "eor %B0,%A0" CR_TAB
4521 "andi %A0,0xf0" CR_TAB
4527 return ("swap %A0" CR_TAB
4529 "ldi %3,0xf0" CR_TAB
4531 "eor %B0,%A0" CR_TAB
4535 break; /* optimize_size ? 6 : 8 */
4539 break; /* scratch ? 5 : 6 */
4543 return ("lsl %A0" CR_TAB
4547 "andi %B0,0xf0" CR_TAB
4548 "eor %B0,%A0" CR_TAB
4549 "andi %A0,0xf0" CR_TAB
4555 return ("lsl %A0" CR_TAB
4559 "ldi %3,0xf0" CR_TAB
4561 "eor %B0,%A0" CR_TAB
4569 break; /* scratch ? 5 : 6 */
4571 return ("clr __tmp_reg__" CR_TAB
4574 "ror __tmp_reg__" CR_TAB
4577 "ror __tmp_reg__" CR_TAB
4578 "mov %B0,%A0" CR_TAB
4579 "mov %A0,__tmp_reg__");
4583 return ("lsr %B0" CR_TAB
4584 "mov %B0,%A0" CR_TAB
4590 return *len
= 2, ("mov %B0,%A1" CR_TAB
4595 return ("mov %B0,%A0" CR_TAB
4601 return ("mov %B0,%A0" CR_TAB
4608 return ("mov %B0,%A0" CR_TAB
4618 return ("mov %B0,%A0" CR_TAB
4626 return ("mov %B0,%A0" CR_TAB
4629 "ldi %3,0xf0" CR_TAB
4633 return ("mov %B0,%A0" CR_TAB
4644 return ("mov %B0,%A0" CR_TAB
4650 if (AVR_HAVE_MUL
&& scratch
)
4653 return ("ldi %3,0x20" CR_TAB
4657 "clr __zero_reg__");
4659 if (optimize_size
&& scratch
)
4664 return ("mov %B0,%A0" CR_TAB
4668 "ldi %3,0xe0" CR_TAB
4674 return ("set" CR_TAB
4679 "clr __zero_reg__");
4682 return ("mov %B0,%A0" CR_TAB
4691 if (AVR_HAVE_MUL
&& ldi_ok
)
4694 return ("ldi %B0,0x40" CR_TAB
4695 "mul %A0,%B0" CR_TAB
4698 "clr __zero_reg__");
4700 if (AVR_HAVE_MUL
&& scratch
)
4703 return ("ldi %3,0x40" CR_TAB
4707 "clr __zero_reg__");
4709 if (optimize_size
&& ldi_ok
)
4712 return ("mov %B0,%A0" CR_TAB
4713 "ldi %A0,6" "\n1:\t"
4718 if (optimize_size
&& scratch
)
4721 return ("clr %B0" CR_TAB
4730 return ("clr %B0" CR_TAB
4737 out_shift_with_cnt ("lsl %A0" CR_TAB
4738 "rol %B0", insn
, operands
, len
, 2);
4743 /* 24-bit shift left */
4746 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4751 if (CONST_INT_P (op
[2]))
4753 switch (INTVAL (op
[2]))
4756 if (INTVAL (op
[2]) < 24)
4759 return avr_asm_len ("clr %A0" CR_TAB
4761 "clr %C0", op
, plen
, 3);
4765 int reg0
= REGNO (op
[0]);
4766 int reg1
= REGNO (op
[1]);
4769 return avr_asm_len ("mov %C0,%B1" CR_TAB
4770 "mov %B0,%A1" CR_TAB
4771 "clr %A0", op
, plen
, 3);
4773 return avr_asm_len ("clr %A0" CR_TAB
4774 "mov %B0,%A1" CR_TAB
4775 "mov %C0,%B1", op
, plen
, 3);
4780 int reg0
= REGNO (op
[0]);
4781 int reg1
= REGNO (op
[1]);
4783 if (reg0
+ 2 != reg1
)
4784 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4786 return avr_asm_len ("clr %B0" CR_TAB
4787 "clr %A0", op
, plen
, 2);
4791 return avr_asm_len ("clr %C0" CR_TAB
4795 "clr %A0", op
, plen
, 5);
4799 out_shift_with_cnt ("lsl %A0" CR_TAB
4801 "rol %C0", insn
, op
, plen
, 3);
4806 /* 32bit shift left ((long)x << i) */
4809 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
4811 if (GET_CODE (operands
[2]) == CONST_INT
)
4819 switch (INTVAL (operands
[2]))
4822 if (INTVAL (operands
[2]) < 32)
4826 return *len
= 3, ("clr %D0" CR_TAB
4830 return ("clr %D0" CR_TAB
4837 int reg0
= true_regnum (operands
[0]);
4838 int reg1
= true_regnum (operands
[1]);
4841 return ("mov %D0,%C1" CR_TAB
4842 "mov %C0,%B1" CR_TAB
4843 "mov %B0,%A1" CR_TAB
4846 return ("clr %A0" CR_TAB
4847 "mov %B0,%A1" CR_TAB
4848 "mov %C0,%B1" CR_TAB
4854 int reg0
= true_regnum (operands
[0]);
4855 int reg1
= true_regnum (operands
[1]);
4856 if (reg0
+ 2 == reg1
)
4857 return *len
= 2, ("clr %B0" CR_TAB
4860 return *len
= 3, ("movw %C0,%A1" CR_TAB
4864 return *len
= 4, ("mov %C0,%A1" CR_TAB
4865 "mov %D0,%B1" CR_TAB
4872 return ("mov %D0,%A1" CR_TAB
4879 return ("clr %D0" CR_TAB
4888 out_shift_with_cnt ("lsl %A0" CR_TAB
4891 "rol %D0", insn
, operands
, len
, 4);
4895 /* 8bit arithmetic shift right ((signed char)x >> i) */
4898 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
4900 if (GET_CODE (operands
[2]) == CONST_INT
)
4907 switch (INTVAL (operands
[2]))
4915 return ("asr %0" CR_TAB
4920 return ("asr %0" CR_TAB
4926 return ("asr %0" CR_TAB
4933 return ("asr %0" CR_TAB
4941 return ("bst %0,6" CR_TAB
4947 if (INTVAL (operands
[2]) < 8)
4954 return ("lsl %0" CR_TAB
4958 else if (CONSTANT_P (operands
[2]))
4959 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4961 out_shift_with_cnt ("asr %0",
4962 insn
, operands
, len
, 1);
4967 /* 16bit arithmetic shift right ((signed short)x >> i) */
4970 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
4972 if (GET_CODE (operands
[2]) == CONST_INT
)
4974 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4975 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4982 switch (INTVAL (operands
[2]))
4986 /* XXX try to optimize this too? */
4991 break; /* scratch ? 5 : 6 */
4993 return ("mov __tmp_reg__,%A0" CR_TAB
4994 "mov %A0,%B0" CR_TAB
4995 "lsl __tmp_reg__" CR_TAB
4997 "sbc %B0,%B0" CR_TAB
4998 "lsl __tmp_reg__" CR_TAB
5004 return ("lsl %A0" CR_TAB
5005 "mov %A0,%B0" CR_TAB
5011 int reg0
= true_regnum (operands
[0]);
5012 int reg1
= true_regnum (operands
[1]);
5015 return *len
= 3, ("mov %A0,%B0" CR_TAB
5019 return *len
= 4, ("mov %A0,%B1" CR_TAB
5027 return ("mov %A0,%B0" CR_TAB
5029 "sbc %B0,%B0" CR_TAB
5034 return ("mov %A0,%B0" CR_TAB
5036 "sbc %B0,%B0" CR_TAB
5041 if (AVR_HAVE_MUL
&& ldi_ok
)
5044 return ("ldi %A0,0x20" CR_TAB
5045 "muls %B0,%A0" CR_TAB
5047 "sbc %B0,%B0" CR_TAB
5048 "clr __zero_reg__");
5050 if (optimize_size
&& scratch
)
5053 return ("mov %A0,%B0" CR_TAB
5055 "sbc %B0,%B0" CR_TAB
5061 if (AVR_HAVE_MUL
&& ldi_ok
)
5064 return ("ldi %A0,0x10" CR_TAB
5065 "muls %B0,%A0" CR_TAB
5067 "sbc %B0,%B0" CR_TAB
5068 "clr __zero_reg__");
5070 if (optimize_size
&& scratch
)
5073 return ("mov %A0,%B0" CR_TAB
5075 "sbc %B0,%B0" CR_TAB
5082 if (AVR_HAVE_MUL
&& ldi_ok
)
5085 return ("ldi %A0,0x08" CR_TAB
5086 "muls %B0,%A0" CR_TAB
5088 "sbc %B0,%B0" CR_TAB
5089 "clr __zero_reg__");
5092 break; /* scratch ? 5 : 7 */
5094 return ("mov %A0,%B0" CR_TAB
5096 "sbc %B0,%B0" CR_TAB
5105 return ("lsl %B0" CR_TAB
5106 "sbc %A0,%A0" CR_TAB
5108 "mov %B0,%A0" CR_TAB
5112 if (INTVAL (operands
[2]) < 16)
5118 return *len
= 3, ("lsl %B0" CR_TAB
5119 "sbc %A0,%A0" CR_TAB
5124 out_shift_with_cnt ("asr %B0" CR_TAB
5125 "ror %A0", insn
, operands
, len
, 2);
5130 /* 24-bit arithmetic shift right */
5133 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5135 int dest
= REGNO (op
[0]);
5136 int src
= REGNO (op
[1]);
5138 if (CONST_INT_P (op
[2]))
5143 switch (INTVAL (op
[2]))
5147 return avr_asm_len ("mov %A0,%B1" CR_TAB
5148 "mov %B0,%C1" CR_TAB
5151 "dec %C0", op
, plen
, 5);
5153 return avr_asm_len ("clr %C0" CR_TAB
5156 "mov %B0,%C1" CR_TAB
5157 "mov %A0,%B1", op
, plen
, 5);
5160 if (dest
!= src
+ 2)
5161 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5163 return avr_asm_len ("clr %B0" CR_TAB
5166 "mov %C0,%B0", op
, plen
, 4);
5169 if (INTVAL (op
[2]) < 24)
5175 return avr_asm_len ("lsl %C0" CR_TAB
5176 "sbc %A0,%A0" CR_TAB
5177 "mov %B0,%A0" CR_TAB
5178 "mov %C0,%A0", op
, plen
, 4);
5182 out_shift_with_cnt ("asr %C0" CR_TAB
5184 "ror %A0", insn
, op
, plen
, 3);
5189 /* 32bit arithmetic shift right ((signed long)x >> i) */
5192 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5194 if (GET_CODE (operands
[2]) == CONST_INT
)
5202 switch (INTVAL (operands
[2]))
5206 int reg0
= true_regnum (operands
[0]);
5207 int reg1
= true_regnum (operands
[1]);
5210 return ("mov %A0,%B1" CR_TAB
5211 "mov %B0,%C1" CR_TAB
5212 "mov %C0,%D1" CR_TAB
5217 return ("clr %D0" CR_TAB
5220 "mov %C0,%D1" CR_TAB
5221 "mov %B0,%C1" CR_TAB
5227 int reg0
= true_regnum (operands
[0]);
5228 int reg1
= true_regnum (operands
[1]);
5230 if (reg0
== reg1
+ 2)
5231 return *len
= 4, ("clr %D0" CR_TAB
5236 return *len
= 5, ("movw %A0,%C1" CR_TAB
5242 return *len
= 6, ("mov %B0,%D1" CR_TAB
5243 "mov %A0,%C1" CR_TAB
5251 return *len
= 6, ("mov %A0,%D1" CR_TAB
5255 "mov %B0,%D0" CR_TAB
5259 if (INTVAL (operands
[2]) < 32)
5266 return *len
= 4, ("lsl %D0" CR_TAB
5267 "sbc %A0,%A0" CR_TAB
5268 "mov %B0,%A0" CR_TAB
5271 return *len
= 5, ("lsl %D0" CR_TAB
5272 "sbc %A0,%A0" CR_TAB
5273 "mov %B0,%A0" CR_TAB
5274 "mov %C0,%A0" CR_TAB
5279 out_shift_with_cnt ("asr %D0" CR_TAB
5282 "ror %A0", insn
, operands
, len
, 4);
5286 /* 8bit logic shift right ((unsigned char)x >> i) */
5289 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5291 if (GET_CODE (operands
[2]) == CONST_INT
)
5298 switch (INTVAL (operands
[2]))
5301 if (INTVAL (operands
[2]) < 8)
5313 return ("lsr %0" CR_TAB
5317 return ("lsr %0" CR_TAB
5322 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5325 return ("swap %0" CR_TAB
5329 return ("lsr %0" CR_TAB
5335 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5338 return ("swap %0" CR_TAB
5343 return ("lsr %0" CR_TAB
5350 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5353 return ("swap %0" CR_TAB
5359 return ("lsr %0" CR_TAB
5368 return ("rol %0" CR_TAB
5373 else if (CONSTANT_P (operands
[2]))
5374 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5376 out_shift_with_cnt ("lsr %0",
5377 insn
, operands
, len
, 1);
5381 /* 16bit logic shift right ((unsigned short)x >> i) */
5384 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5386 if (GET_CODE (operands
[2]) == CONST_INT
)
5388 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5389 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5396 switch (INTVAL (operands
[2]))
5399 if (INTVAL (operands
[2]) < 16)
5403 return ("clr %B0" CR_TAB
5407 if (optimize_size
&& scratch
)
5412 return ("swap %B0" CR_TAB
5414 "andi %A0,0x0f" CR_TAB
5415 "eor %A0,%B0" CR_TAB
5416 "andi %B0,0x0f" CR_TAB
5422 return ("swap %B0" CR_TAB
5424 "ldi %3,0x0f" CR_TAB
5426 "eor %A0,%B0" CR_TAB
5430 break; /* optimize_size ? 6 : 8 */
5434 break; /* scratch ? 5 : 6 */
5438 return ("lsr %B0" CR_TAB
5442 "andi %A0,0x0f" CR_TAB
5443 "eor %A0,%B0" CR_TAB
5444 "andi %B0,0x0f" CR_TAB
5450 return ("lsr %B0" CR_TAB
5454 "ldi %3,0x0f" CR_TAB
5456 "eor %A0,%B0" CR_TAB
5464 break; /* scratch ? 5 : 6 */
5466 return ("clr __tmp_reg__" CR_TAB
5469 "rol __tmp_reg__" CR_TAB
5472 "rol __tmp_reg__" CR_TAB
5473 "mov %A0,%B0" CR_TAB
5474 "mov %B0,__tmp_reg__");
5478 return ("lsl %A0" CR_TAB
5479 "mov %A0,%B0" CR_TAB
5481 "sbc %B0,%B0" CR_TAB
5485 return *len
= 2, ("mov %A0,%B1" CR_TAB
5490 return ("mov %A0,%B0" CR_TAB
5496 return ("mov %A0,%B0" CR_TAB
5503 return ("mov %A0,%B0" CR_TAB
5513 return ("mov %A0,%B0" CR_TAB
5521 return ("mov %A0,%B0" CR_TAB
5524 "ldi %3,0x0f" CR_TAB
5528 return ("mov %A0,%B0" CR_TAB
5539 return ("mov %A0,%B0" CR_TAB
5545 if (AVR_HAVE_MUL
&& scratch
)
5548 return ("ldi %3,0x08" CR_TAB
5552 "clr __zero_reg__");
5554 if (optimize_size
&& scratch
)
5559 return ("mov %A0,%B0" CR_TAB
5563 "ldi %3,0x07" CR_TAB
5569 return ("set" CR_TAB
5574 "clr __zero_reg__");
5577 return ("mov %A0,%B0" CR_TAB
5586 if (AVR_HAVE_MUL
&& ldi_ok
)
5589 return ("ldi %A0,0x04" CR_TAB
5590 "mul %B0,%A0" CR_TAB
5593 "clr __zero_reg__");
5595 if (AVR_HAVE_MUL
&& scratch
)
5598 return ("ldi %3,0x04" CR_TAB
5602 "clr __zero_reg__");
5604 if (optimize_size
&& ldi_ok
)
5607 return ("mov %A0,%B0" CR_TAB
5608 "ldi %B0,6" "\n1:\t"
5613 if (optimize_size
&& scratch
)
5616 return ("clr %A0" CR_TAB
5625 return ("clr %A0" CR_TAB
5632 out_shift_with_cnt ("lsr %B0" CR_TAB
5633 "ror %A0", insn
, operands
, len
, 2);
5638 /* 24-bit logic shift right */
5641 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5643 int dest
= REGNO (op
[0]);
5644 int src
= REGNO (op
[1]);
5646 if (CONST_INT_P (op
[2]))
5651 switch (INTVAL (op
[2]))
5655 return avr_asm_len ("mov %A0,%B1" CR_TAB
5656 "mov %B0,%C1" CR_TAB
5657 "clr %C0", op
, plen
, 3);
5659 return avr_asm_len ("clr %C0" CR_TAB
5660 "mov %B0,%C1" CR_TAB
5661 "mov %A0,%B1", op
, plen
, 3);
5664 if (dest
!= src
+ 2)
5665 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5667 return avr_asm_len ("clr %B0" CR_TAB
5668 "clr %C0", op
, plen
, 2);
5671 if (INTVAL (op
[2]) < 24)
5677 return avr_asm_len ("clr %A0" CR_TAB
5681 "clr %C0", op
, plen
, 5);
5685 out_shift_with_cnt ("lsr %C0" CR_TAB
5687 "ror %A0", insn
, op
, plen
, 3);
5692 /* 32bit logic shift right ((unsigned int)x >> i) */
5695 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5697 if (GET_CODE (operands
[2]) == CONST_INT
)
5705 switch (INTVAL (operands
[2]))
5708 if (INTVAL (operands
[2]) < 32)
5712 return *len
= 3, ("clr %D0" CR_TAB
5716 return ("clr %D0" CR_TAB
5723 int reg0
= true_regnum (operands
[0]);
5724 int reg1
= true_regnum (operands
[1]);
5727 return ("mov %A0,%B1" CR_TAB
5728 "mov %B0,%C1" CR_TAB
5729 "mov %C0,%D1" CR_TAB
5732 return ("clr %D0" CR_TAB
5733 "mov %C0,%D1" CR_TAB
5734 "mov %B0,%C1" CR_TAB
5740 int reg0
= true_regnum (operands
[0]);
5741 int reg1
= true_regnum (operands
[1]);
5743 if (reg0
== reg1
+ 2)
5744 return *len
= 2, ("clr %C0" CR_TAB
5747 return *len
= 3, ("movw %A0,%C1" CR_TAB
5751 return *len
= 4, ("mov %B0,%D1" CR_TAB
5752 "mov %A0,%C1" CR_TAB
5758 return *len
= 4, ("mov %A0,%D1" CR_TAB
5765 return ("clr %A0" CR_TAB
5774 out_shift_with_cnt ("lsr %D0" CR_TAB
5777 "ror %A0", insn
, operands
, len
, 4);
5782 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5784 XOP[0] = XOP[0] + XOP[2]
5786 and return "". If PLEN == NULL, print assembler instructions to perform the
5787 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5788 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5789 CODE == PLUS: perform addition by using ADD instructions.
5790 CODE == MINUS: perform addition by using SUB instructions.
5791 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5794 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
5796 /* MODE of the operation. */
5797 enum machine_mode mode
= GET_MODE (xop
[0]);
5799 /* Number of bytes to operate on. */
5800 int i
, n_bytes
= GET_MODE_SIZE (mode
);
5802 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5803 int clobber_val
= -1;
5805 /* op[0]: 8-bit destination register
5806 op[1]: 8-bit const int
5807 op[2]: 8-bit scratch register */
5810 /* Started the operation? Before starting the operation we may skip
5811 adding 0. This is no more true after the operation started because
5812 carry must be taken into account. */
5813 bool started
= false;
5815 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5818 /* Except in the case of ADIW with 16-bit register (see below)
5819 addition does not set cc0 in a usable way. */
5821 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
5824 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
5831 for (i
= 0; i
< n_bytes
; i
++)
5833 /* We operate byte-wise on the destination. */
5834 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
5835 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5837 /* 8-bit value to operate with this byte. */
5838 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5840 /* Registers R16..R31 can operate with immediate. */
5841 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5844 op
[1] = gen_int_mode (val8
, QImode
);
5846 /* To get usable cc0 no low-bytes must have been skipped. */
5854 && test_hard_reg_class (ADDW_REGS
, reg8
))
5856 rtx xval16
= simplify_gen_subreg (HImode
, xval
, mode
, i
);
5857 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
5859 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5860 i.e. operate word-wise. */
5867 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
5870 if (n_bytes
== 2 && PLUS
== code
)
5882 avr_asm_len (code
== PLUS
5883 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5887 else if ((val8
== 1 || val8
== 0xff)
5889 && i
== n_bytes
- 1)
5891 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
5900 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5902 if (clobber_val
!= (int) val8
)
5903 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5904 clobber_val
= (int) val8
;
5906 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
5913 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
5916 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5918 if (clobber_val
!= (int) val8
)
5919 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5920 clobber_val
= (int) val8
;
5922 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
5934 } /* for all sub-bytes */
5936 /* No output doesn't change cc0. */
5938 if (plen
&& *plen
== 0)
5943 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5945 XOP[0] = XOP[0] + XOP[2]
5947 and return "". If PLEN == NULL, print assembler instructions to perform the
5948 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5949 words) printed with PLEN == NULL.
5950 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5951 condition code (with respect to XOP[0]). */
5954 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
5956 int len_plus
, len_minus
;
5957 int cc_plus
, cc_minus
, cc_dummy
;
5962 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5964 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
5965 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
5967 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5971 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
5972 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
5974 else if (len_minus
<= len_plus
)
5975 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
5977 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
5983 /* Same as above but XOP has just 3 entries.
5984 Supply a dummy 4th operand. */
5987 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
5996 return avr_out_plus (op
, plen
, pcc
);
6000 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6003 avr_out_plus64 (rtx addend
, int *plen
)
6008 op
[0] = gen_rtx_REG (DImode
, 18);
6013 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6018 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6019 time constant XOP[2]:
6021 XOP[0] = XOP[0] <op> XOP[2]
6023 and return "". If PLEN == NULL, print assembler instructions to perform the
6024 operation; otherwise, set *PLEN to the length of the instruction sequence
6025 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6026 register or SCRATCH if no clobber register is needed for the operation. */
6029 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6031 /* CODE and MODE of the operation. */
6032 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6033 enum machine_mode mode
= GET_MODE (xop
[0]);
6035 /* Number of bytes to operate on. */
6036 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6038 /* Value of T-flag (0 or 1) or -1 if unknow. */
6041 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6042 int clobber_val
= -1;
6044 /* op[0]: 8-bit destination register
6045 op[1]: 8-bit const int
6046 op[2]: 8-bit clobber register or SCRATCH
6047 op[3]: 8-bit register containing 0xff or NULL_RTX */
6056 for (i
= 0; i
< n_bytes
; i
++)
6058 /* We operate byte-wise on the destination. */
6059 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6060 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6062 /* 8-bit value to operate with this byte. */
6063 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6065 /* Number of bits set in the current byte of the constant. */
6066 int pop8
= avr_popcount (val8
);
6068 /* Registers R16..R31 can operate with immediate. */
6069 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6072 op
[1] = GEN_INT (val8
);
6081 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6085 avr_asm_len ("set", op
, plen
, 1);
6088 op
[1] = GEN_INT (exact_log2 (val8
));
6089 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6093 if (op
[3] != NULL_RTX
)
6094 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6096 avr_asm_len ("clr %0" CR_TAB
6097 "dec %0", op
, plen
, 2);
6103 if (clobber_val
!= (int) val8
)
6104 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6105 clobber_val
= (int) val8
;
6107 avr_asm_len ("or %0,%2", op
, plen
, 1);
6117 avr_asm_len ("clr %0", op
, plen
, 1);
6119 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6123 avr_asm_len ("clt", op
, plen
, 1);
6126 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6127 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6131 if (clobber_val
!= (int) val8
)
6132 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6133 clobber_val
= (int) val8
;
6135 avr_asm_len ("and %0,%2", op
, plen
, 1);
6145 avr_asm_len ("com %0", op
, plen
, 1);
6146 else if (ld_reg_p
&& val8
== (1 << 7))
6147 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6150 if (clobber_val
!= (int) val8
)
6151 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6152 clobber_val
= (int) val8
;
6154 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6160 /* Unknown rtx_code */
6163 } /* for all sub-bytes */
6169 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6170 PLEN != NULL: Set *PLEN to the length of that sequence.
6174 avr_out_addto_sp (rtx
*op
, int *plen
)
6176 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6177 int addend
= INTVAL (op
[0]);
6184 if (flag_verbose_asm
|| flag_print_asm_name
)
6185 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6187 while (addend
<= -pc_len
)
6190 avr_asm_len ("rcall .", op
, plen
, 1);
6193 while (addend
++ < 0)
6194 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6196 else if (addend
> 0)
6198 if (flag_verbose_asm
|| flag_print_asm_name
)
6199 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6201 while (addend
-- > 0)
6202 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6209 /* Create RTL split patterns for byte sized rotate expressions. This
6210 produces a series of move instructions and considers overlap situations.
6211 Overlapping non-HImode operands need a scratch register. */
6214 avr_rotate_bytes (rtx operands
[])
6217 enum machine_mode mode
= GET_MODE (operands
[0]);
6218 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6219 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6220 int num
= INTVAL (operands
[2]);
6221 rtx scratch
= operands
[3];
6222 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6223 Word move if no scratch is needed, otherwise use size of scratch. */
6224 enum machine_mode move_mode
= QImode
;
6225 int move_size
, offset
, size
;
6229 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6232 move_mode
= GET_MODE (scratch
);
6234 /* Force DI rotate to use QI moves since other DI moves are currently split
6235 into QI moves so forward propagation works better. */
6238 /* Make scratch smaller if needed. */
6239 if (SCRATCH
!= GET_CODE (scratch
)
6240 && HImode
== GET_MODE (scratch
)
6241 && QImode
== move_mode
)
6242 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6244 move_size
= GET_MODE_SIZE (move_mode
);
6245 /* Number of bytes/words to rotate. */
6246 offset
= (num
>> 3) / move_size
;
6247 /* Number of moves needed. */
6248 size
= GET_MODE_SIZE (mode
) / move_size
;
6249 /* Himode byte swap is special case to avoid a scratch register. */
6250 if (mode
== HImode
&& same_reg
)
6252 /* HImode byte swap, using xor. This is as quick as using scratch. */
6254 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6255 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6256 if (!rtx_equal_p (dst
, src
))
6258 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6259 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6260 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6265 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6266 /* Create linked list of moves to determine move order. */
6270 } move
[MAX_SIZE
+ 8];
6273 gcc_assert (size
<= MAX_SIZE
);
6274 /* Generate list of subreg moves. */
6275 for (i
= 0; i
< size
; i
++)
6278 int to
= (from
+ offset
) % size
;
6279 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6280 mode
, from
* move_size
);
6281 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6282 mode
, to
* move_size
);
6285 /* Mark dependence where a dst of one move is the src of another move.
6286 The first move is a conflict as it must wait until second is
6287 performed. We ignore moves to self - we catch this later. */
6289 for (i
= 0; i
< size
; i
++)
6290 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6291 for (j
= 0; j
< size
; j
++)
6292 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6294 /* The dst of move i is the src of move j. */
6301 /* Go through move list and perform non-conflicting moves. As each
6302 non-overlapping move is made, it may remove other conflicts
6303 so the process is repeated until no conflicts remain. */
6308 /* Emit move where dst is not also a src or we have used that
6310 for (i
= 0; i
< size
; i
++)
6311 if (move
[i
].src
!= NULL_RTX
)
6313 if (move
[i
].links
== -1
6314 || move
[move
[i
].links
].src
== NULL_RTX
)
6317 /* Ignore NOP moves to self. */
6318 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6319 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6321 /* Remove conflict from list. */
6322 move
[i
].src
= NULL_RTX
;
6328 /* Check for deadlock. This is when no moves occurred and we have
6329 at least one blocked move. */
6330 if (moves
== 0 && blocked
!= -1)
6332 /* Need to use scratch register to break deadlock.
6333 Add move to put dst of blocked move into scratch.
6334 When this move occurs, it will break chain deadlock.
6335 The scratch register is substituted for real move. */
6337 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6339 move
[size
].src
= move
[blocked
].dst
;
6340 move
[size
].dst
= scratch
;
6341 /* Scratch move is never blocked. */
6342 move
[size
].links
= -1;
6343 /* Make sure we have valid link. */
6344 gcc_assert (move
[blocked
].links
!= -1);
6345 /* Replace src of blocking move with scratch reg. */
6346 move
[move
[blocked
].links
].src
= scratch
;
6347 /* Make dependent on scratch move occuring. */
6348 move
[blocked
].links
= size
;
6352 while (blocked
!= -1);
6357 /* Modifies the length assigned to instruction INSN
6358 LEN is the initially computed length of the insn. */
6361 adjust_insn_length (rtx insn
, int len
)
6363 rtx
*op
= recog_data
.operand
;
6364 enum attr_adjust_len adjust_len
;
6366 /* Some complex insns don't need length adjustment and therefore
6367 the length need not/must not be adjusted for these insns.
6368 It is easier to state this in an insn attribute "adjust_len" than
6369 to clutter up code here... */
6371 if (-1 == recog_memoized (insn
))
6376 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6378 adjust_len
= get_attr_adjust_len (insn
);
6380 if (adjust_len
== ADJUST_LEN_NO
)
6382 /* Nothing to adjust: The length from attribute "length" is fine.
6383 This is the default. */
6388 /* Extract insn's operands. */
6390 extract_constrain_insn_cached (insn
);
6392 /* Dispatch to right function. */
6396 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
6397 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
6398 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
6400 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
6402 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
6403 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
6404 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
6405 avr_out_plus_noclobber (op
, &len
, NULL
); break;
6407 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
6409 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
6410 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
6411 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
6412 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
6413 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
6414 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
6416 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
6417 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
6418 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
6419 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
6420 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
6422 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
6423 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
6424 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
6426 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
6427 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
6428 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
6430 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
6431 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
6432 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
6434 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
6435 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
6436 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
6438 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
6440 case ADJUST_LEN_MAP_BITS
: avr_out_map_bits (insn
, op
, &len
); break;
6449 /* Return nonzero if register REG dead after INSN. */
6452 reg_unused_after (rtx insn
, rtx reg
)
6454 return (dead_or_set_p (insn
, reg
)
6455 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
6458 /* Return nonzero if REG is not used after INSN.
6459 We assume REG is a reload reg, and therefore does
6460 not live past labels. It may live past calls or jumps though. */
6463 _reg_unused_after (rtx insn
, rtx reg
)
6468 /* If the reg is set by this instruction, then it is safe for our
6469 case. Disregard the case where this is a store to memory, since
6470 we are checking a register used in the store address. */
6471 set
= single_set (insn
);
6472 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
6473 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6476 while ((insn
= NEXT_INSN (insn
)))
6479 code
= GET_CODE (insn
);
6482 /* If this is a label that existed before reload, then the register
6483 if dead here. However, if this is a label added by reorg, then
6484 the register may still be live here. We can't tell the difference,
6485 so we just ignore labels completely. */
6486 if (code
== CODE_LABEL
)
6494 if (code
== JUMP_INSN
)
6497 /* If this is a sequence, we must handle them all at once.
6498 We could have for instance a call that sets the target register,
6499 and an insn in a delay slot that uses the register. In this case,
6500 we must return 0. */
6501 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6506 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
6508 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
6509 rtx set
= single_set (this_insn
);
6511 if (GET_CODE (this_insn
) == CALL_INSN
)
6513 else if (GET_CODE (this_insn
) == JUMP_INSN
)
6515 if (INSN_ANNULLED_BRANCH_P (this_insn
))
6520 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6522 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6524 if (GET_CODE (SET_DEST (set
)) != MEM
)
6530 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
6535 else if (code
== JUMP_INSN
)
6539 if (code
== CALL_INSN
)
6542 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6543 if (GET_CODE (XEXP (tem
, 0)) == USE
6544 && REG_P (XEXP (XEXP (tem
, 0), 0))
6545 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
6547 if (call_used_regs
[REGNO (reg
)])
6551 set
= single_set (insn
);
6553 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6555 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6556 return GET_CODE (SET_DEST (set
)) != MEM
;
6557 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
6564 /* Return RTX that represents the lower 16 bits of a constant address.
6565 Unfortunately, simplify_gen_subreg does not handle this case. */
6568 avr_const_address_lo16 (rtx x
)
6572 switch (GET_CODE (x
))
6578 if (PLUS
== GET_CODE (XEXP (x
, 0))
6579 && SYMBOL_REF
== GET_CODE (XEXP (XEXP (x
, 0), 0))
6580 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
6582 HOST_WIDE_INT offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
6583 const char *name
= XSTR (XEXP (XEXP (x
, 0), 0), 0);
6585 lo16
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6586 lo16
= gen_rtx_CONST (Pmode
, plus_constant (lo16
, offset
));
6595 const char *name
= XSTR (x
, 0);
6597 return gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6601 avr_edump ("\n%?: %r\n", x
);
6606 /* Target hook for assembling integer objects. The AVR version needs
6607 special handling for references to certain labels. */
6610 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
6612 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
6613 && text_segment_operand (x
, VOIDmode
) )
6615 fputs ("\t.word\tgs(", asm_out_file
);
6616 output_addr_const (asm_out_file
, x
);
6617 fputs (")\n", asm_out_file
);
6621 else if (GET_MODE (x
) == PSImode
)
6623 default_assemble_integer (avr_const_address_lo16 (x
),
6624 GET_MODE_SIZE (HImode
), aligned_p
);
6626 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6627 " extension for hh8(", asm_out_file
);
6628 output_addr_const (asm_out_file
, x
);
6629 fputs (")\"\n", asm_out_file
);
6631 fputs ("\t.byte\t0\t" ASM_COMMENT_START
" hh8(", asm_out_file
);
6632 output_addr_const (asm_out_file
, x
);
6633 fputs (")\n", asm_out_file
);
6638 return default_assemble_integer (x
, size
, aligned_p
);
6642 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6645 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
6648 /* If the function has the 'signal' or 'interrupt' attribute, test to
6649 make sure that the name of the function is "__vector_NN" so as to
6650 catch when the user misspells the interrupt vector name. */
6652 if (cfun
->machine
->is_interrupt
)
6654 if (!STR_PREFIX_P (name
, "__vector"))
6656 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6657 "%qs appears to be a misspelled interrupt handler",
6661 else if (cfun
->machine
->is_signal
)
6663 if (!STR_PREFIX_P (name
, "__vector"))
6665 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6666 "%qs appears to be a misspelled signal handler",
6671 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
6672 ASM_OUTPUT_LABEL (file
, name
);
6676 /* Return value is nonzero if pseudos that have been
6677 assigned to registers of class CLASS would likely be spilled
6678 because registers of CLASS are needed for spill registers. */
6681 avr_class_likely_spilled_p (reg_class_t c
)
6683 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
6686 /* Valid attributes:
6687 progmem - put data to program memory;
6688 signal - make a function to be hardware interrupt. After function
6689 prologue interrupts are disabled;
6690 interrupt - make a function to be hardware interrupt. After function
6691 prologue interrupts are enabled;
6692 naked - don't generate function prologue/epilogue and `ret' command.
6694 Only `progmem' attribute valid for type. */
6696 /* Handle a "progmem" attribute; arguments as in
6697 struct attribute_spec.handler. */
6699 avr_handle_progmem_attribute (tree
*node
, tree name
,
6700 tree args ATTRIBUTE_UNUSED
,
6701 int flags ATTRIBUTE_UNUSED
,
6706 if (TREE_CODE (*node
) == TYPE_DECL
)
6708 /* This is really a decl attribute, not a type attribute,
6709 but try to handle it for GCC 3.0 backwards compatibility. */
6711 tree type
= TREE_TYPE (*node
);
6712 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
6713 tree newtype
= build_type_attribute_variant (type
, attr
);
6715 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
6716 TREE_TYPE (*node
) = newtype
;
6717 *no_add_attrs
= true;
6719 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
6721 *no_add_attrs
= false;
6725 warning (OPT_Wattributes
, "%qE attribute ignored",
6727 *no_add_attrs
= true;
6734 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6735 struct attribute_spec.handler. */
6738 avr_handle_fndecl_attribute (tree
*node
, tree name
,
6739 tree args ATTRIBUTE_UNUSED
,
6740 int flags ATTRIBUTE_UNUSED
,
6743 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6745 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6747 *no_add_attrs
= true;
6754 avr_handle_fntype_attribute (tree
*node
, tree name
,
6755 tree args ATTRIBUTE_UNUSED
,
6756 int flags ATTRIBUTE_UNUSED
,
6759 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
6761 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6763 *no_add_attrs
= true;
6770 /* AVR attributes. */
6771 static const struct attribute_spec
6772 avr_attribute_table
[] =
6774 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6775 affects_type_identity } */
6776 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
6778 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6780 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6782 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6784 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6786 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6788 { NULL
, 0, 0, false, false, false, NULL
, false }
6792 /* Look if DECL shall be placed in program memory space by
6793 means of attribute `progmem' or some address-space qualifier.
6794 Return non-zero if DECL is data that must end up in Flash and
6795 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6797 Return 2 if DECL is located in 24-bit flash address-space
6798 Return 1 if DECL is located in 16-bit flash address-space
6799 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6800 Return 0 otherwise */
6803 avr_progmem_p (tree decl
, tree attributes
)
6807 if (TREE_CODE (decl
) != VAR_DECL
)
6810 if (avr_decl_memx_p (decl
))
6813 if (avr_decl_flash_p (decl
))
6817 != lookup_attribute ("progmem", attributes
))
6824 while (TREE_CODE (a
) == ARRAY_TYPE
);
6826 if (a
== error_mark_node
)
6829 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
6836 /* Scan type TYP for pointer references to address space ASn.
6837 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6838 the AS are also declared to be CONST.
6839 Otherwise, return the respective addres space, i.e. a value != 0. */
6842 avr_nonconst_pointer_addrspace (tree typ
)
6844 while (ARRAY_TYPE
== TREE_CODE (typ
))
6845 typ
= TREE_TYPE (typ
);
6847 if (POINTER_TYPE_P (typ
))
6849 tree target
= TREE_TYPE (typ
);
6851 /* Pointer to function: Test the function's return type. */
6853 if (FUNCTION_TYPE
== TREE_CODE (target
))
6854 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
6856 /* "Ordinary" pointers... */
6858 while (TREE_CODE (target
) == ARRAY_TYPE
)
6859 target
= TREE_TYPE (target
);
6861 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target
))
6862 && !TYPE_READONLY (target
))
6864 /* Pointers to non-generic address space must be const. */
6866 return TYPE_ADDR_SPACE (target
);
6869 /* Scan pointer's target type. */
6871 return avr_nonconst_pointer_addrspace (target
);
6874 return ADDR_SPACE_GENERIC
;
6878 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6879 go along with CONST qualifier. Writing to these address spaces should
6880 be detected and complained about as early as possible. */
6883 avr_pgm_check_var_decl (tree node
)
6885 const char *reason
= NULL
;
6887 addr_space_t as
= ADDR_SPACE_GENERIC
;
6889 gcc_assert (as
== 0);
6891 if (avr_log
.progmem
)
6892 avr_edump ("%?: %t\n", node
);
6894 switch (TREE_CODE (node
))
6900 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6901 reason
= "variable";
6905 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6906 reason
= "function parameter";
6910 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6911 reason
= "structure field";
6915 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
6917 reason
= "return type of function";
6921 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
6929 error ("pointer targeting address space %qs must be const in %qT",
6930 avr_addrspace
[as
].name
, node
);
6932 error ("pointer targeting address space %qs must be const in %s %q+D",
6933 avr_addrspace
[as
].name
, reason
, node
);
6936 return reason
== NULL
;
6940 /* Add the section attribute if the variable is in progmem. */
6943 avr_insert_attributes (tree node
, tree
*attributes
)
6945 avr_pgm_check_var_decl (node
);
6947 if (TREE_CODE (node
) == VAR_DECL
6948 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
6949 && avr_progmem_p (node
, *attributes
))
6953 /* For C++, we have to peel arrays in order to get correct
6954 determination of readonlyness. */
6957 node0
= TREE_TYPE (node0
);
6958 while (TREE_CODE (node0
) == ARRAY_TYPE
);
6960 if (error_mark_node
== node0
)
6963 if (!TYPE_READONLY (node0
)
6964 && !TREE_READONLY (node
))
6966 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
6967 const char *reason
= "__attribute__((progmem))";
6969 if (!ADDR_SPACE_GENERIC_P (as
))
6970 reason
= avr_addrspace
[as
].name
;
6972 if (avr_log
.progmem
)
6973 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
6975 error ("variable %q+D must be const in order to be put into"
6976 " read-only section by means of %qs", node
, reason
);
6982 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6983 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6984 /* Track need of __do_clear_bss. */
6987 avr_asm_output_aligned_decl_common (FILE * stream
,
6988 const_tree decl ATTRIBUTE_UNUSED
,
6990 unsigned HOST_WIDE_INT size
,
6991 unsigned int align
, bool local_p
)
6993 avr_need_clear_bss_p
= true;
6996 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
6998 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7002 /* Unnamed section callback for data_section
7003 to track need of __do_copy_data. */
7006 avr_output_data_section_asm_op (const void *data
)
7008 avr_need_copy_data_p
= true;
7010 /* Dispatch to default. */
7011 output_section_asm_op (data
);
7015 /* Unnamed section callback for bss_section
7016 to track need of __do_clear_bss. */
7019 avr_output_bss_section_asm_op (const void *data
)
7021 avr_need_clear_bss_p
= true;
7023 /* Dispatch to default. */
7024 output_section_asm_op (data
);
7028 /* Unnamed section callback for progmem*.data sections. */
7031 avr_output_progmem_section_asm_op (const void *data
)
7033 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7034 (const char*) data
);
7038 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7041 avr_asm_init_sections (void)
7045 /* Set up a section for jump tables. Alignment is handled by
7046 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7048 if (AVR_HAVE_JMP_CALL
)
7050 progmem_swtable_section
7051 = get_unnamed_section (0, output_section_asm_op
,
7052 "\t.section\t.progmem.gcc_sw_table"
7053 ",\"a\",@progbits");
7057 progmem_swtable_section
7058 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7059 "\t.section\t.progmem.gcc_sw_table"
7060 ",\"ax\",@progbits");
7063 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7066 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7067 progmem_section_prefix
[n
]);
7070 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7071 resp. `avr_need_copy_data_p'. */
7073 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7074 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7075 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7079 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7082 avr_asm_function_rodata_section (tree decl
)
7084 /* If a function is unused and optimized out by -ffunction-sections
7085 and --gc-sections, ensure that the same will happen for its jump
7086 tables by putting them into individual sections. */
7091 /* Get the frodata section from the default function in varasm.c
7092 but treat function-associated data-like jump tables as code
7093 rather than as user defined data. AVR has no constant pools. */
7095 int fdata
= flag_data_sections
;
7097 flag_data_sections
= flag_function_sections
;
7098 frodata
= default_function_rodata_section (decl
);
7099 flag_data_sections
= fdata
;
7100 flags
= frodata
->common
.flags
;
7103 if (frodata
!= readonly_data_section
7104 && flags
& SECTION_NAMED
)
7106 /* Adjust section flags and replace section name prefix. */
7110 static const char* const prefix
[] =
7112 ".rodata", ".progmem.gcc_sw_table",
7113 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7116 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7118 const char * old_prefix
= prefix
[i
];
7119 const char * new_prefix
= prefix
[i
+1];
7120 const char * name
= frodata
->named
.name
;
7122 if (STR_PREFIX_P (name
, old_prefix
))
7124 const char *rname
= ACONCAT ((new_prefix
,
7125 name
+ strlen (old_prefix
), NULL
));
7126 flags
&= ~SECTION_CODE
;
7127 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7129 return get_section (rname
, flags
, frodata
->named
.decl
);
7134 return progmem_swtable_section
;
7138 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7139 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7142 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7144 if (flags
& AVR_SECTION_PROGMEM
)
7146 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7147 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
7148 const char *old_prefix
= ".rodata";
7149 const char *new_prefix
= progmem_section_prefix
[segment
];
7151 if (STR_PREFIX_P (name
, old_prefix
))
7153 const char *sname
= ACONCAT ((new_prefix
,
7154 name
+ strlen (old_prefix
), NULL
));
7155 default_elf_asm_named_section (sname
, flags
, decl
);
7159 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7163 if (!avr_need_copy_data_p
)
7164 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7165 || STR_PREFIX_P (name
, ".rodata")
7166 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7168 if (!avr_need_clear_bss_p
)
7169 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7171 default_elf_asm_named_section (name
, flags
, decl
);
7175 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7177 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7179 if (STR_PREFIX_P (name
, ".noinit"))
7181 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7182 && DECL_INITIAL (decl
) == NULL_TREE
)
7183 flags
|= SECTION_BSS
; /* @nobits */
7185 warning (0, "only uninitialized variables can be placed in the "
7189 if (decl
&& DECL_P (decl
)
7190 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7192 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7194 /* Attribute progmem puts data in generic address space.
7195 Set section flags as if it was in __flash to get the right
7196 section prefix in the remainder. */
7198 if (ADDR_SPACE_GENERIC_P (as
))
7199 as
= ADDR_SPACE_FLASH
;
7201 flags
|= as
* SECTION_MACH_DEP
;
7202 flags
&= ~SECTION_WRITE
;
7203 flags
&= ~SECTION_BSS
;
7210 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7213 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7215 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7216 readily available, see PR34734. So we postpone the warning
7217 about uninitialized data in program memory section until here. */
7220 && decl
&& DECL_P (decl
)
7221 && NULL_TREE
== DECL_INITIAL (decl
)
7222 && !DECL_EXTERNAL (decl
)
7223 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7225 warning (OPT_Wuninitialized
,
7226 "uninitialized variable %q+D put into "
7227 "program memory area", decl
);
7230 default_encode_section_info (decl
, rtl
, new_decl_p
);
7232 if (decl
&& DECL_P (decl
)
7233 && TREE_CODE (decl
) != FUNCTION_DECL
7235 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7237 rtx sym
= XEXP (rtl
, 0);
7238 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7240 /* PSTR strings are in generic space but located in flash:
7241 patch address space. */
7243 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7244 as
= ADDR_SPACE_FLASH
;
7246 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7251 /* Implement `TARGET_ASM_SELECT_SECTION' */
7254 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7256 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7258 if (decl
&& DECL_P (decl
)
7259 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7261 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7262 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
7264 if (sect
->common
.flags
& SECTION_NAMED
)
7266 const char * name
= sect
->named
.name
;
7267 const char * old_prefix
= ".rodata";
7268 const char * new_prefix
= progmem_section_prefix
[segment
];
7270 if (STR_PREFIX_P (name
, old_prefix
))
7272 const char *sname
= ACONCAT ((new_prefix
,
7273 name
+ strlen (old_prefix
), NULL
));
7274 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7278 return progmem_section
[segment
];
7284 /* Implement `TARGET_ASM_FILE_START'. */
7285 /* Outputs some text at the start of each assembler file. */
7288 avr_file_start (void)
7290 int sfr_offset
= avr_current_arch
->sfr_offset
;
7292 if (avr_current_arch
->asm_only
)
7293 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7295 default_file_start ();
7297 if (!AVR_HAVE_8BIT_SP
)
7298 fprintf (asm_out_file
,
7299 "__SP_H__ = 0x%02x\n",
7300 -sfr_offset
+ SP_ADDR
+ 1);
7302 fprintf (asm_out_file
,
7303 "__SP_L__ = 0x%02x\n"
7304 "__SREG__ = 0x%02x\n"
7305 "__RAMPZ__ = 0x%02x\n"
7306 "__tmp_reg__ = %d\n"
7307 "__zero_reg__ = %d\n",
7308 -sfr_offset
+ SP_ADDR
,
7309 -sfr_offset
+ SREG_ADDR
,
7310 -sfr_offset
+ RAMPZ_ADDR
,
7316 /* Implement `TARGET_ASM_FILE_END'. */
7317 /* Outputs to the stdio stream FILE some
7318 appropriate text to go at the end of an assembler file. */
7323 /* Output these only if there is anything in the
7324 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7325 input section(s) - some code size can be saved by not
7326 linking in the initialization code from libgcc if resp.
7327 sections are empty. */
7329 if (avr_need_copy_data_p
)
7330 fputs (".global __do_copy_data\n", asm_out_file
);
7332 if (avr_need_clear_bss_p
)
7333 fputs (".global __do_clear_bss\n", asm_out_file
);
7336 /* Choose the order in which to allocate hard registers for
7337 pseudo-registers local to a basic block.
7339 Store the desired register order in the array `reg_alloc_order'.
7340 Element 0 should be the register to allocate first; element 1, the
7341 next register; and so on. */
7344 order_regs_for_local_alloc (void)
7347 static const int order_0
[] = {
7355 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7359 static const int order_1
[] = {
7367 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7371 static const int order_2
[] = {
7380 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7385 const int *order
= (TARGET_ORDER_1
? order_1
:
7386 TARGET_ORDER_2
? order_2
:
7388 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7389 reg_alloc_order
[i
] = order
[i
];
7393 /* Implement `TARGET_REGISTER_MOVE_COST' */
7396 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7397 reg_class_t from
, reg_class_t to
)
7399 return (from
== STACK_REG
? 6
7400 : to
== STACK_REG
? 12
7405 /* Implement `TARGET_MEMORY_MOVE_COST' */
7408 avr_memory_move_cost (enum machine_mode mode
,
7409 reg_class_t rclass ATTRIBUTE_UNUSED
,
7410 bool in ATTRIBUTE_UNUSED
)
7412 return (mode
== QImode
? 2
7413 : mode
== HImode
? 4
7414 : mode
== SImode
? 8
7415 : mode
== SFmode
? 8
7420 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7421 cost of an RTX operand given its context. X is the rtx of the
7422 operand, MODE is its mode, and OUTER is the rtx_code of this
7423 operand's parent operator. */
7426 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
7427 int opno
, bool speed
)
7429 enum rtx_code code
= GET_CODE (x
);
7440 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7447 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
7451 /* Worker function for AVR backend's rtx_cost function.
7452 X is rtx expression whose cost is to be calculated.
7453 Return true if the complete cost has been computed.
7454 Return false if subexpressions should be scanned.
7455 In either case, *TOTAL contains the cost result. */
7458 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
7459 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
7461 enum rtx_code code
= (enum rtx_code
) codearg
;
7462 enum machine_mode mode
= GET_MODE (x
);
7472 /* Immediate constants are as cheap as registers. */
7477 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7485 *total
= COSTS_N_INSNS (1);
7491 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
7497 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7505 *total
= COSTS_N_INSNS (1);
7511 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7515 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7516 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7520 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
7521 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7522 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7526 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
7527 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7528 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7536 && MULT
== GET_CODE (XEXP (x
, 0))
7537 && register_operand (XEXP (x
, 1), QImode
))
7540 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7541 /* multiply-add with constant: will be split and load constant. */
7542 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7543 *total
= COSTS_N_INSNS (1) + *total
;
7546 *total
= COSTS_N_INSNS (1);
7547 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7548 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7553 && (MULT
== GET_CODE (XEXP (x
, 0))
7554 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
7555 && register_operand (XEXP (x
, 1), HImode
)
7556 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
7557 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
7560 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7561 /* multiply-add with constant: will be split and load constant. */
7562 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7563 *total
= COSTS_N_INSNS (1) + *total
;
7566 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7568 *total
= COSTS_N_INSNS (2);
7569 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7572 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7573 *total
= COSTS_N_INSNS (1);
7575 *total
= COSTS_N_INSNS (2);
7579 if (!CONST_INT_P (XEXP (x
, 1)))
7581 *total
= COSTS_N_INSNS (3);
7582 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7585 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7586 *total
= COSTS_N_INSNS (2);
7588 *total
= COSTS_N_INSNS (3);
7592 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7594 *total
= COSTS_N_INSNS (4);
7595 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7598 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7599 *total
= COSTS_N_INSNS (1);
7601 *total
= COSTS_N_INSNS (4);
7607 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7613 && register_operand (XEXP (x
, 0), QImode
)
7614 && MULT
== GET_CODE (XEXP (x
, 1)))
7617 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7618 /* multiply-sub with constant: will be split and load constant. */
7619 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7620 *total
= COSTS_N_INSNS (1) + *total
;
7625 && register_operand (XEXP (x
, 0), HImode
)
7626 && (MULT
== GET_CODE (XEXP (x
, 1))
7627 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
7628 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
7629 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
7632 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7633 /* multiply-sub with constant: will be split and load constant. */
7634 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7635 *total
= COSTS_N_INSNS (1) + *total
;
7641 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7642 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7643 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7644 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7648 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7649 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7650 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7658 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
7660 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7668 rtx op0
= XEXP (x
, 0);
7669 rtx op1
= XEXP (x
, 1);
7670 enum rtx_code code0
= GET_CODE (op0
);
7671 enum rtx_code code1
= GET_CODE (op1
);
7672 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
7673 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
7676 && (u8_operand (op1
, HImode
)
7677 || s8_operand (op1
, HImode
)))
7679 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7683 && register_operand (op1
, HImode
))
7685 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7688 else if (ex0
|| ex1
)
7690 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
7693 else if (register_operand (op0
, HImode
)
7694 && (u8_operand (op1
, HImode
)
7695 || s8_operand (op1
, HImode
)))
7697 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
7701 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
7704 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7711 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7721 /* Add some additional costs besides CALL like moves etc. */
7723 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7727 /* Just a rough estimate. Even with -O2 we don't want bulky
7728 code expanded inline. */
7730 *total
= COSTS_N_INSNS (25);
7736 *total
= COSTS_N_INSNS (300);
7738 /* Add some additional costs besides CALL like moves etc. */
7739 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7747 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7748 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7756 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7758 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
7759 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7760 /* For div/mod with const-int divisor we have at least the cost of
7761 loading the divisor. */
7762 if (CONST_INT_P (XEXP (x
, 1)))
7763 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7764 /* Add some overall penaly for clobbering and moving around registers */
7765 *total
+= COSTS_N_INSNS (2);
7772 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
7773 *total
= COSTS_N_INSNS (1);
7778 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
7779 *total
= COSTS_N_INSNS (3);
7784 if (CONST_INT_P (XEXP (x
, 1)))
7785 switch (INTVAL (XEXP (x
, 1)))
7789 *total
= COSTS_N_INSNS (5);
7792 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
7800 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7807 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7809 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
7810 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7815 val
= INTVAL (XEXP (x
, 1));
7817 *total
= COSTS_N_INSNS (3);
7818 else if (val
>= 0 && val
<= 7)
7819 *total
= COSTS_N_INSNS (val
);
7821 *total
= COSTS_N_INSNS (1);
7828 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
7829 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
7830 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
7832 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7837 if (const1_rtx
== (XEXP (x
, 1))
7838 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
7840 *total
= COSTS_N_INSNS (2);
7844 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7846 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7847 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7851 switch (INTVAL (XEXP (x
, 1)))
7858 *total
= COSTS_N_INSNS (2);
7861 *total
= COSTS_N_INSNS (3);
7867 *total
= COSTS_N_INSNS (4);
7872 *total
= COSTS_N_INSNS (5);
7875 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7878 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
7881 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
7884 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7885 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7891 if (!CONST_INT_P (XEXP (x
, 1)))
7893 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
7896 switch (INTVAL (XEXP (x
, 1)))
7904 *total
= COSTS_N_INSNS (3);
7907 *total
= COSTS_N_INSNS (5);
7910 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
7916 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7918 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7919 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7923 switch (INTVAL (XEXP (x
, 1)))
7929 *total
= COSTS_N_INSNS (3);
7934 *total
= COSTS_N_INSNS (4);
7937 *total
= COSTS_N_INSNS (6);
7940 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
7943 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7944 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7952 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7959 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7961 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
7962 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7967 val
= INTVAL (XEXP (x
, 1));
7969 *total
= COSTS_N_INSNS (4);
7971 *total
= COSTS_N_INSNS (2);
7972 else if (val
>= 0 && val
<= 7)
7973 *total
= COSTS_N_INSNS (val
);
7975 *total
= COSTS_N_INSNS (1);
7980 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7982 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7983 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7987 switch (INTVAL (XEXP (x
, 1)))
7993 *total
= COSTS_N_INSNS (2);
7996 *total
= COSTS_N_INSNS (3);
8002 *total
= COSTS_N_INSNS (4);
8006 *total
= COSTS_N_INSNS (5);
8009 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8012 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8016 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8019 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8020 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8026 if (!CONST_INT_P (XEXP (x
, 1)))
8028 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8031 switch (INTVAL (XEXP (x
, 1)))
8037 *total
= COSTS_N_INSNS (3);
8041 *total
= COSTS_N_INSNS (5);
8044 *total
= COSTS_N_INSNS (4);
8047 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8053 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8055 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8056 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8060 switch (INTVAL (XEXP (x
, 1)))
8066 *total
= COSTS_N_INSNS (4);
8071 *total
= COSTS_N_INSNS (6);
8074 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8077 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8080 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8081 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8089 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8096 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8098 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8099 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8104 val
= INTVAL (XEXP (x
, 1));
8106 *total
= COSTS_N_INSNS (3);
8107 else if (val
>= 0 && val
<= 7)
8108 *total
= COSTS_N_INSNS (val
);
8110 *total
= COSTS_N_INSNS (1);
8115 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8117 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8118 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8122 switch (INTVAL (XEXP (x
, 1)))
8129 *total
= COSTS_N_INSNS (2);
8132 *total
= COSTS_N_INSNS (3);
8137 *total
= COSTS_N_INSNS (4);
8141 *total
= COSTS_N_INSNS (5);
8147 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8150 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8154 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8157 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8158 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8164 if (!CONST_INT_P (XEXP (x
, 1)))
8166 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8169 switch (INTVAL (XEXP (x
, 1)))
8177 *total
= COSTS_N_INSNS (3);
8180 *total
= COSTS_N_INSNS (5);
8183 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8189 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8191 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8192 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8196 switch (INTVAL (XEXP (x
, 1)))
8202 *total
= COSTS_N_INSNS (4);
8205 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8210 *total
= COSTS_N_INSNS (4);
8213 *total
= COSTS_N_INSNS (6);
8216 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8217 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8225 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8229 switch (GET_MODE (XEXP (x
, 0)))
8232 *total
= COSTS_N_INSNS (1);
8233 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8234 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8238 *total
= COSTS_N_INSNS (2);
8239 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8240 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8241 else if (INTVAL (XEXP (x
, 1)) != 0)
8242 *total
+= COSTS_N_INSNS (1);
8246 *total
= COSTS_N_INSNS (3);
8247 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8248 *total
+= COSTS_N_INSNS (2);
8252 *total
= COSTS_N_INSNS (4);
8253 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8254 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8255 else if (INTVAL (XEXP (x
, 1)) != 0)
8256 *total
+= COSTS_N_INSNS (3);
8262 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8267 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8268 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8269 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8271 if (QImode
== mode
|| HImode
== mode
)
8273 *total
= COSTS_N_INSNS (2);
8286 /* Implement `TARGET_RTX_COSTS'. */
8289 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8290 int opno
, int *total
, bool speed
)
8292 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8293 opno
, total
, speed
);
8295 if (avr_log
.rtx_costs
)
8297 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8298 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8305 /* Implement `TARGET_ADDRESS_COST'. */
8308 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8312 if (GET_CODE (x
) == PLUS
8313 && CONST_INT_P (XEXP (x
, 1))
8314 && (REG_P (XEXP (x
, 0))
8315 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8317 if (INTVAL (XEXP (x
, 1)) >= 61)
8320 else if (CONSTANT_ADDRESS_P (x
))
8323 && io_address_operand (x
, QImode
))
8327 if (avr_log
.address_cost
)
8328 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8333 /* Test for extra memory constraint 'Q'.
8334 It's a memory address based on Y or Z pointer with valid displacement. */
8337 extra_constraint_Q (rtx x
)
8341 if (GET_CODE (XEXP (x
,0)) == PLUS
8342 && REG_P (XEXP (XEXP (x
,0), 0))
8343 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8344 && (INTVAL (XEXP (XEXP (x
,0), 1))
8345 <= MAX_LD_OFFSET (GET_MODE (x
))))
8347 rtx xx
= XEXP (XEXP (x
,0), 0);
8348 int regno
= REGNO (xx
);
8350 ok
= (/* allocate pseudos */
8351 regno
>= FIRST_PSEUDO_REGISTER
8352 /* strictly check */
8353 || regno
== REG_Z
|| regno
== REG_Y
8354 /* XXX frame & arg pointer checks */
8355 || xx
== frame_pointer_rtx
8356 || xx
== arg_pointer_rtx
);
8358 if (avr_log
.constraints
)
8359 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8360 ok
, reload_completed
, reload_in_progress
, x
);
8366 /* Convert condition code CONDITION to the valid AVR condition code. */
8369 avr_normalize_condition (RTX_CODE condition
)
8386 /* Helper function for `avr_reorg'. */
8389 avr_compare_pattern (rtx insn
)
8391 rtx pattern
= single_set (insn
);
8394 && NONJUMP_INSN_P (insn
)
8395 && SET_DEST (pattern
) == cc0_rtx
8396 && GET_CODE (SET_SRC (pattern
)) == COMPARE
8397 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 0))
8398 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 1)))
8406 /* Helper function for `avr_reorg'. */
8408 /* Expansion of switch/case decision trees leads to code like
8410 cc0 = compare (Reg, Num)
8414 cc0 = compare (Reg, Num)
8418 The second comparison is superfluous and can be deleted.
8419 The second jump condition can be transformed from a
8420 "difficult" one to a "simple" one because "cc0 > 0" and
8421 "cc0 >= 0" will have the same effect here.
8423 This function relies on the way switch/case is being expaned
8424 as binary decision tree. For example code see PR 49903.
8426 Return TRUE if optimization performed.
8427 Return FALSE if nothing changed.
8429 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8431 We don't want to do this in text peephole because it is
8432 tedious to work out jump offsets there and the second comparison
8433 might have been transormed by `avr_reorg'.
8435 RTL peephole won't do because peephole2 does not scan across
8439 avr_reorg_remove_redundant_compare (rtx insn1
)
8441 rtx comp1
, ifelse1
, xcond1
, branch1
;
8442 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
8444 rtx jump
, target
, cond
;
8446 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8448 branch1
= next_nonnote_nondebug_insn (insn1
);
8449 if (!branch1
|| !JUMP_P (branch1
))
8452 insn2
= next_nonnote_nondebug_insn (branch1
);
8453 if (!insn2
|| !avr_compare_pattern (insn2
))
8456 branch2
= next_nonnote_nondebug_insn (insn2
);
8457 if (!branch2
|| !JUMP_P (branch2
))
8460 comp1
= avr_compare_pattern (insn1
);
8461 comp2
= avr_compare_pattern (insn2
);
8462 xcond1
= single_set (branch1
);
8463 xcond2
= single_set (branch2
);
8465 if (!comp1
|| !comp2
8466 || !rtx_equal_p (comp1
, comp2
)
8467 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
8468 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
8469 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
8470 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
8475 comp1
= SET_SRC (comp1
);
8476 ifelse1
= SET_SRC (xcond1
);
8477 ifelse2
= SET_SRC (xcond2
);
8479 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8481 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
8482 || !REG_P (XEXP (comp1
, 0))
8483 || !CONST_INT_P (XEXP (comp1
, 1))
8484 || XEXP (ifelse1
, 2) != pc_rtx
8485 || XEXP (ifelse2
, 2) != pc_rtx
8486 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
8487 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
8488 || !COMPARISON_P (XEXP (ifelse2
, 0))
8489 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
8490 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
8491 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
8492 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
8497 /* We filtered the insn sequence to look like
8503 (if_then_else (eq (cc0)
8512 (if_then_else (CODE (cc0)
8518 code
= GET_CODE (XEXP (ifelse2
, 0));
8520 /* Map GT/GTU to GE/GEU which is easier for AVR.
8521 The first two instructions compare/branch on EQ
8522 so we may replace the difficult
8524 if (x == VAL) goto L1;
8525 if (x > VAL) goto L2;
8529 if (x == VAL) goto L1;
8530 if (x >= VAL) goto L2;
8532 Similarly, replace LE/LEU by LT/LTU. */
8543 code
= avr_normalize_condition (code
);
8550 /* Wrap the branches into UNSPECs so they won't be changed or
8551 optimized in the remainder. */
8553 target
= XEXP (XEXP (ifelse1
, 1), 0);
8554 cond
= XEXP (ifelse1
, 0);
8555 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
8557 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
8559 target
= XEXP (XEXP (ifelse2
, 1), 0);
8560 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8561 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
8563 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
8565 /* The comparisons in insn1 and insn2 are exactly the same;
8566 insn2 is superfluous so delete it. */
8568 delete_insn (insn2
);
8569 delete_insn (branch1
);
8570 delete_insn (branch2
);
8576 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8577 /* Optimize conditional jumps. */
8582 rtx insn
= get_insns();
8584 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
8586 rtx pattern
= avr_compare_pattern (insn
);
8592 && avr_reorg_remove_redundant_compare (insn
))
8597 if (compare_diff_p (insn
))
8599 /* Now we work under compare insn with difficult branch. */
8601 rtx next
= next_real_insn (insn
);
8602 rtx pat
= PATTERN (next
);
8604 pattern
= SET_SRC (pattern
);
8606 if (true_regnum (XEXP (pattern
, 0)) >= 0
8607 && true_regnum (XEXP (pattern
, 1)) >= 0)
8609 rtx x
= XEXP (pattern
, 0);
8610 rtx src
= SET_SRC (pat
);
8611 rtx t
= XEXP (src
,0);
8612 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8613 XEXP (pattern
, 0) = XEXP (pattern
, 1);
8614 XEXP (pattern
, 1) = x
;
8615 INSN_CODE (next
) = -1;
8617 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8618 && XEXP (pattern
, 1) == const0_rtx
)
8620 /* This is a tst insn, we can reverse it. */
8621 rtx src
= SET_SRC (pat
);
8622 rtx t
= XEXP (src
,0);
8624 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8625 XEXP (pattern
, 1) = XEXP (pattern
, 0);
8626 XEXP (pattern
, 0) = const0_rtx
;
8627 INSN_CODE (next
) = -1;
8628 INSN_CODE (insn
) = -1;
8630 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8631 && CONST_INT_P (XEXP (pattern
, 1)))
8633 rtx x
= XEXP (pattern
, 1);
8634 rtx src
= SET_SRC (pat
);
8635 rtx t
= XEXP (src
,0);
8636 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
8638 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
8640 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
8641 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
8642 INSN_CODE (next
) = -1;
8643 INSN_CODE (insn
) = -1;
8650 /* Returns register number for function return value.*/
8652 static inline unsigned int
8653 avr_ret_register (void)
8658 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8661 avr_function_value_regno_p (const unsigned int regno
)
8663 return (regno
== avr_ret_register ());
8666 /* Create an RTX representing the place where a
8667 library function returns a value of mode MODE. */
8670 avr_libcall_value (enum machine_mode mode
,
8671 const_rtx func ATTRIBUTE_UNUSED
)
8673 int offs
= GET_MODE_SIZE (mode
);
8676 offs
= (offs
+ 1) & ~1;
8678 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
8681 /* Create an RTX representing the place where a
8682 function returns a value of data type VALTYPE. */
8685 avr_function_value (const_tree type
,
8686 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
8687 bool outgoing ATTRIBUTE_UNUSED
)
8691 if (TYPE_MODE (type
) != BLKmode
)
8692 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
8694 offs
= int_size_in_bytes (type
);
8697 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
8698 offs
= GET_MODE_SIZE (SImode
);
8699 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
8700 offs
= GET_MODE_SIZE (DImode
);
8702 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
8706 test_hard_reg_class (enum reg_class rclass
, rtx x
)
8708 int regno
= true_regnum (x
);
8712 if (TEST_HARD_REG_CLASS (rclass
, regno
))
8719 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8720 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8723 avr_2word_insn_p (rtx insn
)
8725 if (avr_current_device
->errata_skip
8727 || 2 != get_attr_length (insn
))
8732 switch (INSN_CODE (insn
))
8737 case CODE_FOR_movqi_insn
:
8739 rtx set
= single_set (insn
);
8740 rtx src
= SET_SRC (set
);
8741 rtx dest
= SET_DEST (set
);
8743 /* Factor out LDS and STS from movqi_insn. */
8746 && (REG_P (src
) || src
== const0_rtx
))
8748 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
8750 else if (REG_P (dest
)
8753 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
8759 case CODE_FOR_call_insn
:
8760 case CODE_FOR_call_value_insn
:
8767 jump_over_one_insn_p (rtx insn
, rtx dest
)
8769 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
8772 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
8773 int dest_addr
= INSN_ADDRESSES (uid
);
8774 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
8776 return (jump_offset
== 1
8777 || (jump_offset
== 2
8778 && avr_2word_insn_p (next_active_insn (insn
))));
8781 /* Returns 1 if a value of mode MODE can be stored starting with hard
8782 register number REGNO. On the enhanced core, anything larger than
8783 1 byte must start in even numbered register for "movw" to work
8784 (this way we don't have to check for odd registers everywhere). */
8787 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
8789 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8790 Disallowing QI et al. in these regs might lead to code like
8791 (set (subreg:QI (reg:HI 28) n) ...)
8792 which will result in wrong code because reload does not
8793 handle SUBREGs of hard regsisters like this.
8794 This could be fixed in reload. However, it appears
8795 that fixing reload is not wanted by reload people. */
8797 /* Any GENERAL_REGS register can hold 8-bit values. */
8799 if (GET_MODE_SIZE (mode
) == 1)
8802 /* FIXME: Ideally, the following test is not needed.
8803 However, it turned out that it can reduce the number
8804 of spill fails. AVR and it's poor endowment with
8805 address registers is extreme stress test for reload. */
8807 if (GET_MODE_SIZE (mode
) >= 4
8811 /* All modes larger than 8 bits should start in an even register. */
8813 return !(regno
& 1);
8817 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8820 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
8821 addr_space_t as
, RTX_CODE outer_code
,
8822 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8824 if (!ADDR_SPACE_GENERIC_P (as
))
8826 return POINTER_Z_REGS
;
8830 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
8832 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
8836 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8839 avr_regno_mode_code_ok_for_base_p (int regno
,
8840 enum machine_mode mode ATTRIBUTE_UNUSED
,
8841 addr_space_t as ATTRIBUTE_UNUSED
,
8842 RTX_CODE outer_code
,
8843 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8847 if (!ADDR_SPACE_GENERIC_P (as
))
8849 if (regno
< FIRST_PSEUDO_REGISTER
8857 regno
= reg_renumber
[regno
];
8868 if (regno
< FIRST_PSEUDO_REGISTER
8872 || regno
== ARG_POINTER_REGNUM
))
8876 else if (reg_renumber
)
8878 regno
= reg_renumber
[regno
];
8883 || regno
== ARG_POINTER_REGNUM
)
8890 && PLUS
== outer_code
8900 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8901 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8902 CLOBBER_REG is a QI clobber register or NULL_RTX.
8903 LEN == NULL: output instructions.
8904 LEN != NULL: set *LEN to the length of the instruction sequence
8905 (in words) printed with LEN = NULL.
8906 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8907 If CLEAR_P is false, nothing is known about OP[0].
8909 The effect on cc0 is as follows:
8911 Load 0 to any register except ZERO_REG : NONE
8912 Load ld register with any value : NONE
8913 Anything else: : CLOBBER */
8916 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
8922 int clobber_val
= 1234;
8923 bool cooked_clobber_p
= false;
8925 enum machine_mode mode
= GET_MODE (dest
);
8926 int n
, n_bytes
= GET_MODE_SIZE (mode
);
8928 gcc_assert (REG_P (dest
)
8929 && CONSTANT_P (src
));
8934 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8935 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8937 if (REGNO (dest
) < 16
8938 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
8940 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
8943 /* We might need a clobber reg but don't have one. Look at the value to
8944 be loaded more closely. A clobber is only needed if it is a symbol
8945 or contains a byte that is neither 0, -1 or a power of 2. */
8947 if (NULL_RTX
== clobber_reg
8948 && !test_hard_reg_class (LD_REGS
, dest
)
8949 && (! (CONST_INT_P (src
) || CONST_DOUBLE_P (src
))
8950 || !avr_popcount_each_byte (src
, n_bytes
,
8951 (1 << 0) | (1 << 1) | (1 << 8))))
8953 /* We have no clobber register but need one. Cook one up.
8954 That's cheaper than loading from constant pool. */
8956 cooked_clobber_p
= true;
8957 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
8958 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
8961 /* Now start filling DEST from LSB to MSB. */
8963 for (n
= 0; n
< n_bytes
; n
++)
8966 bool done_byte
= false;
8970 /* Crop the n-th destination byte. */
8972 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
8973 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
8975 if (!CONST_INT_P (src
)
8976 && !CONST_DOUBLE_P (src
))
8978 static const char* const asm_code
[][2] =
8980 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
8981 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
8982 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
8983 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
8988 xop
[2] = clobber_reg
;
8990 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
8995 /* Crop the n-th source byte. */
8997 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
8998 ival
[n
] = INTVAL (xval
);
9000 /* Look if we can reuse the low word by means of MOVW. */
9006 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9007 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9009 if (INTVAL (lo16
) == INTVAL (hi16
))
9011 if (0 != INTVAL (lo16
)
9014 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9021 /* Don't use CLR so that cc0 is set as expected. */
9026 avr_asm_len (ldreg_p
? "ldi %0,0"
9027 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9028 : "mov %0,__zero_reg__",
9033 if (clobber_val
== ival
[n
]
9034 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9039 /* LD_REGS can use LDI to move a constant value */
9045 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9049 /* Try to reuse value already loaded in some lower byte. */
9051 for (j
= 0; j
< n
; j
++)
9052 if (ival
[j
] == ival
[n
])
9057 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9065 /* Need no clobber reg for -1: Use CLR/DEC */
9070 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9072 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9075 else if (1 == ival
[n
])
9078 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9080 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9084 /* Use T flag or INC to manage powers of 2 if we have
9087 if (NULL_RTX
== clobber_reg
9088 && single_one_operand (xval
, QImode
))
9091 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9093 gcc_assert (constm1_rtx
!= xop
[1]);
9098 avr_asm_len ("set", xop
, len
, 1);
9102 avr_asm_len ("clr %0", xop
, len
, 1);
9104 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9108 /* We actually need the LD_REGS clobber reg. */
9110 gcc_assert (NULL_RTX
!= clobber_reg
);
9114 xop
[2] = clobber_reg
;
9115 clobber_val
= ival
[n
];
9117 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9118 "mov %0,%2", xop
, len
, 2);
9121 /* If we cooked up a clobber reg above, restore it. */
9123 if (cooked_clobber_p
)
9125 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9130 /* Reload the constant OP[1] into the HI register OP[0].
9131 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9132 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9133 need a clobber reg or have to cook one up.
9135 PLEN == NULL: Output instructions.
9136 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9137 by the insns printed.
9142 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9144 output_reload_in_const (op
, clobber_reg
, plen
, false);
9149 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9150 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9151 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9152 need a clobber reg or have to cook one up.
9154 LEN == NULL: Output instructions.
9156 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9157 by the insns printed.
9162 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9165 && !test_hard_reg_class (LD_REGS
, op
[0])
9166 && (CONST_INT_P (op
[1])
9167 || CONST_DOUBLE_P (op
[1])))
9169 int len_clr
, len_noclr
;
9171 /* In some cases it is better to clear the destination beforehand, e.g.
9173 CLR R2 CLR R3 MOVW R4,R2 INC R2
9177 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9179 We find it too tedious to work that out in the print function.
9180 Instead, we call the print function twice to get the lengths of
9181 both methods and use the shortest one. */
9183 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9184 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9186 if (len_noclr
- len_clr
== 4)
9188 /* Default needs 4 CLR instructions: clear register beforehand. */
9190 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9191 "mov %B0,__zero_reg__" CR_TAB
9192 "movw %C0,%A0", &op
[0], len
, 3);
9194 output_reload_in_const (op
, clobber_reg
, len
, true);
9203 /* Default: destination not pre-cleared. */
9205 output_reload_in_const (op
, clobber_reg
, len
, false);
9210 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9212 output_reload_in_const (op
, clobber_reg
, len
, false);
9217 avr_output_bld (rtx operands
[], int bit_nr
)
9219 static char s
[] = "bld %A0,0";
9221 s
[5] = 'A' + (bit_nr
>> 3);
9222 s
[8] = '0' + (bit_nr
& 7);
9223 output_asm_insn (s
, operands
);
9227 avr_output_addr_vec_elt (FILE *stream
, int value
)
9229 if (AVR_HAVE_JMP_CALL
)
9230 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9232 fprintf (stream
, "\trjmp .L%d\n", value
);
9235 /* Returns true if SCRATCH are safe to be allocated as a scratch
9236 registers (for a define_peephole2) in the current function. */
9239 avr_hard_regno_scratch_ok (unsigned int regno
)
9241 /* Interrupt functions can only use registers that have already been saved
9242 by the prologue, even if they would normally be call-clobbered. */
9244 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9245 && !df_regs_ever_live_p (regno
))
9248 /* Don't allow hard registers that might be part of the frame pointer.
9249 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9250 and don't care for a frame pointer that spans more than one register. */
9252 if ((!reload_completed
|| frame_pointer_needed
)
9253 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9261 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9264 avr_hard_regno_rename_ok (unsigned int old_reg
,
9265 unsigned int new_reg
)
9267 /* Interrupt functions can only use registers that have already been
9268 saved by the prologue, even if they would normally be
9271 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9272 && !df_regs_ever_live_p (new_reg
))
9275 /* Don't allow hard registers that might be part of the frame pointer.
9276 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9277 and don't care for a frame pointer that spans more than one register. */
9279 if ((!reload_completed
|| frame_pointer_needed
)
9280 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9281 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9289 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9290 or memory location in the I/O space (QImode only).
9292 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9293 Operand 1: register operand to test, or CONST_INT memory address.
9294 Operand 2: bit number.
9295 Operand 3: label to jump to if the test is true. */
9298 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9300 enum rtx_code comp
= GET_CODE (operands
[0]);
9301 bool long_jump
= get_attr_length (insn
) >= 4;
9302 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9306 else if (comp
== LT
)
9310 comp
= reverse_condition (comp
);
9312 switch (GET_CODE (operands
[1]))
9319 if (low_io_address_operand (operands
[1], QImode
))
9322 output_asm_insn ("sbis %i1,%2", operands
);
9324 output_asm_insn ("sbic %i1,%2", operands
);
9328 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9330 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9332 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9335 break; /* CONST_INT */
9339 if (GET_MODE (operands
[1]) == QImode
)
9342 output_asm_insn ("sbrs %1,%2", operands
);
9344 output_asm_insn ("sbrc %1,%2", operands
);
9346 else /* HImode, PSImode or SImode */
9348 static char buf
[] = "sbrc %A1,0";
9349 unsigned int bit_nr
= UINTVAL (operands
[2]);
9351 buf
[3] = (comp
== EQ
) ? 's' : 'c';
9352 buf
[6] = 'A' + (bit_nr
/ 8);
9353 buf
[9] = '0' + (bit_nr
% 8);
9354 output_asm_insn (buf
, operands
);
9361 return ("rjmp .+4" CR_TAB
9370 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9373 avr_asm_out_ctor (rtx symbol
, int priority
)
9375 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9376 default_ctor_section_asm_out_constructor (symbol
, priority
);
9379 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9382 avr_asm_out_dtor (rtx symbol
, int priority
)
9384 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9385 default_dtor_section_asm_out_destructor (symbol
, priority
);
9388 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9391 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9393 if (TYPE_MODE (type
) == BLKmode
)
9395 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9396 return (size
== -1 || size
> 8);
9402 /* Worker function for CASE_VALUES_THRESHOLD. */
9405 avr_case_values_threshold (void)
9407 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
9411 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9413 static enum machine_mode
9414 avr_addr_space_address_mode (addr_space_t as
)
9416 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
9420 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9422 static enum machine_mode
9423 avr_addr_space_pointer_mode (addr_space_t as
)
9425 return avr_addr_space_address_mode (as
);
9429 /* Helper for following function. */
9432 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
9434 gcc_assert (REG_P (reg
));
9438 return REGNO (reg
) == REG_Z
;
9441 /* Avoid combine to propagate hard regs. */
9443 if (can_create_pseudo_p()
9444 && REGNO (reg
) < REG_Z
)
9453 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9456 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
9457 bool strict
, addr_space_t as
)
9466 case ADDR_SPACE_GENERIC
:
9467 return avr_legitimate_address_p (mode
, x
, strict
);
9469 case ADDR_SPACE_FLASH
:
9470 case ADDR_SPACE_FLASH1
:
9471 case ADDR_SPACE_FLASH2
:
9472 case ADDR_SPACE_FLASH3
:
9473 case ADDR_SPACE_FLASH4
:
9474 case ADDR_SPACE_FLASH5
:
9476 switch (GET_CODE (x
))
9479 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
9483 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
9492 case ADDR_SPACE_MEMX
:
9495 && can_create_pseudo_p());
9497 if (LO_SUM
== GET_CODE (x
))
9499 rtx hi
= XEXP (x
, 0);
9500 rtx lo
= XEXP (x
, 1);
9503 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
9505 && REGNO (lo
) == REG_Z
);
9511 if (avr_log
.legitimate_address_p
)
9513 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9514 "reload_completed=%d reload_in_progress=%d %s:",
9515 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
9516 reg_renumber
? "(reg_renumber)" : "");
9518 if (GET_CODE (x
) == PLUS
9519 && REG_P (XEXP (x
, 0))
9520 && CONST_INT_P (XEXP (x
, 1))
9521 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
9524 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
9525 true_regnum (XEXP (x
, 0)));
9528 avr_edump ("\n%r\n", x
);
9535 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9538 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
9539 enum machine_mode mode
, addr_space_t as
)
9541 if (ADDR_SPACE_GENERIC_P (as
))
9542 return avr_legitimize_address (x
, old_x
, mode
);
9544 if (avr_log
.legitimize_address
)
9546 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
9553 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9556 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
9558 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
9559 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
9561 if (avr_log
.progmem
)
9562 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9563 src
, type_from
, type_to
);
9565 /* Up-casting from 16-bit to 24-bit pointer. */
9567 if (as_from
!= ADDR_SPACE_MEMX
9568 && as_to
== ADDR_SPACE_MEMX
)
9572 rtx reg
= gen_reg_rtx (PSImode
);
9574 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
9575 sym
= XEXP (sym
, 0);
9577 /* Look at symbol flags: avr_encode_section_info set the flags
9578 also if attribute progmem was seen so that we get the right
9579 promotion for, e.g. PSTR-like strings that reside in generic space
9580 but are located in flash. In that case we patch the incoming
9583 if (SYMBOL_REF
== GET_CODE (sym
)
9584 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
9586 as_from
= ADDR_SPACE_FLASH
;
9589 /* Linearize memory: RAM has bit 23 set. */
9591 msb
= ADDR_SPACE_GENERIC_P (as_from
)
9593 : avr_addrspace
[as_from
].segment
% avr_current_arch
->n_segments
;
9595 src
= force_reg (Pmode
, src
);
9598 ? gen_zero_extendhipsi2 (reg
, src
)
9599 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
9604 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9606 if (as_from
== ADDR_SPACE_MEMX
9607 && as_to
!= ADDR_SPACE_MEMX
)
9609 rtx new_src
= gen_reg_rtx (Pmode
);
9611 src
= force_reg (PSImode
, src
);
9613 emit_move_insn (new_src
,
9614 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
9622 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9625 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
9626 addr_space_t superset ATTRIBUTE_UNUSED
)
9628 /* Allow any kind of pointer mess. */
9634 /* Worker function for movmemhi expander.
9635 XOP[0] Destination as MEM:BLK
9637 XOP[2] # Bytes to copy
9639 Return TRUE if the expansion is accomplished.
9640 Return FALSE if the operand compination is not supported. */
9643 avr_emit_movmemhi (rtx
*xop
)
9645 HOST_WIDE_INT count
;
9646 enum machine_mode loop_mode
;
9647 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
9648 rtx loop_reg
, addr0
, addr1
, a_src
, a_dest
, insn
, xas
, reg_x
;
9649 rtx a_hi8
= NULL_RTX
;
9651 if (avr_mem_flash_p (xop
[0]))
9654 if (!CONST_INT_P (xop
[2]))
9657 count
= INTVAL (xop
[2]);
9661 a_src
= XEXP (xop
[1], 0);
9662 a_dest
= XEXP (xop
[0], 0);
9664 if (PSImode
== GET_MODE (a_src
))
9666 gcc_assert (as
== ADDR_SPACE_MEMX
);
9668 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
9669 loop_reg
= gen_rtx_REG (loop_mode
, 24);
9670 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
9672 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
9673 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
9677 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
9680 && avr_current_arch
->n_segments
> 1)
9682 a_hi8
= GEN_INT (segment
);
9683 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
9685 else if (!ADDR_SPACE_GENERIC_P (as
))
9687 as
= ADDR_SPACE_FLASH
;
9692 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
9693 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
9698 /* FIXME: Register allocator might come up with spill fails if it is left
9699 on its own. Thus, we allocate the pointer registers by hand:
9701 X = destination address */
9703 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
9704 addr1
= lpm_addr_reg_rtx
;
9706 reg_x
= gen_rtx_REG (HImode
, REG_X
);
9707 emit_move_insn (reg_x
, a_dest
);
9710 /* FIXME: Register allocator does a bad job and might spill address
9711 register(s) inside the loop leading to additional move instruction
9712 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9713 load and store as seperate insns. Instead, we perform the copy
9714 by means of one monolithic insn. */
9716 gcc_assert (TMP_REGNO
== LPM_REGNO
);
9718 if (as
!= ADDR_SPACE_MEMX
)
9720 /* Load instruction ([E]LPM or LD) is known at compile time:
9721 Do the copy-loop inline. */
9723 rtx (*fun
) (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
)
9724 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
9726 insn
= fun (addr0
, addr1
, xas
, loop_reg
,
9727 addr0
, addr1
, tmp_reg_rtx
, loop_reg
);
9731 rtx loop_reg16
= gen_rtx_REG (HImode
, 24);
9732 rtx r23
= gen_rtx_REG (QImode
, 23);
9733 rtx (*fun
) (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
)
9734 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
9736 emit_move_insn (r23
, a_hi8
);
9738 insn
= fun (addr0
, addr1
, xas
, loop_reg
, addr0
, addr1
,
9739 lpm_reg_rtx
, loop_reg16
, r23
, r23
, GEN_INT (RAMPZ_ADDR
));
9742 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
9749 /* Print assembler for movmem_qi, movmem_hi insns...
9753 $3, $7 : Loop register
9754 $6 : Scratch register
9756 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9758 $8, $9 : hh8 (& src)
9763 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
9765 addr_space_t as
= (addr_space_t
) INTVAL (xop
[2]);
9766 enum machine_mode loop_mode
= GET_MODE (xop
[3]);
9768 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, xop
[3]);
9770 gcc_assert (REG_X
== REGNO (xop
[0])
9771 && REG_Z
== REGNO (xop
[1]));
9778 avr_asm_len ("0:", xop
, plen
, 0);
9780 /* Load with post-increment */
9787 case ADDR_SPACE_GENERIC
:
9789 avr_asm_len ("ld %6,%a1+", xop
, plen
, 1);
9792 case ADDR_SPACE_FLASH
:
9795 avr_asm_len ("lpm %6,%a1+", xop
, plen
, 1);
9797 avr_asm_len ("lpm" CR_TAB
9798 "adiw %1,1", xop
, plen
, 2);
9801 case ADDR_SPACE_FLASH1
:
9802 case ADDR_SPACE_FLASH2
:
9803 case ADDR_SPACE_FLASH3
:
9804 case ADDR_SPACE_FLASH4
:
9805 case ADDR_SPACE_FLASH5
:
9808 avr_asm_len ("elpm %6,%a1+", xop
, plen
, 1);
9810 avr_asm_len ("elpm" CR_TAB
9811 "adiw %1,1", xop
, plen
, 2);
9815 /* Store with post-increment */
9817 avr_asm_len ("st %a0+,%6", xop
, plen
, 1);
9819 /* Decrement loop-counter and set Z-flag */
9821 if (QImode
== loop_mode
)
9823 avr_asm_len ("dec %3", xop
, plen
, 1);
9827 avr_asm_len ("sbiw %3,1", xop
, plen
, 1);
9831 avr_asm_len ("subi %A3,1" CR_TAB
9832 "sbci %B3,0", xop
, plen
, 2);
9835 /* Loop until zero */
9837 return avr_asm_len ("brne 0b", xop
, plen
, 1);
9842 /* Helper for __builtin_avr_delay_cycles */
9845 avr_expand_delay_cycles (rtx operands0
)
9847 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
9848 unsigned HOST_WIDE_INT cycles_used
;
9849 unsigned HOST_WIDE_INT loop_count
;
9851 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
9853 loop_count
= ((cycles
- 9) / 6) + 1;
9854 cycles_used
= ((loop_count
- 1) * 6) + 9;
9855 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
9856 cycles
-= cycles_used
;
9859 if (IN_RANGE (cycles
, 262145, 83886081))
9861 loop_count
= ((cycles
- 7) / 5) + 1;
9862 if (loop_count
> 0xFFFFFF)
9863 loop_count
= 0xFFFFFF;
9864 cycles_used
= ((loop_count
- 1) * 5) + 7;
9865 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
9866 cycles
-= cycles_used
;
9869 if (IN_RANGE (cycles
, 768, 262144))
9871 loop_count
= ((cycles
- 5) / 4) + 1;
9872 if (loop_count
> 0xFFFF)
9873 loop_count
= 0xFFFF;
9874 cycles_used
= ((loop_count
- 1) * 4) + 5;
9875 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
9876 cycles
-= cycles_used
;
9879 if (IN_RANGE (cycles
, 6, 767))
9881 loop_count
= cycles
/ 3;
9882 if (loop_count
> 255)
9884 cycles_used
= loop_count
* 3;
9885 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
9886 cycles
-= cycles_used
;
9891 emit_insn (gen_nopv (GEN_INT(2)));
9897 emit_insn (gen_nopv (GEN_INT(1)));
9903 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9906 avr_double_int_push_digit (double_int val
, int base
,
9907 unsigned HOST_WIDE_INT digit
)
9910 ? double_int_lshift (val
, 32, 64, false)
9911 : double_int_mul (val
, uhwi_to_double_int (base
));
9913 return double_int_add (val
, uhwi_to_double_int (digit
));
9917 /* Compute the image of x under f, i.e. perform x --> f(x) */
9920 avr_map (double_int f
, int x
)
9922 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
9926 /* Return the map R that reverses the bits of byte B.
9928 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9929 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9931 Notice that R o R = id. */
9934 avr_revert_map (int b
)
9937 double_int r
= double_int_zero
;
9939 for (i
= 16-1; i
>= 0; i
--)
9940 r
= avr_double_int_push_digit (r
, 16, i
>> 3 == b
? i
^ 7 : i
);
9946 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9948 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9949 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9951 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9952 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9954 Notice that R o R = id. */
9957 avr_swap_map (int size
, int b
)
9960 double_int r
= double_int_zero
;
9962 for (i
= 16-1; i
>= 0; i
--)
9963 r
= avr_double_int_push_digit (r
, 16, i
^ (i
>> 3 == b
? size
: 0));
9969 /* Return Identity. */
9975 double_int r
= double_int_zero
;
9977 for (i
= 16-1; i
>= 0; i
--)
9978 r
= avr_double_int_push_digit (r
, 16, i
);
9989 SIG_REVERT_0
= 1 << 4,
9990 SIG_SWAP1_0
= 1 << 5,
9992 SIG_REVERT_1
= 1 << 6,
9993 SIG_SWAP1_1
= 1 << 7,
9994 SIG_SWAP4_0
= 1 << 8,
9995 SIG_SWAP4_1
= 1 << 9
9999 /* Return basic map with signature SIG. */
10002 avr_sig_map (int n ATTRIBUTE_UNUSED
, int sig
)
10004 if (sig
== SIG_ID
) return avr_id_map ();
10005 else if (sig
== SIG_REVERT_0
) return avr_revert_map (0);
10006 else if (sig
== SIG_REVERT_1
) return avr_revert_map (1);
10007 else if (sig
== SIG_SWAP1_0
) return avr_swap_map (1, 0);
10008 else if (sig
== SIG_SWAP1_1
) return avr_swap_map (1, 1);
10009 else if (sig
== SIG_SWAP4_0
) return avr_swap_map (4, 0);
10010 else if (sig
== SIG_SWAP4_1
) return avr_swap_map (4, 1);
10016 /* Return the Hamming distance between the B-th byte of A and C. */
10019 avr_map_hamming_byte (int n
, int b
, double_int a
, double_int c
, bool strict
)
10021 int i
, hamming
= 0;
10023 for (i
= 8*b
; i
< n
&& i
< 8*b
+ 8; i
++)
10025 int ai
= avr_map (a
, i
);
10026 int ci
= avr_map (c
, i
);
10028 hamming
+= ai
!= ci
&& (strict
|| (ai
< n
&& ci
< n
));
10035 /* Return the non-strict Hamming distance between A and B. */
10037 #define avr_map_hamming_nonstrict(N,A,B) \
10038 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10039 + avr_map_hamming_byte (N, 1, A, B, false))
10042 /* Return TRUE iff A and B represent the same mapping. */
10044 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10047 /* Return TRUE iff A is a map of signature S. Notice that there is no
10048 1:1 correspondance between maps and signatures and thus this is
10049 only supported for basic signatures recognized by avr_sig_map(). */
10051 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10054 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10057 avr_out_swap_bits (rtx
*xop
, int *plen
)
10059 xop
[1] = tmp_reg_rtx
;
10061 return avr_asm_len ("mov %1,%0" CR_TAB
10062 "andi %0,0xaa" CR_TAB
10066 "or %0,%1", xop
, plen
, 6);
10069 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10072 avr_out_revert_bits (rtx
*xop
, int *plen
)
10074 return avr_asm_len ("inc __zero_reg__" "\n"
10075 "0:\tror %1" CR_TAB
10077 "lsl __zero_reg__" CR_TAB
10078 "brne 0b", xop
, plen
, 5);
10082 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10083 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10084 early-clobber conflicts if XOP[0] = XOP[1]. */
10087 avr_move_bits (rtx
*xop
, double_int map
, int n_bits
, bool out_p
, int *plen
)
10089 int bit_dest
, b
, clobber
= 0;
10091 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10092 int t_bit_src
= -1;
10094 if (!optimize
&& !out_p
)
10096 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10097 xop
[1] = tmp_reg_rtx
;
10101 /* We order the operations according to the requested source bit b. */
10103 for (b
= 0; b
< n_bits
; b
++)
10104 for (bit_dest
= 0; bit_dest
< n_bits
; bit_dest
++)
10106 int bit_src
= avr_map (map
, bit_dest
);
10109 /* Same position: No need to copy as the caller did MOV. */
10110 || bit_dest
== bit_src
10111 /* Accessing bits 8..f for 8-bit version is void. */
10112 || bit_src
>= n_bits
)
10115 if (t_bit_src
!= bit_src
)
10117 /* Source bit is not yet in T: Store it to T. */
10119 t_bit_src
= bit_src
;
10123 xop
[2] = GEN_INT (bit_src
);
10124 avr_asm_len ("bst %T1%T2", xop
, plen
, 1);
10126 else if (clobber
& (1 << bit_src
))
10128 /* Bit to be read was written already: Backup input
10129 to resolve early-clobber conflict. */
10131 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10132 xop
[1] = tmp_reg_rtx
;
10137 /* Load destination bit with T. */
10141 xop
[2] = GEN_INT (bit_dest
);
10142 avr_asm_len ("bld %T0%T2", xop
, plen
, 1);
10145 clobber
|= 1 << bit_dest
;
10150 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10153 avr_out_map_bits (rtx insn
, rtx
*operands
, int *plen
)
10155 bool copy_0
, copy_1
;
10156 int n_bits
= GET_MODE_BITSIZE (GET_MODE (operands
[0]));
10157 double_int map
= rtx_to_double_int (operands
[1]);
10160 xop
[0] = operands
[0];
10161 xop
[1] = operands
[2];
10165 else if (flag_print_asm_name
)
10166 avr_fdump (asm_out_file
, ASM_COMMENT_START
"%X\n", map
);
10174 if (avr_map_sig_p (n_bits
, map
, SIG_SWAP1_0
))
10176 return avr_out_swap_bits (xop
, plen
);
10178 else if (avr_map_sig_p (n_bits
, map
, SIG_REVERT_0
))
10180 if (REGNO (xop
[0]) == REGNO (xop
[1])
10181 || !reg_unused_after (insn
, xop
[1]))
10183 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10184 xop
[1] = tmp_reg_rtx
;
10187 return avr_out_revert_bits (xop
, plen
);
10197 /* Copy whole byte is cheaper than moving bits that stay at the same
10198 position. Some bits in a byte stay at the same position iff the
10199 strict Hamming distance to Identity is not 8. */
10201 copy_0
= 8 != avr_map_hamming_byte (n_bits
, 0, map
, avr_id_map(), true);
10202 copy_1
= 8 != avr_map_hamming_byte (n_bits
, 1, map
, avr_id_map(), true);
10204 /* Perform the move(s) just worked out. */
10208 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10210 /* Fix early-clobber clashes.
10211 Notice XOP[0] hat no eary-clobber in its constraint. */
10213 avr_move_bits (xop
, map
, n_bits
, false, plen
);
10217 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10220 else if (AVR_HAVE_MOVW
&& copy_0
&& copy_1
)
10222 avr_asm_len ("movw %A0,%A1", xop
, plen
, 1);
10227 avr_asm_len ("mov %A0,%A1", xop
, plen
, 1);
10230 avr_asm_len ("mov %B0,%B1", xop
, plen
, 1);
10233 /* Move individual bits. */
10235 avr_move_bits (xop
, map
, n_bits
, true, plen
);
10241 /* IDs for all the AVR builtins. */
10243 enum avr_builtin_id
10255 AVR_BUILTIN_FMULSU
,
10256 AVR_BUILTIN_DELAY_CYCLES
10260 avr_init_builtin_int24 (void)
10262 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10263 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10265 (*lang_hooks
.types
.register_builtin_type
) (int24_type
, "__int24");
10266 (*lang_hooks
.types
.register_builtin_type
) (uint24_type
, "__uint24");
10269 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10272 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10273 NULL, NULL_TREE); \
10277 /* Implement `TARGET_INIT_BUILTINS' */
10278 /* Set up all builtin functions for this target. */
10281 avr_init_builtins (void)
10283 tree void_ftype_void
10284 = build_function_type_list (void_type_node
, NULL_TREE
);
10285 tree uchar_ftype_uchar
10286 = build_function_type_list (unsigned_char_type_node
,
10287 unsigned_char_type_node
,
10289 tree uint_ftype_uchar_uchar
10290 = build_function_type_list (unsigned_type_node
,
10291 unsigned_char_type_node
,
10292 unsigned_char_type_node
,
10294 tree int_ftype_char_char
10295 = build_function_type_list (integer_type_node
,
10299 tree int_ftype_char_uchar
10300 = build_function_type_list (integer_type_node
,
10302 unsigned_char_type_node
,
10304 tree void_ftype_ulong
10305 = build_function_type_list (void_type_node
,
10306 long_unsigned_type_node
,
10309 tree uchar_ftype_ulong_uchar
10310 = build_function_type_list (unsigned_char_type_node
,
10311 long_unsigned_type_node
,
10312 unsigned_char_type_node
,
10315 tree uint_ftype_ullong_uint
10316 = build_function_type_list (unsigned_type_node
,
10317 long_long_unsigned_type_node
,
10318 unsigned_type_node
,
10321 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void
, AVR_BUILTIN_NOP
);
10322 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void
, AVR_BUILTIN_SEI
);
10323 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void
, AVR_BUILTIN_CLI
);
10324 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void
, AVR_BUILTIN_WDR
);
10325 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void
, AVR_BUILTIN_SLEEP
);
10326 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar
, AVR_BUILTIN_SWAP
);
10327 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong
,
10328 AVR_BUILTIN_DELAY_CYCLES
);
10330 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar
,
10332 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char
,
10333 AVR_BUILTIN_FMULS
);
10334 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar
,
10335 AVR_BUILTIN_FMULSU
);
10337 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar
,
10339 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint
,
10340 AVR_BUILTIN_MAP16
);
10342 avr_init_builtin_int24 ();
10347 struct avr_builtin_description
10349 const enum insn_code icode
;
10350 const char *const name
;
10351 const enum avr_builtin_id id
;
10354 static const struct avr_builtin_description
10357 { CODE_FOR_rotlqi3_4
, "__builtin_avr_swap", AVR_BUILTIN_SWAP
}
10360 static const struct avr_builtin_description
10363 { CODE_FOR_fmul
, "__builtin_avr_fmul", AVR_BUILTIN_FMUL
},
10364 { CODE_FOR_fmuls
, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS
},
10365 { CODE_FOR_fmulsu
, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU
},
10366 { CODE_FOR_map_bitsqi
, "__builtin_avr_map8", AVR_BUILTIN_MAP8
},
10367 { CODE_FOR_map_bitshi
, "__builtin_avr_map16", AVR_BUILTIN_MAP16
}
10370 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10373 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
10377 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10378 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10379 enum machine_mode op0mode
= GET_MODE (op0
);
10380 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10381 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10384 || GET_MODE (target
) != tmode
10385 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10387 target
= gen_reg_rtx (tmode
);
10390 if (op0mode
== SImode
&& mode0
== HImode
)
10393 op0
= gen_lowpart (HImode
, op0
);
10396 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
10398 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10399 op0
= copy_to_mode_reg (mode0
, op0
);
10401 pat
= GEN_FCN (icode
) (target
, op0
);
10411 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10414 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10417 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10418 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10419 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10420 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10421 enum machine_mode op0mode
= GET_MODE (op0
);
10422 enum machine_mode op1mode
= GET_MODE (op1
);
10423 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10424 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10425 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10428 || GET_MODE (target
) != tmode
10429 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10431 target
= gen_reg_rtx (tmode
);
10434 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10437 op0
= gen_lowpart (HImode
, op0
);
10440 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10443 op1
= gen_lowpart (HImode
, op1
);
10446 /* In case the insn wants input operands in modes different from
10447 the result, abort. */
10449 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10450 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
10452 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10453 op0
= copy_to_mode_reg (mode0
, op0
);
10455 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10456 op1
= copy_to_mode_reg (mode1
, op1
);
10458 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
10468 /* Expand an expression EXP that calls a built-in function,
10469 with result going to TARGET if that's convenient
10470 (and in mode MODE if that's convenient).
10471 SUBTARGET may be used as the target for computing one of EXP's operands.
10472 IGNORE is nonzero if the value is to be ignored. */
10475 avr_expand_builtin (tree exp
, rtx target
,
10476 rtx subtarget ATTRIBUTE_UNUSED
,
10477 enum machine_mode mode ATTRIBUTE_UNUSED
,
10478 int ignore ATTRIBUTE_UNUSED
)
10481 const struct avr_builtin_description
*d
;
10482 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
10483 const char* bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
10484 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
10490 case AVR_BUILTIN_NOP
:
10491 emit_insn (gen_nopv (GEN_INT(1)));
10494 case AVR_BUILTIN_SEI
:
10495 emit_insn (gen_enable_interrupt ());
10498 case AVR_BUILTIN_CLI
:
10499 emit_insn (gen_disable_interrupt ());
10502 case AVR_BUILTIN_WDR
:
10503 emit_insn (gen_wdr ());
10506 case AVR_BUILTIN_SLEEP
:
10507 emit_insn (gen_sleep ());
10510 case AVR_BUILTIN_DELAY_CYCLES
:
10512 arg0
= CALL_EXPR_ARG (exp
, 0);
10513 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10515 if (! CONST_INT_P (op0
))
10516 error ("%s expects a compile time integer constant", bname
);
10518 avr_expand_delay_cycles (op0
);
10522 case AVR_BUILTIN_MAP8
:
10524 arg0
= CALL_EXPR_ARG (exp
, 0);
10525 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10527 if (!CONST_INT_P (op0
))
10529 error ("%s expects a compile time long integer constant"
10530 " as first argument", bname
);
10535 case AVR_BUILTIN_MAP16
:
10537 arg0
= CALL_EXPR_ARG (exp
, 0);
10538 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10540 if (!const_double_operand (op0
, VOIDmode
))
10542 error ("%s expects a compile time long long integer constant"
10543 " as first argument", bname
);
10549 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
10551 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
10553 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
10555 return avr_expand_binop_builtin (d
->icode
, exp
, target
);
10557 gcc_unreachable ();
10560 struct gcc_target targetm
= TARGET_INITIALIZER
;
10562 #include "gt-avr.h"