1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr
;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
135 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
136 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
137 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
138 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
139 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
141 static int avr_naked_function_p (tree
);
142 static int interrupt_function_p (tree
);
143 static int signal_function_p (tree
);
144 static int avr_OS_task_function_p (tree
);
145 static int avr_OS_main_function_p (tree
);
146 static int avr_regs_to_save (HARD_REG_SET
*);
147 static int get_sequence_length (rtx insns
);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code
);
151 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
152 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
154 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
155 static struct machine_function
* avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx
;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx
;
172 rtx lpm_addr_reg_rtx
;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx
;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx
;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx
[32];
184 rtx all_regs_rtx
[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx
;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx
;
192 extern GTY(()) rtx rampx_rtx
;
193 extern GTY(()) rtx rampy_rtx
;
194 extern GTY(()) rtx rampz_rtx
;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty
;
202 static GTY(()) rtx xstring_e
;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro
;
207 /* Current architecture. */
208 const struct base_arch_s
*avr_current_arch
;
210 /* Current device. */
211 const struct mcu_type_s
*avr_current_device
;
213 /* Section to put switch tables in. */
214 static GTY(()) section
*progmem_swtable_section
;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section
*progmem_section
[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode
= true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p
= false;
225 bool avr_need_copy_data_p
= false;
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_ALIGNED_HI_OP
230 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
233 #undef TARGET_ASM_UNALIGNED_HI_OP
234 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
235 #undef TARGET_ASM_UNALIGNED_SI_OP
236 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
237 #undef TARGET_ASM_INTEGER
238 #define TARGET_ASM_INTEGER avr_assemble_integer
239 #undef TARGET_ASM_FILE_START
240 #define TARGET_ASM_FILE_START avr_file_start
241 #undef TARGET_ASM_FILE_END
242 #define TARGET_ASM_FILE_END avr_file_end
244 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
245 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
246 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
247 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
249 #undef TARGET_FUNCTION_VALUE
250 #define TARGET_FUNCTION_VALUE avr_function_value
251 #undef TARGET_LIBCALL_VALUE
252 #define TARGET_LIBCALL_VALUE avr_libcall_value
253 #undef TARGET_FUNCTION_VALUE_REGNO_P
254 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
256 #undef TARGET_ATTRIBUTE_TABLE
257 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
258 #undef TARGET_INSERT_ATTRIBUTES
259 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
260 #undef TARGET_SECTION_TYPE_FLAGS
261 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
263 #undef TARGET_ASM_NAMED_SECTION
264 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
265 #undef TARGET_ASM_INIT_SECTIONS
266 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
269 #undef TARGET_ASM_SELECT_SECTION
270 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
272 #undef TARGET_REGISTER_MOVE_COST
273 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
274 #undef TARGET_MEMORY_MOVE_COST
275 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
276 #undef TARGET_RTX_COSTS
277 #define TARGET_RTX_COSTS avr_rtx_costs
278 #undef TARGET_ADDRESS_COST
279 #define TARGET_ADDRESS_COST avr_address_cost
280 #undef TARGET_MACHINE_DEPENDENT_REORG
281 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
282 #undef TARGET_FUNCTION_ARG
283 #define TARGET_FUNCTION_ARG avr_function_arg
284 #undef TARGET_FUNCTION_ARG_ADVANCE
285 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
287 #undef TARGET_RETURN_IN_MEMORY
288 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
290 #undef TARGET_STRICT_ARGUMENT_NAMING
291 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
293 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
294 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
296 #undef TARGET_HARD_REGNO_SCRATCH_OK
297 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
298 #undef TARGET_CASE_VALUES_THRESHOLD
299 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
301 #undef TARGET_FRAME_POINTER_REQUIRED
302 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE avr_can_eliminate
306 #undef TARGET_CLASS_LIKELY_SPILLED_P
307 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
309 #undef TARGET_OPTION_OVERRIDE
310 #define TARGET_OPTION_OVERRIDE avr_option_override
312 #undef TARGET_CANNOT_MODIFY_JUMPS_P
313 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
315 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
316 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
318 #undef TARGET_INIT_BUILTINS
319 #define TARGET_INIT_BUILTINS avr_init_builtins
321 #undef TARGET_EXPAND_BUILTIN
322 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
324 #undef TARGET_FOLD_BUILTIN
325 #define TARGET_FOLD_BUILTIN avr_fold_builtin
327 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
328 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
330 #undef TARGET_SCALAR_MODE_SUPPORTED_P
331 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
333 #undef TARGET_ADDR_SPACE_SUBSET_P
334 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
336 #undef TARGET_ADDR_SPACE_CONVERT
337 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
339 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
340 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
342 #undef TARGET_ADDR_SPACE_POINTER_MODE
343 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
345 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
346 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
348 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
349 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
351 #undef TARGET_PRINT_OPERAND
352 #define TARGET_PRINT_OPERAND avr_print_operand
353 #undef TARGET_PRINT_OPERAND_ADDRESS
354 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
355 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
356 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
360 /* Custom function to count number of set bits. */
363 avr_popcount (unsigned int val
)
377 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
378 Return true if the least significant N_BYTES bytes of XVAL all have a
379 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
380 of integers which contains an integer N iff bit N of POP_MASK is set. */
383 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
387 enum machine_mode mode
= GET_MODE (xval
);
389 if (VOIDmode
== mode
)
392 for (i
= 0; i
< n_bytes
; i
++)
394 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
395 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
397 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
405 avr_option_override (void)
407 flag_delete_null_pointer_checks
= 0;
409 /* caller-save.c looks for call-clobbered hard registers that are assigned
410 to pseudos that cross calls and tries so save-restore them around calls
411 in order to reduce the number of stack slots needed.
413 This might leads to situations where reload is no more able to cope
414 with the challenge of AVR's very few address registers and fails to
415 perform the requested spills. */
418 flag_caller_saves
= 0;
420 /* Unwind tables currently require a frame pointer for correctness,
421 see toplev.c:process_options(). */
423 if ((flag_unwind_tables
424 || flag_non_call_exceptions
425 || flag_asynchronous_unwind_tables
)
426 && !ACCUMULATE_OUTGOING_ARGS
)
428 flag_omit_frame_pointer
= 0;
431 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
432 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
433 avr_extra_arch_macro
= avr_current_device
->macro
;
435 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
437 /* SREG: Status Register containing flags like I (global IRQ) */
438 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
440 /* RAMPZ: Address' high part when loading via ELPM */
441 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
443 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
444 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
445 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
446 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
448 /* SP: Stack Pointer (SP_H:SP_L) */
449 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
450 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
452 init_machine_status
= avr_init_machine_status
;
454 avr_log_set_avr_log();
457 /* Function to set up the backend function structure. */
459 static struct machine_function
*
460 avr_init_machine_status (void)
462 return ggc_alloc_cleared_machine_function ();
466 /* Implement `INIT_EXPANDERS'. */
467 /* The function works like a singleton. */
470 avr_init_expanders (void)
474 for (regno
= 0; regno
< 32; regno
++)
475 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
477 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
478 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
479 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
481 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
483 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
484 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
485 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
486 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
487 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
489 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
490 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
494 /* Return register class for register R. */
497 avr_regno_reg_class (int r
)
499 static const enum reg_class reg_class_tab
[] =
503 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
504 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
505 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
506 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
508 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
509 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
511 ADDW_REGS
, ADDW_REGS
,
513 POINTER_X_REGS
, POINTER_X_REGS
,
515 POINTER_Y_REGS
, POINTER_Y_REGS
,
517 POINTER_Z_REGS
, POINTER_Z_REGS
,
523 return reg_class_tab
[r
];
530 avr_scalar_mode_supported_p (enum machine_mode mode
)
535 return default_scalar_mode_supported_p (mode
);
539 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
542 avr_decl_flash_p (tree decl
)
544 if (TREE_CODE (decl
) != VAR_DECL
545 || TREE_TYPE (decl
) == error_mark_node
)
550 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
554 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
555 address space and FALSE, otherwise. */
558 avr_decl_memx_p (tree decl
)
560 if (TREE_CODE (decl
) != VAR_DECL
561 || TREE_TYPE (decl
) == error_mark_node
)
566 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
570 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
573 avr_mem_flash_p (rtx x
)
576 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
580 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
581 address space and FALSE, otherwise. */
584 avr_mem_memx_p (rtx x
)
587 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
591 /* A helper for the subsequent function attribute used to dig for
592 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
595 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
597 if (FUNCTION_DECL
== TREE_CODE (func
))
599 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
604 func
= TREE_TYPE (func
);
607 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
608 || TREE_CODE (func
) == METHOD_TYPE
);
610 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
613 /* Return nonzero if FUNC is a naked function. */
616 avr_naked_function_p (tree func
)
618 return avr_lookup_function_attribute1 (func
, "naked");
621 /* Return nonzero if FUNC is an interrupt function as specified
622 by the "interrupt" attribute. */
625 interrupt_function_p (tree func
)
627 return avr_lookup_function_attribute1 (func
, "interrupt");
630 /* Return nonzero if FUNC is a signal function as specified
631 by the "signal" attribute. */
634 signal_function_p (tree func
)
636 return avr_lookup_function_attribute1 (func
, "signal");
639 /* Return nonzero if FUNC is an OS_task function. */
642 avr_OS_task_function_p (tree func
)
644 return avr_lookup_function_attribute1 (func
, "OS_task");
647 /* Return nonzero if FUNC is an OS_main function. */
650 avr_OS_main_function_p (tree func
)
652 return avr_lookup_function_attribute1 (func
, "OS_main");
656 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
659 avr_accumulate_outgoing_args (void)
662 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
664 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
665 what offset is correct. In some cases it is relative to
666 virtual_outgoing_args_rtx and in others it is relative to
667 virtual_stack_vars_rtx. For example code see
668 gcc.c-torture/execute/built-in-setjmp.c
669 gcc.c-torture/execute/builtins/sprintf-chk.c */
671 return (TARGET_ACCUMULATE_OUTGOING_ARGS
672 && !(cfun
->calls_setjmp
673 || cfun
->has_nonlocal_label
));
677 /* Report contribution of accumulated outgoing arguments to stack size. */
680 avr_outgoing_args_size (void)
682 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
686 /* Implement `STARTING_FRAME_OFFSET'. */
687 /* This is the offset from the frame pointer register to the first stack slot
688 that contains a variable living in the frame. */
691 avr_starting_frame_offset (void)
693 return 1 + avr_outgoing_args_size ();
697 /* Return the number of hard registers to push/pop in the prologue/epilogue
698 of the current function, and optionally store these registers in SET. */
701 avr_regs_to_save (HARD_REG_SET
*set
)
704 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
705 || signal_function_p (current_function_decl
));
708 CLEAR_HARD_REG_SET (*set
);
711 /* No need to save any registers if the function never returns or
712 has the "OS_task" or "OS_main" attribute. */
713 if (TREE_THIS_VOLATILE (current_function_decl
)
714 || cfun
->machine
->is_OS_task
715 || cfun
->machine
->is_OS_main
)
718 for (reg
= 0; reg
< 32; reg
++)
720 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
721 any global register variables. */
725 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
726 || (df_regs_ever_live_p (reg
)
727 && (int_or_sig_p
|| !call_used_regs
[reg
])
728 /* Don't record frame pointer registers here. They are treated
729 indivitually in prologue. */
730 && !(frame_pointer_needed
731 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
734 SET_HARD_REG_BIT (*set
, reg
);
741 /* Return true if register FROM can be eliminated via register TO. */
744 avr_can_eliminate (const int from
, const int to
)
746 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
747 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
748 || ((from
== FRAME_POINTER_REGNUM
749 || from
== FRAME_POINTER_REGNUM
+ 1)
750 && !frame_pointer_needed
));
753 /* Compute offset between arg_pointer and frame_pointer. */
756 avr_initial_elimination_offset (int from
, int to
)
758 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
762 int offset
= frame_pointer_needed
? 2 : 0;
763 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
765 offset
+= avr_regs_to_save (NULL
);
766 return (get_frame_size () + avr_outgoing_args_size()
767 + avr_pc_size
+ 1 + offset
);
771 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
772 frame pointer by +STARTING_FRAME_OFFSET.
773 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
774 avoids creating add/sub of offset in nonlocal goto and setjmp. */
777 avr_builtin_setjmp_frame_value (void)
779 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
780 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
783 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
784 This is return address of function. */
786 avr_return_addr_rtx (int count
, rtx tem
)
790 /* Can only return this function's return address. Others not supported. */
796 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
797 warning (0, "'builtin_return_address' contains only 2 bytes of address");
800 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
802 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
803 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
804 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
808 /* Return 1 if the function epilogue is just a single "ret". */
811 avr_simple_epilogue (void)
813 return (! frame_pointer_needed
814 && get_frame_size () == 0
815 && avr_outgoing_args_size() == 0
816 && avr_regs_to_save (NULL
) == 0
817 && ! interrupt_function_p (current_function_decl
)
818 && ! signal_function_p (current_function_decl
)
819 && ! avr_naked_function_p (current_function_decl
)
820 && ! TREE_THIS_VOLATILE (current_function_decl
));
823 /* This function checks sequence of live registers. */
826 sequent_regs_live (void)
832 for (reg
= 0; reg
< 18; ++reg
)
836 /* Don't recognize sequences that contain global register
845 if (!call_used_regs
[reg
])
847 if (df_regs_ever_live_p (reg
))
857 if (!frame_pointer_needed
)
859 if (df_regs_ever_live_p (REG_Y
))
867 if (df_regs_ever_live_p (REG_Y
+1))
880 return (cur_seq
== live_seq
) ? live_seq
: 0;
883 /* Obtain the length sequence of insns. */
886 get_sequence_length (rtx insns
)
891 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
892 length
+= get_attr_length (insn
);
897 /* Implement INCOMING_RETURN_ADDR_RTX. */
900 avr_incoming_return_addr_rtx (void)
902 /* The return address is at the top of the stack. Note that the push
903 was via post-decrement, which means the actual address is off by one. */
904 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
907 /* Helper for expand_prologue. Emit a push of a byte register. */
910 emit_push_byte (unsigned regno
, bool frame_related_p
)
914 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
915 mem
= gen_frame_mem (QImode
, mem
);
916 reg
= gen_rtx_REG (QImode
, regno
);
918 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
920 RTX_FRAME_RELATED_P (insn
) = 1;
922 cfun
->machine
->stack_usage
++;
926 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
927 SFR is a MEM representing the memory location of the SFR.
928 If CLR_P then clear the SFR after the push using zero_reg. */
931 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
935 gcc_assert (MEM_P (sfr
));
937 /* IN __tmp_reg__, IO(SFR) */
938 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
940 RTX_FRAME_RELATED_P (insn
) = 1;
942 /* PUSH __tmp_reg__ */
943 emit_push_byte (TMP_REGNO
, frame_related_p
);
947 /* OUT IO(SFR), __zero_reg__ */
948 insn
= emit_move_insn (sfr
, const0_rtx
);
950 RTX_FRAME_RELATED_P (insn
) = 1;
955 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
958 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
959 int live_seq
= sequent_regs_live ();
961 bool minimize
= (TARGET_CALL_PROLOGUES
964 && !cfun
->machine
->is_OS_task
965 && !cfun
->machine
->is_OS_main
);
968 && (frame_pointer_needed
969 || avr_outgoing_args_size() > 8
970 || (AVR_2_BYTE_PC
&& live_seq
> 6)
974 int first_reg
, reg
, offset
;
976 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
977 gen_int_mode (size
, HImode
));
979 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
980 gen_int_mode (live_seq
+size
, HImode
));
981 insn
= emit_insn (pattern
);
982 RTX_FRAME_RELATED_P (insn
) = 1;
984 /* Describe the effect of the unspec_volatile call to prologue_saves.
985 Note that this formulation assumes that add_reg_note pushes the
986 notes to the front. Thus we build them in the reverse order of
987 how we want dwarf2out to process them. */
989 /* The function does always set frame_pointer_rtx, but whether that
990 is going to be permanent in the function is frame_pointer_needed. */
992 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
993 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
995 : stack_pointer_rtx
),
996 plus_constant (stack_pointer_rtx
,
997 -(size
+ live_seq
))));
999 /* Note that live_seq always contains r28+r29, but the other
1000 registers to be saved are all below 18. */
1002 first_reg
= 18 - (live_seq
- 2);
1004 for (reg
= 29, offset
= -live_seq
+ 1;
1006 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1010 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
1011 r
= gen_rtx_REG (QImode
, reg
);
1012 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1015 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1017 else /* !minimize */
1021 for (reg
= 0; reg
< 32; ++reg
)
1022 if (TEST_HARD_REG_BIT (set
, reg
))
1023 emit_push_byte (reg
, true);
1025 if (frame_pointer_needed
1026 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1028 /* Push frame pointer. Always be consistent about the
1029 ordering of pushes -- epilogue_restores expects the
1030 register pair to be pushed low byte first. */
1032 emit_push_byte (REG_Y
, true);
1033 emit_push_byte (REG_Y
+ 1, true);
1036 if (frame_pointer_needed
1039 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1040 RTX_FRAME_RELATED_P (insn
) = 1;
1045 /* Creating a frame can be done by direct manipulation of the
1046 stack or via the frame pointer. These two methods are:
1053 the optimum method depends on function type, stack and
1054 frame size. To avoid a complex logic, both methods are
1055 tested and shortest is selected.
1057 There is also the case where SIZE != 0 and no frame pointer is
1058 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1059 In that case, insn (*) is not needed in that case.
1060 We use the X register as scratch. This is save because in X
1062 In an interrupt routine, the case of SIZE != 0 together with
1063 !frame_pointer_needed can only occur if the function is not a
1064 leaf function and thus X has already been saved. */
1067 rtx fp_plus_insns
, fp
, my_fp
;
1069 gcc_assert (frame_pointer_needed
1071 || !current_function_is_leaf
);
1073 fp
= my_fp
= (frame_pointer_needed
1075 : gen_rtx_REG (Pmode
, REG_X
));
1077 if (AVR_HAVE_8BIT_SP
)
1079 /* The high byte (r29) does not change:
1080 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1082 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1085 /************ Method 1: Adjust frame pointer ************/
1089 /* Normally, the dwarf2out frame-related-expr interpreter does
1090 not expect to have the CFA change once the frame pointer is
1091 set up. Thus, we avoid marking the move insn below and
1092 instead indicate that the entire operation is complete after
1093 the frame pointer subtraction is done. */
1095 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1096 if (frame_pointer_needed
)
1098 RTX_FRAME_RELATED_P (insn
) = 1;
1099 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1100 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1103 insn
= emit_move_insn (my_fp
, plus_constant (my_fp
, -size
));
1104 if (frame_pointer_needed
)
1106 RTX_FRAME_RELATED_P (insn
) = 1;
1107 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1108 gen_rtx_SET (VOIDmode
, fp
,
1109 plus_constant (fp
, -size
)));
1112 /* Copy to stack pointer. Note that since we've already
1113 changed the CFA to the frame pointer this operation
1114 need not be annotated if frame pointer is needed.
1115 Always move through unspec, see PR50063.
1116 For meaning of irq_state see movhi_sp_r insn. */
1118 if (cfun
->machine
->is_interrupt
)
1121 if (TARGET_NO_INTERRUPTS
1122 || cfun
->machine
->is_signal
1123 || cfun
->machine
->is_OS_main
)
1126 if (AVR_HAVE_8BIT_SP
)
1129 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1130 fp
, GEN_INT (irq_state
)));
1131 if (!frame_pointer_needed
)
1133 RTX_FRAME_RELATED_P (insn
) = 1;
1134 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1135 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1136 plus_constant (stack_pointer_rtx
,
1140 fp_plus_insns
= get_insns ();
1143 /************ Method 2: Adjust Stack pointer ************/
1145 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1146 can only handle specific offsets. */
1148 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1154 insn
= emit_move_insn (stack_pointer_rtx
,
1155 plus_constant (stack_pointer_rtx
, -size
));
1156 RTX_FRAME_RELATED_P (insn
) = 1;
1157 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1158 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1159 plus_constant (stack_pointer_rtx
,
1161 if (frame_pointer_needed
)
1163 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1164 RTX_FRAME_RELATED_P (insn
) = 1;
1167 sp_plus_insns
= get_insns ();
1170 /************ Use shortest method ************/
1172 emit_insn (get_sequence_length (sp_plus_insns
)
1173 < get_sequence_length (fp_plus_insns
)
1179 emit_insn (fp_plus_insns
);
1182 cfun
->machine
->stack_usage
+= size
;
1183 } /* !minimize && size != 0 */
1188 /* Output function prologue. */
1191 expand_prologue (void)
1196 size
= get_frame_size() + avr_outgoing_args_size();
1198 /* Init cfun->machine. */
1199 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
1200 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
1201 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
1202 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
1203 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
1204 cfun
->machine
->stack_usage
= 0;
1206 /* Prologue: naked. */
1207 if (cfun
->machine
->is_naked
)
1212 avr_regs_to_save (&set
);
1214 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1216 /* Enable interrupts. */
1217 if (cfun
->machine
->is_interrupt
)
1218 emit_insn (gen_enable_interrupt ());
1220 /* Push zero reg. */
1221 emit_push_byte (ZERO_REGNO
, true);
1224 emit_push_byte (TMP_REGNO
, true);
1227 /* ??? There's no dwarf2 column reserved for SREG. */
1228 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1230 /* Clear zero reg. */
1231 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1233 /* Prevent any attempt to delete the setting of ZERO_REG! */
1234 emit_use (zero_reg_rtx
);
1236 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1237 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1240 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1243 && TEST_HARD_REG_BIT (set
, REG_X
)
1244 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1246 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1250 && (frame_pointer_needed
1251 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1252 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1254 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1258 && TEST_HARD_REG_BIT (set
, REG_Z
)
1259 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1261 emit_push_sfr (rampz_rtx
, false /* frame-related */, true /* clr */);
1263 } /* is_interrupt is_signal */
1265 avr_prologue_setup_frame (size
, set
);
1267 if (flag_stack_usage_info
)
1268 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1271 /* Output summary at end of function prologue. */
1274 avr_asm_function_end_prologue (FILE *file
)
1276 if (cfun
->machine
->is_naked
)
1278 fputs ("/* prologue: naked */\n", file
);
1282 if (cfun
->machine
->is_interrupt
)
1284 fputs ("/* prologue: Interrupt */\n", file
);
1286 else if (cfun
->machine
->is_signal
)
1288 fputs ("/* prologue: Signal */\n", file
);
1291 fputs ("/* prologue: function */\n", file
);
1294 if (ACCUMULATE_OUTGOING_ARGS
)
1295 fprintf (file
, "/* outgoing args size = %d */\n",
1296 avr_outgoing_args_size());
1298 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1300 fprintf (file
, "/* stack size = %d */\n",
1301 cfun
->machine
->stack_usage
);
1302 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1303 usage for offset so that SP + .L__stack_offset = return address. */
1304 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1308 /* Implement EPILOGUE_USES. */
1311 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1313 if (reload_completed
1315 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1320 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1323 emit_pop_byte (unsigned regno
)
1327 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1328 mem
= gen_frame_mem (QImode
, mem
);
1329 reg
= gen_rtx_REG (QImode
, regno
);
1331 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1334 /* Output RTL epilogue. */
1337 expand_epilogue (bool sibcall_p
)
1344 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1346 size
= get_frame_size() + avr_outgoing_args_size();
1348 /* epilogue: naked */
1349 if (cfun
->machine
->is_naked
)
1351 gcc_assert (!sibcall_p
);
1353 emit_jump_insn (gen_return ());
1357 avr_regs_to_save (&set
);
1358 live_seq
= sequent_regs_live ();
1360 minimize
= (TARGET_CALL_PROLOGUES
1363 && !cfun
->machine
->is_OS_task
1364 && !cfun
->machine
->is_OS_main
);
1368 || frame_pointer_needed
1371 /* Get rid of frame. */
1373 if (!frame_pointer_needed
)
1375 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1380 emit_move_insn (frame_pointer_rtx
,
1381 plus_constant (frame_pointer_rtx
, size
));
1384 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1390 /* Try two methods to adjust stack and select shortest. */
1396 gcc_assert (frame_pointer_needed
1398 || !current_function_is_leaf
);
1400 fp
= my_fp
= (frame_pointer_needed
1402 : gen_rtx_REG (Pmode
, REG_X
));
1404 if (AVR_HAVE_8BIT_SP
)
1406 /* The high byte (r29) does not change:
1407 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1409 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1412 /********** Method 1: Adjust fp register **********/
1416 if (!frame_pointer_needed
)
1417 emit_move_insn (fp
, stack_pointer_rtx
);
1419 emit_move_insn (my_fp
, plus_constant (my_fp
, size
));
1421 /* Copy to stack pointer. */
1423 if (TARGET_NO_INTERRUPTS
)
1426 if (AVR_HAVE_8BIT_SP
)
1429 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1430 GEN_INT (irq_state
)));
1432 fp_plus_insns
= get_insns ();
1435 /********** Method 2: Adjust Stack pointer **********/
1437 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1443 emit_move_insn (stack_pointer_rtx
,
1444 plus_constant (stack_pointer_rtx
, size
));
1446 sp_plus_insns
= get_insns ();
1449 /************ Use shortest method ************/
1451 emit_insn (get_sequence_length (sp_plus_insns
)
1452 < get_sequence_length (fp_plus_insns
)
1457 emit_insn (fp_plus_insns
);
1460 if (frame_pointer_needed
1461 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1463 /* Restore previous frame_pointer. See expand_prologue for
1464 rationale for not using pophi. */
1466 emit_pop_byte (REG_Y
+ 1);
1467 emit_pop_byte (REG_Y
);
1470 /* Restore used registers. */
1472 for (reg
= 31; reg
>= 0; --reg
)
1473 if (TEST_HARD_REG_BIT (set
, reg
))
1474 emit_pop_byte (reg
);
1478 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1479 The conditions to restore them must be tha same as in prologue. */
1482 && TEST_HARD_REG_BIT (set
, REG_X
)
1483 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1485 emit_pop_byte (TMP_REGNO
);
1486 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1490 && (frame_pointer_needed
1491 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1492 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1494 emit_pop_byte (TMP_REGNO
);
1495 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1499 && TEST_HARD_REG_BIT (set
, REG_Z
)
1500 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1502 emit_pop_byte (TMP_REGNO
);
1503 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1508 emit_pop_byte (TMP_REGNO
);
1509 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1512 /* Restore SREG using tmp_reg as scratch. */
1514 emit_pop_byte (TMP_REGNO
);
1515 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1517 /* Restore tmp REG. */
1518 emit_pop_byte (TMP_REGNO
);
1520 /* Restore zero REG. */
1521 emit_pop_byte (ZERO_REGNO
);
1525 emit_jump_insn (gen_return ());
1528 /* Output summary messages at beginning of function epilogue. */
1531 avr_asm_function_begin_epilogue (FILE *file
)
1533 fprintf (file
, "/* epilogue start */\n");
1537 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1540 avr_cannot_modify_jumps_p (void)
1543 /* Naked Functions must not have any instructions after
1544 their epilogue, see PR42240 */
1546 if (reload_completed
1548 && cfun
->machine
->is_naked
)
1557 /* Helper function for `avr_legitimate_address_p'. */
1560 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1561 RTX_CODE outer_code
, bool strict
)
1564 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1565 as
, outer_code
, UNKNOWN
)
1567 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1571 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1572 machine for a memory operand of mode MODE. */
1575 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1577 bool ok
= CONSTANT_ADDRESS_P (x
);
1579 switch (GET_CODE (x
))
1582 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1587 && REG_X
== REGNO (x
))
1595 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1596 GET_CODE (x
), strict
);
1601 rtx reg
= XEXP (x
, 0);
1602 rtx op1
= XEXP (x
, 1);
1605 && CONST_INT_P (op1
)
1606 && INTVAL (op1
) >= 0)
1608 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1613 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1616 if (reg
== frame_pointer_rtx
1617 || reg
== arg_pointer_rtx
)
1622 else if (frame_pointer_needed
1623 && reg
== frame_pointer_rtx
)
1635 if (avr_log
.legitimate_address_p
)
1637 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1638 "reload_completed=%d reload_in_progress=%d %s:",
1639 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1640 reg_renumber
? "(reg_renumber)" : "");
1642 if (GET_CODE (x
) == PLUS
1643 && REG_P (XEXP (x
, 0))
1644 && CONST_INT_P (XEXP (x
, 1))
1645 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1648 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1649 true_regnum (XEXP (x
, 0)));
1652 avr_edump ("\n%r\n", x
);
1659 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1660 now only a helper for avr_addr_space_legitimize_address. */
1661 /* Attempts to replace X with a valid
1662 memory address for an operand of mode MODE */
1665 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1667 bool big_offset_p
= false;
1671 if (GET_CODE (oldx
) == PLUS
1672 && REG_P (XEXP (oldx
, 0)))
1674 if (REG_P (XEXP (oldx
, 1)))
1675 x
= force_reg (GET_MODE (oldx
), oldx
);
1676 else if (CONST_INT_P (XEXP (oldx
, 1)))
1678 int offs
= INTVAL (XEXP (oldx
, 1));
1679 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1680 && offs
> MAX_LD_OFFSET (mode
))
1682 big_offset_p
= true;
1683 x
= force_reg (GET_MODE (oldx
), oldx
);
1688 if (avr_log
.legitimize_address
)
1690 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1693 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1700 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1701 /* This will allow register R26/27 to be used where it is no worse than normal
1702 base pointers R28/29 or R30/31. For example, if base offset is greater
1703 than 63 bytes or for R++ or --R addressing. */
1706 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1707 int opnum
, int type
, int addr_type
,
1708 int ind_levels ATTRIBUTE_UNUSED
,
1709 rtx (*mk_memloc
)(rtx
,int))
1713 if (avr_log
.legitimize_reload_address
)
1714 avr_edump ("\n%?:%m %r\n", mode
, x
);
1716 if (1 && (GET_CODE (x
) == POST_INC
1717 || GET_CODE (x
) == PRE_DEC
))
1719 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1720 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1721 opnum
, RELOAD_OTHER
);
1723 if (avr_log
.legitimize_reload_address
)
1724 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1725 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1730 if (GET_CODE (x
) == PLUS
1731 && REG_P (XEXP (x
, 0))
1732 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1733 && CONST_INT_P (XEXP (x
, 1))
1734 && INTVAL (XEXP (x
, 1)) >= 1)
1736 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1740 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1742 int regno
= REGNO (XEXP (x
, 0));
1743 rtx mem
= mk_memloc (x
, regno
);
1745 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1746 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1749 if (avr_log
.legitimize_reload_address
)
1750 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1751 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1753 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1754 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1757 if (avr_log
.legitimize_reload_address
)
1758 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1759 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1764 else if (! (frame_pointer_needed
1765 && XEXP (x
, 0) == frame_pointer_rtx
))
1767 push_reload (x
, NULL_RTX
, px
, NULL
,
1768 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1771 if (avr_log
.legitimize_reload_address
)
1772 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1773 POINTER_REGS
, x
, NULL_RTX
);
1783 /* Helper function to print assembler resp. track instruction
1784 sequence lengths. Always return "".
1787 Output assembler code from template TPL with operands supplied
1788 by OPERANDS. This is just forwarding to output_asm_insn.
1791 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1792 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1793 Don't output anything.
1797 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1801 output_asm_insn (tpl
, operands
);
1815 /* Return a pointer register name as a string. */
1818 ptrreg_to_str (int regno
)
1822 case REG_X
: return "X";
1823 case REG_Y
: return "Y";
1824 case REG_Z
: return "Z";
1826 output_operand_lossage ("address operand requires constraint for"
1827 " X, Y, or Z register");
1832 /* Return the condition name as a string.
1833 Used in conditional jump constructing */
1836 cond_string (enum rtx_code code
)
1845 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1850 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1866 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1867 /* Output ADDR to FILE as address. */
1870 avr_print_operand_address (FILE *file
, rtx addr
)
1872 switch (GET_CODE (addr
))
1875 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1879 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1883 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1887 if (CONSTANT_ADDRESS_P (addr
)
1888 && text_segment_operand (addr
, VOIDmode
))
1891 if (GET_CODE (x
) == CONST
)
1893 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1895 /* Assembler gs() will implant word address. Make offset
1896 a byte offset inside gs() for assembler. This is
1897 needed because the more logical (constant+gs(sym)) is not
1898 accepted by gas. For 128K and lower devices this is ok.
1899 For large devices it will create a Trampoline to offset
1900 from symbol which may not be what the user really wanted. */
1901 fprintf (file
, "gs(");
1902 output_addr_const (file
, XEXP (x
,0));
1903 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1904 2 * INTVAL (XEXP (x
, 1)));
1906 if (warning (0, "pointer offset from symbol maybe incorrect"))
1908 output_addr_const (stderr
, addr
);
1909 fprintf(stderr
,"\n");
1914 fprintf (file
, "gs(");
1915 output_addr_const (file
, addr
);
1916 fprintf (file
, ")");
1920 output_addr_const (file
, addr
);
1925 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1928 avr_print_operand_punct_valid_p (unsigned char code
)
1930 return code
== '~' || code
== '!';
1934 /* Implement `TARGET_PRINT_OPERAND'. */
1935 /* Output X as assembler operand to file FILE.
1936 For a description of supported %-codes, see top of avr.md. */
1939 avr_print_operand (FILE *file
, rtx x
, int code
)
1943 if (code
>= 'A' && code
<= 'D')
1948 if (!AVR_HAVE_JMP_CALL
)
1951 else if (code
== '!')
1953 if (AVR_HAVE_EIJMP_EICALL
)
1956 else if (code
== 't'
1959 static int t_regno
= -1;
1960 static int t_nbits
= -1;
1962 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
1964 t_regno
= REGNO (x
);
1965 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
1967 else if (CONST_INT_P (x
) && t_regno
>= 0
1968 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
1970 int bpos
= INTVAL (x
);
1972 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
1974 fprintf (file
, ",%d", bpos
% 8);
1979 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
1983 if (x
== zero_reg_rtx
)
1984 fprintf (file
, "__zero_reg__");
1986 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1988 else if (CONST_INT_P (x
))
1990 HOST_WIDE_INT ival
= INTVAL (x
);
1993 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
1994 else if (low_io_address_operand (x
, VOIDmode
)
1995 || high_io_address_operand (x
, VOIDmode
))
1997 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
1998 fprintf (file
, "__RAMPZ__");
1999 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2000 fprintf (file
, "__RAMPY__");
2001 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2002 fprintf (file
, "__RAMPX__");
2003 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2004 fprintf (file
, "__RAMPD__");
2005 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2006 fprintf (file
, "__CCP__");
2007 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2008 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2009 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2012 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2013 ival
- avr_current_arch
->sfr_offset
);
2017 fatal_insn ("bad address, not an I/O address:", x
);
2021 rtx addr
= XEXP (x
, 0);
2025 if (!CONSTANT_P (addr
))
2026 fatal_insn ("bad address, not a constant:", addr
);
2027 /* Assembler template with m-code is data - not progmem section */
2028 if (text_segment_operand (addr
, VOIDmode
))
2029 if (warning (0, "accessing data memory with"
2030 " program memory address"))
2032 output_addr_const (stderr
, addr
);
2033 fprintf(stderr
,"\n");
2035 output_addr_const (file
, addr
);
2037 else if (code
== 'i')
2039 avr_print_operand (file
, addr
, 'i');
2041 else if (code
== 'o')
2043 if (GET_CODE (addr
) != PLUS
)
2044 fatal_insn ("bad address, not (reg+disp):", addr
);
2046 avr_print_operand (file
, XEXP (addr
, 1), 0);
2048 else if (code
== 'p' || code
== 'r')
2050 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2051 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2054 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2056 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2058 else if (GET_CODE (addr
) == PLUS
)
2060 avr_print_operand_address (file
, XEXP (addr
,0));
2061 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2062 fatal_insn ("internal compiler error. Bad address:"
2065 avr_print_operand (file
, XEXP (addr
,1), code
);
2068 avr_print_operand_address (file
, addr
);
2070 else if (code
== 'i')
2072 fatal_insn ("bad address, not an I/O address:", x
);
2074 else if (code
== 'x')
2076 /* Constant progmem address - like used in jmp or call */
2077 if (0 == text_segment_operand (x
, VOIDmode
))
2078 if (warning (0, "accessing program memory"
2079 " with data memory address"))
2081 output_addr_const (stderr
, x
);
2082 fprintf(stderr
,"\n");
2084 /* Use normal symbol for direct address no linker trampoline needed */
2085 output_addr_const (file
, x
);
2087 else if (GET_CODE (x
) == CONST_DOUBLE
)
2091 if (GET_MODE (x
) != SFmode
)
2092 fatal_insn ("internal compiler error. Unknown mode:", x
);
2093 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2094 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2095 fprintf (file
, "0x%lx", val
);
2097 else if (GET_CODE (x
) == CONST_STRING
)
2098 fputs (XSTR (x
, 0), file
);
2099 else if (code
== 'j')
2100 fputs (cond_string (GET_CODE (x
)), file
);
2101 else if (code
== 'k')
2102 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2104 avr_print_operand_address (file
, x
);
2107 /* Update the condition code in the INSN. */
2110 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2113 enum attr_cc cc
= get_attr_cc (insn
);
2121 case CC_OUT_PLUS_NOCLOBBER
:
2124 rtx
*op
= recog_data
.operand
;
2127 /* Extract insn's operands. */
2128 extract_constrain_insn_cached (insn
);
2136 avr_out_plus (op
, &len_dummy
, &icc
);
2137 cc
= (enum attr_cc
) icc
;
2140 case CC_OUT_PLUS_NOCLOBBER
:
2141 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2142 cc
= (enum attr_cc
) icc
;
2147 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2148 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2149 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2151 /* Any other "r,rL" combination does not alter cc0. */
2155 } /* inner switch */
2159 } /* outer swicth */
2164 /* Special values like CC_OUT_PLUS from above have been
2165 mapped to "standard" CC_* values so we never come here. */
2171 /* Insn does not affect CC at all. */
2179 set
= single_set (insn
);
2183 cc_status
.flags
|= CC_NO_OVERFLOW
;
2184 cc_status
.value1
= SET_DEST (set
);
2189 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2190 The V flag may or may not be known but that's ok because
2191 alter_cond will change tests to use EQ/NE. */
2192 set
= single_set (insn
);
2196 cc_status
.value1
= SET_DEST (set
);
2197 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2202 set
= single_set (insn
);
2205 cc_status
.value1
= SET_SRC (set
);
2209 /* Insn doesn't leave CC in a usable state. */
2215 /* Choose mode for jump insn:
2216 1 - relative jump in range -63 <= x <= 62 ;
2217 2 - relative jump in range -2046 <= x <= 2045 ;
2218 3 - absolute jump (only for ATmega[16]03). */
2221 avr_jump_mode (rtx x
, rtx insn
)
2223 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2224 ? XEXP (x
, 0) : x
));
2225 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2226 int jump_distance
= cur_addr
- dest_addr
;
2228 if (-63 <= jump_distance
&& jump_distance
<= 62)
2230 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2232 else if (AVR_HAVE_JMP_CALL
)
2238 /* return an AVR condition jump commands.
2239 X is a comparison RTX.
2240 LEN is a number returned by avr_jump_mode function.
2241 if REVERSE nonzero then condition code in X must be reversed. */
2244 ret_cond_branch (rtx x
, int len
, int reverse
)
2246 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2251 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2252 return (len
== 1 ? ("breq .+2" CR_TAB
2254 len
== 2 ? ("breq .+4" CR_TAB
2262 return (len
== 1 ? ("breq .+2" CR_TAB
2264 len
== 2 ? ("breq .+4" CR_TAB
2271 return (len
== 1 ? ("breq .+2" CR_TAB
2273 len
== 2 ? ("breq .+4" CR_TAB
2280 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2281 return (len
== 1 ? ("breq %0" CR_TAB
2283 len
== 2 ? ("breq .+2" CR_TAB
2290 return (len
== 1 ? ("breq %0" CR_TAB
2292 len
== 2 ? ("breq .+2" CR_TAB
2299 return (len
== 1 ? ("breq %0" CR_TAB
2301 len
== 2 ? ("breq .+2" CR_TAB
2315 return ("br%j1 .+2" CR_TAB
2318 return ("br%j1 .+4" CR_TAB
2329 return ("br%k1 .+2" CR_TAB
2332 return ("br%k1 .+4" CR_TAB
2340 /* Output insn cost for next insn. */
2343 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2344 int num_operands ATTRIBUTE_UNUSED
)
2346 if (avr_log
.rtx_costs
)
2348 rtx set
= single_set (insn
);
2351 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2352 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2354 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2355 rtx_cost (PATTERN (insn
), INSN
, 0,
2356 optimize_insn_for_speed_p()));
2360 /* Return 0 if undefined, 1 if always true or always false. */
2363 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2365 unsigned int max
= (mode
== QImode
? 0xff :
2366 mode
== HImode
? 0xffff :
2367 mode
== PSImode
? 0xffffff :
2368 mode
== SImode
? 0xffffffff : 0);
2369 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2371 if (unsigned_condition (op
) != op
)
2374 if (max
!= (INTVAL (x
) & max
)
2375 && INTVAL (x
) != 0xff)
2382 /* Returns nonzero if REGNO is the number of a hard
2383 register in which function arguments are sometimes passed. */
2386 function_arg_regno_p(int r
)
2388 return (r
>= 8 && r
<= 25);
2391 /* Initializing the variable cum for the state at the beginning
2392 of the argument list. */
2395 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2396 tree fndecl ATTRIBUTE_UNUSED
)
2399 cum
->regno
= FIRST_CUM_REG
;
2400 if (!libname
&& stdarg_p (fntype
))
2403 /* Assume the calle may be tail called */
2405 cfun
->machine
->sibcall_fails
= 0;
2408 /* Returns the number of registers to allocate for a function argument. */
2411 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2415 if (mode
== BLKmode
)
2416 size
= int_size_in_bytes (type
);
2418 size
= GET_MODE_SIZE (mode
);
2420 /* Align all function arguments to start in even-numbered registers.
2421 Odd-sized arguments leave holes above them. */
2423 return (size
+ 1) & ~1;
2426 /* Controls whether a function argument is passed
2427 in a register, and which register. */
2430 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2431 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2433 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2434 int bytes
= avr_num_arg_regs (mode
, type
);
2436 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2437 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2442 /* Update the summarizer variable CUM to advance past an argument
2443 in the argument list. */
2446 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2447 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2449 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2450 int bytes
= avr_num_arg_regs (mode
, type
);
2452 cum
->nregs
-= bytes
;
2453 cum
->regno
-= bytes
;
2455 /* A parameter is being passed in a call-saved register. As the original
2456 contents of these regs has to be restored before leaving the function,
2457 a function must not pass arguments in call-saved regs in order to get
2462 && !call_used_regs
[cum
->regno
])
2464 /* FIXME: We ship info on failing tail-call in struct machine_function.
2465 This uses internals of calls.c:expand_call() and the way args_so_far
2466 is used. targetm.function_ok_for_sibcall() needs to be extended to
2467 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2468 dependent so that such an extension is not wanted. */
2470 cfun
->machine
->sibcall_fails
= 1;
2473 /* Test if all registers needed by the ABI are actually available. If the
2474 user has fixed a GPR needed to pass an argument, an (implicit) function
2475 call will clobber that fixed register. See PR45099 for an example. */
2482 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2483 if (fixed_regs
[regno
])
2484 warning (0, "fixed register %s used to pass parameter to function",
2488 if (cum
->nregs
<= 0)
2491 cum
->regno
= FIRST_CUM_REG
;
2495 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2496 /* Decide whether we can make a sibling call to a function. DECL is the
2497 declaration of the function being targeted by the call and EXP is the
2498 CALL_EXPR representing the call. */
2501 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2505 /* Tail-calling must fail if callee-saved regs are used to pass
2506 function args. We must not tail-call when `epilogue_restores'
2507 is used. Unfortunately, we cannot tell at this point if that
2508 actually will happen or not, and we cannot step back from
2509 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2511 if (cfun
->machine
->sibcall_fails
2512 || TARGET_CALL_PROLOGUES
)
2517 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2521 decl_callee
= TREE_TYPE (decl_callee
);
2525 decl_callee
= fntype_callee
;
2527 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2528 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2530 decl_callee
= TREE_TYPE (decl_callee
);
2534 /* Ensure that caller and callee have compatible epilogues */
2536 if (interrupt_function_p (current_function_decl
)
2537 || signal_function_p (current_function_decl
)
2538 || avr_naked_function_p (decl_callee
)
2539 || avr_naked_function_p (current_function_decl
)
2540 /* FIXME: For OS_task and OS_main, we are over-conservative.
2541 This is due to missing documentation of these attributes
2542 and what they actually should do and should not do. */
2543 || (avr_OS_task_function_p (decl_callee
)
2544 != avr_OS_task_function_p (current_function_decl
))
2545 || (avr_OS_main_function_p (decl_callee
)
2546 != avr_OS_main_function_p (current_function_decl
)))
2554 /***********************************************************************
2555 Functions for outputting various mov's for a various modes
2556 ************************************************************************/
2558 /* Return true if a value of mode MODE is read from flash by
2559 __load_* function from libgcc. */
2562 avr_load_libgcc_p (rtx op
)
2564 enum machine_mode mode
= GET_MODE (op
);
2565 int n_bytes
= GET_MODE_SIZE (mode
);
2569 && avr_mem_flash_p (op
));
2572 /* Return true if a value of mode MODE is read by __xload_* function. */
2575 avr_xload_libgcc_p (enum machine_mode mode
)
2577 int n_bytes
= GET_MODE_SIZE (mode
);
2580 || avr_current_device
->n_flash
> 1);
2584 /* Find an unused d-register to be used as scratch in INSN.
2585 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2586 is a register, skip all possible return values that overlap EXCLUDE.
2587 The policy for the returned register is similar to that of
2588 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2591 Return a QImode d-register or NULL_RTX if nothing found. */
2594 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2597 bool isr_p
= (interrupt_function_p (current_function_decl
)
2598 || signal_function_p (current_function_decl
));
2600 for (regno
= 16; regno
< 32; regno
++)
2602 rtx reg
= all_regs_rtx
[regno
];
2605 && reg_overlap_mentioned_p (exclude
, reg
))
2606 || fixed_regs
[regno
])
2611 /* Try non-live register */
2613 if (!df_regs_ever_live_p (regno
)
2614 && (TREE_THIS_VOLATILE (current_function_decl
)
2615 || cfun
->machine
->is_OS_task
2616 || cfun
->machine
->is_OS_main
2617 || (!isr_p
&& call_used_regs
[regno
])))
2622 /* Any live register can be used if it is unused after.
2623 Prologue/epilogue will care for it as needed. */
2625 if (df_regs_ever_live_p (regno
)
2626 && reg_unused_after (insn
, reg
))
2636 /* Helper function for the next function in the case where only restricted
2637 version of LPM instruction is available. */
2640 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2644 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2647 regno_dest
= REGNO (dest
);
2649 /* The implicit target register of LPM. */
2650 xop
[3] = lpm_reg_rtx
;
2652 switch (GET_CODE (addr
))
2659 gcc_assert (REG_Z
== REGNO (addr
));
2667 avr_asm_len ("%4lpm", xop
, plen
, 1);
2669 if (regno_dest
!= LPM_REGNO
)
2670 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2675 if (REGNO (dest
) == REG_Z
)
2676 return avr_asm_len ("%4lpm" CR_TAB
2681 "pop %A0", xop
, plen
, 6);
2683 avr_asm_len ("%4lpm" CR_TAB
2687 "mov %B0,%3", xop
, plen
, 5);
2689 if (!reg_unused_after (insn
, addr
))
2690 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2699 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2702 if (regno_dest
== LPM_REGNO
)
2703 avr_asm_len ("%4lpm" CR_TAB
2704 "adiw %2,1", xop
, plen
, 2);
2706 avr_asm_len ("%4lpm" CR_TAB
2708 "adiw %2,1", xop
, plen
, 3);
2711 avr_asm_len ("%4lpm" CR_TAB
2713 "adiw %2,1", xop
, plen
, 3);
2716 avr_asm_len ("%4lpm" CR_TAB
2718 "adiw %2,1", xop
, plen
, 3);
2721 avr_asm_len ("%4lpm" CR_TAB
2723 "adiw %2,1", xop
, plen
, 3);
2725 break; /* POST_INC */
2727 } /* switch CODE (addr) */
2733 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2734 OP[1] in AS1 to register OP[0].
2735 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2739 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2743 rtx src
= SET_SRC (single_set (insn
));
2745 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2749 addr_space_t as
= MEM_ADDR_SPACE (src
);
2756 warning (0, "writing to address space %qs not supported",
2757 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2762 addr
= XEXP (src
, 0);
2763 code
= GET_CODE (addr
);
2765 gcc_assert (REG_P (dest
));
2766 gcc_assert (REG
== code
|| POST_INC
== code
);
2770 xop
[2] = lpm_addr_reg_rtx
;
2771 xop
[4] = xstring_empty
;
2772 xop
[5] = tmp_reg_rtx
;
2774 regno_dest
= REGNO (dest
);
2776 segment
= avr_addrspace
[as
].segment
;
2778 /* Set RAMPZ as needed. */
2782 xop
[4] = GEN_INT (segment
);
2784 if (xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
),
2787 avr_asm_len ("ldi %3,%4" CR_TAB
2788 "out __RAMPZ__,%3", xop
, plen
, 2);
2790 else if (segment
== 1)
2792 avr_asm_len ("clr %5" CR_TAB
2794 "out __RAMPZ__,%5", xop
, plen
, 3);
2798 avr_asm_len ("mov %5,%2" CR_TAB
2800 "out __RAMPZ__,%2" CR_TAB
2801 "mov %2,%5", xop
, plen
, 4);
2806 if (!AVR_HAVE_ELPMX
)
2807 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2809 else if (!AVR_HAVE_LPMX
)
2811 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2814 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2816 switch (GET_CODE (addr
))
2823 gcc_assert (REG_Z
== REGNO (addr
));
2831 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
2834 if (REGNO (dest
) == REG_Z
)
2835 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2836 "%4lpm %B0,%a2" CR_TAB
2837 "mov %A0,%5", xop
, plen
, 3);
2840 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2841 "%4lpm %B0,%a2", xop
, plen
, 2);
2843 if (!reg_unused_after (insn
, addr
))
2844 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2851 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2852 "%4lpm %B0,%a2+" CR_TAB
2853 "%4lpm %C0,%a2", xop
, plen
, 3);
2855 if (!reg_unused_after (insn
, addr
))
2856 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
2862 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2863 "%4lpm %B0,%a2+", xop
, plen
, 2);
2865 if (REGNO (dest
) == REG_Z
- 2)
2866 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2867 "%4lpm %C0,%a2" CR_TAB
2868 "mov %D0,%5", xop
, plen
, 3);
2871 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2872 "%4lpm %D0,%a2", xop
, plen
, 2);
2874 if (!reg_unused_after (insn
, addr
))
2875 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
2885 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2888 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
2889 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
2890 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
2891 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
2893 break; /* POST_INC */
2895 } /* switch CODE (addr) */
2901 /* Worker function for xload_8 insn. */
2904 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2910 xop
[2] = lpm_addr_reg_rtx
;
2911 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2916 avr_asm_len ("ld %3,%a2" CR_TAB
2917 "sbrs %1,7", xop
, plen
, 2);
2919 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2921 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2922 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2929 output_movqi (rtx insn
, rtx operands
[], int *l
)
2932 rtx dest
= operands
[0];
2933 rtx src
= operands
[1];
2936 if (avr_mem_flash_p (src
)
2937 || avr_mem_flash_p (dest
))
2939 return avr_out_lpm (insn
, operands
, real_l
);
2947 if (register_operand (dest
, QImode
))
2949 if (register_operand (src
, QImode
)) /* mov r,r */
2951 if (test_hard_reg_class (STACK_REG
, dest
))
2953 else if (test_hard_reg_class (STACK_REG
, src
))
2958 else if (CONSTANT_P (src
))
2960 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2963 else if (GET_CODE (src
) == MEM
)
2964 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2966 else if (GET_CODE (dest
) == MEM
)
2971 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2973 return out_movqi_mr_r (insn
, xop
, real_l
);
2980 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2985 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2987 if (avr_mem_flash_p (src
)
2988 || avr_mem_flash_p (dest
))
2990 return avr_out_lpm (insn
, xop
, plen
);
2995 if (REG_P (src
)) /* mov r,r */
2997 if (test_hard_reg_class (STACK_REG
, dest
))
2999 if (AVR_HAVE_8BIT_SP
)
3000 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3003 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3004 "out __SP_H__,%B1", xop
, plen
, -2);
3006 /* Use simple load of SP if no interrupts are used. */
3008 return TARGET_NO_INTERRUPTS
3009 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3010 "out __SP_L__,%A1", xop
, plen
, -2)
3012 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3014 "out __SP_H__,%B1" CR_TAB
3015 "out __SREG__,__tmp_reg__" CR_TAB
3016 "out __SP_L__,%A1", xop
, plen
, -5);
3018 else if (test_hard_reg_class (STACK_REG
, src
))
3020 return AVR_HAVE_8BIT_SP
3021 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3022 "clr %B0", xop
, plen
, -2)
3024 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3025 "in %B0,__SP_H__", xop
, plen
, -2);
3028 return AVR_HAVE_MOVW
3029 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3031 : avr_asm_len ("mov %A0,%A1" CR_TAB
3032 "mov %B0,%B1", xop
, plen
, -2);
3034 else if (CONSTANT_P (src
))
3036 return output_reload_inhi (xop
, NULL
, plen
);
3038 else if (MEM_P (src
))
3040 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3043 else if (MEM_P (dest
))
3048 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
3050 return out_movhi_mr_r (insn
, xop
, plen
);
3053 fatal_insn ("invalid insn:", insn
);
3059 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3063 rtx x
= XEXP (src
, 0);
3065 if (CONSTANT_ADDRESS_P (x
))
3067 return optimize
> 0 && io_address_operand (x
, QImode
)
3068 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3069 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3071 else if (GET_CODE (x
) == PLUS
3072 && REG_P (XEXP (x
, 0))
3073 && CONST_INT_P (XEXP (x
, 1)))
3075 /* memory access by reg+disp */
3077 int disp
= INTVAL (XEXP (x
, 1));
3079 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3081 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3082 fatal_insn ("incorrect insn:",insn
);
3084 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3085 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3086 "ldd %0,Y+63" CR_TAB
3087 "sbiw r28,%o1-63", op
, plen
, -3);
3089 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3090 "sbci r29,hi8(-%o1)" CR_TAB
3092 "subi r28,lo8(%o1)" CR_TAB
3093 "sbci r29,hi8(%o1)", op
, plen
, -5);
3095 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3097 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3098 it but I have this situation with extremal optimizing options. */
3100 avr_asm_len ("adiw r26,%o1" CR_TAB
3101 "ld %0,X", op
, plen
, -2);
3103 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3104 && !reg_unused_after (insn
, XEXP (x
,0)))
3106 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3112 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3115 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3119 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3123 rtx base
= XEXP (src
, 0);
3124 int reg_dest
= true_regnum (dest
);
3125 int reg_base
= true_regnum (base
);
3126 /* "volatile" forces reading low byte first, even if less efficient,
3127 for correct operation with 16-bit I/O registers. */
3128 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3132 if (reg_dest
== reg_base
) /* R = (R) */
3133 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3135 "mov %A0,__tmp_reg__", op
, plen
, -3);
3137 if (reg_base
!= REG_X
)
3138 return avr_asm_len ("ld %A0,%1" CR_TAB
3139 "ldd %B0,%1+1", op
, plen
, -2);
3141 avr_asm_len ("ld %A0,X+" CR_TAB
3142 "ld %B0,X", op
, plen
, -2);
3144 if (!reg_unused_after (insn
, base
))
3145 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3149 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3151 int disp
= INTVAL (XEXP (base
, 1));
3152 int reg_base
= true_regnum (XEXP (base
, 0));
3154 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3156 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3157 fatal_insn ("incorrect insn:",insn
);
3159 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3160 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3161 "ldd %A0,Y+62" CR_TAB
3162 "ldd %B0,Y+63" CR_TAB
3163 "sbiw r28,%o1-62", op
, plen
, -4)
3165 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3166 "sbci r29,hi8(-%o1)" CR_TAB
3168 "ldd %B0,Y+1" CR_TAB
3169 "subi r28,lo8(%o1)" CR_TAB
3170 "sbci r29,hi8(%o1)", op
, plen
, -6);
3173 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3174 it but I have this situation with extremal
3175 optimization options. */
3177 if (reg_base
== REG_X
)
3178 return reg_base
== reg_dest
3179 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3180 "ld __tmp_reg__,X+" CR_TAB
3182 "mov %A0,__tmp_reg__", op
, plen
, -4)
3184 : avr_asm_len ("adiw r26,%o1" CR_TAB
3187 "sbiw r26,%o1+1", op
, plen
, -4);
3189 return reg_base
== reg_dest
3190 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3191 "ldd %B0,%B1" CR_TAB
3192 "mov %A0,__tmp_reg__", op
, plen
, -3)
3194 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3195 "ldd %B0,%B1", op
, plen
, -2);
3197 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3199 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3200 fatal_insn ("incorrect insn:", insn
);
3202 if (!mem_volatile_p
)
3203 return avr_asm_len ("ld %B0,%1" CR_TAB
3204 "ld %A0,%1", op
, plen
, -2);
3206 return REGNO (XEXP (base
, 0)) == REG_X
3207 ? avr_asm_len ("sbiw r26,2" CR_TAB
3210 "sbiw r26,1", op
, plen
, -4)
3212 : avr_asm_len ("sbiw %r1,2" CR_TAB
3214 "ldd %B0,%p1+1", op
, plen
, -3);
3216 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3218 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3219 fatal_insn ("incorrect insn:", insn
);
3221 return avr_asm_len ("ld %A0,%1" CR_TAB
3222 "ld %B0,%1", op
, plen
, -2);
3224 else if (CONSTANT_ADDRESS_P (base
))
3226 return optimize
> 0 && io_address_operand (base
, HImode
)
3227 ? avr_asm_len ("in %A0,%i1" CR_TAB
3228 "in %B0,%i1+1", op
, plen
, -2)
3230 : avr_asm_len ("lds %A0,%m1" CR_TAB
3231 "lds %B0,%m1+1", op
, plen
, -4);
3234 fatal_insn ("unknown move insn:",insn
);
3239 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3243 rtx base
= XEXP (src
, 0);
3244 int reg_dest
= true_regnum (dest
);
3245 int reg_base
= true_regnum (base
);
3253 if (reg_base
== REG_X
) /* (R26) */
3255 if (reg_dest
== REG_X
)
3256 /* "ld r26,-X" is undefined */
3257 return *l
=7, ("adiw r26,3" CR_TAB
3260 "ld __tmp_reg__,-X" CR_TAB
3263 "mov r27,__tmp_reg__");
3264 else if (reg_dest
== REG_X
- 2)
3265 return *l
=5, ("ld %A0,X+" CR_TAB
3267 "ld __tmp_reg__,X+" CR_TAB
3269 "mov %C0,__tmp_reg__");
3270 else if (reg_unused_after (insn
, base
))
3271 return *l
=4, ("ld %A0,X+" CR_TAB
3276 return *l
=5, ("ld %A0,X+" CR_TAB
3284 if (reg_dest
== reg_base
)
3285 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3286 "ldd %C0,%1+2" CR_TAB
3287 "ldd __tmp_reg__,%1+1" CR_TAB
3289 "mov %B0,__tmp_reg__");
3290 else if (reg_base
== reg_dest
+ 2)
3291 return *l
=5, ("ld %A0,%1" CR_TAB
3292 "ldd %B0,%1+1" CR_TAB
3293 "ldd __tmp_reg__,%1+2" CR_TAB
3294 "ldd %D0,%1+3" CR_TAB
3295 "mov %C0,__tmp_reg__");
3297 return *l
=4, ("ld %A0,%1" CR_TAB
3298 "ldd %B0,%1+1" CR_TAB
3299 "ldd %C0,%1+2" CR_TAB
3303 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3305 int disp
= INTVAL (XEXP (base
, 1));
3307 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3309 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3310 fatal_insn ("incorrect insn:",insn
);
3312 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3313 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3314 "ldd %A0,Y+60" CR_TAB
3315 "ldd %B0,Y+61" CR_TAB
3316 "ldd %C0,Y+62" CR_TAB
3317 "ldd %D0,Y+63" CR_TAB
3320 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3321 "sbci r29,hi8(-%o1)" CR_TAB
3323 "ldd %B0,Y+1" CR_TAB
3324 "ldd %C0,Y+2" CR_TAB
3325 "ldd %D0,Y+3" CR_TAB
3326 "subi r28,lo8(%o1)" CR_TAB
3327 "sbci r29,hi8(%o1)");
3330 reg_base
= true_regnum (XEXP (base
, 0));
3331 if (reg_base
== REG_X
)
3334 if (reg_dest
== REG_X
)
3337 /* "ld r26,-X" is undefined */
3338 return ("adiw r26,%o1+3" CR_TAB
3341 "ld __tmp_reg__,-X" CR_TAB
3344 "mov r27,__tmp_reg__");
3347 if (reg_dest
== REG_X
- 2)
3348 return ("adiw r26,%o1" CR_TAB
3351 "ld __tmp_reg__,X+" CR_TAB
3353 "mov r26,__tmp_reg__");
3355 return ("adiw r26,%o1" CR_TAB
3362 if (reg_dest
== reg_base
)
3363 return *l
=5, ("ldd %D0,%D1" CR_TAB
3364 "ldd %C0,%C1" CR_TAB
3365 "ldd __tmp_reg__,%B1" CR_TAB
3366 "ldd %A0,%A1" CR_TAB
3367 "mov %B0,__tmp_reg__");
3368 else if (reg_dest
== reg_base
- 2)
3369 return *l
=5, ("ldd %A0,%A1" CR_TAB
3370 "ldd %B0,%B1" CR_TAB
3371 "ldd __tmp_reg__,%C1" CR_TAB
3372 "ldd %D0,%D1" CR_TAB
3373 "mov %C0,__tmp_reg__");
3374 return *l
=4, ("ldd %A0,%A1" CR_TAB
3375 "ldd %B0,%B1" CR_TAB
3376 "ldd %C0,%C1" CR_TAB
3379 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3380 return *l
=4, ("ld %D0,%1" CR_TAB
3384 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3385 return *l
=4, ("ld %A0,%1" CR_TAB
3389 else if (CONSTANT_ADDRESS_P (base
))
3390 return *l
=8, ("lds %A0,%m1" CR_TAB
3391 "lds %B0,%m1+1" CR_TAB
3392 "lds %C0,%m1+2" CR_TAB
3395 fatal_insn ("unknown move insn:",insn
);
3400 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3404 rtx base
= XEXP (dest
, 0);
3405 int reg_base
= true_regnum (base
);
3406 int reg_src
= true_regnum (src
);
3412 if (CONSTANT_ADDRESS_P (base
))
3413 return *l
=8,("sts %m0,%A1" CR_TAB
3414 "sts %m0+1,%B1" CR_TAB
3415 "sts %m0+2,%C1" CR_TAB
3417 if (reg_base
> 0) /* (r) */
3419 if (reg_base
== REG_X
) /* (R26) */
3421 if (reg_src
== REG_X
)
3423 /* "st X+,r26" is undefined */
3424 if (reg_unused_after (insn
, base
))
3425 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3428 "st X+,__tmp_reg__" CR_TAB
3432 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3435 "st X+,__tmp_reg__" CR_TAB
3440 else if (reg_base
== reg_src
+ 2)
3442 if (reg_unused_after (insn
, base
))
3443 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3444 "mov __tmp_reg__,%D1" CR_TAB
3447 "st %0+,__zero_reg__" CR_TAB
3448 "st %0,__tmp_reg__" CR_TAB
3449 "clr __zero_reg__");
3451 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3452 "mov __tmp_reg__,%D1" CR_TAB
3455 "st %0+,__zero_reg__" CR_TAB
3456 "st %0,__tmp_reg__" CR_TAB
3457 "clr __zero_reg__" CR_TAB
3460 return *l
=5, ("st %0+,%A1" CR_TAB
3467 return *l
=4, ("st %0,%A1" CR_TAB
3468 "std %0+1,%B1" CR_TAB
3469 "std %0+2,%C1" CR_TAB
3472 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3474 int disp
= INTVAL (XEXP (base
, 1));
3475 reg_base
= REGNO (XEXP (base
, 0));
3476 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3478 if (reg_base
!= REG_Y
)
3479 fatal_insn ("incorrect insn:",insn
);
3481 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3482 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3483 "std Y+60,%A1" CR_TAB
3484 "std Y+61,%B1" CR_TAB
3485 "std Y+62,%C1" CR_TAB
3486 "std Y+63,%D1" CR_TAB
3489 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3490 "sbci r29,hi8(-%o0)" CR_TAB
3492 "std Y+1,%B1" CR_TAB
3493 "std Y+2,%C1" CR_TAB
3494 "std Y+3,%D1" CR_TAB
3495 "subi r28,lo8(%o0)" CR_TAB
3496 "sbci r29,hi8(%o0)");
3498 if (reg_base
== REG_X
)
3501 if (reg_src
== REG_X
)
3504 return ("mov __tmp_reg__,r26" CR_TAB
3505 "mov __zero_reg__,r27" CR_TAB
3506 "adiw r26,%o0" CR_TAB
3507 "st X+,__tmp_reg__" CR_TAB
3508 "st X+,__zero_reg__" CR_TAB
3511 "clr __zero_reg__" CR_TAB
3514 else if (reg_src
== REG_X
- 2)
3517 return ("mov __tmp_reg__,r26" CR_TAB
3518 "mov __zero_reg__,r27" CR_TAB
3519 "adiw r26,%o0" CR_TAB
3522 "st X+,__tmp_reg__" CR_TAB
3523 "st X,__zero_reg__" CR_TAB
3524 "clr __zero_reg__" CR_TAB
3528 return ("adiw r26,%o0" CR_TAB
3535 return *l
=4, ("std %A0,%A1" CR_TAB
3536 "std %B0,%B1" CR_TAB
3537 "std %C0,%C1" CR_TAB
3540 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3541 return *l
=4, ("st %0,%D1" CR_TAB
3545 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3546 return *l
=4, ("st %0,%A1" CR_TAB
3550 fatal_insn ("unknown move insn:",insn
);
3555 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3558 rtx dest
= operands
[0];
3559 rtx src
= operands
[1];
3562 if (avr_mem_flash_p (src
)
3563 || avr_mem_flash_p (dest
))
3565 return avr_out_lpm (insn
, operands
, real_l
);
3571 if (register_operand (dest
, VOIDmode
))
3573 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3575 if (true_regnum (dest
) > true_regnum (src
))
3580 return ("movw %C0,%C1" CR_TAB
3584 return ("mov %D0,%D1" CR_TAB
3585 "mov %C0,%C1" CR_TAB
3586 "mov %B0,%B1" CR_TAB
3594 return ("movw %A0,%A1" CR_TAB
3598 return ("mov %A0,%A1" CR_TAB
3599 "mov %B0,%B1" CR_TAB
3600 "mov %C0,%C1" CR_TAB
3604 else if (CONSTANT_P (src
))
3606 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3608 else if (GET_CODE (src
) == MEM
)
3609 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3611 else if (GET_CODE (dest
) == MEM
)
3615 if (src
== CONST0_RTX (GET_MODE (dest
)))
3616 operands
[1] = zero_reg_rtx
;
3618 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3621 output_asm_insn (templ
, operands
);
3626 fatal_insn ("invalid insn:", insn
);
3631 /* Handle loads of 24-bit types from memory to register. */
3634 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3638 rtx base
= XEXP (src
, 0);
3639 int reg_dest
= true_regnum (dest
);
3640 int reg_base
= true_regnum (base
);
3644 if (reg_base
== REG_X
) /* (R26) */
3646 if (reg_dest
== REG_X
)
3647 /* "ld r26,-X" is undefined */
3648 return avr_asm_len ("adiw r26,2" CR_TAB
3650 "ld __tmp_reg__,-X" CR_TAB
3653 "mov r27,__tmp_reg__", op
, plen
, -6);
3656 avr_asm_len ("ld %A0,X+" CR_TAB
3658 "ld %C0,X", op
, plen
, -3);
3660 if (reg_dest
!= REG_X
- 2
3661 && !reg_unused_after (insn
, base
))
3663 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3669 else /* reg_base != REG_X */
3671 if (reg_dest
== reg_base
)
3672 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3673 "ldd __tmp_reg__,%1+1" CR_TAB
3675 "mov %B0,__tmp_reg__", op
, plen
, -4);
3677 return avr_asm_len ("ld %A0,%1" CR_TAB
3678 "ldd %B0,%1+1" CR_TAB
3679 "ldd %C0,%1+2", op
, plen
, -3);
3682 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3684 int disp
= INTVAL (XEXP (base
, 1));
3686 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3688 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3689 fatal_insn ("incorrect insn:",insn
);
3691 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3692 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3693 "ldd %A0,Y+61" CR_TAB
3694 "ldd %B0,Y+62" CR_TAB
3695 "ldd %C0,Y+63" CR_TAB
3696 "sbiw r28,%o1-61", op
, plen
, -5);
3698 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3699 "sbci r29,hi8(-%o1)" CR_TAB
3701 "ldd %B0,Y+1" CR_TAB
3702 "ldd %C0,Y+2" CR_TAB
3703 "subi r28,lo8(%o1)" CR_TAB
3704 "sbci r29,hi8(%o1)", op
, plen
, -7);
3707 reg_base
= true_regnum (XEXP (base
, 0));
3708 if (reg_base
== REG_X
)
3711 if (reg_dest
== REG_X
)
3713 /* "ld r26,-X" is undefined */
3714 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3716 "ld __tmp_reg__,-X" CR_TAB
3719 "mov r27,__tmp_reg__", op
, plen
, -6);
3722 avr_asm_len ("adiw r26,%o1" CR_TAB
3725 "ld r26,X", op
, plen
, -4);
3727 if (reg_dest
!= REG_X
- 2)
3728 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3733 if (reg_dest
== reg_base
)
3734 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3735 "ldd __tmp_reg__,%B1" CR_TAB
3736 "ldd %A0,%A1" CR_TAB
3737 "mov %B0,__tmp_reg__", op
, plen
, -4);
3739 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3740 "ldd %B0,%B1" CR_TAB
3741 "ldd %C0,%C1", op
, plen
, -3);
3743 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3744 return avr_asm_len ("ld %C0,%1" CR_TAB
3746 "ld %A0,%1", op
, plen
, -3);
3747 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3748 return avr_asm_len ("ld %A0,%1" CR_TAB
3750 "ld %C0,%1", op
, plen
, -3);
3752 else if (CONSTANT_ADDRESS_P (base
))
3753 return avr_asm_len ("lds %A0,%m1" CR_TAB
3754 "lds %B0,%m1+1" CR_TAB
3755 "lds %C0,%m1+2", op
, plen
, -6);
3757 fatal_insn ("unknown move insn:",insn
);
3761 /* Handle store of 24-bit type from register or zero to memory. */
3764 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3768 rtx base
= XEXP (dest
, 0);
3769 int reg_base
= true_regnum (base
);
3771 if (CONSTANT_ADDRESS_P (base
))
3772 return avr_asm_len ("sts %m0,%A1" CR_TAB
3773 "sts %m0+1,%B1" CR_TAB
3774 "sts %m0+2,%C1", op
, plen
, -6);
3776 if (reg_base
> 0) /* (r) */
3778 if (reg_base
== REG_X
) /* (R26) */
3780 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3782 avr_asm_len ("st %0+,%A1" CR_TAB
3784 "st %0,%C1", op
, plen
, -3);
3786 if (!reg_unused_after (insn
, base
))
3787 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3792 return avr_asm_len ("st %0,%A1" CR_TAB
3793 "std %0+1,%B1" CR_TAB
3794 "std %0+2,%C1", op
, plen
, -3);
3796 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3798 int disp
= INTVAL (XEXP (base
, 1));
3799 reg_base
= REGNO (XEXP (base
, 0));
3801 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3803 if (reg_base
!= REG_Y
)
3804 fatal_insn ("incorrect insn:",insn
);
3806 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3807 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3808 "std Y+61,%A1" CR_TAB
3809 "std Y+62,%B1" CR_TAB
3810 "std Y+63,%C1" CR_TAB
3811 "sbiw r28,%o0-60", op
, plen
, -5);
3813 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3814 "sbci r29,hi8(-%o0)" CR_TAB
3816 "std Y+1,%B1" CR_TAB
3817 "std Y+2,%C1" CR_TAB
3818 "subi r28,lo8(%o0)" CR_TAB
3819 "sbci r29,hi8(%o0)", op
, plen
, -7);
3821 if (reg_base
== REG_X
)
3824 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3826 avr_asm_len ("adiw r26,%o0" CR_TAB
3829 "st X,%C1", op
, plen
, -4);
3831 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3832 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3837 return avr_asm_len ("std %A0,%A1" CR_TAB
3838 "std %B0,%B1" CR_TAB
3839 "std %C0,%C1", op
, plen
, -3);
3841 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3842 return avr_asm_len ("st %0,%C1" CR_TAB
3844 "st %0,%A1", op
, plen
, -3);
3845 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3846 return avr_asm_len ("st %0,%A1" CR_TAB
3848 "st %0,%C1", op
, plen
, -3);
3850 fatal_insn ("unknown move insn:",insn
);
3855 /* Move around 24-bit stuff. */
3858 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3863 if (avr_mem_flash_p (src
)
3864 || avr_mem_flash_p (dest
))
3866 return avr_out_lpm (insn
, op
, plen
);
3869 if (register_operand (dest
, VOIDmode
))
3871 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3873 if (true_regnum (dest
) > true_regnum (src
))
3875 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3878 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3880 return avr_asm_len ("mov %B0,%B1" CR_TAB
3881 "mov %A0,%A1", op
, plen
, 2);
3886 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3888 avr_asm_len ("mov %A0,%A1" CR_TAB
3889 "mov %B0,%B1", op
, plen
, -2);
3891 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3894 else if (CONSTANT_P (src
))
3896 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3898 else if (MEM_P (src
))
3899 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3901 else if (MEM_P (dest
))
3906 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3908 return avr_out_store_psi (insn
, xop
, plen
);
3911 fatal_insn ("invalid insn:", insn
);
3917 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3921 rtx x
= XEXP (dest
, 0);
3923 if (CONSTANT_ADDRESS_P (x
))
3925 return optimize
> 0 && io_address_operand (x
, QImode
)
3926 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3927 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3929 else if (GET_CODE (x
) == PLUS
3930 && REG_P (XEXP (x
, 0))
3931 && CONST_INT_P (XEXP (x
, 1)))
3933 /* memory access by reg+disp */
3935 int disp
= INTVAL (XEXP (x
, 1));
3937 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3939 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3940 fatal_insn ("incorrect insn:",insn
);
3942 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3943 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3944 "std Y+63,%1" CR_TAB
3945 "sbiw r28,%o0-63", op
, plen
, -3);
3947 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3948 "sbci r29,hi8(-%o0)" CR_TAB
3950 "subi r28,lo8(%o0)" CR_TAB
3951 "sbci r29,hi8(%o0)", op
, plen
, -5);
3953 else if (REGNO (XEXP (x
,0)) == REG_X
)
3955 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3957 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3958 "adiw r26,%o0" CR_TAB
3959 "st X,__tmp_reg__", op
, plen
, -3);
3963 avr_asm_len ("adiw r26,%o0" CR_TAB
3964 "st X,%1", op
, plen
, -2);
3967 if (!reg_unused_after (insn
, XEXP (x
,0)))
3968 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3973 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3976 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3980 /* Helper for the next function for XMEGA. It does the same
3981 but with low byte first. */
3984 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3988 rtx base
= XEXP (dest
, 0);
3989 int reg_base
= true_regnum (base
);
3990 int reg_src
= true_regnum (src
);
3992 /* "volatile" forces writing low byte first, even if less efficient,
3993 for correct operation with 16-bit I/O registers like SP. */
3994 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3996 if (CONSTANT_ADDRESS_P (base
))
3997 return optimize
> 0 && io_address_operand (base
, HImode
)
3998 ? avr_asm_len ("out %i0,%A1" CR_TAB
3999 "out %i0+1,%B1", op
, plen
, -2)
4001 : avr_asm_len ("sts %m0,%A1" CR_TAB
4002 "sts %m0+1,%B1", op
, plen
, -4);
4006 if (reg_base
!= REG_X
)
4007 return avr_asm_len ("st %0,%A1" CR_TAB
4008 "std %0+1,%B1", op
, plen
, -2);
4010 if (reg_src
== REG_X
)
4011 /* "st X+,r26" and "st -X,r26" are undefined. */
4012 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4015 "st X,__tmp_reg__", op
, plen
, -4);
4017 avr_asm_len ("st X+,%A1" CR_TAB
4018 "st X,%B1", op
, plen
, -2);
4020 return reg_unused_after (insn
, base
)
4022 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4024 else if (GET_CODE (base
) == PLUS
)
4026 int disp
= INTVAL (XEXP (base
, 1));
4027 reg_base
= REGNO (XEXP (base
, 0));
4028 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4030 if (reg_base
!= REG_Y
)
4031 fatal_insn ("incorrect insn:",insn
);
4033 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4034 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4035 "std Y+62,%A1" CR_TAB
4036 "std Y+63,%B1" CR_TAB
4037 "sbiw r28,%o0-62", op
, plen
, -4)
4039 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4040 "sbci r29,hi8(-%o0)" CR_TAB
4042 "std Y+1,%B1" CR_TAB
4043 "subi r28,lo8(%o0)" CR_TAB
4044 "sbci r29,hi8(%o0)", op
, plen
, -6);
4047 if (reg_base
!= REG_X
)
4048 return avr_asm_len ("std %A0,%A1" CR_TAB
4049 "std %B0,%B1", op
, plen
, -2);
4051 return reg_src
== REG_X
4052 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4053 "mov __zero_reg__,r27" CR_TAB
4054 "adiw r26,%o0" CR_TAB
4055 "st X+,__tmp_reg__" CR_TAB
4056 "st X,__zero_reg__" CR_TAB
4057 "clr __zero_reg__" CR_TAB
4058 "sbiw r26,%o0+1", op
, plen
, -7)
4060 : avr_asm_len ("adiw r26,%o0" CR_TAB
4063 "sbiw r26,%o0+1", op
, plen
, -4);
4065 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4067 if (!mem_volatile_p
)
4068 return avr_asm_len ("st %0,%B1" CR_TAB
4069 "st %0,%A1", op
, plen
, -2);
4071 return REGNO (XEXP (base
, 0)) == REG_X
4072 ? avr_asm_len ("sbiw r26,2" CR_TAB
4075 "sbiw r26,1", op
, plen
, -4)
4077 : avr_asm_len ("sbiw %r0,2" CR_TAB
4079 "std %p0+1,%B1", op
, plen
, -3);
4081 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4083 return avr_asm_len ("st %0,%A1" CR_TAB
4084 "st %0,%B1", op
, plen
, -2);
4087 fatal_insn ("unknown move insn:",insn
);
4093 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4097 rtx base
= XEXP (dest
, 0);
4098 int reg_base
= true_regnum (base
);
4099 int reg_src
= true_regnum (src
);
4102 /* "volatile" forces writing high-byte first (no-xmega) resp.
4103 low-byte first (xmega) even if less efficient, for correct
4104 operation with 16-bit I/O registers like. */
4107 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4109 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4111 if (CONSTANT_ADDRESS_P (base
))
4112 return optimize
> 0 && io_address_operand (base
, HImode
)
4113 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4114 "out %i0,%A1", op
, plen
, -2)
4116 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4117 "sts %m0,%A1", op
, plen
, -4);
4121 if (reg_base
!= REG_X
)
4122 return avr_asm_len ("std %0+1,%B1" CR_TAB
4123 "st %0,%A1", op
, plen
, -2);
4125 if (reg_src
== REG_X
)
4126 /* "st X+,r26" and "st -X,r26" are undefined. */
4127 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4128 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4131 "st X,__tmp_reg__", op
, plen
, -4)
4133 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4135 "st X,__tmp_reg__" CR_TAB
4137 "st X,r26", op
, plen
, -5);
4139 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4140 ? avr_asm_len ("st X+,%A1" CR_TAB
4141 "st X,%B1", op
, plen
, -2)
4142 : avr_asm_len ("adiw r26,1" CR_TAB
4144 "st -X,%A1", op
, plen
, -3);
4146 else if (GET_CODE (base
) == PLUS
)
4148 int disp
= INTVAL (XEXP (base
, 1));
4149 reg_base
= REGNO (XEXP (base
, 0));
4150 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4152 if (reg_base
!= REG_Y
)
4153 fatal_insn ("incorrect insn:",insn
);
4155 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4156 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4157 "std Y+63,%B1" CR_TAB
4158 "std Y+62,%A1" CR_TAB
4159 "sbiw r28,%o0-62", op
, plen
, -4)
4161 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4162 "sbci r29,hi8(-%o0)" CR_TAB
4163 "std Y+1,%B1" CR_TAB
4165 "subi r28,lo8(%o0)" CR_TAB
4166 "sbci r29,hi8(%o0)", op
, plen
, -6);
4169 if (reg_base
!= REG_X
)
4170 return avr_asm_len ("std %B0,%B1" CR_TAB
4171 "std %A0,%A1", op
, plen
, -2);
4173 return reg_src
== REG_X
4174 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4175 "mov __zero_reg__,r27" CR_TAB
4176 "adiw r26,%o0+1" CR_TAB
4177 "st X,__zero_reg__" CR_TAB
4178 "st -X,__tmp_reg__" CR_TAB
4179 "clr __zero_reg__" CR_TAB
4180 "sbiw r26,%o0", op
, plen
, -7)
4182 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4185 "sbiw r26,%o0", op
, plen
, -4);
4187 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4189 return avr_asm_len ("st %0,%B1" CR_TAB
4190 "st %0,%A1", op
, plen
, -2);
4192 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4194 if (!mem_volatile_p
)
4195 return avr_asm_len ("st %0,%A1" CR_TAB
4196 "st %0,%B1", op
, plen
, -2);
4198 return REGNO (XEXP (base
, 0)) == REG_X
4199 ? avr_asm_len ("adiw r26,1" CR_TAB
4202 "adiw r26,2", op
, plen
, -4)
4204 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4206 "adiw %r0,2", op
, plen
, -3);
4208 fatal_insn ("unknown move insn:",insn
);
4212 /* Return 1 if frame pointer for current function required. */
4215 avr_frame_pointer_required_p (void)
4217 return (cfun
->calls_alloca
4218 || cfun
->calls_setjmp
4219 || cfun
->has_nonlocal_label
4220 || crtl
->args
.info
.nregs
== 0
4221 || get_frame_size () > 0);
4224 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4227 compare_condition (rtx insn
)
4229 rtx next
= next_real_insn (insn
);
4231 if (next
&& JUMP_P (next
))
4233 rtx pat
= PATTERN (next
);
4234 rtx src
= SET_SRC (pat
);
4236 if (IF_THEN_ELSE
== GET_CODE (src
))
4237 return GET_CODE (XEXP (src
, 0));
4244 /* Returns true iff INSN is a tst insn that only tests the sign. */
4247 compare_sign_p (rtx insn
)
4249 RTX_CODE cond
= compare_condition (insn
);
4250 return (cond
== GE
|| cond
== LT
);
4254 /* Returns true iff the next insn is a JUMP_INSN with a condition
4255 that needs to be swapped (GT, GTU, LE, LEU). */
4258 compare_diff_p (rtx insn
)
4260 RTX_CODE cond
= compare_condition (insn
);
4261 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4264 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4267 compare_eq_p (rtx insn
)
4269 RTX_CODE cond
= compare_condition (insn
);
4270 return (cond
== EQ
|| cond
== NE
);
4274 /* Output compare instruction
4276 compare (XOP[0], XOP[1])
4278 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4279 XOP[2] is an 8-bit scratch register as needed.
4281 PLEN == NULL: Output instructions.
4282 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4283 Don't output anything. */
4286 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4288 /* Register to compare and value to compare against. */
4292 /* MODE of the comparison. */
4293 enum machine_mode mode
= GET_MODE (xreg
);
4295 /* Number of bytes to operate on. */
4296 int i
, n_bytes
= GET_MODE_SIZE (mode
);
4298 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4299 int clobber_val
= -1;
4301 gcc_assert (REG_P (xreg
));
4302 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4303 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4308 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4309 against 0 by ORing the bytes. This is one instruction shorter.
4310 Notice that DImode comparisons are always against reg:DI 18
4311 and therefore don't use this. */
4313 if (!test_hard_reg_class (LD_REGS
, xreg
)
4314 && compare_eq_p (insn
)
4315 && reg_unused_after (insn
, xreg
))
4317 if (xval
== const1_rtx
)
4319 avr_asm_len ("dec %A0" CR_TAB
4320 "or %A0,%B0", xop
, plen
, 2);
4323 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4326 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4330 else if (xval
== constm1_rtx
)
4333 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4336 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4338 return avr_asm_len ("and %A0,%B0" CR_TAB
4339 "com %A0", xop
, plen
, 2);
4343 for (i
= 0; i
< n_bytes
; i
++)
4345 /* We compare byte-wise. */
4346 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4347 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4349 /* 8-bit value to compare with this byte. */
4350 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4352 /* Registers R16..R31 can operate with immediate. */
4353 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4356 xop
[1] = gen_int_mode (val8
, QImode
);
4358 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4361 && test_hard_reg_class (ADDW_REGS
, reg8
))
4363 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4365 if (IN_RANGE (val16
, 0, 63)
4367 || reg_unused_after (insn
, xreg
)))
4369 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4375 && IN_RANGE (val16
, -63, -1)
4376 && compare_eq_p (insn
)
4377 && reg_unused_after (insn
, xreg
))
4379 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4383 /* Comparing against 0 is easy. */
4388 ? "cp %0,__zero_reg__"
4389 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4393 /* Upper registers can compare and subtract-with-carry immediates.
4394 Notice that compare instructions do the same as respective subtract
4395 instruction; the only difference is that comparisons don't write
4396 the result back to the target register. */
4402 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4405 else if (reg_unused_after (insn
, xreg
))
4407 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4412 /* Must load the value into the scratch register. */
4414 gcc_assert (REG_P (xop
[2]));
4416 if (clobber_val
!= (int) val8
)
4417 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4418 clobber_val
= (int) val8
;
4422 : "cpc %0,%2", xop
, plen
, 1);
4429 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4432 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4436 xop
[0] = gen_rtx_REG (DImode
, 18);
4440 return avr_out_compare (insn
, xop
, plen
);
4443 /* Output test instruction for HImode. */
4446 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4448 if (compare_sign_p (insn
))
4450 avr_asm_len ("tst %B0", op
, plen
, -1);
4452 else if (reg_unused_after (insn
, op
[0])
4453 && compare_eq_p (insn
))
4455 /* Faster than sbiw if we can clobber the operand. */
4456 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4460 avr_out_compare (insn
, op
, plen
);
4467 /* Output test instruction for PSImode. */
4470 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4472 if (compare_sign_p (insn
))
4474 avr_asm_len ("tst %C0", op
, plen
, -1);
4476 else if (reg_unused_after (insn
, op
[0])
4477 && compare_eq_p (insn
))
4479 /* Faster than sbiw if we can clobber the operand. */
4480 avr_asm_len ("or %A0,%B0" CR_TAB
4481 "or %A0,%C0", op
, plen
, -2);
4485 avr_out_compare (insn
, op
, plen
);
4492 /* Output test instruction for SImode. */
4495 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4497 if (compare_sign_p (insn
))
4499 avr_asm_len ("tst %D0", op
, plen
, -1);
4501 else if (reg_unused_after (insn
, op
[0])
4502 && compare_eq_p (insn
))
4504 /* Faster than sbiw if we can clobber the operand. */
4505 avr_asm_len ("or %A0,%B0" CR_TAB
4507 "or %A0,%D0", op
, plen
, -3);
4511 avr_out_compare (insn
, op
, plen
);
4518 /* Generate asm equivalent for various shifts. This only handles cases
4519 that are not already carefully hand-optimized in ?sh??i3_out.
4521 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4522 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4523 OPERANDS[3] is a QImode scratch register from LD regs if
4524 available and SCRATCH, otherwise (no scratch available)
4526 TEMPL is an assembler template that shifts by one position.
4527 T_LEN is the length of this template. */
4530 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4531 int *plen
, int t_len
)
4533 bool second_label
= true;
4534 bool saved_in_tmp
= false;
4535 bool use_zero_reg
= false;
4538 op
[0] = operands
[0];
4539 op
[1] = operands
[1];
4540 op
[2] = operands
[2];
4541 op
[3] = operands
[3];
4546 if (CONST_INT_P (operands
[2]))
4548 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4549 && REG_P (operands
[3]));
4550 int count
= INTVAL (operands
[2]);
4551 int max_len
= 10; /* If larger than this, always use a loop. */
4556 if (count
< 8 && !scratch
)
4557 use_zero_reg
= true;
4560 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4562 if (t_len
* count
<= max_len
)
4564 /* Output shifts inline with no loop - faster. */
4567 avr_asm_len (templ
, op
, plen
, t_len
);
4574 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4576 else if (use_zero_reg
)
4578 /* Hack to save one word: use __zero_reg__ as loop counter.
4579 Set one bit, then shift in a loop until it is 0 again. */
4581 op
[3] = zero_reg_rtx
;
4583 avr_asm_len ("set" CR_TAB
4584 "bld %3,%2-1", op
, plen
, 2);
4588 /* No scratch register available, use one from LD_REGS (saved in
4589 __tmp_reg__) that doesn't overlap with registers to shift. */
4591 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4592 op
[4] = tmp_reg_rtx
;
4593 saved_in_tmp
= true;
4595 avr_asm_len ("mov %4,%3" CR_TAB
4596 "ldi %3,%2", op
, plen
, 2);
4599 second_label
= false;
4601 else if (MEM_P (op
[2]))
4605 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4608 out_movqi_r_mr (insn
, op_mov
, plen
);
4610 else if (register_operand (op
[2], QImode
))
4614 if (!reg_unused_after (insn
, op
[2])
4615 || reg_overlap_mentioned_p (op
[0], op
[2]))
4617 op
[3] = tmp_reg_rtx
;
4618 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4622 fatal_insn ("bad shift insn:", insn
);
4625 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4627 avr_asm_len ("1:", op
, plen
, 0);
4628 avr_asm_len (templ
, op
, plen
, t_len
);
4631 avr_asm_len ("2:", op
, plen
, 0);
4633 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4634 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4637 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4641 /* 8bit shift left ((char)x << i) */
4644 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4646 if (GET_CODE (operands
[2]) == CONST_INT
)
4653 switch (INTVAL (operands
[2]))
4656 if (INTVAL (operands
[2]) < 8)
4668 return ("lsl %0" CR_TAB
4673 return ("lsl %0" CR_TAB
4678 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4681 return ("swap %0" CR_TAB
4685 return ("lsl %0" CR_TAB
4691 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4694 return ("swap %0" CR_TAB
4699 return ("lsl %0" CR_TAB
4706 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4709 return ("swap %0" CR_TAB
4715 return ("lsl %0" CR_TAB
4724 return ("ror %0" CR_TAB
4729 else if (CONSTANT_P (operands
[2]))
4730 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4732 out_shift_with_cnt ("lsl %0",
4733 insn
, operands
, len
, 1);
4738 /* 16bit shift left ((short)x << i) */
4741 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4743 if (GET_CODE (operands
[2]) == CONST_INT
)
4745 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4746 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4753 switch (INTVAL (operands
[2]))
4756 if (INTVAL (operands
[2]) < 16)
4760 return ("clr %B0" CR_TAB
4764 if (optimize_size
&& scratch
)
4769 return ("swap %A0" CR_TAB
4771 "andi %B0,0xf0" CR_TAB
4772 "eor %B0,%A0" CR_TAB
4773 "andi %A0,0xf0" CR_TAB
4779 return ("swap %A0" CR_TAB
4781 "ldi %3,0xf0" CR_TAB
4783 "eor %B0,%A0" CR_TAB
4787 break; /* optimize_size ? 6 : 8 */
4791 break; /* scratch ? 5 : 6 */
4795 return ("lsl %A0" CR_TAB
4799 "andi %B0,0xf0" CR_TAB
4800 "eor %B0,%A0" CR_TAB
4801 "andi %A0,0xf0" CR_TAB
4807 return ("lsl %A0" CR_TAB
4811 "ldi %3,0xf0" CR_TAB
4813 "eor %B0,%A0" CR_TAB
4821 break; /* scratch ? 5 : 6 */
4823 return ("clr __tmp_reg__" CR_TAB
4826 "ror __tmp_reg__" CR_TAB
4829 "ror __tmp_reg__" CR_TAB
4830 "mov %B0,%A0" CR_TAB
4831 "mov %A0,__tmp_reg__");
4835 return ("lsr %B0" CR_TAB
4836 "mov %B0,%A0" CR_TAB
4842 return *len
= 2, ("mov %B0,%A1" CR_TAB
4847 return ("mov %B0,%A0" CR_TAB
4853 return ("mov %B0,%A0" CR_TAB
4860 return ("mov %B0,%A0" CR_TAB
4870 return ("mov %B0,%A0" CR_TAB
4878 return ("mov %B0,%A0" CR_TAB
4881 "ldi %3,0xf0" CR_TAB
4885 return ("mov %B0,%A0" CR_TAB
4896 return ("mov %B0,%A0" CR_TAB
4902 if (AVR_HAVE_MUL
&& scratch
)
4905 return ("ldi %3,0x20" CR_TAB
4909 "clr __zero_reg__");
4911 if (optimize_size
&& scratch
)
4916 return ("mov %B0,%A0" CR_TAB
4920 "ldi %3,0xe0" CR_TAB
4926 return ("set" CR_TAB
4931 "clr __zero_reg__");
4934 return ("mov %B0,%A0" CR_TAB
4943 if (AVR_HAVE_MUL
&& ldi_ok
)
4946 return ("ldi %B0,0x40" CR_TAB
4947 "mul %A0,%B0" CR_TAB
4950 "clr __zero_reg__");
4952 if (AVR_HAVE_MUL
&& scratch
)
4955 return ("ldi %3,0x40" CR_TAB
4959 "clr __zero_reg__");
4961 if (optimize_size
&& ldi_ok
)
4964 return ("mov %B0,%A0" CR_TAB
4965 "ldi %A0,6" "\n1:\t"
4970 if (optimize_size
&& scratch
)
4973 return ("clr %B0" CR_TAB
4982 return ("clr %B0" CR_TAB
4989 out_shift_with_cnt ("lsl %A0" CR_TAB
4990 "rol %B0", insn
, operands
, len
, 2);
4995 /* 24-bit shift left */
4998 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5003 if (CONST_INT_P (op
[2]))
5005 switch (INTVAL (op
[2]))
5008 if (INTVAL (op
[2]) < 24)
5011 return avr_asm_len ("clr %A0" CR_TAB
5013 "clr %C0", op
, plen
, 3);
5017 int reg0
= REGNO (op
[0]);
5018 int reg1
= REGNO (op
[1]);
5021 return avr_asm_len ("mov %C0,%B1" CR_TAB
5022 "mov %B0,%A1" CR_TAB
5023 "clr %A0", op
, plen
, 3);
5025 return avr_asm_len ("clr %A0" CR_TAB
5026 "mov %B0,%A1" CR_TAB
5027 "mov %C0,%B1", op
, plen
, 3);
5032 int reg0
= REGNO (op
[0]);
5033 int reg1
= REGNO (op
[1]);
5035 if (reg0
+ 2 != reg1
)
5036 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5038 return avr_asm_len ("clr %B0" CR_TAB
5039 "clr %A0", op
, plen
, 2);
5043 return avr_asm_len ("clr %C0" CR_TAB
5047 "clr %A0", op
, plen
, 5);
5051 out_shift_with_cnt ("lsl %A0" CR_TAB
5053 "rol %C0", insn
, op
, plen
, 3);
5058 /* 32bit shift left ((long)x << i) */
5061 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5063 if (GET_CODE (operands
[2]) == CONST_INT
)
5071 switch (INTVAL (operands
[2]))
5074 if (INTVAL (operands
[2]) < 32)
5078 return *len
= 3, ("clr %D0" CR_TAB
5082 return ("clr %D0" CR_TAB
5089 int reg0
= true_regnum (operands
[0]);
5090 int reg1
= true_regnum (operands
[1]);
5093 return ("mov %D0,%C1" CR_TAB
5094 "mov %C0,%B1" CR_TAB
5095 "mov %B0,%A1" CR_TAB
5098 return ("clr %A0" CR_TAB
5099 "mov %B0,%A1" CR_TAB
5100 "mov %C0,%B1" CR_TAB
5106 int reg0
= true_regnum (operands
[0]);
5107 int reg1
= true_regnum (operands
[1]);
5108 if (reg0
+ 2 == reg1
)
5109 return *len
= 2, ("clr %B0" CR_TAB
5112 return *len
= 3, ("movw %C0,%A1" CR_TAB
5116 return *len
= 4, ("mov %C0,%A1" CR_TAB
5117 "mov %D0,%B1" CR_TAB
5124 return ("mov %D0,%A1" CR_TAB
5131 return ("clr %D0" CR_TAB
5140 out_shift_with_cnt ("lsl %A0" CR_TAB
5143 "rol %D0", insn
, operands
, len
, 4);
5147 /* 8bit arithmetic shift right ((signed char)x >> i) */
5150 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5152 if (GET_CODE (operands
[2]) == CONST_INT
)
5159 switch (INTVAL (operands
[2]))
5167 return ("asr %0" CR_TAB
5172 return ("asr %0" CR_TAB
5178 return ("asr %0" CR_TAB
5185 return ("asr %0" CR_TAB
5193 return ("bst %0,6" CR_TAB
5199 if (INTVAL (operands
[2]) < 8)
5206 return ("lsl %0" CR_TAB
5210 else if (CONSTANT_P (operands
[2]))
5211 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5213 out_shift_with_cnt ("asr %0",
5214 insn
, operands
, len
, 1);
5219 /* 16bit arithmetic shift right ((signed short)x >> i) */
5222 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5224 if (GET_CODE (operands
[2]) == CONST_INT
)
5226 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5227 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5234 switch (INTVAL (operands
[2]))
5238 /* XXX try to optimize this too? */
5243 break; /* scratch ? 5 : 6 */
5245 return ("mov __tmp_reg__,%A0" CR_TAB
5246 "mov %A0,%B0" CR_TAB
5247 "lsl __tmp_reg__" CR_TAB
5249 "sbc %B0,%B0" CR_TAB
5250 "lsl __tmp_reg__" CR_TAB
5256 return ("lsl %A0" CR_TAB
5257 "mov %A0,%B0" CR_TAB
5263 int reg0
= true_regnum (operands
[0]);
5264 int reg1
= true_regnum (operands
[1]);
5267 return *len
= 3, ("mov %A0,%B0" CR_TAB
5271 return *len
= 4, ("mov %A0,%B1" CR_TAB
5279 return ("mov %A0,%B0" CR_TAB
5281 "sbc %B0,%B0" CR_TAB
5286 return ("mov %A0,%B0" CR_TAB
5288 "sbc %B0,%B0" CR_TAB
5293 if (AVR_HAVE_MUL
&& ldi_ok
)
5296 return ("ldi %A0,0x20" CR_TAB
5297 "muls %B0,%A0" CR_TAB
5299 "sbc %B0,%B0" CR_TAB
5300 "clr __zero_reg__");
5302 if (optimize_size
&& scratch
)
5305 return ("mov %A0,%B0" CR_TAB
5307 "sbc %B0,%B0" CR_TAB
5313 if (AVR_HAVE_MUL
&& ldi_ok
)
5316 return ("ldi %A0,0x10" CR_TAB
5317 "muls %B0,%A0" CR_TAB
5319 "sbc %B0,%B0" CR_TAB
5320 "clr __zero_reg__");
5322 if (optimize_size
&& scratch
)
5325 return ("mov %A0,%B0" CR_TAB
5327 "sbc %B0,%B0" CR_TAB
5334 if (AVR_HAVE_MUL
&& ldi_ok
)
5337 return ("ldi %A0,0x08" CR_TAB
5338 "muls %B0,%A0" CR_TAB
5340 "sbc %B0,%B0" CR_TAB
5341 "clr __zero_reg__");
5344 break; /* scratch ? 5 : 7 */
5346 return ("mov %A0,%B0" CR_TAB
5348 "sbc %B0,%B0" CR_TAB
5357 return ("lsl %B0" CR_TAB
5358 "sbc %A0,%A0" CR_TAB
5360 "mov %B0,%A0" CR_TAB
5364 if (INTVAL (operands
[2]) < 16)
5370 return *len
= 3, ("lsl %B0" CR_TAB
5371 "sbc %A0,%A0" CR_TAB
5376 out_shift_with_cnt ("asr %B0" CR_TAB
5377 "ror %A0", insn
, operands
, len
, 2);
5382 /* 24-bit arithmetic shift right */
5385 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5387 int dest
= REGNO (op
[0]);
5388 int src
= REGNO (op
[1]);
5390 if (CONST_INT_P (op
[2]))
5395 switch (INTVAL (op
[2]))
5399 return avr_asm_len ("mov %A0,%B1" CR_TAB
5400 "mov %B0,%C1" CR_TAB
5403 "dec %C0", op
, plen
, 5);
5405 return avr_asm_len ("clr %C0" CR_TAB
5408 "mov %B0,%C1" CR_TAB
5409 "mov %A0,%B1", op
, plen
, 5);
5412 if (dest
!= src
+ 2)
5413 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5415 return avr_asm_len ("clr %B0" CR_TAB
5418 "mov %C0,%B0", op
, plen
, 4);
5421 if (INTVAL (op
[2]) < 24)
5427 return avr_asm_len ("lsl %C0" CR_TAB
5428 "sbc %A0,%A0" CR_TAB
5429 "mov %B0,%A0" CR_TAB
5430 "mov %C0,%A0", op
, plen
, 4);
5434 out_shift_with_cnt ("asr %C0" CR_TAB
5436 "ror %A0", insn
, op
, plen
, 3);
5441 /* 32bit arithmetic shift right ((signed long)x >> i) */
5444 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5446 if (GET_CODE (operands
[2]) == CONST_INT
)
5454 switch (INTVAL (operands
[2]))
5458 int reg0
= true_regnum (operands
[0]);
5459 int reg1
= true_regnum (operands
[1]);
5462 return ("mov %A0,%B1" CR_TAB
5463 "mov %B0,%C1" CR_TAB
5464 "mov %C0,%D1" CR_TAB
5469 return ("clr %D0" CR_TAB
5472 "mov %C0,%D1" CR_TAB
5473 "mov %B0,%C1" CR_TAB
5479 int reg0
= true_regnum (operands
[0]);
5480 int reg1
= true_regnum (operands
[1]);
5482 if (reg0
== reg1
+ 2)
5483 return *len
= 4, ("clr %D0" CR_TAB
5488 return *len
= 5, ("movw %A0,%C1" CR_TAB
5494 return *len
= 6, ("mov %B0,%D1" CR_TAB
5495 "mov %A0,%C1" CR_TAB
5503 return *len
= 6, ("mov %A0,%D1" CR_TAB
5507 "mov %B0,%D0" CR_TAB
5511 if (INTVAL (operands
[2]) < 32)
5518 return *len
= 4, ("lsl %D0" CR_TAB
5519 "sbc %A0,%A0" CR_TAB
5520 "mov %B0,%A0" CR_TAB
5523 return *len
= 5, ("lsl %D0" CR_TAB
5524 "sbc %A0,%A0" CR_TAB
5525 "mov %B0,%A0" CR_TAB
5526 "mov %C0,%A0" CR_TAB
5531 out_shift_with_cnt ("asr %D0" CR_TAB
5534 "ror %A0", insn
, operands
, len
, 4);
5538 /* 8bit logic shift right ((unsigned char)x >> i) */
5541 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5543 if (GET_CODE (operands
[2]) == CONST_INT
)
5550 switch (INTVAL (operands
[2]))
5553 if (INTVAL (operands
[2]) < 8)
5565 return ("lsr %0" CR_TAB
5569 return ("lsr %0" CR_TAB
5574 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5577 return ("swap %0" CR_TAB
5581 return ("lsr %0" CR_TAB
5587 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5590 return ("swap %0" CR_TAB
5595 return ("lsr %0" CR_TAB
5602 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5605 return ("swap %0" CR_TAB
5611 return ("lsr %0" CR_TAB
5620 return ("rol %0" CR_TAB
5625 else if (CONSTANT_P (operands
[2]))
5626 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5628 out_shift_with_cnt ("lsr %0",
5629 insn
, operands
, len
, 1);
5633 /* 16bit logic shift right ((unsigned short)x >> i) */
5636 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5638 if (GET_CODE (operands
[2]) == CONST_INT
)
5640 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5641 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5648 switch (INTVAL (operands
[2]))
5651 if (INTVAL (operands
[2]) < 16)
5655 return ("clr %B0" CR_TAB
5659 if (optimize_size
&& scratch
)
5664 return ("swap %B0" CR_TAB
5666 "andi %A0,0x0f" CR_TAB
5667 "eor %A0,%B0" CR_TAB
5668 "andi %B0,0x0f" CR_TAB
5674 return ("swap %B0" CR_TAB
5676 "ldi %3,0x0f" CR_TAB
5678 "eor %A0,%B0" CR_TAB
5682 break; /* optimize_size ? 6 : 8 */
5686 break; /* scratch ? 5 : 6 */
5690 return ("lsr %B0" CR_TAB
5694 "andi %A0,0x0f" CR_TAB
5695 "eor %A0,%B0" CR_TAB
5696 "andi %B0,0x0f" CR_TAB
5702 return ("lsr %B0" CR_TAB
5706 "ldi %3,0x0f" CR_TAB
5708 "eor %A0,%B0" CR_TAB
5716 break; /* scratch ? 5 : 6 */
5718 return ("clr __tmp_reg__" CR_TAB
5721 "rol __tmp_reg__" CR_TAB
5724 "rol __tmp_reg__" CR_TAB
5725 "mov %A0,%B0" CR_TAB
5726 "mov %B0,__tmp_reg__");
5730 return ("lsl %A0" CR_TAB
5731 "mov %A0,%B0" CR_TAB
5733 "sbc %B0,%B0" CR_TAB
5737 return *len
= 2, ("mov %A0,%B1" CR_TAB
5742 return ("mov %A0,%B0" CR_TAB
5748 return ("mov %A0,%B0" CR_TAB
5755 return ("mov %A0,%B0" CR_TAB
5765 return ("mov %A0,%B0" CR_TAB
5773 return ("mov %A0,%B0" CR_TAB
5776 "ldi %3,0x0f" CR_TAB
5780 return ("mov %A0,%B0" CR_TAB
5791 return ("mov %A0,%B0" CR_TAB
5797 if (AVR_HAVE_MUL
&& scratch
)
5800 return ("ldi %3,0x08" CR_TAB
5804 "clr __zero_reg__");
5806 if (optimize_size
&& scratch
)
5811 return ("mov %A0,%B0" CR_TAB
5815 "ldi %3,0x07" CR_TAB
5821 return ("set" CR_TAB
5826 "clr __zero_reg__");
5829 return ("mov %A0,%B0" CR_TAB
5838 if (AVR_HAVE_MUL
&& ldi_ok
)
5841 return ("ldi %A0,0x04" CR_TAB
5842 "mul %B0,%A0" CR_TAB
5845 "clr __zero_reg__");
5847 if (AVR_HAVE_MUL
&& scratch
)
5850 return ("ldi %3,0x04" CR_TAB
5854 "clr __zero_reg__");
5856 if (optimize_size
&& ldi_ok
)
5859 return ("mov %A0,%B0" CR_TAB
5860 "ldi %B0,6" "\n1:\t"
5865 if (optimize_size
&& scratch
)
5868 return ("clr %A0" CR_TAB
5877 return ("clr %A0" CR_TAB
5884 out_shift_with_cnt ("lsr %B0" CR_TAB
5885 "ror %A0", insn
, operands
, len
, 2);
5890 /* 24-bit logic shift right */
5893 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5895 int dest
= REGNO (op
[0]);
5896 int src
= REGNO (op
[1]);
5898 if (CONST_INT_P (op
[2]))
5903 switch (INTVAL (op
[2]))
5907 return avr_asm_len ("mov %A0,%B1" CR_TAB
5908 "mov %B0,%C1" CR_TAB
5909 "clr %C0", op
, plen
, 3);
5911 return avr_asm_len ("clr %C0" CR_TAB
5912 "mov %B0,%C1" CR_TAB
5913 "mov %A0,%B1", op
, plen
, 3);
5916 if (dest
!= src
+ 2)
5917 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5919 return avr_asm_len ("clr %B0" CR_TAB
5920 "clr %C0", op
, plen
, 2);
5923 if (INTVAL (op
[2]) < 24)
5929 return avr_asm_len ("clr %A0" CR_TAB
5933 "clr %C0", op
, plen
, 5);
5937 out_shift_with_cnt ("lsr %C0" CR_TAB
5939 "ror %A0", insn
, op
, plen
, 3);
5944 /* 32bit logic shift right ((unsigned int)x >> i) */
5947 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5949 if (GET_CODE (operands
[2]) == CONST_INT
)
5957 switch (INTVAL (operands
[2]))
5960 if (INTVAL (operands
[2]) < 32)
5964 return *len
= 3, ("clr %D0" CR_TAB
5968 return ("clr %D0" CR_TAB
5975 int reg0
= true_regnum (operands
[0]);
5976 int reg1
= true_regnum (operands
[1]);
5979 return ("mov %A0,%B1" CR_TAB
5980 "mov %B0,%C1" CR_TAB
5981 "mov %C0,%D1" CR_TAB
5984 return ("clr %D0" CR_TAB
5985 "mov %C0,%D1" CR_TAB
5986 "mov %B0,%C1" CR_TAB
5992 int reg0
= true_regnum (operands
[0]);
5993 int reg1
= true_regnum (operands
[1]);
5995 if (reg0
== reg1
+ 2)
5996 return *len
= 2, ("clr %C0" CR_TAB
5999 return *len
= 3, ("movw %A0,%C1" CR_TAB
6003 return *len
= 4, ("mov %B0,%D1" CR_TAB
6004 "mov %A0,%C1" CR_TAB
6010 return *len
= 4, ("mov %A0,%D1" CR_TAB
6017 return ("clr %A0" CR_TAB
6026 out_shift_with_cnt ("lsr %D0" CR_TAB
6029 "ror %A0", insn
, operands
, len
, 4);
6034 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6036 XOP[0] = XOP[0] + XOP[2]
6038 and return "". If PLEN == NULL, print assembler instructions to perform the
6039 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6040 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
6041 CODE == PLUS: perform addition by using ADD instructions.
6042 CODE == MINUS: perform addition by using SUB instructions.
6043 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
6046 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
6048 /* MODE of the operation. */
6049 enum machine_mode mode
= GET_MODE (xop
[0]);
6051 /* Number of bytes to operate on. */
6052 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6054 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6055 int clobber_val
= -1;
6057 /* op[0]: 8-bit destination register
6058 op[1]: 8-bit const int
6059 op[2]: 8-bit scratch register */
6062 /* Started the operation? Before starting the operation we may skip
6063 adding 0. This is no more true after the operation started because
6064 carry must be taken into account. */
6065 bool started
= false;
6067 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6070 /* Except in the case of ADIW with 16-bit register (see below)
6071 addition does not set cc0 in a usable way. */
6073 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6076 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
6083 for (i
= 0; i
< n_bytes
; i
++)
6085 /* We operate byte-wise on the destination. */
6086 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6087 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
6089 /* 8-bit value to operate with this byte. */
6090 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6092 /* Registers R16..R31 can operate with immediate. */
6093 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6096 op
[1] = gen_int_mode (val8
, QImode
);
6098 /* To get usable cc0 no low-bytes must have been skipped. */
6106 && test_hard_reg_class (ADDW_REGS
, reg8
))
6108 rtx xval16
= simplify_gen_subreg (HImode
, xval
, mode
, i
);
6109 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6111 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6112 i.e. operate word-wise. */
6119 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6122 if (n_bytes
== 2 && PLUS
== code
)
6134 avr_asm_len (code
== PLUS
6135 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6139 else if ((val8
== 1 || val8
== 0xff)
6141 && i
== n_bytes
- 1)
6143 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6152 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6154 if (clobber_val
!= (int) val8
)
6155 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6156 clobber_val
= (int) val8
;
6158 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6165 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6168 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6170 if (clobber_val
!= (int) val8
)
6171 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6172 clobber_val
= (int) val8
;
6174 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6186 } /* for all sub-bytes */
6188 /* No output doesn't change cc0. */
6190 if (plen
&& *plen
== 0)
6195 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6197 XOP[0] = XOP[0] + XOP[2]
6199 and return "". If PLEN == NULL, print assembler instructions to perform the
6200 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6201 words) printed with PLEN == NULL.
6202 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6203 condition code (with respect to XOP[0]). */
6206 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
6208 int len_plus
, len_minus
;
6209 int cc_plus
, cc_minus
, cc_dummy
;
6214 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6216 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
6217 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
6219 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6223 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6224 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6226 else if (len_minus
<= len_plus
)
6227 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
6229 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
6235 /* Same as above but XOP has just 3 entries.
6236 Supply a dummy 4th operand. */
6239 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
6248 return avr_out_plus (op
, plen
, pcc
);
6252 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6255 avr_out_plus64 (rtx addend
, int *plen
)
6260 op
[0] = gen_rtx_REG (DImode
, 18);
6265 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6270 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6271 time constant XOP[2]:
6273 XOP[0] = XOP[0] <op> XOP[2]
6275 and return "". If PLEN == NULL, print assembler instructions to perform the
6276 operation; otherwise, set *PLEN to the length of the instruction sequence
6277 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6278 register or SCRATCH if no clobber register is needed for the operation. */
6281 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6283 /* CODE and MODE of the operation. */
6284 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6285 enum machine_mode mode
= GET_MODE (xop
[0]);
6287 /* Number of bytes to operate on. */
6288 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6290 /* Value of T-flag (0 or 1) or -1 if unknow. */
6293 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6294 int clobber_val
= -1;
6296 /* op[0]: 8-bit destination register
6297 op[1]: 8-bit const int
6298 op[2]: 8-bit clobber register or SCRATCH
6299 op[3]: 8-bit register containing 0xff or NULL_RTX */
6308 for (i
= 0; i
< n_bytes
; i
++)
6310 /* We operate byte-wise on the destination. */
6311 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6312 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6314 /* 8-bit value to operate with this byte. */
6315 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6317 /* Number of bits set in the current byte of the constant. */
6318 int pop8
= avr_popcount (val8
);
6320 /* Registers R16..R31 can operate with immediate. */
6321 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6324 op
[1] = GEN_INT (val8
);
6333 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6337 avr_asm_len ("set", op
, plen
, 1);
6340 op
[1] = GEN_INT (exact_log2 (val8
));
6341 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6345 if (op
[3] != NULL_RTX
)
6346 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6348 avr_asm_len ("clr %0" CR_TAB
6349 "dec %0", op
, plen
, 2);
6355 if (clobber_val
!= (int) val8
)
6356 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6357 clobber_val
= (int) val8
;
6359 avr_asm_len ("or %0,%2", op
, plen
, 1);
6369 avr_asm_len ("clr %0", op
, plen
, 1);
6371 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6375 avr_asm_len ("clt", op
, plen
, 1);
6378 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6379 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6383 if (clobber_val
!= (int) val8
)
6384 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6385 clobber_val
= (int) val8
;
6387 avr_asm_len ("and %0,%2", op
, plen
, 1);
6397 avr_asm_len ("com %0", op
, plen
, 1);
6398 else if (ld_reg_p
&& val8
== (1 << 7))
6399 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6402 if (clobber_val
!= (int) val8
)
6403 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6404 clobber_val
= (int) val8
;
6406 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6412 /* Unknown rtx_code */
6415 } /* for all sub-bytes */
6421 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6422 PLEN != NULL: Set *PLEN to the length of that sequence.
6426 avr_out_addto_sp (rtx
*op
, int *plen
)
6428 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6429 int addend
= INTVAL (op
[0]);
6436 if (flag_verbose_asm
|| flag_print_asm_name
)
6437 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6439 while (addend
<= -pc_len
)
6442 avr_asm_len ("rcall .", op
, plen
, 1);
6445 while (addend
++ < 0)
6446 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6448 else if (addend
> 0)
6450 if (flag_verbose_asm
|| flag_print_asm_name
)
6451 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6453 while (addend
-- > 0)
6454 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6461 /* Create RTL split patterns for byte sized rotate expressions. This
6462 produces a series of move instructions and considers overlap situations.
6463 Overlapping non-HImode operands need a scratch register. */
6466 avr_rotate_bytes (rtx operands
[])
6469 enum machine_mode mode
= GET_MODE (operands
[0]);
6470 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6471 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6472 int num
= INTVAL (operands
[2]);
6473 rtx scratch
= operands
[3];
6474 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6475 Word move if no scratch is needed, otherwise use size of scratch. */
6476 enum machine_mode move_mode
= QImode
;
6477 int move_size
, offset
, size
;
6481 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6484 move_mode
= GET_MODE (scratch
);
6486 /* Force DI rotate to use QI moves since other DI moves are currently split
6487 into QI moves so forward propagation works better. */
6490 /* Make scratch smaller if needed. */
6491 if (SCRATCH
!= GET_CODE (scratch
)
6492 && HImode
== GET_MODE (scratch
)
6493 && QImode
== move_mode
)
6494 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6496 move_size
= GET_MODE_SIZE (move_mode
);
6497 /* Number of bytes/words to rotate. */
6498 offset
= (num
>> 3) / move_size
;
6499 /* Number of moves needed. */
6500 size
= GET_MODE_SIZE (mode
) / move_size
;
6501 /* Himode byte swap is special case to avoid a scratch register. */
6502 if (mode
== HImode
&& same_reg
)
6504 /* HImode byte swap, using xor. This is as quick as using scratch. */
6506 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6507 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6508 if (!rtx_equal_p (dst
, src
))
6510 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6511 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6512 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6517 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6518 /* Create linked list of moves to determine move order. */
6522 } move
[MAX_SIZE
+ 8];
6525 gcc_assert (size
<= MAX_SIZE
);
6526 /* Generate list of subreg moves. */
6527 for (i
= 0; i
< size
; i
++)
6530 int to
= (from
+ offset
) % size
;
6531 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6532 mode
, from
* move_size
);
6533 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6534 mode
, to
* move_size
);
6537 /* Mark dependence where a dst of one move is the src of another move.
6538 The first move is a conflict as it must wait until second is
6539 performed. We ignore moves to self - we catch this later. */
6541 for (i
= 0; i
< size
; i
++)
6542 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6543 for (j
= 0; j
< size
; j
++)
6544 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6546 /* The dst of move i is the src of move j. */
6553 /* Go through move list and perform non-conflicting moves. As each
6554 non-overlapping move is made, it may remove other conflicts
6555 so the process is repeated until no conflicts remain. */
6560 /* Emit move where dst is not also a src or we have used that
6562 for (i
= 0; i
< size
; i
++)
6563 if (move
[i
].src
!= NULL_RTX
)
6565 if (move
[i
].links
== -1
6566 || move
[move
[i
].links
].src
== NULL_RTX
)
6569 /* Ignore NOP moves to self. */
6570 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6571 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6573 /* Remove conflict from list. */
6574 move
[i
].src
= NULL_RTX
;
6580 /* Check for deadlock. This is when no moves occurred and we have
6581 at least one blocked move. */
6582 if (moves
== 0 && blocked
!= -1)
6584 /* Need to use scratch register to break deadlock.
6585 Add move to put dst of blocked move into scratch.
6586 When this move occurs, it will break chain deadlock.
6587 The scratch register is substituted for real move. */
6589 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6591 move
[size
].src
= move
[blocked
].dst
;
6592 move
[size
].dst
= scratch
;
6593 /* Scratch move is never blocked. */
6594 move
[size
].links
= -1;
6595 /* Make sure we have valid link. */
6596 gcc_assert (move
[blocked
].links
!= -1);
6597 /* Replace src of blocking move with scratch reg. */
6598 move
[move
[blocked
].links
].src
= scratch
;
6599 /* Make dependent on scratch move occuring. */
6600 move
[blocked
].links
= size
;
6604 while (blocked
!= -1);
6609 /* Modifies the length assigned to instruction INSN
6610 LEN is the initially computed length of the insn. */
6613 adjust_insn_length (rtx insn
, int len
)
6615 rtx
*op
= recog_data
.operand
;
6616 enum attr_adjust_len adjust_len
;
6618 /* Some complex insns don't need length adjustment and therefore
6619 the length need not/must not be adjusted for these insns.
6620 It is easier to state this in an insn attribute "adjust_len" than
6621 to clutter up code here... */
6623 if (-1 == recog_memoized (insn
))
6628 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6630 adjust_len
= get_attr_adjust_len (insn
);
6632 if (adjust_len
== ADJUST_LEN_NO
)
6634 /* Nothing to adjust: The length from attribute "length" is fine.
6635 This is the default. */
6640 /* Extract insn's operands. */
6642 extract_constrain_insn_cached (insn
);
6644 /* Dispatch to right function. */
6648 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
6649 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
6650 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
6652 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
6654 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
6655 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
6656 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
6657 avr_out_plus_noclobber (op
, &len
, NULL
); break;
6659 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
6661 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
6662 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
6663 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
6664 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
6665 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
6666 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
6668 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
6669 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
6670 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
6671 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
6672 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
6674 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
6675 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
6676 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
6678 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
6679 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
6680 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
6682 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
6683 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
6684 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
6686 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
6687 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
6688 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
6690 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
6692 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
6701 /* Return nonzero if register REG dead after INSN. */
6704 reg_unused_after (rtx insn
, rtx reg
)
6706 return (dead_or_set_p (insn
, reg
)
6707 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
6710 /* Return nonzero if REG is not used after INSN.
6711 We assume REG is a reload reg, and therefore does
6712 not live past labels. It may live past calls or jumps though. */
6715 _reg_unused_after (rtx insn
, rtx reg
)
6720 /* If the reg is set by this instruction, then it is safe for our
6721 case. Disregard the case where this is a store to memory, since
6722 we are checking a register used in the store address. */
6723 set
= single_set (insn
);
6724 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
6725 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6728 while ((insn
= NEXT_INSN (insn
)))
6731 code
= GET_CODE (insn
);
6734 /* If this is a label that existed before reload, then the register
6735 if dead here. However, if this is a label added by reorg, then
6736 the register may still be live here. We can't tell the difference,
6737 so we just ignore labels completely. */
6738 if (code
== CODE_LABEL
)
6746 if (code
== JUMP_INSN
)
6749 /* If this is a sequence, we must handle them all at once.
6750 We could have for instance a call that sets the target register,
6751 and an insn in a delay slot that uses the register. In this case,
6752 we must return 0. */
6753 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6758 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
6760 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
6761 rtx set
= single_set (this_insn
);
6763 if (GET_CODE (this_insn
) == CALL_INSN
)
6765 else if (GET_CODE (this_insn
) == JUMP_INSN
)
6767 if (INSN_ANNULLED_BRANCH_P (this_insn
))
6772 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6774 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6776 if (GET_CODE (SET_DEST (set
)) != MEM
)
6782 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
6787 else if (code
== JUMP_INSN
)
6791 if (code
== CALL_INSN
)
6794 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6795 if (GET_CODE (XEXP (tem
, 0)) == USE
6796 && REG_P (XEXP (XEXP (tem
, 0), 0))
6797 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
6799 if (call_used_regs
[REGNO (reg
)])
6803 set
= single_set (insn
);
6805 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6807 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6808 return GET_CODE (SET_DEST (set
)) != MEM
;
6809 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
6816 /* Return RTX that represents the lower 16 bits of a constant address.
6817 Unfortunately, simplify_gen_subreg does not handle this case. */
6820 avr_const_address_lo16 (rtx x
)
6824 switch (GET_CODE (x
))
6830 if (PLUS
== GET_CODE (XEXP (x
, 0))
6831 && SYMBOL_REF
== GET_CODE (XEXP (XEXP (x
, 0), 0))
6832 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
6834 HOST_WIDE_INT offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
6835 const char *name
= XSTR (XEXP (XEXP (x
, 0), 0), 0);
6837 lo16
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6838 lo16
= gen_rtx_CONST (Pmode
, plus_constant (lo16
, offset
));
6847 const char *name
= XSTR (x
, 0);
6849 return gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6853 avr_edump ("\n%?: %r\n", x
);
6858 /* Target hook for assembling integer objects. The AVR version needs
6859 special handling for references to certain labels. */
6862 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
6864 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
6865 && text_segment_operand (x
, VOIDmode
) )
6867 fputs ("\t.word\tgs(", asm_out_file
);
6868 output_addr_const (asm_out_file
, x
);
6869 fputs (")\n", asm_out_file
);
6873 else if (GET_MODE (x
) == PSImode
)
6875 default_assemble_integer (avr_const_address_lo16 (x
),
6876 GET_MODE_SIZE (HImode
), aligned_p
);
6878 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6879 " extension for hh8(", asm_out_file
);
6880 output_addr_const (asm_out_file
, x
);
6881 fputs (")\"\n", asm_out_file
);
6883 fputs ("\t.byte\t0\t" ASM_COMMENT_START
" hh8(", asm_out_file
);
6884 output_addr_const (asm_out_file
, x
);
6885 fputs (")\n", asm_out_file
);
6890 return default_assemble_integer (x
, size
, aligned_p
);
6894 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6897 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
6900 /* If the function has the 'signal' or 'interrupt' attribute, test to
6901 make sure that the name of the function is "__vector_NN" so as to
6902 catch when the user misspells the interrupt vector name. */
6904 if (cfun
->machine
->is_interrupt
)
6906 if (!STR_PREFIX_P (name
, "__vector"))
6908 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6909 "%qs appears to be a misspelled interrupt handler",
6913 else if (cfun
->machine
->is_signal
)
6915 if (!STR_PREFIX_P (name
, "__vector"))
6917 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6918 "%qs appears to be a misspelled signal handler",
6923 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
6924 ASM_OUTPUT_LABEL (file
, name
);
6928 /* Return value is nonzero if pseudos that have been
6929 assigned to registers of class CLASS would likely be spilled
6930 because registers of CLASS are needed for spill registers. */
6933 avr_class_likely_spilled_p (reg_class_t c
)
6935 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
6938 /* Valid attributes:
6939 progmem - put data to program memory;
6940 signal - make a function to be hardware interrupt. After function
6941 prologue interrupts are disabled;
6942 interrupt - make a function to be hardware interrupt. After function
6943 prologue interrupts are enabled;
6944 naked - don't generate function prologue/epilogue and `ret' command.
6946 Only `progmem' attribute valid for type. */
6948 /* Handle a "progmem" attribute; arguments as in
6949 struct attribute_spec.handler. */
6951 avr_handle_progmem_attribute (tree
*node
, tree name
,
6952 tree args ATTRIBUTE_UNUSED
,
6953 int flags ATTRIBUTE_UNUSED
,
6958 if (TREE_CODE (*node
) == TYPE_DECL
)
6960 /* This is really a decl attribute, not a type attribute,
6961 but try to handle it for GCC 3.0 backwards compatibility. */
6963 tree type
= TREE_TYPE (*node
);
6964 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
6965 tree newtype
= build_type_attribute_variant (type
, attr
);
6967 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
6968 TREE_TYPE (*node
) = newtype
;
6969 *no_add_attrs
= true;
6971 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
6973 *no_add_attrs
= false;
6977 warning (OPT_Wattributes
, "%qE attribute ignored",
6979 *no_add_attrs
= true;
6986 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6987 struct attribute_spec.handler. */
6990 avr_handle_fndecl_attribute (tree
*node
, tree name
,
6991 tree args ATTRIBUTE_UNUSED
,
6992 int flags ATTRIBUTE_UNUSED
,
6995 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6997 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6999 *no_add_attrs
= true;
7006 avr_handle_fntype_attribute (tree
*node
, tree name
,
7007 tree args ATTRIBUTE_UNUSED
,
7008 int flags ATTRIBUTE_UNUSED
,
7011 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7013 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7015 *no_add_attrs
= true;
7022 /* AVR attributes. */
7023 static const struct attribute_spec
7024 avr_attribute_table
[] =
7026 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7027 affects_type_identity } */
7028 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7030 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7032 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7034 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7036 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7038 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7040 { NULL
, 0, 0, false, false, false, NULL
, false }
7044 /* Look if DECL shall be placed in program memory space by
7045 means of attribute `progmem' or some address-space qualifier.
7046 Return non-zero if DECL is data that must end up in Flash and
7047 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7049 Return 2 if DECL is located in 24-bit flash address-space
7050 Return 1 if DECL is located in 16-bit flash address-space
7051 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7052 Return 0 otherwise */
7055 avr_progmem_p (tree decl
, tree attributes
)
7059 if (TREE_CODE (decl
) != VAR_DECL
)
7062 if (avr_decl_memx_p (decl
))
7065 if (avr_decl_flash_p (decl
))
7069 != lookup_attribute ("progmem", attributes
))
7076 while (TREE_CODE (a
) == ARRAY_TYPE
);
7078 if (a
== error_mark_node
)
7081 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7088 /* Scan type TYP for pointer references to address space ASn.
7089 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7090 the AS are also declared to be CONST.
7091 Otherwise, return the respective addres space, i.e. a value != 0. */
7094 avr_nonconst_pointer_addrspace (tree typ
)
7096 while (ARRAY_TYPE
== TREE_CODE (typ
))
7097 typ
= TREE_TYPE (typ
);
7099 if (POINTER_TYPE_P (typ
))
7102 tree target
= TREE_TYPE (typ
);
7104 /* Pointer to function: Test the function's return type. */
7106 if (FUNCTION_TYPE
== TREE_CODE (target
))
7107 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7109 /* "Ordinary" pointers... */
7111 while (TREE_CODE (target
) == ARRAY_TYPE
)
7112 target
= TREE_TYPE (target
);
7114 /* Pointers to non-generic address space must be const.
7115 Refuse address spaces outside the device's flash. */
7117 as
= TYPE_ADDR_SPACE (target
);
7119 if (!ADDR_SPACE_GENERIC_P (as
)
7120 && (!TYPE_READONLY (target
)
7121 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
7126 /* Scan pointer's target type. */
7128 return avr_nonconst_pointer_addrspace (target
);
7131 return ADDR_SPACE_GENERIC
;
7135 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
7136 go along with CONST qualifier. Writing to these address spaces should
7137 be detected and complained about as early as possible. */
7140 avr_pgm_check_var_decl (tree node
)
7142 const char *reason
= NULL
;
7144 addr_space_t as
= ADDR_SPACE_GENERIC
;
7146 gcc_assert (as
== 0);
7148 if (avr_log
.progmem
)
7149 avr_edump ("%?: %t\n", node
);
7151 switch (TREE_CODE (node
))
7157 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7158 reason
= "variable";
7162 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7163 reason
= "function parameter";
7167 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7168 reason
= "structure field";
7172 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7174 reason
= "return type of function";
7178 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7185 avr_edump ("%?: %s, %d, %d\n",
7186 avr_addrspace
[as
].name
,
7187 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
7188 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7191 error ("%qT uses address space %qs beyond flash of %qs",
7192 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7194 error ("%s %q+D uses address space %qs beyond flash of %qs",
7195 reason
, node
, avr_addrspace
[as
].name
,
7196 avr_current_device
->name
);
7201 error ("pointer targeting address space %qs must be const in %qT",
7202 avr_addrspace
[as
].name
, node
);
7204 error ("pointer targeting address space %qs must be const"
7206 avr_addrspace
[as
].name
, reason
, node
);
7210 return reason
== NULL
;
7214 /* Add the section attribute if the variable is in progmem. */
7217 avr_insert_attributes (tree node
, tree
*attributes
)
7219 avr_pgm_check_var_decl (node
);
7221 if (TREE_CODE (node
) == VAR_DECL
7222 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
7223 && avr_progmem_p (node
, *attributes
))
7228 /* For C++, we have to peel arrays in order to get correct
7229 determination of readonlyness. */
7232 node0
= TREE_TYPE (node0
);
7233 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7235 if (error_mark_node
== node0
)
7238 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7240 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7242 error ("variable %q+D located in address space %qs"
7243 " beyond flash of %qs",
7244 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7247 if (!TYPE_READONLY (node0
)
7248 && !TREE_READONLY (node
))
7250 const char *reason
= "__attribute__((progmem))";
7252 if (!ADDR_SPACE_GENERIC_P (as
))
7253 reason
= avr_addrspace
[as
].name
;
7255 if (avr_log
.progmem
)
7256 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7258 error ("variable %q+D must be const in order to be put into"
7259 " read-only section by means of %qs", node
, reason
);
7265 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7266 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7267 /* Track need of __do_clear_bss. */
7270 avr_asm_output_aligned_decl_common (FILE * stream
,
7271 const_tree decl ATTRIBUTE_UNUSED
,
7273 unsigned HOST_WIDE_INT size
,
7274 unsigned int align
, bool local_p
)
7276 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7277 There is no need to trigger __do_clear_bss code for them. */
7279 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7280 avr_need_clear_bss_p
= true;
7283 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7285 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7289 /* Unnamed section callback for data_section
7290 to track need of __do_copy_data. */
7293 avr_output_data_section_asm_op (const void *data
)
7295 avr_need_copy_data_p
= true;
7297 /* Dispatch to default. */
7298 output_section_asm_op (data
);
7302 /* Unnamed section callback for bss_section
7303 to track need of __do_clear_bss. */
7306 avr_output_bss_section_asm_op (const void *data
)
7308 avr_need_clear_bss_p
= true;
7310 /* Dispatch to default. */
7311 output_section_asm_op (data
);
7315 /* Unnamed section callback for progmem*.data sections. */
7318 avr_output_progmem_section_asm_op (const void *data
)
7320 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7321 (const char*) data
);
7325 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7328 avr_asm_init_sections (void)
7332 /* Set up a section for jump tables. Alignment is handled by
7333 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7335 if (AVR_HAVE_JMP_CALL
)
7337 progmem_swtable_section
7338 = get_unnamed_section (0, output_section_asm_op
,
7339 "\t.section\t.progmem.gcc_sw_table"
7340 ",\"a\",@progbits");
7344 progmem_swtable_section
7345 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7346 "\t.section\t.progmem.gcc_sw_table"
7347 ",\"ax\",@progbits");
7350 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7353 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7354 progmem_section_prefix
[n
]);
7357 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7358 resp. `avr_need_copy_data_p'. */
7360 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7361 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7362 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7366 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7369 avr_asm_function_rodata_section (tree decl
)
7371 /* If a function is unused and optimized out by -ffunction-sections
7372 and --gc-sections, ensure that the same will happen for its jump
7373 tables by putting them into individual sections. */
7378 /* Get the frodata section from the default function in varasm.c
7379 but treat function-associated data-like jump tables as code
7380 rather than as user defined data. AVR has no constant pools. */
7382 int fdata
= flag_data_sections
;
7384 flag_data_sections
= flag_function_sections
;
7385 frodata
= default_function_rodata_section (decl
);
7386 flag_data_sections
= fdata
;
7387 flags
= frodata
->common
.flags
;
7390 if (frodata
!= readonly_data_section
7391 && flags
& SECTION_NAMED
)
7393 /* Adjust section flags and replace section name prefix. */
7397 static const char* const prefix
[] =
7399 ".rodata", ".progmem.gcc_sw_table",
7400 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7403 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7405 const char * old_prefix
= prefix
[i
];
7406 const char * new_prefix
= prefix
[i
+1];
7407 const char * name
= frodata
->named
.name
;
7409 if (STR_PREFIX_P (name
, old_prefix
))
7411 const char *rname
= ACONCAT ((new_prefix
,
7412 name
+ strlen (old_prefix
), NULL
));
7413 flags
&= ~SECTION_CODE
;
7414 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7416 return get_section (rname
, flags
, frodata
->named
.decl
);
7421 return progmem_swtable_section
;
7425 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7426 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7429 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7431 if (flags
& AVR_SECTION_PROGMEM
)
7433 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7434 int segment
= avr_addrspace
[as
].segment
;
7435 const char *old_prefix
= ".rodata";
7436 const char *new_prefix
= progmem_section_prefix
[segment
];
7438 if (STR_PREFIX_P (name
, old_prefix
))
7440 const char *sname
= ACONCAT ((new_prefix
,
7441 name
+ strlen (old_prefix
), NULL
));
7442 default_elf_asm_named_section (sname
, flags
, decl
);
7446 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7450 if (!avr_need_copy_data_p
)
7451 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7452 || STR_PREFIX_P (name
, ".rodata")
7453 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7455 if (!avr_need_clear_bss_p
)
7456 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7458 default_elf_asm_named_section (name
, flags
, decl
);
7462 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7464 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7466 if (STR_PREFIX_P (name
, ".noinit"))
7468 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7469 && DECL_INITIAL (decl
) == NULL_TREE
)
7470 flags
|= SECTION_BSS
; /* @nobits */
7472 warning (0, "only uninitialized variables can be placed in the "
7476 if (decl
&& DECL_P (decl
)
7477 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7479 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7481 /* Attribute progmem puts data in generic address space.
7482 Set section flags as if it was in __flash to get the right
7483 section prefix in the remainder. */
7485 if (ADDR_SPACE_GENERIC_P (as
))
7486 as
= ADDR_SPACE_FLASH
;
7488 flags
|= as
* SECTION_MACH_DEP
;
7489 flags
&= ~SECTION_WRITE
;
7490 flags
&= ~SECTION_BSS
;
7497 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7500 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7502 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7503 readily available, see PR34734. So we postpone the warning
7504 about uninitialized data in program memory section until here. */
7507 && decl
&& DECL_P (decl
)
7508 && NULL_TREE
== DECL_INITIAL (decl
)
7509 && !DECL_EXTERNAL (decl
)
7510 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7512 warning (OPT_Wuninitialized
,
7513 "uninitialized variable %q+D put into "
7514 "program memory area", decl
);
7517 default_encode_section_info (decl
, rtl
, new_decl_p
);
7519 if (decl
&& DECL_P (decl
)
7520 && TREE_CODE (decl
) != FUNCTION_DECL
7522 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7524 rtx sym
= XEXP (rtl
, 0);
7525 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7527 /* PSTR strings are in generic space but located in flash:
7528 patch address space. */
7530 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7531 as
= ADDR_SPACE_FLASH
;
7533 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7538 /* Implement `TARGET_ASM_SELECT_SECTION' */
7541 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7543 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7545 if (decl
&& DECL_P (decl
)
7546 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7548 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7549 int segment
= avr_addrspace
[as
].segment
;
7551 if (sect
->common
.flags
& SECTION_NAMED
)
7553 const char * name
= sect
->named
.name
;
7554 const char * old_prefix
= ".rodata";
7555 const char * new_prefix
= progmem_section_prefix
[segment
];
7557 if (STR_PREFIX_P (name
, old_prefix
))
7559 const char *sname
= ACONCAT ((new_prefix
,
7560 name
+ strlen (old_prefix
), NULL
));
7561 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7565 return progmem_section
[segment
];
7571 /* Implement `TARGET_ASM_FILE_START'. */
7572 /* Outputs some text at the start of each assembler file. */
7575 avr_file_start (void)
7577 int sfr_offset
= avr_current_arch
->sfr_offset
;
7579 if (avr_current_arch
->asm_only
)
7580 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7582 default_file_start ();
7584 /* Print I/O addresses of some SFRs used with IN and OUT. */
7586 if (!AVR_HAVE_8BIT_SP
)
7587 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
7589 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
7590 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
7592 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
7594 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
7596 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
7598 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
7600 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
7601 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
7602 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
7606 /* Implement `TARGET_ASM_FILE_END'. */
7607 /* Outputs to the stdio stream FILE some
7608 appropriate text to go at the end of an assembler file. */
7613 /* Output these only if there is anything in the
7614 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7615 input section(s) - some code size can be saved by not
7616 linking in the initialization code from libgcc if resp.
7617 sections are empty. */
7619 if (avr_need_copy_data_p
)
7620 fputs (".global __do_copy_data\n", asm_out_file
);
7622 if (avr_need_clear_bss_p
)
7623 fputs (".global __do_clear_bss\n", asm_out_file
);
7626 /* Choose the order in which to allocate hard registers for
7627 pseudo-registers local to a basic block.
7629 Store the desired register order in the array `reg_alloc_order'.
7630 Element 0 should be the register to allocate first; element 1, the
7631 next register; and so on. */
7634 order_regs_for_local_alloc (void)
7637 static const int order_0
[] = {
7645 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7649 static const int order_1
[] = {
7657 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7661 static const int order_2
[] = {
7670 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7675 const int *order
= (TARGET_ORDER_1
? order_1
:
7676 TARGET_ORDER_2
? order_2
:
7678 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7679 reg_alloc_order
[i
] = order
[i
];
7683 /* Implement `TARGET_REGISTER_MOVE_COST' */
7686 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7687 reg_class_t from
, reg_class_t to
)
7689 return (from
== STACK_REG
? 6
7690 : to
== STACK_REG
? 12
7695 /* Implement `TARGET_MEMORY_MOVE_COST' */
7698 avr_memory_move_cost (enum machine_mode mode
,
7699 reg_class_t rclass ATTRIBUTE_UNUSED
,
7700 bool in ATTRIBUTE_UNUSED
)
7702 return (mode
== QImode
? 2
7703 : mode
== HImode
? 4
7704 : mode
== SImode
? 8
7705 : mode
== SFmode
? 8
7710 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7711 cost of an RTX operand given its context. X is the rtx of the
7712 operand, MODE is its mode, and OUTER is the rtx_code of this
7713 operand's parent operator. */
7716 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
7717 int opno
, bool speed
)
7719 enum rtx_code code
= GET_CODE (x
);
7730 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7737 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
7741 /* Worker function for AVR backend's rtx_cost function.
7742 X is rtx expression whose cost is to be calculated.
7743 Return true if the complete cost has been computed.
7744 Return false if subexpressions should be scanned.
7745 In either case, *TOTAL contains the cost result. */
7748 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
7749 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
7751 enum rtx_code code
= (enum rtx_code
) codearg
;
7752 enum machine_mode mode
= GET_MODE (x
);
7762 /* Immediate constants are as cheap as registers. */
7767 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7775 *total
= COSTS_N_INSNS (1);
7781 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
7787 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7795 *total
= COSTS_N_INSNS (1);
7801 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7805 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7806 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7810 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
7811 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7812 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7816 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
7817 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7818 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7826 && MULT
== GET_CODE (XEXP (x
, 0))
7827 && register_operand (XEXP (x
, 1), QImode
))
7830 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7831 /* multiply-add with constant: will be split and load constant. */
7832 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7833 *total
= COSTS_N_INSNS (1) + *total
;
7836 *total
= COSTS_N_INSNS (1);
7837 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7838 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7843 && (MULT
== GET_CODE (XEXP (x
, 0))
7844 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
7845 && register_operand (XEXP (x
, 1), HImode
)
7846 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
7847 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
7850 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7851 /* multiply-add with constant: will be split and load constant. */
7852 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7853 *total
= COSTS_N_INSNS (1) + *total
;
7856 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7858 *total
= COSTS_N_INSNS (2);
7859 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7862 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7863 *total
= COSTS_N_INSNS (1);
7865 *total
= COSTS_N_INSNS (2);
7869 if (!CONST_INT_P (XEXP (x
, 1)))
7871 *total
= COSTS_N_INSNS (3);
7872 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7875 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7876 *total
= COSTS_N_INSNS (2);
7878 *total
= COSTS_N_INSNS (3);
7882 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7884 *total
= COSTS_N_INSNS (4);
7885 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7888 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7889 *total
= COSTS_N_INSNS (1);
7891 *total
= COSTS_N_INSNS (4);
7897 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7903 && register_operand (XEXP (x
, 0), QImode
)
7904 && MULT
== GET_CODE (XEXP (x
, 1)))
7907 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7908 /* multiply-sub with constant: will be split and load constant. */
7909 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7910 *total
= COSTS_N_INSNS (1) + *total
;
7915 && register_operand (XEXP (x
, 0), HImode
)
7916 && (MULT
== GET_CODE (XEXP (x
, 1))
7917 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
7918 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
7919 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
7922 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7923 /* multiply-sub with constant: will be split and load constant. */
7924 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7925 *total
= COSTS_N_INSNS (1) + *total
;
7931 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7932 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7933 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7934 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7938 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7939 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7940 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7948 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
7950 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7958 rtx op0
= XEXP (x
, 0);
7959 rtx op1
= XEXP (x
, 1);
7960 enum rtx_code code0
= GET_CODE (op0
);
7961 enum rtx_code code1
= GET_CODE (op1
);
7962 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
7963 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
7966 && (u8_operand (op1
, HImode
)
7967 || s8_operand (op1
, HImode
)))
7969 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7973 && register_operand (op1
, HImode
))
7975 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7978 else if (ex0
|| ex1
)
7980 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
7983 else if (register_operand (op0
, HImode
)
7984 && (u8_operand (op1
, HImode
)
7985 || s8_operand (op1
, HImode
)))
7987 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
7991 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
7994 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8001 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8011 /* Add some additional costs besides CALL like moves etc. */
8013 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8017 /* Just a rough estimate. Even with -O2 we don't want bulky
8018 code expanded inline. */
8020 *total
= COSTS_N_INSNS (25);
8026 *total
= COSTS_N_INSNS (300);
8028 /* Add some additional costs besides CALL like moves etc. */
8029 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8037 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8038 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8046 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8048 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8049 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8050 /* For div/mod with const-int divisor we have at least the cost of
8051 loading the divisor. */
8052 if (CONST_INT_P (XEXP (x
, 1)))
8053 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8054 /* Add some overall penaly for clobbering and moving around registers */
8055 *total
+= COSTS_N_INSNS (2);
8062 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8063 *total
= COSTS_N_INSNS (1);
8068 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8069 *total
= COSTS_N_INSNS (3);
8074 if (CONST_INT_P (XEXP (x
, 1)))
8075 switch (INTVAL (XEXP (x
, 1)))
8079 *total
= COSTS_N_INSNS (5);
8082 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8090 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8097 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8099 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8100 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8105 val
= INTVAL (XEXP (x
, 1));
8107 *total
= COSTS_N_INSNS (3);
8108 else if (val
>= 0 && val
<= 7)
8109 *total
= COSTS_N_INSNS (val
);
8111 *total
= COSTS_N_INSNS (1);
8118 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8119 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8120 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8122 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8127 if (const1_rtx
== (XEXP (x
, 1))
8128 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8130 *total
= COSTS_N_INSNS (2);
8134 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8136 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8137 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8141 switch (INTVAL (XEXP (x
, 1)))
8148 *total
= COSTS_N_INSNS (2);
8151 *total
= COSTS_N_INSNS (3);
8157 *total
= COSTS_N_INSNS (4);
8162 *total
= COSTS_N_INSNS (5);
8165 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8168 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8171 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8174 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8175 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8181 if (!CONST_INT_P (XEXP (x
, 1)))
8183 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8186 switch (INTVAL (XEXP (x
, 1)))
8194 *total
= COSTS_N_INSNS (3);
8197 *total
= COSTS_N_INSNS (5);
8200 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8206 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8208 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8209 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8213 switch (INTVAL (XEXP (x
, 1)))
8219 *total
= COSTS_N_INSNS (3);
8224 *total
= COSTS_N_INSNS (4);
8227 *total
= COSTS_N_INSNS (6);
8230 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8233 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8234 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8242 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8249 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8251 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8252 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8257 val
= INTVAL (XEXP (x
, 1));
8259 *total
= COSTS_N_INSNS (4);
8261 *total
= COSTS_N_INSNS (2);
8262 else if (val
>= 0 && val
<= 7)
8263 *total
= COSTS_N_INSNS (val
);
8265 *total
= COSTS_N_INSNS (1);
8270 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8272 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8273 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8277 switch (INTVAL (XEXP (x
, 1)))
8283 *total
= COSTS_N_INSNS (2);
8286 *total
= COSTS_N_INSNS (3);
8292 *total
= COSTS_N_INSNS (4);
8296 *total
= COSTS_N_INSNS (5);
8299 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8302 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8306 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8309 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8310 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8316 if (!CONST_INT_P (XEXP (x
, 1)))
8318 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8321 switch (INTVAL (XEXP (x
, 1)))
8327 *total
= COSTS_N_INSNS (3);
8331 *total
= COSTS_N_INSNS (5);
8334 *total
= COSTS_N_INSNS (4);
8337 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8343 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8345 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8346 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8350 switch (INTVAL (XEXP (x
, 1)))
8356 *total
= COSTS_N_INSNS (4);
8361 *total
= COSTS_N_INSNS (6);
8364 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8367 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8370 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8371 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8379 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8386 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8388 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8389 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8394 val
= INTVAL (XEXP (x
, 1));
8396 *total
= COSTS_N_INSNS (3);
8397 else if (val
>= 0 && val
<= 7)
8398 *total
= COSTS_N_INSNS (val
);
8400 *total
= COSTS_N_INSNS (1);
8405 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8407 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8408 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8412 switch (INTVAL (XEXP (x
, 1)))
8419 *total
= COSTS_N_INSNS (2);
8422 *total
= COSTS_N_INSNS (3);
8427 *total
= COSTS_N_INSNS (4);
8431 *total
= COSTS_N_INSNS (5);
8437 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8440 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8444 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8447 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8448 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8454 if (!CONST_INT_P (XEXP (x
, 1)))
8456 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8459 switch (INTVAL (XEXP (x
, 1)))
8467 *total
= COSTS_N_INSNS (3);
8470 *total
= COSTS_N_INSNS (5);
8473 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8479 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8481 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8482 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8486 switch (INTVAL (XEXP (x
, 1)))
8492 *total
= COSTS_N_INSNS (4);
8495 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8500 *total
= COSTS_N_INSNS (4);
8503 *total
= COSTS_N_INSNS (6);
8506 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8507 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8515 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8519 switch (GET_MODE (XEXP (x
, 0)))
8522 *total
= COSTS_N_INSNS (1);
8523 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8524 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8528 *total
= COSTS_N_INSNS (2);
8529 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8530 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8531 else if (INTVAL (XEXP (x
, 1)) != 0)
8532 *total
+= COSTS_N_INSNS (1);
8536 *total
= COSTS_N_INSNS (3);
8537 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8538 *total
+= COSTS_N_INSNS (2);
8542 *total
= COSTS_N_INSNS (4);
8543 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8544 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8545 else if (INTVAL (XEXP (x
, 1)) != 0)
8546 *total
+= COSTS_N_INSNS (3);
8552 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8557 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8558 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8559 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8561 if (QImode
== mode
|| HImode
== mode
)
8563 *total
= COSTS_N_INSNS (2);
8576 /* Implement `TARGET_RTX_COSTS'. */
8579 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8580 int opno
, int *total
, bool speed
)
8582 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8583 opno
, total
, speed
);
8585 if (avr_log
.rtx_costs
)
8587 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8588 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8595 /* Implement `TARGET_ADDRESS_COST'. */
8598 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8602 if (GET_CODE (x
) == PLUS
8603 && CONST_INT_P (XEXP (x
, 1))
8604 && (REG_P (XEXP (x
, 0))
8605 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8607 if (INTVAL (XEXP (x
, 1)) >= 61)
8610 else if (CONSTANT_ADDRESS_P (x
))
8613 && io_address_operand (x
, QImode
))
8617 if (avr_log
.address_cost
)
8618 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8623 /* Test for extra memory constraint 'Q'.
8624 It's a memory address based on Y or Z pointer with valid displacement. */
8627 extra_constraint_Q (rtx x
)
8631 if (GET_CODE (XEXP (x
,0)) == PLUS
8632 && REG_P (XEXP (XEXP (x
,0), 0))
8633 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8634 && (INTVAL (XEXP (XEXP (x
,0), 1))
8635 <= MAX_LD_OFFSET (GET_MODE (x
))))
8637 rtx xx
= XEXP (XEXP (x
,0), 0);
8638 int regno
= REGNO (xx
);
8640 ok
= (/* allocate pseudos */
8641 regno
>= FIRST_PSEUDO_REGISTER
8642 /* strictly check */
8643 || regno
== REG_Z
|| regno
== REG_Y
8644 /* XXX frame & arg pointer checks */
8645 || xx
== frame_pointer_rtx
8646 || xx
== arg_pointer_rtx
);
8648 if (avr_log
.constraints
)
8649 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8650 ok
, reload_completed
, reload_in_progress
, x
);
8656 /* Convert condition code CONDITION to the valid AVR condition code. */
8659 avr_normalize_condition (RTX_CODE condition
)
8676 /* Helper function for `avr_reorg'. */
8679 avr_compare_pattern (rtx insn
)
8681 rtx pattern
= single_set (insn
);
8684 && NONJUMP_INSN_P (insn
)
8685 && SET_DEST (pattern
) == cc0_rtx
8686 && GET_CODE (SET_SRC (pattern
)) == COMPARE
8687 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 0))
8688 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 1)))
8696 /* Helper function for `avr_reorg'. */
8698 /* Expansion of switch/case decision trees leads to code like
8700 cc0 = compare (Reg, Num)
8704 cc0 = compare (Reg, Num)
8708 The second comparison is superfluous and can be deleted.
8709 The second jump condition can be transformed from a
8710 "difficult" one to a "simple" one because "cc0 > 0" and
8711 "cc0 >= 0" will have the same effect here.
8713 This function relies on the way switch/case is being expaned
8714 as binary decision tree. For example code see PR 49903.
8716 Return TRUE if optimization performed.
8717 Return FALSE if nothing changed.
8719 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8721 We don't want to do this in text peephole because it is
8722 tedious to work out jump offsets there and the second comparison
8723 might have been transormed by `avr_reorg'.
8725 RTL peephole won't do because peephole2 does not scan across
8729 avr_reorg_remove_redundant_compare (rtx insn1
)
8731 rtx comp1
, ifelse1
, xcond1
, branch1
;
8732 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
8734 rtx jump
, target
, cond
;
8736 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8738 branch1
= next_nonnote_nondebug_insn (insn1
);
8739 if (!branch1
|| !JUMP_P (branch1
))
8742 insn2
= next_nonnote_nondebug_insn (branch1
);
8743 if (!insn2
|| !avr_compare_pattern (insn2
))
8746 branch2
= next_nonnote_nondebug_insn (insn2
);
8747 if (!branch2
|| !JUMP_P (branch2
))
8750 comp1
= avr_compare_pattern (insn1
);
8751 comp2
= avr_compare_pattern (insn2
);
8752 xcond1
= single_set (branch1
);
8753 xcond2
= single_set (branch2
);
8755 if (!comp1
|| !comp2
8756 || !rtx_equal_p (comp1
, comp2
)
8757 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
8758 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
8759 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
8760 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
8765 comp1
= SET_SRC (comp1
);
8766 ifelse1
= SET_SRC (xcond1
);
8767 ifelse2
= SET_SRC (xcond2
);
8769 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8771 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
8772 || !REG_P (XEXP (comp1
, 0))
8773 || !CONST_INT_P (XEXP (comp1
, 1))
8774 || XEXP (ifelse1
, 2) != pc_rtx
8775 || XEXP (ifelse2
, 2) != pc_rtx
8776 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
8777 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
8778 || !COMPARISON_P (XEXP (ifelse2
, 0))
8779 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
8780 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
8781 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
8782 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
8787 /* We filtered the insn sequence to look like
8793 (if_then_else (eq (cc0)
8802 (if_then_else (CODE (cc0)
8808 code
= GET_CODE (XEXP (ifelse2
, 0));
8810 /* Map GT/GTU to GE/GEU which is easier for AVR.
8811 The first two instructions compare/branch on EQ
8812 so we may replace the difficult
8814 if (x == VAL) goto L1;
8815 if (x > VAL) goto L2;
8819 if (x == VAL) goto L1;
8820 if (x >= VAL) goto L2;
8822 Similarly, replace LE/LEU by LT/LTU. */
8833 code
= avr_normalize_condition (code
);
8840 /* Wrap the branches into UNSPECs so they won't be changed or
8841 optimized in the remainder. */
8843 target
= XEXP (XEXP (ifelse1
, 1), 0);
8844 cond
= XEXP (ifelse1
, 0);
8845 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
8847 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
8849 target
= XEXP (XEXP (ifelse2
, 1), 0);
8850 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8851 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
8853 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
8855 /* The comparisons in insn1 and insn2 are exactly the same;
8856 insn2 is superfluous so delete it. */
8858 delete_insn (insn2
);
8859 delete_insn (branch1
);
8860 delete_insn (branch2
);
8866 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8867 /* Optimize conditional jumps. */
8872 rtx insn
= get_insns();
8874 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
8876 rtx pattern
= avr_compare_pattern (insn
);
8882 && avr_reorg_remove_redundant_compare (insn
))
8887 if (compare_diff_p (insn
))
8889 /* Now we work under compare insn with difficult branch. */
8891 rtx next
= next_real_insn (insn
);
8892 rtx pat
= PATTERN (next
);
8894 pattern
= SET_SRC (pattern
);
8896 if (true_regnum (XEXP (pattern
, 0)) >= 0
8897 && true_regnum (XEXP (pattern
, 1)) >= 0)
8899 rtx x
= XEXP (pattern
, 0);
8900 rtx src
= SET_SRC (pat
);
8901 rtx t
= XEXP (src
,0);
8902 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8903 XEXP (pattern
, 0) = XEXP (pattern
, 1);
8904 XEXP (pattern
, 1) = x
;
8905 INSN_CODE (next
) = -1;
8907 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8908 && XEXP (pattern
, 1) == const0_rtx
)
8910 /* This is a tst insn, we can reverse it. */
8911 rtx src
= SET_SRC (pat
);
8912 rtx t
= XEXP (src
,0);
8914 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8915 XEXP (pattern
, 1) = XEXP (pattern
, 0);
8916 XEXP (pattern
, 0) = const0_rtx
;
8917 INSN_CODE (next
) = -1;
8918 INSN_CODE (insn
) = -1;
8920 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8921 && CONST_INT_P (XEXP (pattern
, 1)))
8923 rtx x
= XEXP (pattern
, 1);
8924 rtx src
= SET_SRC (pat
);
8925 rtx t
= XEXP (src
,0);
8926 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
8928 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
8930 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
8931 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
8932 INSN_CODE (next
) = -1;
8933 INSN_CODE (insn
) = -1;
8940 /* Returns register number for function return value.*/
8942 static inline unsigned int
8943 avr_ret_register (void)
8948 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8951 avr_function_value_regno_p (const unsigned int regno
)
8953 return (regno
== avr_ret_register ());
8956 /* Create an RTX representing the place where a
8957 library function returns a value of mode MODE. */
8960 avr_libcall_value (enum machine_mode mode
,
8961 const_rtx func ATTRIBUTE_UNUSED
)
8963 int offs
= GET_MODE_SIZE (mode
);
8966 offs
= (offs
+ 1) & ~1;
8968 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
8971 /* Create an RTX representing the place where a
8972 function returns a value of data type VALTYPE. */
8975 avr_function_value (const_tree type
,
8976 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
8977 bool outgoing ATTRIBUTE_UNUSED
)
8981 if (TYPE_MODE (type
) != BLKmode
)
8982 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
8984 offs
= int_size_in_bytes (type
);
8987 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
8988 offs
= GET_MODE_SIZE (SImode
);
8989 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
8990 offs
= GET_MODE_SIZE (DImode
);
8992 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
8996 test_hard_reg_class (enum reg_class rclass
, rtx x
)
8998 int regno
= true_regnum (x
);
9002 if (TEST_HARD_REG_CLASS (rclass
, regno
))
9009 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9010 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9013 avr_2word_insn_p (rtx insn
)
9015 if (avr_current_device
->errata_skip
9017 || 2 != get_attr_length (insn
))
9022 switch (INSN_CODE (insn
))
9027 case CODE_FOR_movqi_insn
:
9029 rtx set
= single_set (insn
);
9030 rtx src
= SET_SRC (set
);
9031 rtx dest
= SET_DEST (set
);
9033 /* Factor out LDS and STS from movqi_insn. */
9036 && (REG_P (src
) || src
== const0_rtx
))
9038 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9040 else if (REG_P (dest
)
9043 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9049 case CODE_FOR_call_insn
:
9050 case CODE_FOR_call_value_insn
:
9057 jump_over_one_insn_p (rtx insn
, rtx dest
)
9059 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9062 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9063 int dest_addr
= INSN_ADDRESSES (uid
);
9064 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9066 return (jump_offset
== 1
9067 || (jump_offset
== 2
9068 && avr_2word_insn_p (next_active_insn (insn
))));
9071 /* Returns 1 if a value of mode MODE can be stored starting with hard
9072 register number REGNO. On the enhanced core, anything larger than
9073 1 byte must start in even numbered register for "movw" to work
9074 (this way we don't have to check for odd registers everywhere). */
9077 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9079 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9080 Disallowing QI et al. in these regs might lead to code like
9081 (set (subreg:QI (reg:HI 28) n) ...)
9082 which will result in wrong code because reload does not
9083 handle SUBREGs of hard regsisters like this.
9084 This could be fixed in reload. However, it appears
9085 that fixing reload is not wanted by reload people. */
9087 /* Any GENERAL_REGS register can hold 8-bit values. */
9089 if (GET_MODE_SIZE (mode
) == 1)
9092 /* FIXME: Ideally, the following test is not needed.
9093 However, it turned out that it can reduce the number
9094 of spill fails. AVR and it's poor endowment with
9095 address registers is extreme stress test for reload. */
9097 if (GET_MODE_SIZE (mode
) >= 4
9101 /* All modes larger than 8 bits should start in an even register. */
9103 return !(regno
& 1);
9107 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9110 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9111 addr_space_t as
, RTX_CODE outer_code
,
9112 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9114 if (!ADDR_SPACE_GENERIC_P (as
))
9116 return POINTER_Z_REGS
;
9120 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9122 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9126 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9129 avr_regno_mode_code_ok_for_base_p (int regno
,
9130 enum machine_mode mode ATTRIBUTE_UNUSED
,
9131 addr_space_t as ATTRIBUTE_UNUSED
,
9132 RTX_CODE outer_code
,
9133 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9137 if (!ADDR_SPACE_GENERIC_P (as
))
9139 if (regno
< FIRST_PSEUDO_REGISTER
9147 regno
= reg_renumber
[regno
];
9158 if (regno
< FIRST_PSEUDO_REGISTER
9162 || regno
== ARG_POINTER_REGNUM
))
9166 else if (reg_renumber
)
9168 regno
= reg_renumber
[regno
];
9173 || regno
== ARG_POINTER_REGNUM
)
9180 && PLUS
== outer_code
9190 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9191 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9192 CLOBBER_REG is a QI clobber register or NULL_RTX.
9193 LEN == NULL: output instructions.
9194 LEN != NULL: set *LEN to the length of the instruction sequence
9195 (in words) printed with LEN = NULL.
9196 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9197 If CLEAR_P is false, nothing is known about OP[0].
9199 The effect on cc0 is as follows:
9201 Load 0 to any register except ZERO_REG : NONE
9202 Load ld register with any value : NONE
9203 Anything else: : CLOBBER */
9206 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
9212 int clobber_val
= 1234;
9213 bool cooked_clobber_p
= false;
9215 enum machine_mode mode
= GET_MODE (dest
);
9216 int n
, n_bytes
= GET_MODE_SIZE (mode
);
9218 gcc_assert (REG_P (dest
)
9219 && CONSTANT_P (src
));
9224 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9225 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9227 if (REGNO (dest
) < 16
9228 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
9230 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
9233 /* We might need a clobber reg but don't have one. Look at the value to
9234 be loaded more closely. A clobber is only needed if it is a symbol
9235 or contains a byte that is neither 0, -1 or a power of 2. */
9237 if (NULL_RTX
== clobber_reg
9238 && !test_hard_reg_class (LD_REGS
, dest
)
9239 && (! (CONST_INT_P (src
) || CONST_DOUBLE_P (src
))
9240 || !avr_popcount_each_byte (src
, n_bytes
,
9241 (1 << 0) | (1 << 1) | (1 << 8))))
9243 /* We have no clobber register but need one. Cook one up.
9244 That's cheaper than loading from constant pool. */
9246 cooked_clobber_p
= true;
9247 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9248 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9251 /* Now start filling DEST from LSB to MSB. */
9253 for (n
= 0; n
< n_bytes
; n
++)
9256 bool done_byte
= false;
9260 /* Crop the n-th destination byte. */
9262 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9263 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9265 if (!CONST_INT_P (src
)
9266 && !CONST_DOUBLE_P (src
))
9268 static const char* const asm_code
[][2] =
9270 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9271 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9272 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9273 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9278 xop
[2] = clobber_reg
;
9280 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9285 /* Crop the n-th source byte. */
9287 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9288 ival
[n
] = INTVAL (xval
);
9290 /* Look if we can reuse the low word by means of MOVW. */
9296 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9297 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9299 if (INTVAL (lo16
) == INTVAL (hi16
))
9301 if (0 != INTVAL (lo16
)
9304 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9311 /* Don't use CLR so that cc0 is set as expected. */
9316 avr_asm_len (ldreg_p
? "ldi %0,0"
9317 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9318 : "mov %0,__zero_reg__",
9323 if (clobber_val
== ival
[n
]
9324 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9329 /* LD_REGS can use LDI to move a constant value */
9335 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9339 /* Try to reuse value already loaded in some lower byte. */
9341 for (j
= 0; j
< n
; j
++)
9342 if (ival
[j
] == ival
[n
])
9347 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9355 /* Need no clobber reg for -1: Use CLR/DEC */
9360 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9362 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9365 else if (1 == ival
[n
])
9368 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9370 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9374 /* Use T flag or INC to manage powers of 2 if we have
9377 if (NULL_RTX
== clobber_reg
9378 && single_one_operand (xval
, QImode
))
9381 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9383 gcc_assert (constm1_rtx
!= xop
[1]);
9388 avr_asm_len ("set", xop
, len
, 1);
9392 avr_asm_len ("clr %0", xop
, len
, 1);
9394 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9398 /* We actually need the LD_REGS clobber reg. */
9400 gcc_assert (NULL_RTX
!= clobber_reg
);
9404 xop
[2] = clobber_reg
;
9405 clobber_val
= ival
[n
];
9407 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9408 "mov %0,%2", xop
, len
, 2);
9411 /* If we cooked up a clobber reg above, restore it. */
9413 if (cooked_clobber_p
)
9415 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9420 /* Reload the constant OP[1] into the HI register OP[0].
9421 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9422 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9423 need a clobber reg or have to cook one up.
9425 PLEN == NULL: Output instructions.
9426 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9427 by the insns printed.
9432 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9434 output_reload_in_const (op
, clobber_reg
, plen
, false);
9439 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9440 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9441 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9442 need a clobber reg or have to cook one up.
9444 LEN == NULL: Output instructions.
9446 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9447 by the insns printed.
9452 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9455 && !test_hard_reg_class (LD_REGS
, op
[0])
9456 && (CONST_INT_P (op
[1])
9457 || CONST_DOUBLE_P (op
[1])))
9459 int len_clr
, len_noclr
;
9461 /* In some cases it is better to clear the destination beforehand, e.g.
9463 CLR R2 CLR R3 MOVW R4,R2 INC R2
9467 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9469 We find it too tedious to work that out in the print function.
9470 Instead, we call the print function twice to get the lengths of
9471 both methods and use the shortest one. */
9473 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9474 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9476 if (len_noclr
- len_clr
== 4)
9478 /* Default needs 4 CLR instructions: clear register beforehand. */
9480 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9481 "mov %B0,__zero_reg__" CR_TAB
9482 "movw %C0,%A0", &op
[0], len
, 3);
9484 output_reload_in_const (op
, clobber_reg
, len
, true);
9493 /* Default: destination not pre-cleared. */
9495 output_reload_in_const (op
, clobber_reg
, len
, false);
9500 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9502 output_reload_in_const (op
, clobber_reg
, len
, false);
9508 avr_output_addr_vec_elt (FILE *stream
, int value
)
9510 if (AVR_HAVE_JMP_CALL
)
9511 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9513 fprintf (stream
, "\trjmp .L%d\n", value
);
9516 /* Returns true if SCRATCH are safe to be allocated as a scratch
9517 registers (for a define_peephole2) in the current function. */
9520 avr_hard_regno_scratch_ok (unsigned int regno
)
9522 /* Interrupt functions can only use registers that have already been saved
9523 by the prologue, even if they would normally be call-clobbered. */
9525 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9526 && !df_regs_ever_live_p (regno
))
9529 /* Don't allow hard registers that might be part of the frame pointer.
9530 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9531 and don't care for a frame pointer that spans more than one register. */
9533 if ((!reload_completed
|| frame_pointer_needed
)
9534 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9542 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9545 avr_hard_regno_rename_ok (unsigned int old_reg
,
9546 unsigned int new_reg
)
9548 /* Interrupt functions can only use registers that have already been
9549 saved by the prologue, even if they would normally be
9552 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9553 && !df_regs_ever_live_p (new_reg
))
9556 /* Don't allow hard registers that might be part of the frame pointer.
9557 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9558 and don't care for a frame pointer that spans more than one register. */
9560 if ((!reload_completed
|| frame_pointer_needed
)
9561 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9562 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9570 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9571 or memory location in the I/O space (QImode only).
9573 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9574 Operand 1: register operand to test, or CONST_INT memory address.
9575 Operand 2: bit number.
9576 Operand 3: label to jump to if the test is true. */
9579 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9581 enum rtx_code comp
= GET_CODE (operands
[0]);
9582 bool long_jump
= get_attr_length (insn
) >= 4;
9583 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9587 else if (comp
== LT
)
9591 comp
= reverse_condition (comp
);
9593 switch (GET_CODE (operands
[1]))
9600 if (low_io_address_operand (operands
[1], QImode
))
9603 output_asm_insn ("sbis %i1,%2", operands
);
9605 output_asm_insn ("sbic %i1,%2", operands
);
9609 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9611 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9613 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9616 break; /* CONST_INT */
9621 output_asm_insn ("sbrs %T1%T2", operands
);
9623 output_asm_insn ("sbrc %T1%T2", operands
);
9629 return ("rjmp .+4" CR_TAB
9638 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9641 avr_asm_out_ctor (rtx symbol
, int priority
)
9643 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9644 default_ctor_section_asm_out_constructor (symbol
, priority
);
9647 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9650 avr_asm_out_dtor (rtx symbol
, int priority
)
9652 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9653 default_dtor_section_asm_out_destructor (symbol
, priority
);
9656 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9659 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9661 if (TYPE_MODE (type
) == BLKmode
)
9663 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9664 return (size
== -1 || size
> 8);
9670 /* Worker function for CASE_VALUES_THRESHOLD. */
9673 avr_case_values_threshold (void)
9675 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
9679 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9681 static enum machine_mode
9682 avr_addr_space_address_mode (addr_space_t as
)
9684 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
9688 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9690 static enum machine_mode
9691 avr_addr_space_pointer_mode (addr_space_t as
)
9693 return avr_addr_space_address_mode (as
);
9697 /* Helper for following function. */
9700 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
9702 gcc_assert (REG_P (reg
));
9706 return REGNO (reg
) == REG_Z
;
9709 /* Avoid combine to propagate hard regs. */
9711 if (can_create_pseudo_p()
9712 && REGNO (reg
) < REG_Z
)
9721 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9724 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
9725 bool strict
, addr_space_t as
)
9734 case ADDR_SPACE_GENERIC
:
9735 return avr_legitimate_address_p (mode
, x
, strict
);
9737 case ADDR_SPACE_FLASH
:
9738 case ADDR_SPACE_FLASH1
:
9739 case ADDR_SPACE_FLASH2
:
9740 case ADDR_SPACE_FLASH3
:
9741 case ADDR_SPACE_FLASH4
:
9742 case ADDR_SPACE_FLASH5
:
9744 switch (GET_CODE (x
))
9747 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
9751 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
9760 case ADDR_SPACE_MEMX
:
9763 && can_create_pseudo_p());
9765 if (LO_SUM
== GET_CODE (x
))
9767 rtx hi
= XEXP (x
, 0);
9768 rtx lo
= XEXP (x
, 1);
9771 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
9773 && REGNO (lo
) == REG_Z
);
9779 if (avr_log
.legitimate_address_p
)
9781 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9782 "reload_completed=%d reload_in_progress=%d %s:",
9783 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
9784 reg_renumber
? "(reg_renumber)" : "");
9786 if (GET_CODE (x
) == PLUS
9787 && REG_P (XEXP (x
, 0))
9788 && CONST_INT_P (XEXP (x
, 1))
9789 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
9792 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
9793 true_regnum (XEXP (x
, 0)));
9796 avr_edump ("\n%r\n", x
);
9803 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9806 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
9807 enum machine_mode mode
, addr_space_t as
)
9809 if (ADDR_SPACE_GENERIC_P (as
))
9810 return avr_legitimize_address (x
, old_x
, mode
);
9812 if (avr_log
.legitimize_address
)
9814 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
9821 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9824 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
9826 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
9827 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
9829 if (avr_log
.progmem
)
9830 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9831 src
, type_from
, type_to
);
9833 /* Up-casting from 16-bit to 24-bit pointer. */
9835 if (as_from
!= ADDR_SPACE_MEMX
9836 && as_to
== ADDR_SPACE_MEMX
)
9840 rtx reg
= gen_reg_rtx (PSImode
);
9842 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
9843 sym
= XEXP (sym
, 0);
9845 /* Look at symbol flags: avr_encode_section_info set the flags
9846 also if attribute progmem was seen so that we get the right
9847 promotion for, e.g. PSTR-like strings that reside in generic space
9848 but are located in flash. In that case we patch the incoming
9851 if (SYMBOL_REF
== GET_CODE (sym
)
9852 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
9854 as_from
= ADDR_SPACE_FLASH
;
9857 /* Linearize memory: RAM has bit 23 set. */
9859 msb
= ADDR_SPACE_GENERIC_P (as_from
)
9861 : avr_addrspace
[as_from
].segment
;
9863 src
= force_reg (Pmode
, src
);
9866 ? gen_zero_extendhipsi2 (reg
, src
)
9867 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
9872 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9874 if (as_from
== ADDR_SPACE_MEMX
9875 && as_to
!= ADDR_SPACE_MEMX
)
9877 rtx new_src
= gen_reg_rtx (Pmode
);
9879 src
= force_reg (PSImode
, src
);
9881 emit_move_insn (new_src
,
9882 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
9890 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9893 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
9894 addr_space_t superset ATTRIBUTE_UNUSED
)
9896 /* Allow any kind of pointer mess. */
9902 /* Worker function for movmemhi expander.
9903 XOP[0] Destination as MEM:BLK
9905 XOP[2] # Bytes to copy
9907 Return TRUE if the expansion is accomplished.
9908 Return FALSE if the operand compination is not supported. */
9911 avr_emit_movmemhi (rtx
*xop
)
9913 HOST_WIDE_INT count
;
9914 enum machine_mode loop_mode
;
9915 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
9916 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
9917 rtx a_hi8
= NULL_RTX
;
9919 if (avr_mem_flash_p (xop
[0]))
9922 if (!CONST_INT_P (xop
[2]))
9925 count
= INTVAL (xop
[2]);
9929 a_src
= XEXP (xop
[1], 0);
9930 a_dest
= XEXP (xop
[0], 0);
9932 if (PSImode
== GET_MODE (a_src
))
9934 gcc_assert (as
== ADDR_SPACE_MEMX
);
9936 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
9937 loop_reg
= gen_rtx_REG (loop_mode
, 24);
9938 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
9940 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
9941 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
9945 int segment
= avr_addrspace
[as
].segment
;
9948 && avr_current_device
->n_flash
> 1)
9950 a_hi8
= GEN_INT (segment
);
9951 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
9953 else if (!ADDR_SPACE_GENERIC_P (as
))
9955 as
= ADDR_SPACE_FLASH
;
9960 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
9961 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
9966 /* FIXME: Register allocator might come up with spill fails if it is left
9967 on its own. Thus, we allocate the pointer registers by hand:
9969 X = destination address */
9971 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
9972 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
9974 /* FIXME: Register allocator does a bad job and might spill address
9975 register(s) inside the loop leading to additional move instruction
9976 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9977 load and store as seperate insns. Instead, we perform the copy
9978 by means of one monolithic insn. */
9980 gcc_assert (TMP_REGNO
== LPM_REGNO
);
9982 if (as
!= ADDR_SPACE_MEMX
)
9984 /* Load instruction ([E]LPM or LD) is known at compile time:
9985 Do the copy-loop inline. */
9987 rtx (*fun
) (rtx
, rtx
, rtx
)
9988 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
9990 insn
= fun (xas
, loop_reg
, loop_reg
);
9994 rtx (*fun
) (rtx
, rtx
)
9995 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
9997 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
9999 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
10002 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10009 /* Print assembler for movmem_qi, movmem_hi insns...
10011 $1, $2 : Loop register
10013 X : Destination address
10017 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
10019 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
10020 enum machine_mode loop_mode
= GET_MODE (op
[1]);
10021 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
10029 xop
[2] = tmp_reg_rtx
;
10033 avr_asm_len ("0:", xop
, plen
, 0);
10035 /* Load with post-increment */
10042 case ADDR_SPACE_GENERIC
:
10044 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
10047 case ADDR_SPACE_FLASH
:
10050 avr_asm_len ("lpm %2,%Z+", xop
, plen
, 1);
10052 avr_asm_len ("lpm" CR_TAB
10053 "adiw r30,1", xop
, plen
, 2);
10056 case ADDR_SPACE_FLASH1
:
10057 case ADDR_SPACE_FLASH2
:
10058 case ADDR_SPACE_FLASH3
:
10059 case ADDR_SPACE_FLASH4
:
10060 case ADDR_SPACE_FLASH5
:
10062 if (AVR_HAVE_ELPMX
)
10063 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
10065 avr_asm_len ("elpm" CR_TAB
10066 "adiw r30,1", xop
, plen
, 2);
10070 /* Store with post-increment */
10072 avr_asm_len ("st X+,%2", xop
, plen
, 1);
10074 /* Decrement loop-counter and set Z-flag */
10076 if (QImode
== loop_mode
)
10078 avr_asm_len ("dec %1", xop
, plen
, 1);
10082 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
10086 avr_asm_len ("subi %A1,1" CR_TAB
10087 "sbci %B1,0", xop
, plen
, 2);
10090 /* Loop until zero */
10092 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10097 /* Helper for __builtin_avr_delay_cycles */
10100 avr_expand_delay_cycles (rtx operands0
)
10102 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
10103 unsigned HOST_WIDE_INT cycles_used
;
10104 unsigned HOST_WIDE_INT loop_count
;
10106 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10108 loop_count
= ((cycles
- 9) / 6) + 1;
10109 cycles_used
= ((loop_count
- 1) * 6) + 9;
10110 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
10111 cycles
-= cycles_used
;
10114 if (IN_RANGE (cycles
, 262145, 83886081))
10116 loop_count
= ((cycles
- 7) / 5) + 1;
10117 if (loop_count
> 0xFFFFFF)
10118 loop_count
= 0xFFFFFF;
10119 cycles_used
= ((loop_count
- 1) * 5) + 7;
10120 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
10121 cycles
-= cycles_used
;
10124 if (IN_RANGE (cycles
, 768, 262144))
10126 loop_count
= ((cycles
- 5) / 4) + 1;
10127 if (loop_count
> 0xFFFF)
10128 loop_count
= 0xFFFF;
10129 cycles_used
= ((loop_count
- 1) * 4) + 5;
10130 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
10131 cycles
-= cycles_used
;
10134 if (IN_RANGE (cycles
, 6, 767))
10136 loop_count
= cycles
/ 3;
10137 if (loop_count
> 255)
10139 cycles_used
= loop_count
* 3;
10140 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
10141 cycles
-= cycles_used
;
10144 while (cycles
>= 2)
10146 emit_insn (gen_nopv (GEN_INT(2)));
10152 emit_insn (gen_nopv (GEN_INT(1)));
10158 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10161 avr_double_int_push_digit (double_int val
, int base
,
10162 unsigned HOST_WIDE_INT digit
)
10165 ? double_int_lshift (val
, 32, 64, false)
10166 : double_int_mul (val
, uhwi_to_double_int (base
));
10168 return double_int_add (val
, uhwi_to_double_int (digit
));
10172 /* Compute the image of x under f, i.e. perform x --> f(x) */
10175 avr_map (double_int f
, int x
)
10177 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
10181 /* Return some metrics of map A. */
10185 /* Number of fixed points in { 0 ... 7 } */
10188 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10191 /* Mask representing the fixed points in { 0 ... 7 } */
10192 MAP_MASK_FIXED_0_7
,
10194 /* Size of the preimage of { 0 ... 7 } */
10197 /* Mask that represents the preimage of { f } */
10198 MAP_MASK_PREIMAGE_F
10202 avr_map_metric (double_int a
, int mode
)
10204 unsigned i
, metric
= 0;
10206 for (i
= 0; i
< 8; i
++)
10208 unsigned ai
= avr_map (a
, i
);
10210 if (mode
== MAP_FIXED_0_7
)
10212 else if (mode
== MAP_NONFIXED_0_7
)
10213 metric
+= ai
< 8 && ai
!= i
;
10214 else if (mode
== MAP_MASK_FIXED_0_7
)
10215 metric
|= ((unsigned) (ai
== i
)) << i
;
10216 else if (mode
== MAP_PREIMAGE_0_7
)
10218 else if (mode
== MAP_MASK_PREIMAGE_F
)
10219 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10228 /* Return true if IVAL has a 0xf in its hexadecimal representation
10229 and false, otherwise. Only nibbles 0..7 are taken into account.
10230 Used as constraint helper for C0f and Cxf. */
10233 avr_has_nibble_0xf (rtx ival
)
10235 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10239 /* We have a set of bits that are mapped by a function F.
10240 Try to decompose F by means of a second function G so that
10246 cost (F o G^-1) + cost (G) < cost (F)
10248 Example: Suppose builtin insert_bits supplies us with the map
10249 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10250 nibble of the result, we can just as well rotate the bits before inserting
10251 them and use the map 0x7654ffff which is cheaper than the original map.
10252 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10256 /* tree code of binary function G */
10257 enum tree_code code
;
10259 /* The constant second argument of G */
10262 /* G^-1, the inverse of G (*, arg) */
10265 /* The cost of appplying G (*, arg) */
10268 /* The composition F o G^-1 (*, arg) for some function F */
10271 /* For debug purpose only */
10275 static const avr_map_op_t avr_map_op
[] =
10277 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10278 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10279 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10280 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10281 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10282 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10283 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10284 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10285 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10286 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10287 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10288 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10289 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10290 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10291 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10295 /* Try to decompose F as F = (F o G^-1) o G as described above.
10296 The result is a struct representing F o G^-1 and G.
10297 If result.cost < 0 then such a decomposition does not exist. */
10299 static avr_map_op_t
10300 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10303 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10304 avr_map_op_t f_ginv
= *g
;
10305 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10309 /* Step 1: Computing F o G^-1 */
10311 for (i
= 7; i
>= 0; i
--)
10313 int x
= avr_map (f
, i
);
10317 x
= avr_map (ginv
, x
);
10319 /* The bit is no element of the image of G: no avail (cost = -1) */
10325 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10328 /* Step 2: Compute the cost of the operations.
10329 The overall cost of doing an operation prior to the insertion is
10330 the cost of the insertion plus the cost of the operation. */
10332 /* Step 2a: Compute cost of F o G^-1 */
10334 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10336 /* The mapping consists only of fixed points and can be folded
10337 to AND/OR logic in the remainder. Reasonable cost is 3. */
10339 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10345 /* Get the cost of the insn by calling the output worker with some
10346 fake values. Mimic effect of reloading xop[3]: Unused operands
10347 are mapped to 0 and used operands are reloaded to xop[0]. */
10349 xop
[0] = all_regs_rtx
[24];
10350 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
10351 xop
[2] = all_regs_rtx
[25];
10352 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
10354 avr_out_insert_bits (xop
, &f_ginv
.cost
);
10356 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
10359 /* Step 2b: Add cost of G */
10361 f_ginv
.cost
+= g
->cost
;
10363 if (avr_log
.builtin
)
10364 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
10370 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10371 XOP[0] and XOP[1] don't overlap.
10372 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10373 If FIXP_P = false: Just move the bit if its position in the destination
10374 is different to its source position. */
10377 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
10381 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10382 int t_bit_src
= -1;
10384 /* We order the operations according to the requested source bit b. */
10386 for (b
= 0; b
< 8; b
++)
10387 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
10389 int bit_src
= avr_map (map
, bit_dest
);
10393 /* Same position: No need to copy as requested by FIXP_P. */
10394 || (bit_dest
== bit_src
&& !fixp_p
))
10397 if (t_bit_src
!= bit_src
)
10399 /* Source bit is not yet in T: Store it to T. */
10401 t_bit_src
= bit_src
;
10403 xop
[3] = GEN_INT (bit_src
);
10404 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
10407 /* Load destination bit with T. */
10409 xop
[3] = GEN_INT (bit_dest
);
10410 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
10415 /* PLEN == 0: Print assembler code for `insert_bits'.
10416 PLEN != 0: Compute code length in bytes.
10419 OP[1]: The mapping composed of nibbles. If nibble no. N is
10420 0: Bit N of result is copied from bit OP[2].0
10422 7: Bit N of result is copied from bit OP[2].7
10423 0xf: Bit N of result is copied from bit OP[3].N
10424 OP[2]: Bits to be inserted
10425 OP[3]: Target value */
10428 avr_out_insert_bits (rtx
*op
, int *plen
)
10430 double_int map
= rtx_to_double_int (op
[1]);
10431 unsigned mask_fixed
;
10432 bool fixp_p
= true;
10439 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
10443 else if (flag_print_asm_name
)
10444 fprintf (asm_out_file
,
10445 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
10446 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
10448 /* If MAP has fixed points it might be better to initialize the result
10449 with the bits to be inserted instead of moving all bits by hand. */
10451 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
10453 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10455 /* Avoid early-clobber conflicts */
10457 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10458 xop
[1] = tmp_reg_rtx
;
10462 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10464 /* XOP[2] is used and reloaded to XOP[0] already */
10466 int n_fix
= 0, n_nofix
= 0;
10468 gcc_assert (REG_P (xop
[2]));
10470 /* Get the code size of the bit insertions; once with all bits
10471 moved and once with fixed points omitted. */
10473 avr_move_bits (xop
, map
, true, &n_fix
);
10474 avr_move_bits (xop
, map
, false, &n_nofix
);
10476 if (fixp_p
&& n_fix
- n_nofix
> 3)
10478 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
10480 avr_asm_len ("eor %0,%1" CR_TAB
10481 "andi %0,%3" CR_TAB
10482 "eor %0,%1", xop
, plen
, 3);
10488 /* XOP[2] is unused */
10490 if (fixp_p
&& mask_fixed
)
10492 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10497 /* Move/insert remaining bits. */
10499 avr_move_bits (xop
, map
, fixp_p
, plen
);
10505 /* IDs for all the AVR builtins. */
10507 enum avr_builtin_id
10510 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10511 #include "builtins.def"
10518 avr_init_builtin_int24 (void)
10520 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10521 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10523 (*lang_hooks
.types
.register_builtin_type
) (int24_type
, "__int24");
10524 (*lang_hooks
.types
.register_builtin_type
) (uint24_type
, "__uint24");
10527 /* Implement `TARGET_INIT_BUILTINS' */
10528 /* Set up all builtin functions for this target. */
10531 avr_init_builtins (void)
10533 tree void_ftype_void
10534 = build_function_type_list (void_type_node
, NULL_TREE
);
10535 tree uchar_ftype_uchar
10536 = build_function_type_list (unsigned_char_type_node
,
10537 unsigned_char_type_node
,
10539 tree uint_ftype_uchar_uchar
10540 = build_function_type_list (unsigned_type_node
,
10541 unsigned_char_type_node
,
10542 unsigned_char_type_node
,
10544 tree int_ftype_char_char
10545 = build_function_type_list (integer_type_node
,
10549 tree int_ftype_char_uchar
10550 = build_function_type_list (integer_type_node
,
10552 unsigned_char_type_node
,
10554 tree void_ftype_ulong
10555 = build_function_type_list (void_type_node
,
10556 long_unsigned_type_node
,
10559 tree uchar_ftype_ulong_uchar_uchar
10560 = build_function_type_list (unsigned_char_type_node
,
10561 long_unsigned_type_node
,
10562 unsigned_char_type_node
,
10563 unsigned_char_type_node
,
10566 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10567 add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10568 #include "builtins.def"
10571 avr_init_builtin_int24 ();
10575 struct avr_builtin_description
10577 enum insn_code icode
;
10579 enum avr_builtin_id id
;
10583 static const struct avr_builtin_description
10587 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10588 { ICODE, NAME, ID, N_ARGS },
10589 #include "builtins.def"
10592 { CODE_FOR_nothing
, NULL
, 0, -1 }
10596 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10599 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
10603 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10604 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10605 enum machine_mode op0mode
= GET_MODE (op0
);
10606 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10607 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10610 || GET_MODE (target
) != tmode
10611 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10613 target
= gen_reg_rtx (tmode
);
10616 if (op0mode
== SImode
&& mode0
== HImode
)
10619 op0
= gen_lowpart (HImode
, op0
);
10622 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
10624 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10625 op0
= copy_to_mode_reg (mode0
, op0
);
10627 pat
= GEN_FCN (icode
) (target
, op0
);
10637 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10640 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10643 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10644 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10645 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10646 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10647 enum machine_mode op0mode
= GET_MODE (op0
);
10648 enum machine_mode op1mode
= GET_MODE (op1
);
10649 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10650 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10651 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10654 || GET_MODE (target
) != tmode
10655 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10657 target
= gen_reg_rtx (tmode
);
10660 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10663 op0
= gen_lowpart (HImode
, op0
);
10666 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10669 op1
= gen_lowpart (HImode
, op1
);
10672 /* In case the insn wants input operands in modes different from
10673 the result, abort. */
10675 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10676 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
10678 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10679 op0
= copy_to_mode_reg (mode0
, op0
);
10681 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10682 op1
= copy_to_mode_reg (mode1
, op1
);
10684 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
10693 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10696 avr_expand_triop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10699 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10700 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10701 tree arg2
= CALL_EXPR_ARG (exp
, 2);
10702 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10703 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10704 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10705 enum machine_mode op0mode
= GET_MODE (op0
);
10706 enum machine_mode op1mode
= GET_MODE (op1
);
10707 enum machine_mode op2mode
= GET_MODE (op2
);
10708 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10709 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10710 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10711 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
10714 || GET_MODE (target
) != tmode
10715 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10717 target
= gen_reg_rtx (tmode
);
10720 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10723 op0
= gen_lowpart (HImode
, op0
);
10726 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10729 op1
= gen_lowpart (HImode
, op1
);
10732 if ((op2mode
== SImode
|| op2mode
== VOIDmode
) && mode2
== HImode
)
10735 op2
= gen_lowpart (HImode
, op2
);
10738 /* In case the insn wants input operands in modes different from
10739 the result, abort. */
10741 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10742 && (op1mode
== mode1
|| op1mode
== VOIDmode
)
10743 && (op2mode
== mode2
|| op2mode
== VOIDmode
));
10745 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10746 op0
= copy_to_mode_reg (mode0
, op0
);
10748 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10749 op1
= copy_to_mode_reg (mode1
, op1
);
10751 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
10752 op2
= copy_to_mode_reg (mode2
, op2
);
10754 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
10764 /* Expand an expression EXP that calls a built-in function,
10765 with result going to TARGET if that's convenient
10766 (and in mode MODE if that's convenient).
10767 SUBTARGET may be used as the target for computing one of EXP's operands.
10768 IGNORE is nonzero if the value is to be ignored. */
10771 avr_expand_builtin (tree exp
, rtx target
,
10772 rtx subtarget ATTRIBUTE_UNUSED
,
10773 enum machine_mode mode ATTRIBUTE_UNUSED
,
10774 int ignore ATTRIBUTE_UNUSED
)
10777 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
10778 const char* bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
10779 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
10785 case AVR_BUILTIN_NOP
:
10786 emit_insn (gen_nopv (GEN_INT(1)));
10789 case AVR_BUILTIN_DELAY_CYCLES
:
10791 arg0
= CALL_EXPR_ARG (exp
, 0);
10792 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10794 if (!CONST_INT_P (op0
))
10795 error ("%s expects a compile time integer constant", bname
);
10797 avr_expand_delay_cycles (op0
);
10802 case AVR_BUILTIN_INSERT_BITS
:
10804 arg0
= CALL_EXPR_ARG (exp
, 0);
10805 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10807 if (!CONST_INT_P (op0
))
10809 error ("%s expects a compile time long integer constant"
10810 " as first argument", bname
);
10816 for (i
= 0; avr_bdesc
[i
].name
; i
++)
10818 const struct avr_builtin_description
*d
= &avr_bdesc
[i
];
10824 emit_insn ((GEN_FCN (d
->icode
)) (target
));
10828 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
10831 return avr_expand_binop_builtin (d
->icode
, exp
, target
);
10834 return avr_expand_triop_builtin (d
->icode
, exp
, target
);
10841 gcc_unreachable ();
10845 /* Implement `TARGET_FOLD_BUILTIN'. */
10848 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
10849 bool ignore ATTRIBUTE_UNUSED
)
10851 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
10852 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
10862 case AVR_BUILTIN_SWAP
:
10864 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
10865 build_int_cst (val_type
, 4));
10868 case AVR_BUILTIN_INSERT_BITS
:
10870 tree tbits
= arg
[1];
10871 tree tval
= arg
[2];
10873 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
10875 bool changed
= false;
10877 avr_map_op_t best_g
;
10879 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
10881 /* No constant as first argument: Don't fold this and run into
10882 error in avr_expand_builtin. */
10887 map
= tree_to_double_int (arg
[0]);
10888 tmap
= double_int_to_tree (map_type
, map
);
10890 if (TREE_CODE (tval
) != INTEGER_CST
10891 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10893 /* There are no F in the map, i.e. 3rd operand is unused.
10894 Replace that argument with some constant to render
10895 respective input unused. */
10897 tval
= build_int_cst (val_type
, 0);
10901 if (TREE_CODE (tbits
) != INTEGER_CST
10902 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
10904 /* Similar for the bits to be inserted. If they are unused,
10905 we can just as well pass 0. */
10907 tbits
= build_int_cst (val_type
, 0);
10910 if (TREE_CODE (tbits
) == INTEGER_CST
)
10912 /* Inserting bits known at compile time is easy and can be
10913 performed by AND and OR with appropriate masks. */
10915 int bits
= TREE_INT_CST_LOW (tbits
);
10916 int mask_ior
= 0, mask_and
= 0xff;
10918 for (i
= 0; i
< 8; i
++)
10920 int mi
= avr_map (map
, i
);
10924 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
10925 else mask_and
&= ~(1 << i
);
10929 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
10930 build_int_cst (val_type
, mask_ior
));
10931 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
10932 build_int_cst (val_type
, mask_and
));
10936 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10938 /* If bits don't change their position we can use vanilla logic
10939 to merge the two arguments. */
10941 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
10943 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
10944 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
10946 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
10947 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
10948 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
10951 /* Try to decomposing map to reduce overall cost. */
10953 if (avr_log
.builtin
)
10954 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
10956 best_g
= avr_map_op
[0];
10957 best_g
.cost
= 1000;
10959 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
10962 = avr_map_decompose (map
, avr_map_op
+ i
,
10963 TREE_CODE (tval
) == INTEGER_CST
);
10965 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
10969 if (avr_log
.builtin
)
10972 if (best_g
.arg
== 0)
10973 /* No optimization found */
10976 /* Apply operation G to the 2nd argument. */
10978 if (avr_log
.builtin
)
10979 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10980 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
10982 /* Do right-shifts arithmetically: They copy the MSB instead of
10983 shifting in a non-usable value (0) as with logic right-shift. */
10985 tbits
= fold_convert (signed_char_type_node
, tbits
);
10986 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
10987 build_int_cst (val_type
, best_g
.arg
));
10988 tbits
= fold_convert (val_type
, tbits
);
10990 /* Use map o G^-1 instead of original map to undo the effect of G. */
10992 tmap
= double_int_to_tree (map_type
, best_g
.map
);
10994 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10995 } /* AVR_BUILTIN_INSERT_BITS */
11003 struct gcc_target targetm
= TARGET_INITIALIZER
;
11005 #include "gt-avr.h"