1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr
;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
135 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
136 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
137 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
138 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
139 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
141 static int avr_naked_function_p (tree
);
142 static int interrupt_function_p (tree
);
143 static int signal_function_p (tree
);
144 static int avr_OS_task_function_p (tree
);
145 static int avr_OS_main_function_p (tree
);
146 static int avr_regs_to_save (HARD_REG_SET
*);
147 static int get_sequence_length (rtx insns
);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code
);
151 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
152 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
154 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
155 static struct machine_function
* avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx
;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx
;
172 rtx lpm_addr_reg_rtx
;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx
;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx
;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx
[32];
184 rtx all_regs_rtx
[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx
;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx
;
192 extern GTY(()) rtx rampx_rtx
;
193 extern GTY(()) rtx rampy_rtx
;
194 extern GTY(()) rtx rampz_rtx
;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty
;
202 static GTY(()) rtx xstring_e
;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro
;
207 /* Current architecture. */
208 const struct base_arch_s
*avr_current_arch
;
210 /* Current device. */
211 const struct mcu_type_s
*avr_current_device
;
213 /* Section to put switch tables in. */
214 static GTY(()) section
*progmem_swtable_section
;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section
*progmem_section
[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode
= true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p
= false;
225 bool avr_need_copy_data_p
= false;
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_ALIGNED_HI_OP
230 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
233 #undef TARGET_ASM_UNALIGNED_HI_OP
234 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
235 #undef TARGET_ASM_UNALIGNED_SI_OP
236 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
237 #undef TARGET_ASM_INTEGER
238 #define TARGET_ASM_INTEGER avr_assemble_integer
239 #undef TARGET_ASM_FILE_START
240 #define TARGET_ASM_FILE_START avr_file_start
241 #undef TARGET_ASM_FILE_END
242 #define TARGET_ASM_FILE_END avr_file_end
244 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
245 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
246 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
247 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
249 #undef TARGET_FUNCTION_VALUE
250 #define TARGET_FUNCTION_VALUE avr_function_value
251 #undef TARGET_LIBCALL_VALUE
252 #define TARGET_LIBCALL_VALUE avr_libcall_value
253 #undef TARGET_FUNCTION_VALUE_REGNO_P
254 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
256 #undef TARGET_ATTRIBUTE_TABLE
257 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
258 #undef TARGET_INSERT_ATTRIBUTES
259 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
260 #undef TARGET_SECTION_TYPE_FLAGS
261 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
263 #undef TARGET_ASM_NAMED_SECTION
264 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
265 #undef TARGET_ASM_INIT_SECTIONS
266 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
267 #undef TARGET_ENCODE_SECTION_INFO
268 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
269 #undef TARGET_ASM_SELECT_SECTION
270 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
272 #undef TARGET_REGISTER_MOVE_COST
273 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
274 #undef TARGET_MEMORY_MOVE_COST
275 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
276 #undef TARGET_RTX_COSTS
277 #define TARGET_RTX_COSTS avr_rtx_costs
278 #undef TARGET_ADDRESS_COST
279 #define TARGET_ADDRESS_COST avr_address_cost
280 #undef TARGET_MACHINE_DEPENDENT_REORG
281 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
282 #undef TARGET_FUNCTION_ARG
283 #define TARGET_FUNCTION_ARG avr_function_arg
284 #undef TARGET_FUNCTION_ARG_ADVANCE
285 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
287 #undef TARGET_RETURN_IN_MEMORY
288 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
290 #undef TARGET_STRICT_ARGUMENT_NAMING
291 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
293 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
294 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
296 #undef TARGET_HARD_REGNO_SCRATCH_OK
297 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
298 #undef TARGET_CASE_VALUES_THRESHOLD
299 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
301 #undef TARGET_FRAME_POINTER_REQUIRED
302 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
303 #undef TARGET_CAN_ELIMINATE
304 #define TARGET_CAN_ELIMINATE avr_can_eliminate
306 #undef TARGET_CLASS_LIKELY_SPILLED_P
307 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
309 #undef TARGET_OPTION_OVERRIDE
310 #define TARGET_OPTION_OVERRIDE avr_option_override
312 #undef TARGET_CANNOT_MODIFY_JUMPS_P
313 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
315 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
316 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
318 #undef TARGET_INIT_BUILTINS
319 #define TARGET_INIT_BUILTINS avr_init_builtins
321 #undef TARGET_EXPAND_BUILTIN
322 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
324 #undef TARGET_FOLD_BUILTIN
325 #define TARGET_FOLD_BUILTIN avr_fold_builtin
327 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
328 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
330 #undef TARGET_SCALAR_MODE_SUPPORTED_P
331 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
333 #undef TARGET_ADDR_SPACE_SUBSET_P
334 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
336 #undef TARGET_ADDR_SPACE_CONVERT
337 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
339 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
340 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
342 #undef TARGET_ADDR_SPACE_POINTER_MODE
343 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
345 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
346 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
348 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
349 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
351 #undef TARGET_PRINT_OPERAND
352 #define TARGET_PRINT_OPERAND avr_print_operand
353 #undef TARGET_PRINT_OPERAND_ADDRESS
354 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
355 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
356 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
360 /* Custom function to count number of set bits. */
363 avr_popcount (unsigned int val
)
377 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
378 Return true if the least significant N_BYTES bytes of XVAL all have a
379 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
380 of integers which contains an integer N iff bit N of POP_MASK is set. */
383 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
387 enum machine_mode mode
= GET_MODE (xval
);
389 if (VOIDmode
== mode
)
392 for (i
= 0; i
< n_bytes
; i
++)
394 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
395 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
397 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
405 avr_option_override (void)
407 flag_delete_null_pointer_checks
= 0;
409 /* caller-save.c looks for call-clobbered hard registers that are assigned
410 to pseudos that cross calls and tries so save-restore them around calls
411 in order to reduce the number of stack slots needed.
413 This might leads to situations where reload is no more able to cope
414 with the challenge of AVR's very few address registers and fails to
415 perform the requested spills. */
418 flag_caller_saves
= 0;
420 /* Unwind tables currently require a frame pointer for correctness,
421 see toplev.c:process_options(). */
423 if ((flag_unwind_tables
424 || flag_non_call_exceptions
425 || flag_asynchronous_unwind_tables
)
426 && !ACCUMULATE_OUTGOING_ARGS
)
428 flag_omit_frame_pointer
= 0;
431 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
432 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
433 avr_extra_arch_macro
= avr_current_device
->macro
;
435 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
437 /* SREG: Status Register containing flags like I (global IRQ) */
438 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
440 /* RAMPZ: Address' high part when loading via ELPM */
441 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
443 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
444 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
445 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
446 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
448 /* SP: Stack Pointer (SP_H:SP_L) */
449 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
450 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
452 init_machine_status
= avr_init_machine_status
;
454 avr_log_set_avr_log();
457 /* Function to set up the backend function structure. */
459 static struct machine_function
*
460 avr_init_machine_status (void)
462 return ggc_alloc_cleared_machine_function ();
466 /* Implement `INIT_EXPANDERS'. */
467 /* The function works like a singleton. */
470 avr_init_expanders (void)
474 for (regno
= 0; regno
< 32; regno
++)
475 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
477 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
478 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
479 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
481 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
483 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
484 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
485 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
486 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
487 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
489 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
490 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
494 /* Return register class for register R. */
497 avr_regno_reg_class (int r
)
499 static const enum reg_class reg_class_tab
[] =
503 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
504 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
505 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
506 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
508 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
509 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
511 ADDW_REGS
, ADDW_REGS
,
513 POINTER_X_REGS
, POINTER_X_REGS
,
515 POINTER_Y_REGS
, POINTER_Y_REGS
,
517 POINTER_Z_REGS
, POINTER_Z_REGS
,
523 return reg_class_tab
[r
];
530 avr_scalar_mode_supported_p (enum machine_mode mode
)
535 return default_scalar_mode_supported_p (mode
);
539 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
542 avr_decl_flash_p (tree decl
)
544 if (TREE_CODE (decl
) != VAR_DECL
545 || TREE_TYPE (decl
) == error_mark_node
)
550 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
554 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
555 address space and FALSE, otherwise. */
558 avr_decl_memx_p (tree decl
)
560 if (TREE_CODE (decl
) != VAR_DECL
561 || TREE_TYPE (decl
) == error_mark_node
)
566 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
570 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
573 avr_mem_flash_p (rtx x
)
576 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
580 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
581 address space and FALSE, otherwise. */
584 avr_mem_memx_p (rtx x
)
587 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
591 /* A helper for the subsequent function attribute used to dig for
592 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
595 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
597 if (FUNCTION_DECL
== TREE_CODE (func
))
599 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
604 func
= TREE_TYPE (func
);
607 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
608 || TREE_CODE (func
) == METHOD_TYPE
);
610 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
613 /* Return nonzero if FUNC is a naked function. */
616 avr_naked_function_p (tree func
)
618 return avr_lookup_function_attribute1 (func
, "naked");
621 /* Return nonzero if FUNC is an interrupt function as specified
622 by the "interrupt" attribute. */
625 interrupt_function_p (tree func
)
627 return avr_lookup_function_attribute1 (func
, "interrupt");
630 /* Return nonzero if FUNC is a signal function as specified
631 by the "signal" attribute. */
634 signal_function_p (tree func
)
636 return avr_lookup_function_attribute1 (func
, "signal");
639 /* Return nonzero if FUNC is an OS_task function. */
642 avr_OS_task_function_p (tree func
)
644 return avr_lookup_function_attribute1 (func
, "OS_task");
647 /* Return nonzero if FUNC is an OS_main function. */
650 avr_OS_main_function_p (tree func
)
652 return avr_lookup_function_attribute1 (func
, "OS_main");
656 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
659 avr_accumulate_outgoing_args (void)
662 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
664 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
665 what offset is correct. In some cases it is relative to
666 virtual_outgoing_args_rtx and in others it is relative to
667 virtual_stack_vars_rtx. For example code see
668 gcc.c-torture/execute/built-in-setjmp.c
669 gcc.c-torture/execute/builtins/sprintf-chk.c */
671 return (TARGET_ACCUMULATE_OUTGOING_ARGS
672 && !(cfun
->calls_setjmp
673 || cfun
->has_nonlocal_label
));
677 /* Report contribution of accumulated outgoing arguments to stack size. */
680 avr_outgoing_args_size (void)
682 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
686 /* Implement `STARTING_FRAME_OFFSET'. */
687 /* This is the offset from the frame pointer register to the first stack slot
688 that contains a variable living in the frame. */
691 avr_starting_frame_offset (void)
693 return 1 + avr_outgoing_args_size ();
697 /* Return the number of hard registers to push/pop in the prologue/epilogue
698 of the current function, and optionally store these registers in SET. */
701 avr_regs_to_save (HARD_REG_SET
*set
)
704 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
705 || signal_function_p (current_function_decl
));
708 CLEAR_HARD_REG_SET (*set
);
711 /* No need to save any registers if the function never returns or
712 has the "OS_task" or "OS_main" attribute. */
713 if (TREE_THIS_VOLATILE (current_function_decl
)
714 || cfun
->machine
->is_OS_task
715 || cfun
->machine
->is_OS_main
)
718 for (reg
= 0; reg
< 32; reg
++)
720 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
721 any global register variables. */
725 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
726 || (df_regs_ever_live_p (reg
)
727 && (int_or_sig_p
|| !call_used_regs
[reg
])
728 /* Don't record frame pointer registers here. They are treated
729 indivitually in prologue. */
730 && !(frame_pointer_needed
731 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
734 SET_HARD_REG_BIT (*set
, reg
);
741 /* Return true if register FROM can be eliminated via register TO. */
744 avr_can_eliminate (const int from
, const int to
)
746 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
747 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
748 || ((from
== FRAME_POINTER_REGNUM
749 || from
== FRAME_POINTER_REGNUM
+ 1)
750 && !frame_pointer_needed
));
753 /* Compute offset between arg_pointer and frame_pointer. */
756 avr_initial_elimination_offset (int from
, int to
)
758 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
762 int offset
= frame_pointer_needed
? 2 : 0;
763 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
765 offset
+= avr_regs_to_save (NULL
);
766 return (get_frame_size () + avr_outgoing_args_size()
767 + avr_pc_size
+ 1 + offset
);
771 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
772 frame pointer by +STARTING_FRAME_OFFSET.
773 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
774 avoids creating add/sub of offset in nonlocal goto and setjmp. */
777 avr_builtin_setjmp_frame_value (void)
779 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
780 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
783 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
784 This is return address of function. */
786 avr_return_addr_rtx (int count
, rtx tem
)
790 /* Can only return this function's return address. Others not supported. */
796 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
797 warning (0, "'builtin_return_address' contains only 2 bytes of address");
800 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
802 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
803 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
804 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
808 /* Return 1 if the function epilogue is just a single "ret". */
811 avr_simple_epilogue (void)
813 return (! frame_pointer_needed
814 && get_frame_size () == 0
815 && avr_outgoing_args_size() == 0
816 && avr_regs_to_save (NULL
) == 0
817 && ! interrupt_function_p (current_function_decl
)
818 && ! signal_function_p (current_function_decl
)
819 && ! avr_naked_function_p (current_function_decl
)
820 && ! TREE_THIS_VOLATILE (current_function_decl
));
823 /* This function checks sequence of live registers. */
826 sequent_regs_live (void)
832 for (reg
= 0; reg
< 18; ++reg
)
836 /* Don't recognize sequences that contain global register
845 if (!call_used_regs
[reg
])
847 if (df_regs_ever_live_p (reg
))
857 if (!frame_pointer_needed
)
859 if (df_regs_ever_live_p (REG_Y
))
867 if (df_regs_ever_live_p (REG_Y
+1))
880 return (cur_seq
== live_seq
) ? live_seq
: 0;
883 /* Obtain the length sequence of insns. */
886 get_sequence_length (rtx insns
)
891 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
892 length
+= get_attr_length (insn
);
897 /* Implement INCOMING_RETURN_ADDR_RTX. */
900 avr_incoming_return_addr_rtx (void)
902 /* The return address is at the top of the stack. Note that the push
903 was via post-decrement, which means the actual address is off by one. */
904 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
907 /* Helper for expand_prologue. Emit a push of a byte register. */
910 emit_push_byte (unsigned regno
, bool frame_related_p
)
914 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
915 mem
= gen_frame_mem (QImode
, mem
);
916 reg
= gen_rtx_REG (QImode
, regno
);
918 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
920 RTX_FRAME_RELATED_P (insn
) = 1;
922 cfun
->machine
->stack_usage
++;
926 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
927 SFR is a MEM representing the memory location of the SFR.
928 If CLR_P then clear the SFR after the push using zero_reg. */
931 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
935 gcc_assert (MEM_P (sfr
));
937 /* IN __tmp_reg__, IO(SFR) */
938 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
940 RTX_FRAME_RELATED_P (insn
) = 1;
942 /* PUSH __tmp_reg__ */
943 emit_push_byte (TMP_REGNO
, frame_related_p
);
947 /* OUT IO(SFR), __zero_reg__ */
948 insn
= emit_move_insn (sfr
, const0_rtx
);
950 RTX_FRAME_RELATED_P (insn
) = 1;
955 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
958 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
959 int live_seq
= sequent_regs_live ();
961 bool minimize
= (TARGET_CALL_PROLOGUES
964 && !cfun
->machine
->is_OS_task
965 && !cfun
->machine
->is_OS_main
);
968 && (frame_pointer_needed
969 || avr_outgoing_args_size() > 8
970 || (AVR_2_BYTE_PC
&& live_seq
> 6)
974 int first_reg
, reg
, offset
;
976 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
977 gen_int_mode (size
, HImode
));
979 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
980 gen_int_mode (live_seq
+size
, HImode
));
981 insn
= emit_insn (pattern
);
982 RTX_FRAME_RELATED_P (insn
) = 1;
984 /* Describe the effect of the unspec_volatile call to prologue_saves.
985 Note that this formulation assumes that add_reg_note pushes the
986 notes to the front. Thus we build them in the reverse order of
987 how we want dwarf2out to process them. */
989 /* The function does always set frame_pointer_rtx, but whether that
990 is going to be permanent in the function is frame_pointer_needed. */
992 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
993 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
995 : stack_pointer_rtx
),
996 plus_constant (stack_pointer_rtx
,
997 -(size
+ live_seq
))));
999 /* Note that live_seq always contains r28+r29, but the other
1000 registers to be saved are all below 18. */
1002 first_reg
= 18 - (live_seq
- 2);
1004 for (reg
= 29, offset
= -live_seq
+ 1;
1006 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1010 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
1011 r
= gen_rtx_REG (QImode
, reg
);
1012 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1015 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1017 else /* !minimize */
1021 for (reg
= 0; reg
< 32; ++reg
)
1022 if (TEST_HARD_REG_BIT (set
, reg
))
1023 emit_push_byte (reg
, true);
1025 if (frame_pointer_needed
1026 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1028 /* Push frame pointer. Always be consistent about the
1029 ordering of pushes -- epilogue_restores expects the
1030 register pair to be pushed low byte first. */
1032 emit_push_byte (REG_Y
, true);
1033 emit_push_byte (REG_Y
+ 1, true);
1036 if (frame_pointer_needed
1039 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1040 RTX_FRAME_RELATED_P (insn
) = 1;
1045 /* Creating a frame can be done by direct manipulation of the
1046 stack or via the frame pointer. These two methods are:
1053 the optimum method depends on function type, stack and
1054 frame size. To avoid a complex logic, both methods are
1055 tested and shortest is selected.
1057 There is also the case where SIZE != 0 and no frame pointer is
1058 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1059 In that case, insn (*) is not needed in that case.
1060 We use the X register as scratch. This is save because in X
1062 In an interrupt routine, the case of SIZE != 0 together with
1063 !frame_pointer_needed can only occur if the function is not a
1064 leaf function and thus X has already been saved. */
1067 rtx fp_plus_insns
, fp
, my_fp
;
1069 gcc_assert (frame_pointer_needed
1071 || !current_function_is_leaf
);
1073 fp
= my_fp
= (frame_pointer_needed
1075 : gen_rtx_REG (Pmode
, REG_X
));
1077 if (AVR_HAVE_8BIT_SP
)
1079 /* The high byte (r29) does not change:
1080 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1082 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1085 /************ Method 1: Adjust frame pointer ************/
1089 /* Normally, the dwarf2out frame-related-expr interpreter does
1090 not expect to have the CFA change once the frame pointer is
1091 set up. Thus, we avoid marking the move insn below and
1092 instead indicate that the entire operation is complete after
1093 the frame pointer subtraction is done. */
1095 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1096 if (frame_pointer_needed
)
1098 RTX_FRAME_RELATED_P (insn
) = 1;
1099 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1100 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1103 insn
= emit_move_insn (my_fp
, plus_constant (my_fp
, -size
));
1104 if (frame_pointer_needed
)
1106 RTX_FRAME_RELATED_P (insn
) = 1;
1107 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1108 gen_rtx_SET (VOIDmode
, fp
,
1109 plus_constant (fp
, -size
)));
1112 /* Copy to stack pointer. Note that since we've already
1113 changed the CFA to the frame pointer this operation
1114 need not be annotated if frame pointer is needed.
1115 Always move through unspec, see PR50063.
1116 For meaning of irq_state see movhi_sp_r insn. */
1118 if (cfun
->machine
->is_interrupt
)
1121 if (TARGET_NO_INTERRUPTS
1122 || cfun
->machine
->is_signal
1123 || cfun
->machine
->is_OS_main
)
1126 if (AVR_HAVE_8BIT_SP
)
1129 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1130 fp
, GEN_INT (irq_state
)));
1131 if (!frame_pointer_needed
)
1133 RTX_FRAME_RELATED_P (insn
) = 1;
1134 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1135 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1136 plus_constant (stack_pointer_rtx
,
1140 fp_plus_insns
= get_insns ();
1143 /************ Method 2: Adjust Stack pointer ************/
1145 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1146 can only handle specific offsets. */
1148 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1154 insn
= emit_move_insn (stack_pointer_rtx
,
1155 plus_constant (stack_pointer_rtx
, -size
));
1156 RTX_FRAME_RELATED_P (insn
) = 1;
1157 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1158 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1159 plus_constant (stack_pointer_rtx
,
1161 if (frame_pointer_needed
)
1163 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1164 RTX_FRAME_RELATED_P (insn
) = 1;
1167 sp_plus_insns
= get_insns ();
1170 /************ Use shortest method ************/
1172 emit_insn (get_sequence_length (sp_plus_insns
)
1173 < get_sequence_length (fp_plus_insns
)
1179 emit_insn (fp_plus_insns
);
1182 cfun
->machine
->stack_usage
+= size
;
1183 } /* !minimize && size != 0 */
1188 /* Output function prologue. */
1191 expand_prologue (void)
1196 size
= get_frame_size() + avr_outgoing_args_size();
1198 /* Init cfun->machine. */
1199 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
1200 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
1201 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
1202 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
1203 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
1204 cfun
->machine
->stack_usage
= 0;
1206 /* Prologue: naked. */
1207 if (cfun
->machine
->is_naked
)
1212 avr_regs_to_save (&set
);
1214 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1216 /* Enable interrupts. */
1217 if (cfun
->machine
->is_interrupt
)
1218 emit_insn (gen_enable_interrupt ());
1220 /* Push zero reg. */
1221 emit_push_byte (ZERO_REGNO
, true);
1224 emit_push_byte (TMP_REGNO
, true);
1227 /* ??? There's no dwarf2 column reserved for SREG. */
1228 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1230 /* Clear zero reg. */
1231 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1233 /* Prevent any attempt to delete the setting of ZERO_REG! */
1234 emit_use (zero_reg_rtx
);
1236 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1237 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1240 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1243 && TEST_HARD_REG_BIT (set
, REG_X
)
1244 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1246 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1250 && (frame_pointer_needed
1251 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1252 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1254 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1258 && TEST_HARD_REG_BIT (set
, REG_Z
)
1259 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1261 emit_push_sfr (rampz_rtx
, false /* frame-related */, true /* clr */);
1263 } /* is_interrupt is_signal */
1265 avr_prologue_setup_frame (size
, set
);
1267 if (flag_stack_usage_info
)
1268 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1271 /* Output summary at end of function prologue. */
1274 avr_asm_function_end_prologue (FILE *file
)
1276 if (cfun
->machine
->is_naked
)
1278 fputs ("/* prologue: naked */\n", file
);
1282 if (cfun
->machine
->is_interrupt
)
1284 fputs ("/* prologue: Interrupt */\n", file
);
1286 else if (cfun
->machine
->is_signal
)
1288 fputs ("/* prologue: Signal */\n", file
);
1291 fputs ("/* prologue: function */\n", file
);
1294 if (ACCUMULATE_OUTGOING_ARGS
)
1295 fprintf (file
, "/* outgoing args size = %d */\n",
1296 avr_outgoing_args_size());
1298 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1300 fprintf (file
, "/* stack size = %d */\n",
1301 cfun
->machine
->stack_usage
);
1302 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1303 usage for offset so that SP + .L__stack_offset = return address. */
1304 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1308 /* Implement EPILOGUE_USES. */
1311 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1313 if (reload_completed
1315 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1320 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1323 emit_pop_byte (unsigned regno
)
1327 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1328 mem
= gen_frame_mem (QImode
, mem
);
1329 reg
= gen_rtx_REG (QImode
, regno
);
1331 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1334 /* Output RTL epilogue. */
1337 expand_epilogue (bool sibcall_p
)
1344 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1346 size
= get_frame_size() + avr_outgoing_args_size();
1348 /* epilogue: naked */
1349 if (cfun
->machine
->is_naked
)
1351 gcc_assert (!sibcall_p
);
1353 emit_jump_insn (gen_return ());
1357 avr_regs_to_save (&set
);
1358 live_seq
= sequent_regs_live ();
1360 minimize
= (TARGET_CALL_PROLOGUES
1363 && !cfun
->machine
->is_OS_task
1364 && !cfun
->machine
->is_OS_main
);
1368 || frame_pointer_needed
1371 /* Get rid of frame. */
1373 if (!frame_pointer_needed
)
1375 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1380 emit_move_insn (frame_pointer_rtx
,
1381 plus_constant (frame_pointer_rtx
, size
));
1384 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1390 /* Try two methods to adjust stack and select shortest. */
1396 gcc_assert (frame_pointer_needed
1398 || !current_function_is_leaf
);
1400 fp
= my_fp
= (frame_pointer_needed
1402 : gen_rtx_REG (Pmode
, REG_X
));
1404 if (AVR_HAVE_8BIT_SP
)
1406 /* The high byte (r29) does not change:
1407 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1409 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1412 /********** Method 1: Adjust fp register **********/
1416 if (!frame_pointer_needed
)
1417 emit_move_insn (fp
, stack_pointer_rtx
);
1419 emit_move_insn (my_fp
, plus_constant (my_fp
, size
));
1421 /* Copy to stack pointer. */
1423 if (TARGET_NO_INTERRUPTS
)
1426 if (AVR_HAVE_8BIT_SP
)
1429 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1430 GEN_INT (irq_state
)));
1432 fp_plus_insns
= get_insns ();
1435 /********** Method 2: Adjust Stack pointer **********/
1437 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1443 emit_move_insn (stack_pointer_rtx
,
1444 plus_constant (stack_pointer_rtx
, size
));
1446 sp_plus_insns
= get_insns ();
1449 /************ Use shortest method ************/
1451 emit_insn (get_sequence_length (sp_plus_insns
)
1452 < get_sequence_length (fp_plus_insns
)
1457 emit_insn (fp_plus_insns
);
1460 if (frame_pointer_needed
1461 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1463 /* Restore previous frame_pointer. See expand_prologue for
1464 rationale for not using pophi. */
1466 emit_pop_byte (REG_Y
+ 1);
1467 emit_pop_byte (REG_Y
);
1470 /* Restore used registers. */
1472 for (reg
= 31; reg
>= 0; --reg
)
1473 if (TEST_HARD_REG_BIT (set
, reg
))
1474 emit_pop_byte (reg
);
1478 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1479 The conditions to restore them must be tha same as in prologue. */
1482 && TEST_HARD_REG_BIT (set
, REG_X
)
1483 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1485 emit_pop_byte (TMP_REGNO
);
1486 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1490 && (frame_pointer_needed
1491 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1492 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1494 emit_pop_byte (TMP_REGNO
);
1495 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1499 && TEST_HARD_REG_BIT (set
, REG_Z
)
1500 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1502 emit_pop_byte (TMP_REGNO
);
1503 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1508 emit_pop_byte (TMP_REGNO
);
1509 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1512 /* Restore SREG using tmp_reg as scratch. */
1514 emit_pop_byte (TMP_REGNO
);
1515 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1517 /* Restore tmp REG. */
1518 emit_pop_byte (TMP_REGNO
);
1520 /* Restore zero REG. */
1521 emit_pop_byte (ZERO_REGNO
);
1525 emit_jump_insn (gen_return ());
1528 /* Output summary messages at beginning of function epilogue. */
1531 avr_asm_function_begin_epilogue (FILE *file
)
1533 fprintf (file
, "/* epilogue start */\n");
1537 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1540 avr_cannot_modify_jumps_p (void)
1543 /* Naked Functions must not have any instructions after
1544 their epilogue, see PR42240 */
1546 if (reload_completed
1548 && cfun
->machine
->is_naked
)
1557 /* Helper function for `avr_legitimate_address_p'. */
1560 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1561 RTX_CODE outer_code
, bool strict
)
1564 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1565 as
, outer_code
, UNKNOWN
)
1567 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1571 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1572 machine for a memory operand of mode MODE. */
1575 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1577 bool ok
= CONSTANT_ADDRESS_P (x
);
1579 switch (GET_CODE (x
))
1582 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1587 && REG_X
== REGNO (x
))
1595 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1596 GET_CODE (x
), strict
);
1601 rtx reg
= XEXP (x
, 0);
1602 rtx op1
= XEXP (x
, 1);
1605 && CONST_INT_P (op1
)
1606 && INTVAL (op1
) >= 0)
1608 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1613 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1616 if (reg
== frame_pointer_rtx
1617 || reg
== arg_pointer_rtx
)
1622 else if (frame_pointer_needed
1623 && reg
== frame_pointer_rtx
)
1635 if (avr_log
.legitimate_address_p
)
1637 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1638 "reload_completed=%d reload_in_progress=%d %s:",
1639 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1640 reg_renumber
? "(reg_renumber)" : "");
1642 if (GET_CODE (x
) == PLUS
1643 && REG_P (XEXP (x
, 0))
1644 && CONST_INT_P (XEXP (x
, 1))
1645 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1648 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1649 true_regnum (XEXP (x
, 0)));
1652 avr_edump ("\n%r\n", x
);
1659 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1660 now only a helper for avr_addr_space_legitimize_address. */
1661 /* Attempts to replace X with a valid
1662 memory address for an operand of mode MODE */
1665 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1667 bool big_offset_p
= false;
1671 if (GET_CODE (oldx
) == PLUS
1672 && REG_P (XEXP (oldx
, 0)))
1674 if (REG_P (XEXP (oldx
, 1)))
1675 x
= force_reg (GET_MODE (oldx
), oldx
);
1676 else if (CONST_INT_P (XEXP (oldx
, 1)))
1678 int offs
= INTVAL (XEXP (oldx
, 1));
1679 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1680 && offs
> MAX_LD_OFFSET (mode
))
1682 big_offset_p
= true;
1683 x
= force_reg (GET_MODE (oldx
), oldx
);
1688 if (avr_log
.legitimize_address
)
1690 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1693 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1700 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1701 /* This will allow register R26/27 to be used where it is no worse than normal
1702 base pointers R28/29 or R30/31. For example, if base offset is greater
1703 than 63 bytes or for R++ or --R addressing. */
1706 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1707 int opnum
, int type
, int addr_type
,
1708 int ind_levels ATTRIBUTE_UNUSED
,
1709 rtx (*mk_memloc
)(rtx
,int))
1713 if (avr_log
.legitimize_reload_address
)
1714 avr_edump ("\n%?:%m %r\n", mode
, x
);
1716 if (1 && (GET_CODE (x
) == POST_INC
1717 || GET_CODE (x
) == PRE_DEC
))
1719 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1720 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1721 opnum
, RELOAD_OTHER
);
1723 if (avr_log
.legitimize_reload_address
)
1724 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1725 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1730 if (GET_CODE (x
) == PLUS
1731 && REG_P (XEXP (x
, 0))
1732 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1733 && CONST_INT_P (XEXP (x
, 1))
1734 && INTVAL (XEXP (x
, 1)) >= 1)
1736 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1740 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1742 int regno
= REGNO (XEXP (x
, 0));
1743 rtx mem
= mk_memloc (x
, regno
);
1745 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1746 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1749 if (avr_log
.legitimize_reload_address
)
1750 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1751 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1753 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1754 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1757 if (avr_log
.legitimize_reload_address
)
1758 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1759 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1764 else if (! (frame_pointer_needed
1765 && XEXP (x
, 0) == frame_pointer_rtx
))
1767 push_reload (x
, NULL_RTX
, px
, NULL
,
1768 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1771 if (avr_log
.legitimize_reload_address
)
1772 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1773 POINTER_REGS
, x
, NULL_RTX
);
1783 /* Helper function to print assembler resp. track instruction
1784 sequence lengths. Always return "".
1787 Output assembler code from template TPL with operands supplied
1788 by OPERANDS. This is just forwarding to output_asm_insn.
1791 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1792 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1793 Don't output anything.
1797 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1801 output_asm_insn (tpl
, operands
);
1815 /* Return a pointer register name as a string. */
1818 ptrreg_to_str (int regno
)
1822 case REG_X
: return "X";
1823 case REG_Y
: return "Y";
1824 case REG_Z
: return "Z";
1826 output_operand_lossage ("address operand requires constraint for"
1827 " X, Y, or Z register");
1832 /* Return the condition name as a string.
1833 Used in conditional jump constructing */
1836 cond_string (enum rtx_code code
)
1845 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1850 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1866 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1867 /* Output ADDR to FILE as address. */
1870 avr_print_operand_address (FILE *file
, rtx addr
)
1872 switch (GET_CODE (addr
))
1875 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1879 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1883 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1887 if (CONSTANT_ADDRESS_P (addr
)
1888 && text_segment_operand (addr
, VOIDmode
))
1891 if (GET_CODE (x
) == CONST
)
1893 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1895 /* Assembler gs() will implant word address. Make offset
1896 a byte offset inside gs() for assembler. This is
1897 needed because the more logical (constant+gs(sym)) is not
1898 accepted by gas. For 128K and lower devices this is ok.
1899 For large devices it will create a Trampoline to offset
1900 from symbol which may not be what the user really wanted. */
1901 fprintf (file
, "gs(");
1902 output_addr_const (file
, XEXP (x
,0));
1903 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1904 2 * INTVAL (XEXP (x
, 1)));
1906 if (warning (0, "pointer offset from symbol maybe incorrect"))
1908 output_addr_const (stderr
, addr
);
1909 fprintf(stderr
,"\n");
1914 fprintf (file
, "gs(");
1915 output_addr_const (file
, addr
);
1916 fprintf (file
, ")");
1920 output_addr_const (file
, addr
);
1925 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1928 avr_print_operand_punct_valid_p (unsigned char code
)
1930 return code
== '~' || code
== '!';
1934 /* Implement `TARGET_PRINT_OPERAND'. */
1935 /* Output X as assembler operand to file FILE.
1936 For a description of supported %-codes, see top of avr.md. */
1939 avr_print_operand (FILE *file
, rtx x
, int code
)
1943 if (code
>= 'A' && code
<= 'D')
1948 if (!AVR_HAVE_JMP_CALL
)
1951 else if (code
== '!')
1953 if (AVR_HAVE_EIJMP_EICALL
)
1956 else if (code
== 't'
1959 static int t_regno
= -1;
1960 static int t_nbits
= -1;
1962 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
1964 t_regno
= REGNO (x
);
1965 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
1967 else if (CONST_INT_P (x
) && t_regno
>= 0
1968 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
1970 int bpos
= INTVAL (x
);
1972 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
1974 fprintf (file
, ",%d", bpos
% 8);
1979 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
1983 if (x
== zero_reg_rtx
)
1984 fprintf (file
, "__zero_reg__");
1986 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1988 else if (CONST_INT_P (x
))
1990 HOST_WIDE_INT ival
= INTVAL (x
);
1993 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
1994 else if (low_io_address_operand (x
, VOIDmode
)
1995 || high_io_address_operand (x
, VOIDmode
))
1997 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
1998 fprintf (file
, "__RAMPZ__");
1999 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2000 fprintf (file
, "__RAMPY__");
2001 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2002 fprintf (file
, "__RAMPX__");
2003 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2004 fprintf (file
, "__RAMPD__");
2005 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2006 fprintf (file
, "__CCP__");
2007 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2008 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2009 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2012 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2013 ival
- avr_current_arch
->sfr_offset
);
2017 fatal_insn ("bad address, not an I/O address:", x
);
2021 rtx addr
= XEXP (x
, 0);
2025 if (!CONSTANT_P (addr
))
2026 fatal_insn ("bad address, not a constant:", addr
);
2027 /* Assembler template with m-code is data - not progmem section */
2028 if (text_segment_operand (addr
, VOIDmode
))
2029 if (warning (0, "accessing data memory with"
2030 " program memory address"))
2032 output_addr_const (stderr
, addr
);
2033 fprintf(stderr
,"\n");
2035 output_addr_const (file
, addr
);
2037 else if (code
== 'i')
2039 avr_print_operand (file
, addr
, 'i');
2041 else if (code
== 'o')
2043 if (GET_CODE (addr
) != PLUS
)
2044 fatal_insn ("bad address, not (reg+disp):", addr
);
2046 avr_print_operand (file
, XEXP (addr
, 1), 0);
2048 else if (code
== 'p' || code
== 'r')
2050 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2051 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2054 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2056 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2058 else if (GET_CODE (addr
) == PLUS
)
2060 avr_print_operand_address (file
, XEXP (addr
,0));
2061 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2062 fatal_insn ("internal compiler error. Bad address:"
2065 avr_print_operand (file
, XEXP (addr
,1), code
);
2068 avr_print_operand_address (file
, addr
);
2070 else if (code
== 'i')
2072 fatal_insn ("bad address, not an I/O address:", x
);
2074 else if (code
== 'x')
2076 /* Constant progmem address - like used in jmp or call */
2077 if (0 == text_segment_operand (x
, VOIDmode
))
2078 if (warning (0, "accessing program memory"
2079 " with data memory address"))
2081 output_addr_const (stderr
, x
);
2082 fprintf(stderr
,"\n");
2084 /* Use normal symbol for direct address no linker trampoline needed */
2085 output_addr_const (file
, x
);
2087 else if (GET_CODE (x
) == CONST_DOUBLE
)
2091 if (GET_MODE (x
) != SFmode
)
2092 fatal_insn ("internal compiler error. Unknown mode:", x
);
2093 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2094 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2095 fprintf (file
, "0x%lx", val
);
2097 else if (GET_CODE (x
) == CONST_STRING
)
2098 fputs (XSTR (x
, 0), file
);
2099 else if (code
== 'j')
2100 fputs (cond_string (GET_CODE (x
)), file
);
2101 else if (code
== 'k')
2102 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2104 avr_print_operand_address (file
, x
);
2107 /* Update the condition code in the INSN. */
2110 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2113 enum attr_cc cc
= get_attr_cc (insn
);
2121 case CC_OUT_PLUS_NOCLOBBER
:
2124 rtx
*op
= recog_data
.operand
;
2127 /* Extract insn's operands. */
2128 extract_constrain_insn_cached (insn
);
2136 avr_out_plus (op
, &len_dummy
, &icc
);
2137 cc
= (enum attr_cc
) icc
;
2140 case CC_OUT_PLUS_NOCLOBBER
:
2141 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2142 cc
= (enum attr_cc
) icc
;
2147 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2148 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2149 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2151 /* Any other "r,rL" combination does not alter cc0. */
2155 } /* inner switch */
2159 } /* outer swicth */
2164 /* Special values like CC_OUT_PLUS from above have been
2165 mapped to "standard" CC_* values so we never come here. */
2171 /* Insn does not affect CC at all. */
2179 set
= single_set (insn
);
2183 cc_status
.flags
|= CC_NO_OVERFLOW
;
2184 cc_status
.value1
= SET_DEST (set
);
2189 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2190 The V flag may or may not be known but that's ok because
2191 alter_cond will change tests to use EQ/NE. */
2192 set
= single_set (insn
);
2196 cc_status
.value1
= SET_DEST (set
);
2197 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2202 set
= single_set (insn
);
2205 cc_status
.value1
= SET_SRC (set
);
2209 /* Insn doesn't leave CC in a usable state. */
2215 /* Choose mode for jump insn:
2216 1 - relative jump in range -63 <= x <= 62 ;
2217 2 - relative jump in range -2046 <= x <= 2045 ;
2218 3 - absolute jump (only for ATmega[16]03). */
2221 avr_jump_mode (rtx x
, rtx insn
)
2223 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2224 ? XEXP (x
, 0) : x
));
2225 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2226 int jump_distance
= cur_addr
- dest_addr
;
2228 if (-63 <= jump_distance
&& jump_distance
<= 62)
2230 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2232 else if (AVR_HAVE_JMP_CALL
)
2238 /* return an AVR condition jump commands.
2239 X is a comparison RTX.
2240 LEN is a number returned by avr_jump_mode function.
2241 if REVERSE nonzero then condition code in X must be reversed. */
2244 ret_cond_branch (rtx x
, int len
, int reverse
)
2246 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2251 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2252 return (len
== 1 ? ("breq .+2" CR_TAB
2254 len
== 2 ? ("breq .+4" CR_TAB
2262 return (len
== 1 ? ("breq .+2" CR_TAB
2264 len
== 2 ? ("breq .+4" CR_TAB
2271 return (len
== 1 ? ("breq .+2" CR_TAB
2273 len
== 2 ? ("breq .+4" CR_TAB
2280 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2281 return (len
== 1 ? ("breq %0" CR_TAB
2283 len
== 2 ? ("breq .+2" CR_TAB
2290 return (len
== 1 ? ("breq %0" CR_TAB
2292 len
== 2 ? ("breq .+2" CR_TAB
2299 return (len
== 1 ? ("breq %0" CR_TAB
2301 len
== 2 ? ("breq .+2" CR_TAB
2315 return ("br%j1 .+2" CR_TAB
2318 return ("br%j1 .+4" CR_TAB
2329 return ("br%k1 .+2" CR_TAB
2332 return ("br%k1 .+4" CR_TAB
2340 /* Output insn cost for next insn. */
2343 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2344 int num_operands ATTRIBUTE_UNUSED
)
2346 if (avr_log
.rtx_costs
)
2348 rtx set
= single_set (insn
);
2351 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2352 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2354 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2355 rtx_cost (PATTERN (insn
), INSN
, 0,
2356 optimize_insn_for_speed_p()));
2360 /* Return 0 if undefined, 1 if always true or always false. */
2363 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2365 unsigned int max
= (mode
== QImode
? 0xff :
2366 mode
== HImode
? 0xffff :
2367 mode
== PSImode
? 0xffffff :
2368 mode
== SImode
? 0xffffffff : 0);
2369 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2371 if (unsigned_condition (op
) != op
)
2374 if (max
!= (INTVAL (x
) & max
)
2375 && INTVAL (x
) != 0xff)
2382 /* Returns nonzero if REGNO is the number of a hard
2383 register in which function arguments are sometimes passed. */
2386 function_arg_regno_p(int r
)
2388 return (r
>= 8 && r
<= 25);
2391 /* Initializing the variable cum for the state at the beginning
2392 of the argument list. */
2395 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2396 tree fndecl ATTRIBUTE_UNUSED
)
2399 cum
->regno
= FIRST_CUM_REG
;
2400 if (!libname
&& stdarg_p (fntype
))
2403 /* Assume the calle may be tail called */
2405 cfun
->machine
->sibcall_fails
= 0;
2408 /* Returns the number of registers to allocate for a function argument. */
2411 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2415 if (mode
== BLKmode
)
2416 size
= int_size_in_bytes (type
);
2418 size
= GET_MODE_SIZE (mode
);
2420 /* Align all function arguments to start in even-numbered registers.
2421 Odd-sized arguments leave holes above them. */
2423 return (size
+ 1) & ~1;
2426 /* Controls whether a function argument is passed
2427 in a register, and which register. */
2430 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2431 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2433 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2434 int bytes
= avr_num_arg_regs (mode
, type
);
2436 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2437 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2442 /* Update the summarizer variable CUM to advance past an argument
2443 in the argument list. */
2446 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2447 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2449 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2450 int bytes
= avr_num_arg_regs (mode
, type
);
2452 cum
->nregs
-= bytes
;
2453 cum
->regno
-= bytes
;
2455 /* A parameter is being passed in a call-saved register. As the original
2456 contents of these regs has to be restored before leaving the function,
2457 a function must not pass arguments in call-saved regs in order to get
2462 && !call_used_regs
[cum
->regno
])
2464 /* FIXME: We ship info on failing tail-call in struct machine_function.
2465 This uses internals of calls.c:expand_call() and the way args_so_far
2466 is used. targetm.function_ok_for_sibcall() needs to be extended to
2467 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2468 dependent so that such an extension is not wanted. */
2470 cfun
->machine
->sibcall_fails
= 1;
2473 /* Test if all registers needed by the ABI are actually available. If the
2474 user has fixed a GPR needed to pass an argument, an (implicit) function
2475 call will clobber that fixed register. See PR45099 for an example. */
2482 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2483 if (fixed_regs
[regno
])
2484 warning (0, "fixed register %s used to pass parameter to function",
2488 if (cum
->nregs
<= 0)
2491 cum
->regno
= FIRST_CUM_REG
;
2495 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2496 /* Decide whether we can make a sibling call to a function. DECL is the
2497 declaration of the function being targeted by the call and EXP is the
2498 CALL_EXPR representing the call. */
2501 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2505 /* Tail-calling must fail if callee-saved regs are used to pass
2506 function args. We must not tail-call when `epilogue_restores'
2507 is used. Unfortunately, we cannot tell at this point if that
2508 actually will happen or not, and we cannot step back from
2509 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2511 if (cfun
->machine
->sibcall_fails
2512 || TARGET_CALL_PROLOGUES
)
2517 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2521 decl_callee
= TREE_TYPE (decl_callee
);
2525 decl_callee
= fntype_callee
;
2527 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2528 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2530 decl_callee
= TREE_TYPE (decl_callee
);
2534 /* Ensure that caller and callee have compatible epilogues */
2536 if (interrupt_function_p (current_function_decl
)
2537 || signal_function_p (current_function_decl
)
2538 || avr_naked_function_p (decl_callee
)
2539 || avr_naked_function_p (current_function_decl
)
2540 /* FIXME: For OS_task and OS_main, we are over-conservative.
2541 This is due to missing documentation of these attributes
2542 and what they actually should do and should not do. */
2543 || (avr_OS_task_function_p (decl_callee
)
2544 != avr_OS_task_function_p (current_function_decl
))
2545 || (avr_OS_main_function_p (decl_callee
)
2546 != avr_OS_main_function_p (current_function_decl
)))
2554 /***********************************************************************
2555 Functions for outputting various mov's for a various modes
2556 ************************************************************************/
2558 /* Return true if a value of mode MODE is read from flash by
2559 __load_* function from libgcc. */
2562 avr_load_libgcc_p (rtx op
)
2564 enum machine_mode mode
= GET_MODE (op
);
2565 int n_bytes
= GET_MODE_SIZE (mode
);
2569 && avr_mem_flash_p (op
));
2572 /* Return true if a value of mode MODE is read by __xload_* function. */
2575 avr_xload_libgcc_p (enum machine_mode mode
)
2577 int n_bytes
= GET_MODE_SIZE (mode
);
2580 || avr_current_arch
->n_segments
> 1);
2584 /* Find an unused d-register to be used as scratch in INSN.
2585 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2586 is a register, skip all possible return values that overlap EXCLUDE.
2587 The policy for the returned register is similar to that of
2588 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2591 Return a QImode d-register or NULL_RTX if nothing found. */
2594 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2597 bool isr_p
= (interrupt_function_p (current_function_decl
)
2598 || signal_function_p (current_function_decl
));
2600 for (regno
= 16; regno
< 32; regno
++)
2602 rtx reg
= all_regs_rtx
[regno
];
2605 && reg_overlap_mentioned_p (exclude
, reg
))
2606 || fixed_regs
[regno
])
2611 /* Try non-live register */
2613 if (!df_regs_ever_live_p (regno
)
2614 && (TREE_THIS_VOLATILE (current_function_decl
)
2615 || cfun
->machine
->is_OS_task
2616 || cfun
->machine
->is_OS_main
2617 || (!isr_p
&& call_used_regs
[regno
])))
2622 /* Any live register can be used if it is unused after.
2623 Prologue/epilogue will care for it as needed. */
2625 if (df_regs_ever_live_p (regno
)
2626 && reg_unused_after (insn
, reg
))
2636 /* Helper function for the next function in the case where only restricted
2637 version of LPM instruction is available. */
2640 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2644 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2647 regno_dest
= REGNO (dest
);
2649 /* The implicit target register of LPM. */
2650 xop
[3] = lpm_reg_rtx
;
2652 switch (GET_CODE (addr
))
2659 gcc_assert (REG_Z
== REGNO (addr
));
2667 avr_asm_len ("%4lpm", xop
, plen
, 1);
2669 if (regno_dest
!= LPM_REGNO
)
2670 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2675 if (REGNO (dest
) == REG_Z
)
2676 return avr_asm_len ("%4lpm" CR_TAB
2681 "pop %A0", xop
, plen
, 6);
2683 avr_asm_len ("%4lpm" CR_TAB
2687 "mov %B0,%3", xop
, plen
, 5);
2689 if (!reg_unused_after (insn
, addr
))
2690 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2699 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2702 if (regno_dest
== LPM_REGNO
)
2703 avr_asm_len ("%4lpm" CR_TAB
2704 "adiw %2,1", xop
, plen
, 2);
2706 avr_asm_len ("%4lpm" CR_TAB
2708 "adiw %2,1", xop
, plen
, 3);
2711 avr_asm_len ("%4lpm" CR_TAB
2713 "adiw %2,1", xop
, plen
, 3);
2716 avr_asm_len ("%4lpm" CR_TAB
2718 "adiw %2,1", xop
, plen
, 3);
2721 avr_asm_len ("%4lpm" CR_TAB
2723 "adiw %2,1", xop
, plen
, 3);
2725 break; /* POST_INC */
2727 } /* switch CODE (addr) */
2733 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2734 OP[1] in AS1 to register OP[0].
2735 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2739 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2743 rtx src
= SET_SRC (single_set (insn
));
2745 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2749 addr_space_t as
= MEM_ADDR_SPACE (src
);
2756 warning (0, "writing to address space %qs not supported",
2757 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2762 addr
= XEXP (src
, 0);
2763 code
= GET_CODE (addr
);
2765 gcc_assert (REG_P (dest
));
2766 gcc_assert (REG
== code
|| POST_INC
== code
);
2770 xop
[2] = lpm_addr_reg_rtx
;
2771 xop
[4] = xstring_empty
;
2772 xop
[5] = tmp_reg_rtx
;
2774 regno_dest
= REGNO (dest
);
2776 /* Cut down segment number to a number the device actually supports.
2777 We do this late to preserve the address space's name for diagnostics. */
2779 segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
2781 /* Set RAMPZ as needed. */
2785 xop
[4] = GEN_INT (segment
);
2787 if (xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
),
2790 avr_asm_len ("ldi %3,%4" CR_TAB
2791 "out __RAMPZ__,%3", xop
, plen
, 2);
2793 else if (segment
== 1)
2795 avr_asm_len ("clr %5" CR_TAB
2797 "out __RAMPZ__,%5", xop
, plen
, 3);
2801 avr_asm_len ("mov %5,%2" CR_TAB
2803 "out __RAMPZ__,%2" CR_TAB
2804 "mov %2,%5", xop
, plen
, 4);
2809 if (!AVR_HAVE_ELPMX
)
2810 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2812 else if (!AVR_HAVE_LPMX
)
2814 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2817 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2819 switch (GET_CODE (addr
))
2826 gcc_assert (REG_Z
== REGNO (addr
));
2834 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
2837 if (REGNO (dest
) == REG_Z
)
2838 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2839 "%4lpm %B0,%a2" CR_TAB
2840 "mov %A0,%5", xop
, plen
, 3);
2843 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2844 "%4lpm %B0,%a2", xop
, plen
, 2);
2846 if (!reg_unused_after (insn
, addr
))
2847 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2854 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2855 "%4lpm %B0,%a2+" CR_TAB
2856 "%4lpm %C0,%a2", xop
, plen
, 3);
2858 if (!reg_unused_after (insn
, addr
))
2859 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
2865 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2866 "%4lpm %B0,%a2+", xop
, plen
, 2);
2868 if (REGNO (dest
) == REG_Z
- 2)
2869 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2870 "%4lpm %C0,%a2" CR_TAB
2871 "mov %D0,%5", xop
, plen
, 3);
2874 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2875 "%4lpm %D0,%a2", xop
, plen
, 2);
2877 if (!reg_unused_after (insn
, addr
))
2878 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
2888 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2891 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
2892 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
2893 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
2894 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
2896 break; /* POST_INC */
2898 } /* switch CODE (addr) */
2904 /* Worker function for xload_8 insn. */
2907 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2913 xop
[2] = lpm_addr_reg_rtx
;
2914 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2919 avr_asm_len ("ld %3,%a2" CR_TAB
2920 "sbrs %1,7", xop
, plen
, 2);
2922 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2924 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2925 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2932 output_movqi (rtx insn
, rtx operands
[], int *l
)
2935 rtx dest
= operands
[0];
2936 rtx src
= operands
[1];
2939 if (avr_mem_flash_p (src
)
2940 || avr_mem_flash_p (dest
))
2942 return avr_out_lpm (insn
, operands
, real_l
);
2950 if (register_operand (dest
, QImode
))
2952 if (register_operand (src
, QImode
)) /* mov r,r */
2954 if (test_hard_reg_class (STACK_REG
, dest
))
2956 else if (test_hard_reg_class (STACK_REG
, src
))
2961 else if (CONSTANT_P (src
))
2963 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2966 else if (GET_CODE (src
) == MEM
)
2967 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2969 else if (GET_CODE (dest
) == MEM
)
2974 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2976 return out_movqi_mr_r (insn
, xop
, real_l
);
2983 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2988 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2990 if (avr_mem_flash_p (src
)
2991 || avr_mem_flash_p (dest
))
2993 return avr_out_lpm (insn
, xop
, plen
);
2998 if (REG_P (src
)) /* mov r,r */
3000 if (test_hard_reg_class (STACK_REG
, dest
))
3002 if (AVR_HAVE_8BIT_SP
)
3003 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3006 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3007 "out __SP_H__,%B1", xop
, plen
, -2);
3009 /* Use simple load of SP if no interrupts are used. */
3011 return TARGET_NO_INTERRUPTS
3012 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3013 "out __SP_L__,%A1", xop
, plen
, -2)
3015 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3017 "out __SP_H__,%B1" CR_TAB
3018 "out __SREG__,__tmp_reg__" CR_TAB
3019 "out __SP_L__,%A1", xop
, plen
, -5);
3021 else if (test_hard_reg_class (STACK_REG
, src
))
3023 return AVR_HAVE_8BIT_SP
3024 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3025 "clr %B0", xop
, plen
, -2)
3027 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3028 "in %B0,__SP_H__", xop
, plen
, -2);
3031 return AVR_HAVE_MOVW
3032 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3034 : avr_asm_len ("mov %A0,%A1" CR_TAB
3035 "mov %B0,%B1", xop
, plen
, -2);
3037 else if (CONSTANT_P (src
))
3039 return output_reload_inhi (xop
, NULL
, plen
);
3041 else if (MEM_P (src
))
3043 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3046 else if (MEM_P (dest
))
3051 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
3053 return out_movhi_mr_r (insn
, xop
, plen
);
3056 fatal_insn ("invalid insn:", insn
);
3062 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3066 rtx x
= XEXP (src
, 0);
3068 if (CONSTANT_ADDRESS_P (x
))
3070 return optimize
> 0 && io_address_operand (x
, QImode
)
3071 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3072 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3074 else if (GET_CODE (x
) == PLUS
3075 && REG_P (XEXP (x
, 0))
3076 && CONST_INT_P (XEXP (x
, 1)))
3078 /* memory access by reg+disp */
3080 int disp
= INTVAL (XEXP (x
, 1));
3082 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3084 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3085 fatal_insn ("incorrect insn:",insn
);
3087 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3088 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3089 "ldd %0,Y+63" CR_TAB
3090 "sbiw r28,%o1-63", op
, plen
, -3);
3092 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3093 "sbci r29,hi8(-%o1)" CR_TAB
3095 "subi r28,lo8(%o1)" CR_TAB
3096 "sbci r29,hi8(%o1)", op
, plen
, -5);
3098 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3100 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3101 it but I have this situation with extremal optimizing options. */
3103 avr_asm_len ("adiw r26,%o1" CR_TAB
3104 "ld %0,X", op
, plen
, -2);
3106 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3107 && !reg_unused_after (insn
, XEXP (x
,0)))
3109 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3115 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3118 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3122 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3126 rtx base
= XEXP (src
, 0);
3127 int reg_dest
= true_regnum (dest
);
3128 int reg_base
= true_regnum (base
);
3129 /* "volatile" forces reading low byte first, even if less efficient,
3130 for correct operation with 16-bit I/O registers. */
3131 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3135 if (reg_dest
== reg_base
) /* R = (R) */
3136 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3138 "mov %A0,__tmp_reg__", op
, plen
, -3);
3140 if (reg_base
!= REG_X
)
3141 return avr_asm_len ("ld %A0,%1" CR_TAB
3142 "ldd %B0,%1+1", op
, plen
, -2);
3144 avr_asm_len ("ld %A0,X+" CR_TAB
3145 "ld %B0,X", op
, plen
, -2);
3147 if (!reg_unused_after (insn
, base
))
3148 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3152 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3154 int disp
= INTVAL (XEXP (base
, 1));
3155 int reg_base
= true_regnum (XEXP (base
, 0));
3157 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3159 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3160 fatal_insn ("incorrect insn:",insn
);
3162 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3163 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3164 "ldd %A0,Y+62" CR_TAB
3165 "ldd %B0,Y+63" CR_TAB
3166 "sbiw r28,%o1-62", op
, plen
, -4)
3168 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3169 "sbci r29,hi8(-%o1)" CR_TAB
3171 "ldd %B0,Y+1" CR_TAB
3172 "subi r28,lo8(%o1)" CR_TAB
3173 "sbci r29,hi8(%o1)", op
, plen
, -6);
3176 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3177 it but I have this situation with extremal
3178 optimization options. */
3180 if (reg_base
== REG_X
)
3181 return reg_base
== reg_dest
3182 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3183 "ld __tmp_reg__,X+" CR_TAB
3185 "mov %A0,__tmp_reg__", op
, plen
, -4)
3187 : avr_asm_len ("adiw r26,%o1" CR_TAB
3190 "sbiw r26,%o1+1", op
, plen
, -4);
3192 return reg_base
== reg_dest
3193 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3194 "ldd %B0,%B1" CR_TAB
3195 "mov %A0,__tmp_reg__", op
, plen
, -3)
3197 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3198 "ldd %B0,%B1", op
, plen
, -2);
3200 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3202 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3203 fatal_insn ("incorrect insn:", insn
);
3205 if (!mem_volatile_p
)
3206 return avr_asm_len ("ld %B0,%1" CR_TAB
3207 "ld %A0,%1", op
, plen
, -2);
3209 return REGNO (XEXP (base
, 0)) == REG_X
3210 ? avr_asm_len ("sbiw r26,2" CR_TAB
3213 "sbiw r26,1", op
, plen
, -4)
3215 : avr_asm_len ("sbiw %r1,2" CR_TAB
3217 "ldd %B0,%p1+1", op
, plen
, -3);
3219 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3221 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3222 fatal_insn ("incorrect insn:", insn
);
3224 return avr_asm_len ("ld %A0,%1" CR_TAB
3225 "ld %B0,%1", op
, plen
, -2);
3227 else if (CONSTANT_ADDRESS_P (base
))
3229 return optimize
> 0 && io_address_operand (base
, HImode
)
3230 ? avr_asm_len ("in %A0,%i1" CR_TAB
3231 "in %B0,%i1+1", op
, plen
, -2)
3233 : avr_asm_len ("lds %A0,%m1" CR_TAB
3234 "lds %B0,%m1+1", op
, plen
, -4);
3237 fatal_insn ("unknown move insn:",insn
);
3242 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3246 rtx base
= XEXP (src
, 0);
3247 int reg_dest
= true_regnum (dest
);
3248 int reg_base
= true_regnum (base
);
3256 if (reg_base
== REG_X
) /* (R26) */
3258 if (reg_dest
== REG_X
)
3259 /* "ld r26,-X" is undefined */
3260 return *l
=7, ("adiw r26,3" CR_TAB
3263 "ld __tmp_reg__,-X" CR_TAB
3266 "mov r27,__tmp_reg__");
3267 else if (reg_dest
== REG_X
- 2)
3268 return *l
=5, ("ld %A0,X+" CR_TAB
3270 "ld __tmp_reg__,X+" CR_TAB
3272 "mov %C0,__tmp_reg__");
3273 else if (reg_unused_after (insn
, base
))
3274 return *l
=4, ("ld %A0,X+" CR_TAB
3279 return *l
=5, ("ld %A0,X+" CR_TAB
3287 if (reg_dest
== reg_base
)
3288 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3289 "ldd %C0,%1+2" CR_TAB
3290 "ldd __tmp_reg__,%1+1" CR_TAB
3292 "mov %B0,__tmp_reg__");
3293 else if (reg_base
== reg_dest
+ 2)
3294 return *l
=5, ("ld %A0,%1" CR_TAB
3295 "ldd %B0,%1+1" CR_TAB
3296 "ldd __tmp_reg__,%1+2" CR_TAB
3297 "ldd %D0,%1+3" CR_TAB
3298 "mov %C0,__tmp_reg__");
3300 return *l
=4, ("ld %A0,%1" CR_TAB
3301 "ldd %B0,%1+1" CR_TAB
3302 "ldd %C0,%1+2" CR_TAB
3306 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3308 int disp
= INTVAL (XEXP (base
, 1));
3310 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3312 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3313 fatal_insn ("incorrect insn:",insn
);
3315 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3316 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3317 "ldd %A0,Y+60" CR_TAB
3318 "ldd %B0,Y+61" CR_TAB
3319 "ldd %C0,Y+62" CR_TAB
3320 "ldd %D0,Y+63" CR_TAB
3323 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3324 "sbci r29,hi8(-%o1)" CR_TAB
3326 "ldd %B0,Y+1" CR_TAB
3327 "ldd %C0,Y+2" CR_TAB
3328 "ldd %D0,Y+3" CR_TAB
3329 "subi r28,lo8(%o1)" CR_TAB
3330 "sbci r29,hi8(%o1)");
3333 reg_base
= true_regnum (XEXP (base
, 0));
3334 if (reg_base
== REG_X
)
3337 if (reg_dest
== REG_X
)
3340 /* "ld r26,-X" is undefined */
3341 return ("adiw r26,%o1+3" CR_TAB
3344 "ld __tmp_reg__,-X" CR_TAB
3347 "mov r27,__tmp_reg__");
3350 if (reg_dest
== REG_X
- 2)
3351 return ("adiw r26,%o1" CR_TAB
3354 "ld __tmp_reg__,X+" CR_TAB
3356 "mov r26,__tmp_reg__");
3358 return ("adiw r26,%o1" CR_TAB
3365 if (reg_dest
== reg_base
)
3366 return *l
=5, ("ldd %D0,%D1" CR_TAB
3367 "ldd %C0,%C1" CR_TAB
3368 "ldd __tmp_reg__,%B1" CR_TAB
3369 "ldd %A0,%A1" CR_TAB
3370 "mov %B0,__tmp_reg__");
3371 else if (reg_dest
== reg_base
- 2)
3372 return *l
=5, ("ldd %A0,%A1" CR_TAB
3373 "ldd %B0,%B1" CR_TAB
3374 "ldd __tmp_reg__,%C1" CR_TAB
3375 "ldd %D0,%D1" CR_TAB
3376 "mov %C0,__tmp_reg__");
3377 return *l
=4, ("ldd %A0,%A1" CR_TAB
3378 "ldd %B0,%B1" CR_TAB
3379 "ldd %C0,%C1" CR_TAB
3382 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3383 return *l
=4, ("ld %D0,%1" CR_TAB
3387 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3388 return *l
=4, ("ld %A0,%1" CR_TAB
3392 else if (CONSTANT_ADDRESS_P (base
))
3393 return *l
=8, ("lds %A0,%m1" CR_TAB
3394 "lds %B0,%m1+1" CR_TAB
3395 "lds %C0,%m1+2" CR_TAB
3398 fatal_insn ("unknown move insn:",insn
);
3403 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3407 rtx base
= XEXP (dest
, 0);
3408 int reg_base
= true_regnum (base
);
3409 int reg_src
= true_regnum (src
);
3415 if (CONSTANT_ADDRESS_P (base
))
3416 return *l
=8,("sts %m0,%A1" CR_TAB
3417 "sts %m0+1,%B1" CR_TAB
3418 "sts %m0+2,%C1" CR_TAB
3420 if (reg_base
> 0) /* (r) */
3422 if (reg_base
== REG_X
) /* (R26) */
3424 if (reg_src
== REG_X
)
3426 /* "st X+,r26" is undefined */
3427 if (reg_unused_after (insn
, base
))
3428 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3431 "st X+,__tmp_reg__" CR_TAB
3435 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3438 "st X+,__tmp_reg__" CR_TAB
3443 else if (reg_base
== reg_src
+ 2)
3445 if (reg_unused_after (insn
, base
))
3446 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3447 "mov __tmp_reg__,%D1" CR_TAB
3450 "st %0+,__zero_reg__" CR_TAB
3451 "st %0,__tmp_reg__" CR_TAB
3452 "clr __zero_reg__");
3454 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3455 "mov __tmp_reg__,%D1" CR_TAB
3458 "st %0+,__zero_reg__" CR_TAB
3459 "st %0,__tmp_reg__" CR_TAB
3460 "clr __zero_reg__" CR_TAB
3463 return *l
=5, ("st %0+,%A1" CR_TAB
3470 return *l
=4, ("st %0,%A1" CR_TAB
3471 "std %0+1,%B1" CR_TAB
3472 "std %0+2,%C1" CR_TAB
3475 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3477 int disp
= INTVAL (XEXP (base
, 1));
3478 reg_base
= REGNO (XEXP (base
, 0));
3479 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3481 if (reg_base
!= REG_Y
)
3482 fatal_insn ("incorrect insn:",insn
);
3484 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3485 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3486 "std Y+60,%A1" CR_TAB
3487 "std Y+61,%B1" CR_TAB
3488 "std Y+62,%C1" CR_TAB
3489 "std Y+63,%D1" CR_TAB
3492 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3493 "sbci r29,hi8(-%o0)" CR_TAB
3495 "std Y+1,%B1" CR_TAB
3496 "std Y+2,%C1" CR_TAB
3497 "std Y+3,%D1" CR_TAB
3498 "subi r28,lo8(%o0)" CR_TAB
3499 "sbci r29,hi8(%o0)");
3501 if (reg_base
== REG_X
)
3504 if (reg_src
== REG_X
)
3507 return ("mov __tmp_reg__,r26" CR_TAB
3508 "mov __zero_reg__,r27" CR_TAB
3509 "adiw r26,%o0" CR_TAB
3510 "st X+,__tmp_reg__" CR_TAB
3511 "st X+,__zero_reg__" CR_TAB
3514 "clr __zero_reg__" CR_TAB
3517 else if (reg_src
== REG_X
- 2)
3520 return ("mov __tmp_reg__,r26" CR_TAB
3521 "mov __zero_reg__,r27" CR_TAB
3522 "adiw r26,%o0" CR_TAB
3525 "st X+,__tmp_reg__" CR_TAB
3526 "st X,__zero_reg__" CR_TAB
3527 "clr __zero_reg__" CR_TAB
3531 return ("adiw r26,%o0" CR_TAB
3538 return *l
=4, ("std %A0,%A1" CR_TAB
3539 "std %B0,%B1" CR_TAB
3540 "std %C0,%C1" CR_TAB
3543 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3544 return *l
=4, ("st %0,%D1" CR_TAB
3548 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3549 return *l
=4, ("st %0,%A1" CR_TAB
3553 fatal_insn ("unknown move insn:",insn
);
3558 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3561 rtx dest
= operands
[0];
3562 rtx src
= operands
[1];
3565 if (avr_mem_flash_p (src
)
3566 || avr_mem_flash_p (dest
))
3568 return avr_out_lpm (insn
, operands
, real_l
);
3574 if (register_operand (dest
, VOIDmode
))
3576 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3578 if (true_regnum (dest
) > true_regnum (src
))
3583 return ("movw %C0,%C1" CR_TAB
3587 return ("mov %D0,%D1" CR_TAB
3588 "mov %C0,%C1" CR_TAB
3589 "mov %B0,%B1" CR_TAB
3597 return ("movw %A0,%A1" CR_TAB
3601 return ("mov %A0,%A1" CR_TAB
3602 "mov %B0,%B1" CR_TAB
3603 "mov %C0,%C1" CR_TAB
3607 else if (CONSTANT_P (src
))
3609 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3611 else if (GET_CODE (src
) == MEM
)
3612 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3614 else if (GET_CODE (dest
) == MEM
)
3618 if (src
== CONST0_RTX (GET_MODE (dest
)))
3619 operands
[1] = zero_reg_rtx
;
3621 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3624 output_asm_insn (templ
, operands
);
3629 fatal_insn ("invalid insn:", insn
);
3634 /* Handle loads of 24-bit types from memory to register. */
3637 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3641 rtx base
= XEXP (src
, 0);
3642 int reg_dest
= true_regnum (dest
);
3643 int reg_base
= true_regnum (base
);
3647 if (reg_base
== REG_X
) /* (R26) */
3649 if (reg_dest
== REG_X
)
3650 /* "ld r26,-X" is undefined */
3651 return avr_asm_len ("adiw r26,2" CR_TAB
3653 "ld __tmp_reg__,-X" CR_TAB
3656 "mov r27,__tmp_reg__", op
, plen
, -6);
3659 avr_asm_len ("ld %A0,X+" CR_TAB
3661 "ld %C0,X", op
, plen
, -3);
3663 if (reg_dest
!= REG_X
- 2
3664 && !reg_unused_after (insn
, base
))
3666 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3672 else /* reg_base != REG_X */
3674 if (reg_dest
== reg_base
)
3675 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3676 "ldd __tmp_reg__,%1+1" CR_TAB
3678 "mov %B0,__tmp_reg__", op
, plen
, -4);
3680 return avr_asm_len ("ld %A0,%1" CR_TAB
3681 "ldd %B0,%1+1" CR_TAB
3682 "ldd %C0,%1+2", op
, plen
, -3);
3685 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3687 int disp
= INTVAL (XEXP (base
, 1));
3689 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3691 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3692 fatal_insn ("incorrect insn:",insn
);
3694 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3695 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3696 "ldd %A0,Y+61" CR_TAB
3697 "ldd %B0,Y+62" CR_TAB
3698 "ldd %C0,Y+63" CR_TAB
3699 "sbiw r28,%o1-61", op
, plen
, -5);
3701 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3702 "sbci r29,hi8(-%o1)" CR_TAB
3704 "ldd %B0,Y+1" CR_TAB
3705 "ldd %C0,Y+2" CR_TAB
3706 "subi r28,lo8(%o1)" CR_TAB
3707 "sbci r29,hi8(%o1)", op
, plen
, -7);
3710 reg_base
= true_regnum (XEXP (base
, 0));
3711 if (reg_base
== REG_X
)
3714 if (reg_dest
== REG_X
)
3716 /* "ld r26,-X" is undefined */
3717 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3719 "ld __tmp_reg__,-X" CR_TAB
3722 "mov r27,__tmp_reg__", op
, plen
, -6);
3725 avr_asm_len ("adiw r26,%o1" CR_TAB
3728 "ld r26,X", op
, plen
, -4);
3730 if (reg_dest
!= REG_X
- 2)
3731 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3736 if (reg_dest
== reg_base
)
3737 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3738 "ldd __tmp_reg__,%B1" CR_TAB
3739 "ldd %A0,%A1" CR_TAB
3740 "mov %B0,__tmp_reg__", op
, plen
, -4);
3742 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3743 "ldd %B0,%B1" CR_TAB
3744 "ldd %C0,%C1", op
, plen
, -3);
3746 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3747 return avr_asm_len ("ld %C0,%1" CR_TAB
3749 "ld %A0,%1", op
, plen
, -3);
3750 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3751 return avr_asm_len ("ld %A0,%1" CR_TAB
3753 "ld %C0,%1", op
, plen
, -3);
3755 else if (CONSTANT_ADDRESS_P (base
))
3756 return avr_asm_len ("lds %A0,%m1" CR_TAB
3757 "lds %B0,%m1+1" CR_TAB
3758 "lds %C0,%m1+2", op
, plen
, -6);
3760 fatal_insn ("unknown move insn:",insn
);
3764 /* Handle store of 24-bit type from register or zero to memory. */
3767 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3771 rtx base
= XEXP (dest
, 0);
3772 int reg_base
= true_regnum (base
);
3774 if (CONSTANT_ADDRESS_P (base
))
3775 return avr_asm_len ("sts %m0,%A1" CR_TAB
3776 "sts %m0+1,%B1" CR_TAB
3777 "sts %m0+2,%C1", op
, plen
, -6);
3779 if (reg_base
> 0) /* (r) */
3781 if (reg_base
== REG_X
) /* (R26) */
3783 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3785 avr_asm_len ("st %0+,%A1" CR_TAB
3787 "st %0,%C1", op
, plen
, -3);
3789 if (!reg_unused_after (insn
, base
))
3790 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3795 return avr_asm_len ("st %0,%A1" CR_TAB
3796 "std %0+1,%B1" CR_TAB
3797 "std %0+2,%C1", op
, plen
, -3);
3799 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3801 int disp
= INTVAL (XEXP (base
, 1));
3802 reg_base
= REGNO (XEXP (base
, 0));
3804 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3806 if (reg_base
!= REG_Y
)
3807 fatal_insn ("incorrect insn:",insn
);
3809 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3810 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3811 "std Y+61,%A1" CR_TAB
3812 "std Y+62,%B1" CR_TAB
3813 "std Y+63,%C1" CR_TAB
3814 "sbiw r28,%o0-60", op
, plen
, -5);
3816 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3817 "sbci r29,hi8(-%o0)" CR_TAB
3819 "std Y+1,%B1" CR_TAB
3820 "std Y+2,%C1" CR_TAB
3821 "subi r28,lo8(%o0)" CR_TAB
3822 "sbci r29,hi8(%o0)", op
, plen
, -7);
3824 if (reg_base
== REG_X
)
3827 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3829 avr_asm_len ("adiw r26,%o0" CR_TAB
3832 "st X,%C1", op
, plen
, -4);
3834 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3835 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3840 return avr_asm_len ("std %A0,%A1" CR_TAB
3841 "std %B0,%B1" CR_TAB
3842 "std %C0,%C1", op
, plen
, -3);
3844 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3845 return avr_asm_len ("st %0,%C1" CR_TAB
3847 "st %0,%A1", op
, plen
, -3);
3848 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3849 return avr_asm_len ("st %0,%A1" CR_TAB
3851 "st %0,%C1", op
, plen
, -3);
3853 fatal_insn ("unknown move insn:",insn
);
3858 /* Move around 24-bit stuff. */
3861 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3866 if (avr_mem_flash_p (src
)
3867 || avr_mem_flash_p (dest
))
3869 return avr_out_lpm (insn
, op
, plen
);
3872 if (register_operand (dest
, VOIDmode
))
3874 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3876 if (true_regnum (dest
) > true_regnum (src
))
3878 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3881 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3883 return avr_asm_len ("mov %B0,%B1" CR_TAB
3884 "mov %A0,%A1", op
, plen
, 2);
3889 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3891 avr_asm_len ("mov %A0,%A1" CR_TAB
3892 "mov %B0,%B1", op
, plen
, -2);
3894 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3897 else if (CONSTANT_P (src
))
3899 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3901 else if (MEM_P (src
))
3902 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3904 else if (MEM_P (dest
))
3909 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3911 return avr_out_store_psi (insn
, xop
, plen
);
3914 fatal_insn ("invalid insn:", insn
);
3920 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3924 rtx x
= XEXP (dest
, 0);
3926 if (CONSTANT_ADDRESS_P (x
))
3928 return optimize
> 0 && io_address_operand (x
, QImode
)
3929 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3930 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3932 else if (GET_CODE (x
) == PLUS
3933 && REG_P (XEXP (x
, 0))
3934 && CONST_INT_P (XEXP (x
, 1)))
3936 /* memory access by reg+disp */
3938 int disp
= INTVAL (XEXP (x
, 1));
3940 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3942 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3943 fatal_insn ("incorrect insn:",insn
);
3945 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3946 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3947 "std Y+63,%1" CR_TAB
3948 "sbiw r28,%o0-63", op
, plen
, -3);
3950 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3951 "sbci r29,hi8(-%o0)" CR_TAB
3953 "subi r28,lo8(%o0)" CR_TAB
3954 "sbci r29,hi8(%o0)", op
, plen
, -5);
3956 else if (REGNO (XEXP (x
,0)) == REG_X
)
3958 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3960 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3961 "adiw r26,%o0" CR_TAB
3962 "st X,__tmp_reg__", op
, plen
, -3);
3966 avr_asm_len ("adiw r26,%o0" CR_TAB
3967 "st X,%1", op
, plen
, -2);
3970 if (!reg_unused_after (insn
, XEXP (x
,0)))
3971 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3976 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3979 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3983 /* Helper for the next function for XMEGA. It does the same
3984 but with low byte first. */
3987 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3991 rtx base
= XEXP (dest
, 0);
3992 int reg_base
= true_regnum (base
);
3993 int reg_src
= true_regnum (src
);
3995 /* "volatile" forces writing low byte first, even if less efficient,
3996 for correct operation with 16-bit I/O registers like SP. */
3997 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3999 if (CONSTANT_ADDRESS_P (base
))
4000 return optimize
> 0 && io_address_operand (base
, HImode
)
4001 ? avr_asm_len ("out %i0,%A1" CR_TAB
4002 "out %i0+1,%B1", op
, plen
, -2)
4004 : avr_asm_len ("sts %m0,%A1" CR_TAB
4005 "sts %m0+1,%B1", op
, plen
, -4);
4009 if (reg_base
!= REG_X
)
4010 return avr_asm_len ("st %0,%A1" CR_TAB
4011 "std %0+1,%B1", op
, plen
, -2);
4013 if (reg_src
== REG_X
)
4014 /* "st X+,r26" and "st -X,r26" are undefined. */
4015 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4018 "st X,__tmp_reg__", op
, plen
, -4);
4020 avr_asm_len ("st X+,%A1" CR_TAB
4021 "st X,%B1", op
, plen
, -2);
4023 return reg_unused_after (insn
, base
)
4025 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4027 else if (GET_CODE (base
) == PLUS
)
4029 int disp
= INTVAL (XEXP (base
, 1));
4030 reg_base
= REGNO (XEXP (base
, 0));
4031 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4033 if (reg_base
!= REG_Y
)
4034 fatal_insn ("incorrect insn:",insn
);
4036 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4037 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4038 "std Y+62,%A1" CR_TAB
4039 "std Y+63,%B1" CR_TAB
4040 "sbiw r28,%o0-62", op
, plen
, -4)
4042 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4043 "sbci r29,hi8(-%o0)" CR_TAB
4045 "std Y+1,%B1" CR_TAB
4046 "subi r28,lo8(%o0)" CR_TAB
4047 "sbci r29,hi8(%o0)", op
, plen
, -6);
4050 if (reg_base
!= REG_X
)
4051 return avr_asm_len ("std %A0,%A1" CR_TAB
4052 "std %B0,%B1", op
, plen
, -2);
4054 return reg_src
== REG_X
4055 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4056 "mov __zero_reg__,r27" CR_TAB
4057 "adiw r26,%o0" CR_TAB
4058 "st X+,__tmp_reg__" CR_TAB
4059 "st X,__zero_reg__" CR_TAB
4060 "clr __zero_reg__" CR_TAB
4061 "sbiw r26,%o0+1", op
, plen
, -7)
4063 : avr_asm_len ("adiw r26,%o0" CR_TAB
4066 "sbiw r26,%o0+1", op
, plen
, -4);
4068 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4070 if (!mem_volatile_p
)
4071 return avr_asm_len ("st %0,%B1" CR_TAB
4072 "st %0,%A1", op
, plen
, -2);
4074 return REGNO (XEXP (base
, 0)) == REG_X
4075 ? avr_asm_len ("sbiw r26,2" CR_TAB
4078 "sbiw r26,1", op
, plen
, -4)
4080 : avr_asm_len ("sbiw %r0,2" CR_TAB
4082 "std %p0+1,%B1", op
, plen
, -3);
4084 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4086 return avr_asm_len ("st %0,%A1" CR_TAB
4087 "st %0,%B1", op
, plen
, -2);
4090 fatal_insn ("unknown move insn:",insn
);
4096 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4100 rtx base
= XEXP (dest
, 0);
4101 int reg_base
= true_regnum (base
);
4102 int reg_src
= true_regnum (src
);
4105 /* "volatile" forces writing high-byte first (no-xmega) resp.
4106 low-byte first (xmega) even if less efficient, for correct
4107 operation with 16-bit I/O registers like. */
4110 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4112 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4114 if (CONSTANT_ADDRESS_P (base
))
4115 return optimize
> 0 && io_address_operand (base
, HImode
)
4116 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4117 "out %i0,%A1", op
, plen
, -2)
4119 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4120 "sts %m0,%A1", op
, plen
, -4);
4124 if (reg_base
!= REG_X
)
4125 return avr_asm_len ("std %0+1,%B1" CR_TAB
4126 "st %0,%A1", op
, plen
, -2);
4128 if (reg_src
== REG_X
)
4129 /* "st X+,r26" and "st -X,r26" are undefined. */
4130 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4131 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4134 "st X,__tmp_reg__", op
, plen
, -4)
4136 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4138 "st X,__tmp_reg__" CR_TAB
4140 "st X,r26", op
, plen
, -5);
4142 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4143 ? avr_asm_len ("st X+,%A1" CR_TAB
4144 "st X,%B1", op
, plen
, -2)
4145 : avr_asm_len ("adiw r26,1" CR_TAB
4147 "st -X,%A1", op
, plen
, -3);
4149 else if (GET_CODE (base
) == PLUS
)
4151 int disp
= INTVAL (XEXP (base
, 1));
4152 reg_base
= REGNO (XEXP (base
, 0));
4153 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4155 if (reg_base
!= REG_Y
)
4156 fatal_insn ("incorrect insn:",insn
);
4158 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4159 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4160 "std Y+63,%B1" CR_TAB
4161 "std Y+62,%A1" CR_TAB
4162 "sbiw r28,%o0-62", op
, plen
, -4)
4164 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4165 "sbci r29,hi8(-%o0)" CR_TAB
4166 "std Y+1,%B1" CR_TAB
4168 "subi r28,lo8(%o0)" CR_TAB
4169 "sbci r29,hi8(%o0)", op
, plen
, -6);
4172 if (reg_base
!= REG_X
)
4173 return avr_asm_len ("std %B0,%B1" CR_TAB
4174 "std %A0,%A1", op
, plen
, -2);
4176 return reg_src
== REG_X
4177 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4178 "mov __zero_reg__,r27" CR_TAB
4179 "adiw r26,%o0+1" CR_TAB
4180 "st X,__zero_reg__" CR_TAB
4181 "st -X,__tmp_reg__" CR_TAB
4182 "clr __zero_reg__" CR_TAB
4183 "sbiw r26,%o0", op
, plen
, -7)
4185 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4188 "sbiw r26,%o0", op
, plen
, -4);
4190 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4192 return avr_asm_len ("st %0,%B1" CR_TAB
4193 "st %0,%A1", op
, plen
, -2);
4195 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4197 if (!mem_volatile_p
)
4198 return avr_asm_len ("st %0,%A1" CR_TAB
4199 "st %0,%B1", op
, plen
, -2);
4201 return REGNO (XEXP (base
, 0)) == REG_X
4202 ? avr_asm_len ("adiw r26,1" CR_TAB
4205 "adiw r26,2", op
, plen
, -4)
4207 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4209 "adiw %r0,2", op
, plen
, -3);
4211 fatal_insn ("unknown move insn:",insn
);
4215 /* Return 1 if frame pointer for current function required. */
4218 avr_frame_pointer_required_p (void)
4220 return (cfun
->calls_alloca
4221 || cfun
->calls_setjmp
4222 || cfun
->has_nonlocal_label
4223 || crtl
->args
.info
.nregs
== 0
4224 || get_frame_size () > 0);
4227 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4230 compare_condition (rtx insn
)
4232 rtx next
= next_real_insn (insn
);
4234 if (next
&& JUMP_P (next
))
4236 rtx pat
= PATTERN (next
);
4237 rtx src
= SET_SRC (pat
);
4239 if (IF_THEN_ELSE
== GET_CODE (src
))
4240 return GET_CODE (XEXP (src
, 0));
4247 /* Returns true iff INSN is a tst insn that only tests the sign. */
4250 compare_sign_p (rtx insn
)
4252 RTX_CODE cond
= compare_condition (insn
);
4253 return (cond
== GE
|| cond
== LT
);
4257 /* Returns true iff the next insn is a JUMP_INSN with a condition
4258 that needs to be swapped (GT, GTU, LE, LEU). */
4261 compare_diff_p (rtx insn
)
4263 RTX_CODE cond
= compare_condition (insn
);
4264 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4267 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4270 compare_eq_p (rtx insn
)
4272 RTX_CODE cond
= compare_condition (insn
);
4273 return (cond
== EQ
|| cond
== NE
);
4277 /* Output compare instruction
4279 compare (XOP[0], XOP[1])
4281 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4282 XOP[2] is an 8-bit scratch register as needed.
4284 PLEN == NULL: Output instructions.
4285 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4286 Don't output anything. */
4289 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4291 /* Register to compare and value to compare against. */
4295 /* MODE of the comparison. */
4296 enum machine_mode mode
= GET_MODE (xreg
);
4298 /* Number of bytes to operate on. */
4299 int i
, n_bytes
= GET_MODE_SIZE (mode
);
4301 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4302 int clobber_val
= -1;
4304 gcc_assert (REG_P (xreg
));
4305 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4306 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4311 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4312 against 0 by ORing the bytes. This is one instruction shorter.
4313 Notice that DImode comparisons are always against reg:DI 18
4314 and therefore don't use this. */
4316 if (!test_hard_reg_class (LD_REGS
, xreg
)
4317 && compare_eq_p (insn
)
4318 && reg_unused_after (insn
, xreg
))
4320 if (xval
== const1_rtx
)
4322 avr_asm_len ("dec %A0" CR_TAB
4323 "or %A0,%B0", xop
, plen
, 2);
4326 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4329 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4333 else if (xval
== constm1_rtx
)
4336 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4339 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4341 return avr_asm_len ("and %A0,%B0" CR_TAB
4342 "com %A0", xop
, plen
, 2);
4346 for (i
= 0; i
< n_bytes
; i
++)
4348 /* We compare byte-wise. */
4349 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4350 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4352 /* 8-bit value to compare with this byte. */
4353 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4355 /* Registers R16..R31 can operate with immediate. */
4356 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4359 xop
[1] = gen_int_mode (val8
, QImode
);
4361 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4364 && test_hard_reg_class (ADDW_REGS
, reg8
))
4366 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4368 if (IN_RANGE (val16
, 0, 63)
4370 || reg_unused_after (insn
, xreg
)))
4372 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4378 && IN_RANGE (val16
, -63, -1)
4379 && compare_eq_p (insn
)
4380 && reg_unused_after (insn
, xreg
))
4382 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4386 /* Comparing against 0 is easy. */
4391 ? "cp %0,__zero_reg__"
4392 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4396 /* Upper registers can compare and subtract-with-carry immediates.
4397 Notice that compare instructions do the same as respective subtract
4398 instruction; the only difference is that comparisons don't write
4399 the result back to the target register. */
4405 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4408 else if (reg_unused_after (insn
, xreg
))
4410 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4415 /* Must load the value into the scratch register. */
4417 gcc_assert (REG_P (xop
[2]));
4419 if (clobber_val
!= (int) val8
)
4420 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4421 clobber_val
= (int) val8
;
4425 : "cpc %0,%2", xop
, plen
, 1);
4432 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4435 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4439 xop
[0] = gen_rtx_REG (DImode
, 18);
4443 return avr_out_compare (insn
, xop
, plen
);
4446 /* Output test instruction for HImode. */
4449 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4451 if (compare_sign_p (insn
))
4453 avr_asm_len ("tst %B0", op
, plen
, -1);
4455 else if (reg_unused_after (insn
, op
[0])
4456 && compare_eq_p (insn
))
4458 /* Faster than sbiw if we can clobber the operand. */
4459 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4463 avr_out_compare (insn
, op
, plen
);
4470 /* Output test instruction for PSImode. */
4473 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4475 if (compare_sign_p (insn
))
4477 avr_asm_len ("tst %C0", op
, plen
, -1);
4479 else if (reg_unused_after (insn
, op
[0])
4480 && compare_eq_p (insn
))
4482 /* Faster than sbiw if we can clobber the operand. */
4483 avr_asm_len ("or %A0,%B0" CR_TAB
4484 "or %A0,%C0", op
, plen
, -2);
4488 avr_out_compare (insn
, op
, plen
);
4495 /* Output test instruction for SImode. */
4498 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4500 if (compare_sign_p (insn
))
4502 avr_asm_len ("tst %D0", op
, plen
, -1);
4504 else if (reg_unused_after (insn
, op
[0])
4505 && compare_eq_p (insn
))
4507 /* Faster than sbiw if we can clobber the operand. */
4508 avr_asm_len ("or %A0,%B0" CR_TAB
4510 "or %A0,%D0", op
, plen
, -3);
4514 avr_out_compare (insn
, op
, plen
);
4521 /* Generate asm equivalent for various shifts. This only handles cases
4522 that are not already carefully hand-optimized in ?sh??i3_out.
4524 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4525 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4526 OPERANDS[3] is a QImode scratch register from LD regs if
4527 available and SCRATCH, otherwise (no scratch available)
4529 TEMPL is an assembler template that shifts by one position.
4530 T_LEN is the length of this template. */
4533 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4534 int *plen
, int t_len
)
4536 bool second_label
= true;
4537 bool saved_in_tmp
= false;
4538 bool use_zero_reg
= false;
4541 op
[0] = operands
[0];
4542 op
[1] = operands
[1];
4543 op
[2] = operands
[2];
4544 op
[3] = operands
[3];
4549 if (CONST_INT_P (operands
[2]))
4551 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4552 && REG_P (operands
[3]));
4553 int count
= INTVAL (operands
[2]);
4554 int max_len
= 10; /* If larger than this, always use a loop. */
4559 if (count
< 8 && !scratch
)
4560 use_zero_reg
= true;
4563 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4565 if (t_len
* count
<= max_len
)
4567 /* Output shifts inline with no loop - faster. */
4570 avr_asm_len (templ
, op
, plen
, t_len
);
4577 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4579 else if (use_zero_reg
)
4581 /* Hack to save one word: use __zero_reg__ as loop counter.
4582 Set one bit, then shift in a loop until it is 0 again. */
4584 op
[3] = zero_reg_rtx
;
4586 avr_asm_len ("set" CR_TAB
4587 "bld %3,%2-1", op
, plen
, 2);
4591 /* No scratch register available, use one from LD_REGS (saved in
4592 __tmp_reg__) that doesn't overlap with registers to shift. */
4594 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4595 op
[4] = tmp_reg_rtx
;
4596 saved_in_tmp
= true;
4598 avr_asm_len ("mov %4,%3" CR_TAB
4599 "ldi %3,%2", op
, plen
, 2);
4602 second_label
= false;
4604 else if (MEM_P (op
[2]))
4608 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4611 out_movqi_r_mr (insn
, op_mov
, plen
);
4613 else if (register_operand (op
[2], QImode
))
4617 if (!reg_unused_after (insn
, op
[2])
4618 || reg_overlap_mentioned_p (op
[0], op
[2]))
4620 op
[3] = tmp_reg_rtx
;
4621 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4625 fatal_insn ("bad shift insn:", insn
);
4628 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4630 avr_asm_len ("1:", op
, plen
, 0);
4631 avr_asm_len (templ
, op
, plen
, t_len
);
4634 avr_asm_len ("2:", op
, plen
, 0);
4636 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4637 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4640 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4644 /* 8bit shift left ((char)x << i) */
4647 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4649 if (GET_CODE (operands
[2]) == CONST_INT
)
4656 switch (INTVAL (operands
[2]))
4659 if (INTVAL (operands
[2]) < 8)
4671 return ("lsl %0" CR_TAB
4676 return ("lsl %0" CR_TAB
4681 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4684 return ("swap %0" CR_TAB
4688 return ("lsl %0" CR_TAB
4694 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4697 return ("swap %0" CR_TAB
4702 return ("lsl %0" CR_TAB
4709 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4712 return ("swap %0" CR_TAB
4718 return ("lsl %0" CR_TAB
4727 return ("ror %0" CR_TAB
4732 else if (CONSTANT_P (operands
[2]))
4733 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4735 out_shift_with_cnt ("lsl %0",
4736 insn
, operands
, len
, 1);
4741 /* 16bit shift left ((short)x << i) */
4744 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4746 if (GET_CODE (operands
[2]) == CONST_INT
)
4748 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4749 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4756 switch (INTVAL (operands
[2]))
4759 if (INTVAL (operands
[2]) < 16)
4763 return ("clr %B0" CR_TAB
4767 if (optimize_size
&& scratch
)
4772 return ("swap %A0" CR_TAB
4774 "andi %B0,0xf0" CR_TAB
4775 "eor %B0,%A0" CR_TAB
4776 "andi %A0,0xf0" CR_TAB
4782 return ("swap %A0" CR_TAB
4784 "ldi %3,0xf0" CR_TAB
4786 "eor %B0,%A0" CR_TAB
4790 break; /* optimize_size ? 6 : 8 */
4794 break; /* scratch ? 5 : 6 */
4798 return ("lsl %A0" CR_TAB
4802 "andi %B0,0xf0" CR_TAB
4803 "eor %B0,%A0" CR_TAB
4804 "andi %A0,0xf0" CR_TAB
4810 return ("lsl %A0" CR_TAB
4814 "ldi %3,0xf0" CR_TAB
4816 "eor %B0,%A0" CR_TAB
4824 break; /* scratch ? 5 : 6 */
4826 return ("clr __tmp_reg__" CR_TAB
4829 "ror __tmp_reg__" CR_TAB
4832 "ror __tmp_reg__" CR_TAB
4833 "mov %B0,%A0" CR_TAB
4834 "mov %A0,__tmp_reg__");
4838 return ("lsr %B0" CR_TAB
4839 "mov %B0,%A0" CR_TAB
4845 return *len
= 2, ("mov %B0,%A1" CR_TAB
4850 return ("mov %B0,%A0" CR_TAB
4856 return ("mov %B0,%A0" CR_TAB
4863 return ("mov %B0,%A0" CR_TAB
4873 return ("mov %B0,%A0" CR_TAB
4881 return ("mov %B0,%A0" CR_TAB
4884 "ldi %3,0xf0" CR_TAB
4888 return ("mov %B0,%A0" CR_TAB
4899 return ("mov %B0,%A0" CR_TAB
4905 if (AVR_HAVE_MUL
&& scratch
)
4908 return ("ldi %3,0x20" CR_TAB
4912 "clr __zero_reg__");
4914 if (optimize_size
&& scratch
)
4919 return ("mov %B0,%A0" CR_TAB
4923 "ldi %3,0xe0" CR_TAB
4929 return ("set" CR_TAB
4934 "clr __zero_reg__");
4937 return ("mov %B0,%A0" CR_TAB
4946 if (AVR_HAVE_MUL
&& ldi_ok
)
4949 return ("ldi %B0,0x40" CR_TAB
4950 "mul %A0,%B0" CR_TAB
4953 "clr __zero_reg__");
4955 if (AVR_HAVE_MUL
&& scratch
)
4958 return ("ldi %3,0x40" CR_TAB
4962 "clr __zero_reg__");
4964 if (optimize_size
&& ldi_ok
)
4967 return ("mov %B0,%A0" CR_TAB
4968 "ldi %A0,6" "\n1:\t"
4973 if (optimize_size
&& scratch
)
4976 return ("clr %B0" CR_TAB
4985 return ("clr %B0" CR_TAB
4992 out_shift_with_cnt ("lsl %A0" CR_TAB
4993 "rol %B0", insn
, operands
, len
, 2);
4998 /* 24-bit shift left */
5001 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5006 if (CONST_INT_P (op
[2]))
5008 switch (INTVAL (op
[2]))
5011 if (INTVAL (op
[2]) < 24)
5014 return avr_asm_len ("clr %A0" CR_TAB
5016 "clr %C0", op
, plen
, 3);
5020 int reg0
= REGNO (op
[0]);
5021 int reg1
= REGNO (op
[1]);
5024 return avr_asm_len ("mov %C0,%B1" CR_TAB
5025 "mov %B0,%A1" CR_TAB
5026 "clr %A0", op
, plen
, 3);
5028 return avr_asm_len ("clr %A0" CR_TAB
5029 "mov %B0,%A1" CR_TAB
5030 "mov %C0,%B1", op
, plen
, 3);
5035 int reg0
= REGNO (op
[0]);
5036 int reg1
= REGNO (op
[1]);
5038 if (reg0
+ 2 != reg1
)
5039 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5041 return avr_asm_len ("clr %B0" CR_TAB
5042 "clr %A0", op
, plen
, 2);
5046 return avr_asm_len ("clr %C0" CR_TAB
5050 "clr %A0", op
, plen
, 5);
5054 out_shift_with_cnt ("lsl %A0" CR_TAB
5056 "rol %C0", insn
, op
, plen
, 3);
5061 /* 32bit shift left ((long)x << i) */
5064 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5066 if (GET_CODE (operands
[2]) == CONST_INT
)
5074 switch (INTVAL (operands
[2]))
5077 if (INTVAL (operands
[2]) < 32)
5081 return *len
= 3, ("clr %D0" CR_TAB
5085 return ("clr %D0" CR_TAB
5092 int reg0
= true_regnum (operands
[0]);
5093 int reg1
= true_regnum (operands
[1]);
5096 return ("mov %D0,%C1" CR_TAB
5097 "mov %C0,%B1" CR_TAB
5098 "mov %B0,%A1" CR_TAB
5101 return ("clr %A0" CR_TAB
5102 "mov %B0,%A1" CR_TAB
5103 "mov %C0,%B1" CR_TAB
5109 int reg0
= true_regnum (operands
[0]);
5110 int reg1
= true_regnum (operands
[1]);
5111 if (reg0
+ 2 == reg1
)
5112 return *len
= 2, ("clr %B0" CR_TAB
5115 return *len
= 3, ("movw %C0,%A1" CR_TAB
5119 return *len
= 4, ("mov %C0,%A1" CR_TAB
5120 "mov %D0,%B1" CR_TAB
5127 return ("mov %D0,%A1" CR_TAB
5134 return ("clr %D0" CR_TAB
5143 out_shift_with_cnt ("lsl %A0" CR_TAB
5146 "rol %D0", insn
, operands
, len
, 4);
5150 /* 8bit arithmetic shift right ((signed char)x >> i) */
5153 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5155 if (GET_CODE (operands
[2]) == CONST_INT
)
5162 switch (INTVAL (operands
[2]))
5170 return ("asr %0" CR_TAB
5175 return ("asr %0" CR_TAB
5181 return ("asr %0" CR_TAB
5188 return ("asr %0" CR_TAB
5196 return ("bst %0,6" CR_TAB
5202 if (INTVAL (operands
[2]) < 8)
5209 return ("lsl %0" CR_TAB
5213 else if (CONSTANT_P (operands
[2]))
5214 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5216 out_shift_with_cnt ("asr %0",
5217 insn
, operands
, len
, 1);
5222 /* 16bit arithmetic shift right ((signed short)x >> i) */
5225 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5227 if (GET_CODE (operands
[2]) == CONST_INT
)
5229 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5230 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5237 switch (INTVAL (operands
[2]))
5241 /* XXX try to optimize this too? */
5246 break; /* scratch ? 5 : 6 */
5248 return ("mov __tmp_reg__,%A0" CR_TAB
5249 "mov %A0,%B0" CR_TAB
5250 "lsl __tmp_reg__" CR_TAB
5252 "sbc %B0,%B0" CR_TAB
5253 "lsl __tmp_reg__" CR_TAB
5259 return ("lsl %A0" CR_TAB
5260 "mov %A0,%B0" CR_TAB
5266 int reg0
= true_regnum (operands
[0]);
5267 int reg1
= true_regnum (operands
[1]);
5270 return *len
= 3, ("mov %A0,%B0" CR_TAB
5274 return *len
= 4, ("mov %A0,%B1" CR_TAB
5282 return ("mov %A0,%B0" CR_TAB
5284 "sbc %B0,%B0" CR_TAB
5289 return ("mov %A0,%B0" CR_TAB
5291 "sbc %B0,%B0" CR_TAB
5296 if (AVR_HAVE_MUL
&& ldi_ok
)
5299 return ("ldi %A0,0x20" CR_TAB
5300 "muls %B0,%A0" CR_TAB
5302 "sbc %B0,%B0" CR_TAB
5303 "clr __zero_reg__");
5305 if (optimize_size
&& scratch
)
5308 return ("mov %A0,%B0" CR_TAB
5310 "sbc %B0,%B0" CR_TAB
5316 if (AVR_HAVE_MUL
&& ldi_ok
)
5319 return ("ldi %A0,0x10" CR_TAB
5320 "muls %B0,%A0" CR_TAB
5322 "sbc %B0,%B0" CR_TAB
5323 "clr __zero_reg__");
5325 if (optimize_size
&& scratch
)
5328 return ("mov %A0,%B0" CR_TAB
5330 "sbc %B0,%B0" CR_TAB
5337 if (AVR_HAVE_MUL
&& ldi_ok
)
5340 return ("ldi %A0,0x08" CR_TAB
5341 "muls %B0,%A0" CR_TAB
5343 "sbc %B0,%B0" CR_TAB
5344 "clr __zero_reg__");
5347 break; /* scratch ? 5 : 7 */
5349 return ("mov %A0,%B0" CR_TAB
5351 "sbc %B0,%B0" CR_TAB
5360 return ("lsl %B0" CR_TAB
5361 "sbc %A0,%A0" CR_TAB
5363 "mov %B0,%A0" CR_TAB
5367 if (INTVAL (operands
[2]) < 16)
5373 return *len
= 3, ("lsl %B0" CR_TAB
5374 "sbc %A0,%A0" CR_TAB
5379 out_shift_with_cnt ("asr %B0" CR_TAB
5380 "ror %A0", insn
, operands
, len
, 2);
5385 /* 24-bit arithmetic shift right */
5388 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5390 int dest
= REGNO (op
[0]);
5391 int src
= REGNO (op
[1]);
5393 if (CONST_INT_P (op
[2]))
5398 switch (INTVAL (op
[2]))
5402 return avr_asm_len ("mov %A0,%B1" CR_TAB
5403 "mov %B0,%C1" CR_TAB
5406 "dec %C0", op
, plen
, 5);
5408 return avr_asm_len ("clr %C0" CR_TAB
5411 "mov %B0,%C1" CR_TAB
5412 "mov %A0,%B1", op
, plen
, 5);
5415 if (dest
!= src
+ 2)
5416 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5418 return avr_asm_len ("clr %B0" CR_TAB
5421 "mov %C0,%B0", op
, plen
, 4);
5424 if (INTVAL (op
[2]) < 24)
5430 return avr_asm_len ("lsl %C0" CR_TAB
5431 "sbc %A0,%A0" CR_TAB
5432 "mov %B0,%A0" CR_TAB
5433 "mov %C0,%A0", op
, plen
, 4);
5437 out_shift_with_cnt ("asr %C0" CR_TAB
5439 "ror %A0", insn
, op
, plen
, 3);
5444 /* 32bit arithmetic shift right ((signed long)x >> i) */
5447 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5449 if (GET_CODE (operands
[2]) == CONST_INT
)
5457 switch (INTVAL (operands
[2]))
5461 int reg0
= true_regnum (operands
[0]);
5462 int reg1
= true_regnum (operands
[1]);
5465 return ("mov %A0,%B1" CR_TAB
5466 "mov %B0,%C1" CR_TAB
5467 "mov %C0,%D1" CR_TAB
5472 return ("clr %D0" CR_TAB
5475 "mov %C0,%D1" CR_TAB
5476 "mov %B0,%C1" CR_TAB
5482 int reg0
= true_regnum (operands
[0]);
5483 int reg1
= true_regnum (operands
[1]);
5485 if (reg0
== reg1
+ 2)
5486 return *len
= 4, ("clr %D0" CR_TAB
5491 return *len
= 5, ("movw %A0,%C1" CR_TAB
5497 return *len
= 6, ("mov %B0,%D1" CR_TAB
5498 "mov %A0,%C1" CR_TAB
5506 return *len
= 6, ("mov %A0,%D1" CR_TAB
5510 "mov %B0,%D0" CR_TAB
5514 if (INTVAL (operands
[2]) < 32)
5521 return *len
= 4, ("lsl %D0" CR_TAB
5522 "sbc %A0,%A0" CR_TAB
5523 "mov %B0,%A0" CR_TAB
5526 return *len
= 5, ("lsl %D0" CR_TAB
5527 "sbc %A0,%A0" CR_TAB
5528 "mov %B0,%A0" CR_TAB
5529 "mov %C0,%A0" CR_TAB
5534 out_shift_with_cnt ("asr %D0" CR_TAB
5537 "ror %A0", insn
, operands
, len
, 4);
5541 /* 8bit logic shift right ((unsigned char)x >> i) */
5544 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5546 if (GET_CODE (operands
[2]) == CONST_INT
)
5553 switch (INTVAL (operands
[2]))
5556 if (INTVAL (operands
[2]) < 8)
5568 return ("lsr %0" CR_TAB
5572 return ("lsr %0" CR_TAB
5577 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5580 return ("swap %0" CR_TAB
5584 return ("lsr %0" CR_TAB
5590 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5593 return ("swap %0" CR_TAB
5598 return ("lsr %0" CR_TAB
5605 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5608 return ("swap %0" CR_TAB
5614 return ("lsr %0" CR_TAB
5623 return ("rol %0" CR_TAB
5628 else if (CONSTANT_P (operands
[2]))
5629 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5631 out_shift_with_cnt ("lsr %0",
5632 insn
, operands
, len
, 1);
5636 /* 16bit logic shift right ((unsigned short)x >> i) */
5639 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5641 if (GET_CODE (operands
[2]) == CONST_INT
)
5643 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5644 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5651 switch (INTVAL (operands
[2]))
5654 if (INTVAL (operands
[2]) < 16)
5658 return ("clr %B0" CR_TAB
5662 if (optimize_size
&& scratch
)
5667 return ("swap %B0" CR_TAB
5669 "andi %A0,0x0f" CR_TAB
5670 "eor %A0,%B0" CR_TAB
5671 "andi %B0,0x0f" CR_TAB
5677 return ("swap %B0" CR_TAB
5679 "ldi %3,0x0f" CR_TAB
5681 "eor %A0,%B0" CR_TAB
5685 break; /* optimize_size ? 6 : 8 */
5689 break; /* scratch ? 5 : 6 */
5693 return ("lsr %B0" CR_TAB
5697 "andi %A0,0x0f" CR_TAB
5698 "eor %A0,%B0" CR_TAB
5699 "andi %B0,0x0f" CR_TAB
5705 return ("lsr %B0" CR_TAB
5709 "ldi %3,0x0f" CR_TAB
5711 "eor %A0,%B0" CR_TAB
5719 break; /* scratch ? 5 : 6 */
5721 return ("clr __tmp_reg__" CR_TAB
5724 "rol __tmp_reg__" CR_TAB
5727 "rol __tmp_reg__" CR_TAB
5728 "mov %A0,%B0" CR_TAB
5729 "mov %B0,__tmp_reg__");
5733 return ("lsl %A0" CR_TAB
5734 "mov %A0,%B0" CR_TAB
5736 "sbc %B0,%B0" CR_TAB
5740 return *len
= 2, ("mov %A0,%B1" CR_TAB
5745 return ("mov %A0,%B0" CR_TAB
5751 return ("mov %A0,%B0" CR_TAB
5758 return ("mov %A0,%B0" CR_TAB
5768 return ("mov %A0,%B0" CR_TAB
5776 return ("mov %A0,%B0" CR_TAB
5779 "ldi %3,0x0f" CR_TAB
5783 return ("mov %A0,%B0" CR_TAB
5794 return ("mov %A0,%B0" CR_TAB
5800 if (AVR_HAVE_MUL
&& scratch
)
5803 return ("ldi %3,0x08" CR_TAB
5807 "clr __zero_reg__");
5809 if (optimize_size
&& scratch
)
5814 return ("mov %A0,%B0" CR_TAB
5818 "ldi %3,0x07" CR_TAB
5824 return ("set" CR_TAB
5829 "clr __zero_reg__");
5832 return ("mov %A0,%B0" CR_TAB
5841 if (AVR_HAVE_MUL
&& ldi_ok
)
5844 return ("ldi %A0,0x04" CR_TAB
5845 "mul %B0,%A0" CR_TAB
5848 "clr __zero_reg__");
5850 if (AVR_HAVE_MUL
&& scratch
)
5853 return ("ldi %3,0x04" CR_TAB
5857 "clr __zero_reg__");
5859 if (optimize_size
&& ldi_ok
)
5862 return ("mov %A0,%B0" CR_TAB
5863 "ldi %B0,6" "\n1:\t"
5868 if (optimize_size
&& scratch
)
5871 return ("clr %A0" CR_TAB
5880 return ("clr %A0" CR_TAB
5887 out_shift_with_cnt ("lsr %B0" CR_TAB
5888 "ror %A0", insn
, operands
, len
, 2);
5893 /* 24-bit logic shift right */
5896 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5898 int dest
= REGNO (op
[0]);
5899 int src
= REGNO (op
[1]);
5901 if (CONST_INT_P (op
[2]))
5906 switch (INTVAL (op
[2]))
5910 return avr_asm_len ("mov %A0,%B1" CR_TAB
5911 "mov %B0,%C1" CR_TAB
5912 "clr %C0", op
, plen
, 3);
5914 return avr_asm_len ("clr %C0" CR_TAB
5915 "mov %B0,%C1" CR_TAB
5916 "mov %A0,%B1", op
, plen
, 3);
5919 if (dest
!= src
+ 2)
5920 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5922 return avr_asm_len ("clr %B0" CR_TAB
5923 "clr %C0", op
, plen
, 2);
5926 if (INTVAL (op
[2]) < 24)
5932 return avr_asm_len ("clr %A0" CR_TAB
5936 "clr %C0", op
, plen
, 5);
5940 out_shift_with_cnt ("lsr %C0" CR_TAB
5942 "ror %A0", insn
, op
, plen
, 3);
5947 /* 32bit logic shift right ((unsigned int)x >> i) */
5950 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5952 if (GET_CODE (operands
[2]) == CONST_INT
)
5960 switch (INTVAL (operands
[2]))
5963 if (INTVAL (operands
[2]) < 32)
5967 return *len
= 3, ("clr %D0" CR_TAB
5971 return ("clr %D0" CR_TAB
5978 int reg0
= true_regnum (operands
[0]);
5979 int reg1
= true_regnum (operands
[1]);
5982 return ("mov %A0,%B1" CR_TAB
5983 "mov %B0,%C1" CR_TAB
5984 "mov %C0,%D1" CR_TAB
5987 return ("clr %D0" CR_TAB
5988 "mov %C0,%D1" CR_TAB
5989 "mov %B0,%C1" CR_TAB
5995 int reg0
= true_regnum (operands
[0]);
5996 int reg1
= true_regnum (operands
[1]);
5998 if (reg0
== reg1
+ 2)
5999 return *len
= 2, ("clr %C0" CR_TAB
6002 return *len
= 3, ("movw %A0,%C1" CR_TAB
6006 return *len
= 4, ("mov %B0,%D1" CR_TAB
6007 "mov %A0,%C1" CR_TAB
6013 return *len
= 4, ("mov %A0,%D1" CR_TAB
6020 return ("clr %A0" CR_TAB
6029 out_shift_with_cnt ("lsr %D0" CR_TAB
6032 "ror %A0", insn
, operands
, len
, 4);
6037 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6039 XOP[0] = XOP[0] + XOP[2]
6041 and return "". If PLEN == NULL, print assembler instructions to perform the
6042 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6043 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
6044 CODE == PLUS: perform addition by using ADD instructions.
6045 CODE == MINUS: perform addition by using SUB instructions.
6046 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
6049 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
6051 /* MODE of the operation. */
6052 enum machine_mode mode
= GET_MODE (xop
[0]);
6054 /* Number of bytes to operate on. */
6055 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6057 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6058 int clobber_val
= -1;
6060 /* op[0]: 8-bit destination register
6061 op[1]: 8-bit const int
6062 op[2]: 8-bit scratch register */
6065 /* Started the operation? Before starting the operation we may skip
6066 adding 0. This is no more true after the operation started because
6067 carry must be taken into account. */
6068 bool started
= false;
6070 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6073 /* Except in the case of ADIW with 16-bit register (see below)
6074 addition does not set cc0 in a usable way. */
6076 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6079 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
6086 for (i
= 0; i
< n_bytes
; i
++)
6088 /* We operate byte-wise on the destination. */
6089 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6090 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
6092 /* 8-bit value to operate with this byte. */
6093 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6095 /* Registers R16..R31 can operate with immediate. */
6096 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6099 op
[1] = gen_int_mode (val8
, QImode
);
6101 /* To get usable cc0 no low-bytes must have been skipped. */
6109 && test_hard_reg_class (ADDW_REGS
, reg8
))
6111 rtx xval16
= simplify_gen_subreg (HImode
, xval
, mode
, i
);
6112 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6114 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6115 i.e. operate word-wise. */
6122 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6125 if (n_bytes
== 2 && PLUS
== code
)
6137 avr_asm_len (code
== PLUS
6138 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6142 else if ((val8
== 1 || val8
== 0xff)
6144 && i
== n_bytes
- 1)
6146 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6155 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6157 if (clobber_val
!= (int) val8
)
6158 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6159 clobber_val
= (int) val8
;
6161 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6168 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6171 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6173 if (clobber_val
!= (int) val8
)
6174 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6175 clobber_val
= (int) val8
;
6177 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6189 } /* for all sub-bytes */
6191 /* No output doesn't change cc0. */
6193 if (plen
&& *plen
== 0)
6198 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6200 XOP[0] = XOP[0] + XOP[2]
6202 and return "". If PLEN == NULL, print assembler instructions to perform the
6203 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6204 words) printed with PLEN == NULL.
6205 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6206 condition code (with respect to XOP[0]). */
6209 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
6211 int len_plus
, len_minus
;
6212 int cc_plus
, cc_minus
, cc_dummy
;
6217 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6219 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
6220 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
6222 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6226 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6227 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6229 else if (len_minus
<= len_plus
)
6230 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
6232 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
6238 /* Same as above but XOP has just 3 entries.
6239 Supply a dummy 4th operand. */
6242 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
6251 return avr_out_plus (op
, plen
, pcc
);
6255 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6258 avr_out_plus64 (rtx addend
, int *plen
)
6263 op
[0] = gen_rtx_REG (DImode
, 18);
6268 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6273 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6274 time constant XOP[2]:
6276 XOP[0] = XOP[0] <op> XOP[2]
6278 and return "". If PLEN == NULL, print assembler instructions to perform the
6279 operation; otherwise, set *PLEN to the length of the instruction sequence
6280 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6281 register or SCRATCH if no clobber register is needed for the operation. */
6284 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6286 /* CODE and MODE of the operation. */
6287 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6288 enum machine_mode mode
= GET_MODE (xop
[0]);
6290 /* Number of bytes to operate on. */
6291 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6293 /* Value of T-flag (0 or 1) or -1 if unknow. */
6296 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6297 int clobber_val
= -1;
6299 /* op[0]: 8-bit destination register
6300 op[1]: 8-bit const int
6301 op[2]: 8-bit clobber register or SCRATCH
6302 op[3]: 8-bit register containing 0xff or NULL_RTX */
6311 for (i
= 0; i
< n_bytes
; i
++)
6313 /* We operate byte-wise on the destination. */
6314 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6315 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6317 /* 8-bit value to operate with this byte. */
6318 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6320 /* Number of bits set in the current byte of the constant. */
6321 int pop8
= avr_popcount (val8
);
6323 /* Registers R16..R31 can operate with immediate. */
6324 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6327 op
[1] = GEN_INT (val8
);
6336 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6340 avr_asm_len ("set", op
, plen
, 1);
6343 op
[1] = GEN_INT (exact_log2 (val8
));
6344 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6348 if (op
[3] != NULL_RTX
)
6349 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6351 avr_asm_len ("clr %0" CR_TAB
6352 "dec %0", op
, plen
, 2);
6358 if (clobber_val
!= (int) val8
)
6359 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6360 clobber_val
= (int) val8
;
6362 avr_asm_len ("or %0,%2", op
, plen
, 1);
6372 avr_asm_len ("clr %0", op
, plen
, 1);
6374 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6378 avr_asm_len ("clt", op
, plen
, 1);
6381 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6382 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6386 if (clobber_val
!= (int) val8
)
6387 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6388 clobber_val
= (int) val8
;
6390 avr_asm_len ("and %0,%2", op
, plen
, 1);
6400 avr_asm_len ("com %0", op
, plen
, 1);
6401 else if (ld_reg_p
&& val8
== (1 << 7))
6402 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6405 if (clobber_val
!= (int) val8
)
6406 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6407 clobber_val
= (int) val8
;
6409 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6415 /* Unknown rtx_code */
6418 } /* for all sub-bytes */
6424 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6425 PLEN != NULL: Set *PLEN to the length of that sequence.
6429 avr_out_addto_sp (rtx
*op
, int *plen
)
6431 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6432 int addend
= INTVAL (op
[0]);
6439 if (flag_verbose_asm
|| flag_print_asm_name
)
6440 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6442 while (addend
<= -pc_len
)
6445 avr_asm_len ("rcall .", op
, plen
, 1);
6448 while (addend
++ < 0)
6449 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6451 else if (addend
> 0)
6453 if (flag_verbose_asm
|| flag_print_asm_name
)
6454 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6456 while (addend
-- > 0)
6457 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6464 /* Create RTL split patterns for byte sized rotate expressions. This
6465 produces a series of move instructions and considers overlap situations.
6466 Overlapping non-HImode operands need a scratch register. */
6469 avr_rotate_bytes (rtx operands
[])
6472 enum machine_mode mode
= GET_MODE (operands
[0]);
6473 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6474 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6475 int num
= INTVAL (operands
[2]);
6476 rtx scratch
= operands
[3];
6477 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6478 Word move if no scratch is needed, otherwise use size of scratch. */
6479 enum machine_mode move_mode
= QImode
;
6480 int move_size
, offset
, size
;
6484 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6487 move_mode
= GET_MODE (scratch
);
6489 /* Force DI rotate to use QI moves since other DI moves are currently split
6490 into QI moves so forward propagation works better. */
6493 /* Make scratch smaller if needed. */
6494 if (SCRATCH
!= GET_CODE (scratch
)
6495 && HImode
== GET_MODE (scratch
)
6496 && QImode
== move_mode
)
6497 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6499 move_size
= GET_MODE_SIZE (move_mode
);
6500 /* Number of bytes/words to rotate. */
6501 offset
= (num
>> 3) / move_size
;
6502 /* Number of moves needed. */
6503 size
= GET_MODE_SIZE (mode
) / move_size
;
6504 /* Himode byte swap is special case to avoid a scratch register. */
6505 if (mode
== HImode
&& same_reg
)
6507 /* HImode byte swap, using xor. This is as quick as using scratch. */
6509 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6510 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6511 if (!rtx_equal_p (dst
, src
))
6513 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6514 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6515 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6520 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6521 /* Create linked list of moves to determine move order. */
6525 } move
[MAX_SIZE
+ 8];
6528 gcc_assert (size
<= MAX_SIZE
);
6529 /* Generate list of subreg moves. */
6530 for (i
= 0; i
< size
; i
++)
6533 int to
= (from
+ offset
) % size
;
6534 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6535 mode
, from
* move_size
);
6536 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6537 mode
, to
* move_size
);
6540 /* Mark dependence where a dst of one move is the src of another move.
6541 The first move is a conflict as it must wait until second is
6542 performed. We ignore moves to self - we catch this later. */
6544 for (i
= 0; i
< size
; i
++)
6545 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6546 for (j
= 0; j
< size
; j
++)
6547 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6549 /* The dst of move i is the src of move j. */
6556 /* Go through move list and perform non-conflicting moves. As each
6557 non-overlapping move is made, it may remove other conflicts
6558 so the process is repeated until no conflicts remain. */
6563 /* Emit move where dst is not also a src or we have used that
6565 for (i
= 0; i
< size
; i
++)
6566 if (move
[i
].src
!= NULL_RTX
)
6568 if (move
[i
].links
== -1
6569 || move
[move
[i
].links
].src
== NULL_RTX
)
6572 /* Ignore NOP moves to self. */
6573 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6574 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6576 /* Remove conflict from list. */
6577 move
[i
].src
= NULL_RTX
;
6583 /* Check for deadlock. This is when no moves occurred and we have
6584 at least one blocked move. */
6585 if (moves
== 0 && blocked
!= -1)
6587 /* Need to use scratch register to break deadlock.
6588 Add move to put dst of blocked move into scratch.
6589 When this move occurs, it will break chain deadlock.
6590 The scratch register is substituted for real move. */
6592 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6594 move
[size
].src
= move
[blocked
].dst
;
6595 move
[size
].dst
= scratch
;
6596 /* Scratch move is never blocked. */
6597 move
[size
].links
= -1;
6598 /* Make sure we have valid link. */
6599 gcc_assert (move
[blocked
].links
!= -1);
6600 /* Replace src of blocking move with scratch reg. */
6601 move
[move
[blocked
].links
].src
= scratch
;
6602 /* Make dependent on scratch move occuring. */
6603 move
[blocked
].links
= size
;
6607 while (blocked
!= -1);
6612 /* Modifies the length assigned to instruction INSN
6613 LEN is the initially computed length of the insn. */
6616 adjust_insn_length (rtx insn
, int len
)
6618 rtx
*op
= recog_data
.operand
;
6619 enum attr_adjust_len adjust_len
;
6621 /* Some complex insns don't need length adjustment and therefore
6622 the length need not/must not be adjusted for these insns.
6623 It is easier to state this in an insn attribute "adjust_len" than
6624 to clutter up code here... */
6626 if (-1 == recog_memoized (insn
))
6631 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6633 adjust_len
= get_attr_adjust_len (insn
);
6635 if (adjust_len
== ADJUST_LEN_NO
)
6637 /* Nothing to adjust: The length from attribute "length" is fine.
6638 This is the default. */
6643 /* Extract insn's operands. */
6645 extract_constrain_insn_cached (insn
);
6647 /* Dispatch to right function. */
6651 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
6652 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
6653 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
6655 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
6657 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
6658 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
6659 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
6660 avr_out_plus_noclobber (op
, &len
, NULL
); break;
6662 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
6664 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
6665 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
6666 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
6667 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
6668 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
6669 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
6671 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
6672 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
6673 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
6674 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
6675 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
6677 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
6678 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
6679 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
6681 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
6682 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
6683 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
6685 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
6686 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
6687 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
6689 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
6690 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
6691 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
6693 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
6695 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
6704 /* Return nonzero if register REG dead after INSN. */
6707 reg_unused_after (rtx insn
, rtx reg
)
6709 return (dead_or_set_p (insn
, reg
)
6710 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
6713 /* Return nonzero if REG is not used after INSN.
6714 We assume REG is a reload reg, and therefore does
6715 not live past labels. It may live past calls or jumps though. */
6718 _reg_unused_after (rtx insn
, rtx reg
)
6723 /* If the reg is set by this instruction, then it is safe for our
6724 case. Disregard the case where this is a store to memory, since
6725 we are checking a register used in the store address. */
6726 set
= single_set (insn
);
6727 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
6728 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6731 while ((insn
= NEXT_INSN (insn
)))
6734 code
= GET_CODE (insn
);
6737 /* If this is a label that existed before reload, then the register
6738 if dead here. However, if this is a label added by reorg, then
6739 the register may still be live here. We can't tell the difference,
6740 so we just ignore labels completely. */
6741 if (code
== CODE_LABEL
)
6749 if (code
== JUMP_INSN
)
6752 /* If this is a sequence, we must handle them all at once.
6753 We could have for instance a call that sets the target register,
6754 and an insn in a delay slot that uses the register. In this case,
6755 we must return 0. */
6756 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6761 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
6763 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
6764 rtx set
= single_set (this_insn
);
6766 if (GET_CODE (this_insn
) == CALL_INSN
)
6768 else if (GET_CODE (this_insn
) == JUMP_INSN
)
6770 if (INSN_ANNULLED_BRANCH_P (this_insn
))
6775 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6777 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6779 if (GET_CODE (SET_DEST (set
)) != MEM
)
6785 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
6790 else if (code
== JUMP_INSN
)
6794 if (code
== CALL_INSN
)
6797 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6798 if (GET_CODE (XEXP (tem
, 0)) == USE
6799 && REG_P (XEXP (XEXP (tem
, 0), 0))
6800 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
6802 if (call_used_regs
[REGNO (reg
)])
6806 set
= single_set (insn
);
6808 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6810 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6811 return GET_CODE (SET_DEST (set
)) != MEM
;
6812 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
6819 /* Return RTX that represents the lower 16 bits of a constant address.
6820 Unfortunately, simplify_gen_subreg does not handle this case. */
6823 avr_const_address_lo16 (rtx x
)
6827 switch (GET_CODE (x
))
6833 if (PLUS
== GET_CODE (XEXP (x
, 0))
6834 && SYMBOL_REF
== GET_CODE (XEXP (XEXP (x
, 0), 0))
6835 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
6837 HOST_WIDE_INT offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
6838 const char *name
= XSTR (XEXP (XEXP (x
, 0), 0), 0);
6840 lo16
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6841 lo16
= gen_rtx_CONST (Pmode
, plus_constant (lo16
, offset
));
6850 const char *name
= XSTR (x
, 0);
6852 return gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6856 avr_edump ("\n%?: %r\n", x
);
6861 /* Target hook for assembling integer objects. The AVR version needs
6862 special handling for references to certain labels. */
6865 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
6867 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
6868 && text_segment_operand (x
, VOIDmode
) )
6870 fputs ("\t.word\tgs(", asm_out_file
);
6871 output_addr_const (asm_out_file
, x
);
6872 fputs (")\n", asm_out_file
);
6876 else if (GET_MODE (x
) == PSImode
)
6878 default_assemble_integer (avr_const_address_lo16 (x
),
6879 GET_MODE_SIZE (HImode
), aligned_p
);
6881 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6882 " extension for hh8(", asm_out_file
);
6883 output_addr_const (asm_out_file
, x
);
6884 fputs (")\"\n", asm_out_file
);
6886 fputs ("\t.byte\t0\t" ASM_COMMENT_START
" hh8(", asm_out_file
);
6887 output_addr_const (asm_out_file
, x
);
6888 fputs (")\n", asm_out_file
);
6893 return default_assemble_integer (x
, size
, aligned_p
);
6897 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6900 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
6903 /* If the function has the 'signal' or 'interrupt' attribute, test to
6904 make sure that the name of the function is "__vector_NN" so as to
6905 catch when the user misspells the interrupt vector name. */
6907 if (cfun
->machine
->is_interrupt
)
6909 if (!STR_PREFIX_P (name
, "__vector"))
6911 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6912 "%qs appears to be a misspelled interrupt handler",
6916 else if (cfun
->machine
->is_signal
)
6918 if (!STR_PREFIX_P (name
, "__vector"))
6920 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6921 "%qs appears to be a misspelled signal handler",
6926 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
6927 ASM_OUTPUT_LABEL (file
, name
);
6931 /* Return value is nonzero if pseudos that have been
6932 assigned to registers of class CLASS would likely be spilled
6933 because registers of CLASS are needed for spill registers. */
6936 avr_class_likely_spilled_p (reg_class_t c
)
6938 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
6941 /* Valid attributes:
6942 progmem - put data to program memory;
6943 signal - make a function to be hardware interrupt. After function
6944 prologue interrupts are disabled;
6945 interrupt - make a function to be hardware interrupt. After function
6946 prologue interrupts are enabled;
6947 naked - don't generate function prologue/epilogue and `ret' command.
6949 Only `progmem' attribute valid for type. */
6951 /* Handle a "progmem" attribute; arguments as in
6952 struct attribute_spec.handler. */
6954 avr_handle_progmem_attribute (tree
*node
, tree name
,
6955 tree args ATTRIBUTE_UNUSED
,
6956 int flags ATTRIBUTE_UNUSED
,
6961 if (TREE_CODE (*node
) == TYPE_DECL
)
6963 /* This is really a decl attribute, not a type attribute,
6964 but try to handle it for GCC 3.0 backwards compatibility. */
6966 tree type
= TREE_TYPE (*node
);
6967 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
6968 tree newtype
= build_type_attribute_variant (type
, attr
);
6970 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
6971 TREE_TYPE (*node
) = newtype
;
6972 *no_add_attrs
= true;
6974 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
6976 *no_add_attrs
= false;
6980 warning (OPT_Wattributes
, "%qE attribute ignored",
6982 *no_add_attrs
= true;
6989 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6990 struct attribute_spec.handler. */
6993 avr_handle_fndecl_attribute (tree
*node
, tree name
,
6994 tree args ATTRIBUTE_UNUSED
,
6995 int flags ATTRIBUTE_UNUSED
,
6998 if (TREE_CODE (*node
) != FUNCTION_DECL
)
7000 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7002 *no_add_attrs
= true;
7009 avr_handle_fntype_attribute (tree
*node
, tree name
,
7010 tree args ATTRIBUTE_UNUSED
,
7011 int flags ATTRIBUTE_UNUSED
,
7014 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7016 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7018 *no_add_attrs
= true;
7025 /* AVR attributes. */
7026 static const struct attribute_spec
7027 avr_attribute_table
[] =
7029 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7030 affects_type_identity } */
7031 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7033 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7035 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7037 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7039 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7041 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7043 { NULL
, 0, 0, false, false, false, NULL
, false }
7047 /* Look if DECL shall be placed in program memory space by
7048 means of attribute `progmem' or some address-space qualifier.
7049 Return non-zero if DECL is data that must end up in Flash and
7050 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7052 Return 2 if DECL is located in 24-bit flash address-space
7053 Return 1 if DECL is located in 16-bit flash address-space
7054 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7055 Return 0 otherwise */
7058 avr_progmem_p (tree decl
, tree attributes
)
7062 if (TREE_CODE (decl
) != VAR_DECL
)
7065 if (avr_decl_memx_p (decl
))
7068 if (avr_decl_flash_p (decl
))
7072 != lookup_attribute ("progmem", attributes
))
7079 while (TREE_CODE (a
) == ARRAY_TYPE
);
7081 if (a
== error_mark_node
)
7084 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7091 /* Scan type TYP for pointer references to address space ASn.
7092 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7093 the AS are also declared to be CONST.
7094 Otherwise, return the respective addres space, i.e. a value != 0. */
7097 avr_nonconst_pointer_addrspace (tree typ
)
7099 while (ARRAY_TYPE
== TREE_CODE (typ
))
7100 typ
= TREE_TYPE (typ
);
7102 if (POINTER_TYPE_P (typ
))
7104 tree target
= TREE_TYPE (typ
);
7106 /* Pointer to function: Test the function's return type. */
7108 if (FUNCTION_TYPE
== TREE_CODE (target
))
7109 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7111 /* "Ordinary" pointers... */
7113 while (TREE_CODE (target
) == ARRAY_TYPE
)
7114 target
= TREE_TYPE (target
);
7116 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target
))
7117 && !TYPE_READONLY (target
))
7119 /* Pointers to non-generic address space must be const. */
7121 return TYPE_ADDR_SPACE (target
);
7124 /* Scan pointer's target type. */
7126 return avr_nonconst_pointer_addrspace (target
);
7129 return ADDR_SPACE_GENERIC
;
7133 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
7134 go along with CONST qualifier. Writing to these address spaces should
7135 be detected and complained about as early as possible. */
7138 avr_pgm_check_var_decl (tree node
)
7140 const char *reason
= NULL
;
7142 addr_space_t as
= ADDR_SPACE_GENERIC
;
7144 gcc_assert (as
== 0);
7146 if (avr_log
.progmem
)
7147 avr_edump ("%?: %t\n", node
);
7149 switch (TREE_CODE (node
))
7155 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7156 reason
= "variable";
7160 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7161 reason
= "function parameter";
7165 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7166 reason
= "structure field";
7170 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7172 reason
= "return type of function";
7176 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7184 error ("pointer targeting address space %qs must be const in %qT",
7185 avr_addrspace
[as
].name
, node
);
7187 error ("pointer targeting address space %qs must be const in %s %q+D",
7188 avr_addrspace
[as
].name
, reason
, node
);
7191 return reason
== NULL
;
7195 /* Add the section attribute if the variable is in progmem. */
7198 avr_insert_attributes (tree node
, tree
*attributes
)
7200 avr_pgm_check_var_decl (node
);
7202 if (TREE_CODE (node
) == VAR_DECL
7203 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
7204 && avr_progmem_p (node
, *attributes
))
7208 /* For C++, we have to peel arrays in order to get correct
7209 determination of readonlyness. */
7212 node0
= TREE_TYPE (node0
);
7213 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7215 if (error_mark_node
== node0
)
7218 if (!TYPE_READONLY (node0
)
7219 && !TREE_READONLY (node
))
7221 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7222 const char *reason
= "__attribute__((progmem))";
7224 if (!ADDR_SPACE_GENERIC_P (as
))
7225 reason
= avr_addrspace
[as
].name
;
7227 if (avr_log
.progmem
)
7228 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7230 error ("variable %q+D must be const in order to be put into"
7231 " read-only section by means of %qs", node
, reason
);
7237 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7238 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7239 /* Track need of __do_clear_bss. */
7242 avr_asm_output_aligned_decl_common (FILE * stream
,
7243 const_tree decl ATTRIBUTE_UNUSED
,
7245 unsigned HOST_WIDE_INT size
,
7246 unsigned int align
, bool local_p
)
7248 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7249 There is no need to trigger __do_clear_bss code for them. */
7251 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7252 avr_need_clear_bss_p
= true;
7255 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7257 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7261 /* Unnamed section callback for data_section
7262 to track need of __do_copy_data. */
7265 avr_output_data_section_asm_op (const void *data
)
7267 avr_need_copy_data_p
= true;
7269 /* Dispatch to default. */
7270 output_section_asm_op (data
);
7274 /* Unnamed section callback for bss_section
7275 to track need of __do_clear_bss. */
7278 avr_output_bss_section_asm_op (const void *data
)
7280 avr_need_clear_bss_p
= true;
7282 /* Dispatch to default. */
7283 output_section_asm_op (data
);
7287 /* Unnamed section callback for progmem*.data sections. */
7290 avr_output_progmem_section_asm_op (const void *data
)
7292 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7293 (const char*) data
);
7297 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7300 avr_asm_init_sections (void)
7304 /* Set up a section for jump tables. Alignment is handled by
7305 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7307 if (AVR_HAVE_JMP_CALL
)
7309 progmem_swtable_section
7310 = get_unnamed_section (0, output_section_asm_op
,
7311 "\t.section\t.progmem.gcc_sw_table"
7312 ",\"a\",@progbits");
7316 progmem_swtable_section
7317 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7318 "\t.section\t.progmem.gcc_sw_table"
7319 ",\"ax\",@progbits");
7322 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7325 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7326 progmem_section_prefix
[n
]);
7329 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7330 resp. `avr_need_copy_data_p'. */
7332 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7333 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7334 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7338 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7341 avr_asm_function_rodata_section (tree decl
)
7343 /* If a function is unused and optimized out by -ffunction-sections
7344 and --gc-sections, ensure that the same will happen for its jump
7345 tables by putting them into individual sections. */
7350 /* Get the frodata section from the default function in varasm.c
7351 but treat function-associated data-like jump tables as code
7352 rather than as user defined data. AVR has no constant pools. */
7354 int fdata
= flag_data_sections
;
7356 flag_data_sections
= flag_function_sections
;
7357 frodata
= default_function_rodata_section (decl
);
7358 flag_data_sections
= fdata
;
7359 flags
= frodata
->common
.flags
;
7362 if (frodata
!= readonly_data_section
7363 && flags
& SECTION_NAMED
)
7365 /* Adjust section flags and replace section name prefix. */
7369 static const char* const prefix
[] =
7371 ".rodata", ".progmem.gcc_sw_table",
7372 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7375 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7377 const char * old_prefix
= prefix
[i
];
7378 const char * new_prefix
= prefix
[i
+1];
7379 const char * name
= frodata
->named
.name
;
7381 if (STR_PREFIX_P (name
, old_prefix
))
7383 const char *rname
= ACONCAT ((new_prefix
,
7384 name
+ strlen (old_prefix
), NULL
));
7385 flags
&= ~SECTION_CODE
;
7386 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7388 return get_section (rname
, flags
, frodata
->named
.decl
);
7393 return progmem_swtable_section
;
7397 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7398 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7401 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7403 if (flags
& AVR_SECTION_PROGMEM
)
7405 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7406 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
7407 const char *old_prefix
= ".rodata";
7408 const char *new_prefix
= progmem_section_prefix
[segment
];
7410 if (STR_PREFIX_P (name
, old_prefix
))
7412 const char *sname
= ACONCAT ((new_prefix
,
7413 name
+ strlen (old_prefix
), NULL
));
7414 default_elf_asm_named_section (sname
, flags
, decl
);
7418 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7422 if (!avr_need_copy_data_p
)
7423 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7424 || STR_PREFIX_P (name
, ".rodata")
7425 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7427 if (!avr_need_clear_bss_p
)
7428 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7430 default_elf_asm_named_section (name
, flags
, decl
);
7434 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7436 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7438 if (STR_PREFIX_P (name
, ".noinit"))
7440 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7441 && DECL_INITIAL (decl
) == NULL_TREE
)
7442 flags
|= SECTION_BSS
; /* @nobits */
7444 warning (0, "only uninitialized variables can be placed in the "
7448 if (decl
&& DECL_P (decl
)
7449 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7451 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7453 /* Attribute progmem puts data in generic address space.
7454 Set section flags as if it was in __flash to get the right
7455 section prefix in the remainder. */
7457 if (ADDR_SPACE_GENERIC_P (as
))
7458 as
= ADDR_SPACE_FLASH
;
7460 flags
|= as
* SECTION_MACH_DEP
;
7461 flags
&= ~SECTION_WRITE
;
7462 flags
&= ~SECTION_BSS
;
7469 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7472 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7474 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7475 readily available, see PR34734. So we postpone the warning
7476 about uninitialized data in program memory section until here. */
7479 && decl
&& DECL_P (decl
)
7480 && NULL_TREE
== DECL_INITIAL (decl
)
7481 && !DECL_EXTERNAL (decl
)
7482 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7484 warning (OPT_Wuninitialized
,
7485 "uninitialized variable %q+D put into "
7486 "program memory area", decl
);
7489 default_encode_section_info (decl
, rtl
, new_decl_p
);
7491 if (decl
&& DECL_P (decl
)
7492 && TREE_CODE (decl
) != FUNCTION_DECL
7494 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7496 rtx sym
= XEXP (rtl
, 0);
7497 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7499 /* PSTR strings are in generic space but located in flash:
7500 patch address space. */
7502 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7503 as
= ADDR_SPACE_FLASH
;
7505 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7510 /* Implement `TARGET_ASM_SELECT_SECTION' */
7513 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7515 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7517 if (decl
&& DECL_P (decl
)
7518 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7520 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7521 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
7523 if (sect
->common
.flags
& SECTION_NAMED
)
7525 const char * name
= sect
->named
.name
;
7526 const char * old_prefix
= ".rodata";
7527 const char * new_prefix
= progmem_section_prefix
[segment
];
7529 if (STR_PREFIX_P (name
, old_prefix
))
7531 const char *sname
= ACONCAT ((new_prefix
,
7532 name
+ strlen (old_prefix
), NULL
));
7533 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7537 return progmem_section
[segment
];
7543 /* Implement `TARGET_ASM_FILE_START'. */
7544 /* Outputs some text at the start of each assembler file. */
7547 avr_file_start (void)
7549 int sfr_offset
= avr_current_arch
->sfr_offset
;
7551 if (avr_current_arch
->asm_only
)
7552 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7554 default_file_start ();
7556 /* Print I/O addresses of some SFRs used with IN and OUT. */
7558 if (!AVR_HAVE_8BIT_SP
)
7559 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
7561 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
7562 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
7564 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
7566 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
7568 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
7570 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
7572 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
7573 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
7574 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
7578 /* Implement `TARGET_ASM_FILE_END'. */
7579 /* Outputs to the stdio stream FILE some
7580 appropriate text to go at the end of an assembler file. */
7585 /* Output these only if there is anything in the
7586 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7587 input section(s) - some code size can be saved by not
7588 linking in the initialization code from libgcc if resp.
7589 sections are empty. */
7591 if (avr_need_copy_data_p
)
7592 fputs (".global __do_copy_data\n", asm_out_file
);
7594 if (avr_need_clear_bss_p
)
7595 fputs (".global __do_clear_bss\n", asm_out_file
);
7598 /* Choose the order in which to allocate hard registers for
7599 pseudo-registers local to a basic block.
7601 Store the desired register order in the array `reg_alloc_order'.
7602 Element 0 should be the register to allocate first; element 1, the
7603 next register; and so on. */
7606 order_regs_for_local_alloc (void)
7609 static const int order_0
[] = {
7617 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7621 static const int order_1
[] = {
7629 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7633 static const int order_2
[] = {
7642 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7647 const int *order
= (TARGET_ORDER_1
? order_1
:
7648 TARGET_ORDER_2
? order_2
:
7650 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7651 reg_alloc_order
[i
] = order
[i
];
7655 /* Implement `TARGET_REGISTER_MOVE_COST' */
7658 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7659 reg_class_t from
, reg_class_t to
)
7661 return (from
== STACK_REG
? 6
7662 : to
== STACK_REG
? 12
7667 /* Implement `TARGET_MEMORY_MOVE_COST' */
7670 avr_memory_move_cost (enum machine_mode mode
,
7671 reg_class_t rclass ATTRIBUTE_UNUSED
,
7672 bool in ATTRIBUTE_UNUSED
)
7674 return (mode
== QImode
? 2
7675 : mode
== HImode
? 4
7676 : mode
== SImode
? 8
7677 : mode
== SFmode
? 8
7682 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7683 cost of an RTX operand given its context. X is the rtx of the
7684 operand, MODE is its mode, and OUTER is the rtx_code of this
7685 operand's parent operator. */
7688 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
7689 int opno
, bool speed
)
7691 enum rtx_code code
= GET_CODE (x
);
7702 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7709 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
7713 /* Worker function for AVR backend's rtx_cost function.
7714 X is rtx expression whose cost is to be calculated.
7715 Return true if the complete cost has been computed.
7716 Return false if subexpressions should be scanned.
7717 In either case, *TOTAL contains the cost result. */
7720 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
7721 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
7723 enum rtx_code code
= (enum rtx_code
) codearg
;
7724 enum machine_mode mode
= GET_MODE (x
);
7734 /* Immediate constants are as cheap as registers. */
7739 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7747 *total
= COSTS_N_INSNS (1);
7753 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
7759 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7767 *total
= COSTS_N_INSNS (1);
7773 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7777 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7778 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7782 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
7783 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7784 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7788 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
7789 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7790 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7798 && MULT
== GET_CODE (XEXP (x
, 0))
7799 && register_operand (XEXP (x
, 1), QImode
))
7802 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7803 /* multiply-add with constant: will be split and load constant. */
7804 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7805 *total
= COSTS_N_INSNS (1) + *total
;
7808 *total
= COSTS_N_INSNS (1);
7809 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7810 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7815 && (MULT
== GET_CODE (XEXP (x
, 0))
7816 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
7817 && register_operand (XEXP (x
, 1), HImode
)
7818 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
7819 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
7822 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7823 /* multiply-add with constant: will be split and load constant. */
7824 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7825 *total
= COSTS_N_INSNS (1) + *total
;
7828 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7830 *total
= COSTS_N_INSNS (2);
7831 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7834 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7835 *total
= COSTS_N_INSNS (1);
7837 *total
= COSTS_N_INSNS (2);
7841 if (!CONST_INT_P (XEXP (x
, 1)))
7843 *total
= COSTS_N_INSNS (3);
7844 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7847 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7848 *total
= COSTS_N_INSNS (2);
7850 *total
= COSTS_N_INSNS (3);
7854 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7856 *total
= COSTS_N_INSNS (4);
7857 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7860 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7861 *total
= COSTS_N_INSNS (1);
7863 *total
= COSTS_N_INSNS (4);
7869 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7875 && register_operand (XEXP (x
, 0), QImode
)
7876 && MULT
== GET_CODE (XEXP (x
, 1)))
7879 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7880 /* multiply-sub with constant: will be split and load constant. */
7881 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7882 *total
= COSTS_N_INSNS (1) + *total
;
7887 && register_operand (XEXP (x
, 0), HImode
)
7888 && (MULT
== GET_CODE (XEXP (x
, 1))
7889 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
7890 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
7891 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
7894 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7895 /* multiply-sub with constant: will be split and load constant. */
7896 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7897 *total
= COSTS_N_INSNS (1) + *total
;
7903 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7904 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7905 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7906 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7910 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7911 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7912 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7920 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
7922 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7930 rtx op0
= XEXP (x
, 0);
7931 rtx op1
= XEXP (x
, 1);
7932 enum rtx_code code0
= GET_CODE (op0
);
7933 enum rtx_code code1
= GET_CODE (op1
);
7934 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
7935 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
7938 && (u8_operand (op1
, HImode
)
7939 || s8_operand (op1
, HImode
)))
7941 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7945 && register_operand (op1
, HImode
))
7947 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7950 else if (ex0
|| ex1
)
7952 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
7955 else if (register_operand (op0
, HImode
)
7956 && (u8_operand (op1
, HImode
)
7957 || s8_operand (op1
, HImode
)))
7959 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
7963 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
7966 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7973 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7983 /* Add some additional costs besides CALL like moves etc. */
7985 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7989 /* Just a rough estimate. Even with -O2 we don't want bulky
7990 code expanded inline. */
7992 *total
= COSTS_N_INSNS (25);
7998 *total
= COSTS_N_INSNS (300);
8000 /* Add some additional costs besides CALL like moves etc. */
8001 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8009 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8010 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8018 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8020 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8021 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8022 /* For div/mod with const-int divisor we have at least the cost of
8023 loading the divisor. */
8024 if (CONST_INT_P (XEXP (x
, 1)))
8025 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8026 /* Add some overall penaly for clobbering and moving around registers */
8027 *total
+= COSTS_N_INSNS (2);
8034 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8035 *total
= COSTS_N_INSNS (1);
8040 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8041 *total
= COSTS_N_INSNS (3);
8046 if (CONST_INT_P (XEXP (x
, 1)))
8047 switch (INTVAL (XEXP (x
, 1)))
8051 *total
= COSTS_N_INSNS (5);
8054 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8062 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8069 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8071 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8072 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8077 val
= INTVAL (XEXP (x
, 1));
8079 *total
= COSTS_N_INSNS (3);
8080 else if (val
>= 0 && val
<= 7)
8081 *total
= COSTS_N_INSNS (val
);
8083 *total
= COSTS_N_INSNS (1);
8090 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8091 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8092 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8094 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8099 if (const1_rtx
== (XEXP (x
, 1))
8100 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8102 *total
= COSTS_N_INSNS (2);
8106 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8108 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8109 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8113 switch (INTVAL (XEXP (x
, 1)))
8120 *total
= COSTS_N_INSNS (2);
8123 *total
= COSTS_N_INSNS (3);
8129 *total
= COSTS_N_INSNS (4);
8134 *total
= COSTS_N_INSNS (5);
8137 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8140 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8143 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8146 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8147 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8153 if (!CONST_INT_P (XEXP (x
, 1)))
8155 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8158 switch (INTVAL (XEXP (x
, 1)))
8166 *total
= COSTS_N_INSNS (3);
8169 *total
= COSTS_N_INSNS (5);
8172 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8178 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8180 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8181 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8185 switch (INTVAL (XEXP (x
, 1)))
8191 *total
= COSTS_N_INSNS (3);
8196 *total
= COSTS_N_INSNS (4);
8199 *total
= COSTS_N_INSNS (6);
8202 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8205 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8206 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8214 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8221 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8223 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8224 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8229 val
= INTVAL (XEXP (x
, 1));
8231 *total
= COSTS_N_INSNS (4);
8233 *total
= COSTS_N_INSNS (2);
8234 else if (val
>= 0 && val
<= 7)
8235 *total
= COSTS_N_INSNS (val
);
8237 *total
= COSTS_N_INSNS (1);
8242 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8244 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8245 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8249 switch (INTVAL (XEXP (x
, 1)))
8255 *total
= COSTS_N_INSNS (2);
8258 *total
= COSTS_N_INSNS (3);
8264 *total
= COSTS_N_INSNS (4);
8268 *total
= COSTS_N_INSNS (5);
8271 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8274 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8278 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8281 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8282 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8288 if (!CONST_INT_P (XEXP (x
, 1)))
8290 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8293 switch (INTVAL (XEXP (x
, 1)))
8299 *total
= COSTS_N_INSNS (3);
8303 *total
= COSTS_N_INSNS (5);
8306 *total
= COSTS_N_INSNS (4);
8309 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8315 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8317 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8318 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8322 switch (INTVAL (XEXP (x
, 1)))
8328 *total
= COSTS_N_INSNS (4);
8333 *total
= COSTS_N_INSNS (6);
8336 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8339 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8342 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8343 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8351 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8358 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8360 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8361 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8366 val
= INTVAL (XEXP (x
, 1));
8368 *total
= COSTS_N_INSNS (3);
8369 else if (val
>= 0 && val
<= 7)
8370 *total
= COSTS_N_INSNS (val
);
8372 *total
= COSTS_N_INSNS (1);
8377 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8379 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8380 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8384 switch (INTVAL (XEXP (x
, 1)))
8391 *total
= COSTS_N_INSNS (2);
8394 *total
= COSTS_N_INSNS (3);
8399 *total
= COSTS_N_INSNS (4);
8403 *total
= COSTS_N_INSNS (5);
8409 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8412 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8416 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8419 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8420 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8426 if (!CONST_INT_P (XEXP (x
, 1)))
8428 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8431 switch (INTVAL (XEXP (x
, 1)))
8439 *total
= COSTS_N_INSNS (3);
8442 *total
= COSTS_N_INSNS (5);
8445 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8451 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8453 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8454 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8458 switch (INTVAL (XEXP (x
, 1)))
8464 *total
= COSTS_N_INSNS (4);
8467 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8472 *total
= COSTS_N_INSNS (4);
8475 *total
= COSTS_N_INSNS (6);
8478 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8479 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8487 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8491 switch (GET_MODE (XEXP (x
, 0)))
8494 *total
= COSTS_N_INSNS (1);
8495 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8496 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8500 *total
= COSTS_N_INSNS (2);
8501 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8502 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8503 else if (INTVAL (XEXP (x
, 1)) != 0)
8504 *total
+= COSTS_N_INSNS (1);
8508 *total
= COSTS_N_INSNS (3);
8509 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8510 *total
+= COSTS_N_INSNS (2);
8514 *total
= COSTS_N_INSNS (4);
8515 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8516 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8517 else if (INTVAL (XEXP (x
, 1)) != 0)
8518 *total
+= COSTS_N_INSNS (3);
8524 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8529 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8530 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8531 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8533 if (QImode
== mode
|| HImode
== mode
)
8535 *total
= COSTS_N_INSNS (2);
8548 /* Implement `TARGET_RTX_COSTS'. */
8551 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8552 int opno
, int *total
, bool speed
)
8554 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8555 opno
, total
, speed
);
8557 if (avr_log
.rtx_costs
)
8559 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8560 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8567 /* Implement `TARGET_ADDRESS_COST'. */
8570 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8574 if (GET_CODE (x
) == PLUS
8575 && CONST_INT_P (XEXP (x
, 1))
8576 && (REG_P (XEXP (x
, 0))
8577 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8579 if (INTVAL (XEXP (x
, 1)) >= 61)
8582 else if (CONSTANT_ADDRESS_P (x
))
8585 && io_address_operand (x
, QImode
))
8589 if (avr_log
.address_cost
)
8590 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8595 /* Test for extra memory constraint 'Q'.
8596 It's a memory address based on Y or Z pointer with valid displacement. */
8599 extra_constraint_Q (rtx x
)
8603 if (GET_CODE (XEXP (x
,0)) == PLUS
8604 && REG_P (XEXP (XEXP (x
,0), 0))
8605 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8606 && (INTVAL (XEXP (XEXP (x
,0), 1))
8607 <= MAX_LD_OFFSET (GET_MODE (x
))))
8609 rtx xx
= XEXP (XEXP (x
,0), 0);
8610 int regno
= REGNO (xx
);
8612 ok
= (/* allocate pseudos */
8613 regno
>= FIRST_PSEUDO_REGISTER
8614 /* strictly check */
8615 || regno
== REG_Z
|| regno
== REG_Y
8616 /* XXX frame & arg pointer checks */
8617 || xx
== frame_pointer_rtx
8618 || xx
== arg_pointer_rtx
);
8620 if (avr_log
.constraints
)
8621 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8622 ok
, reload_completed
, reload_in_progress
, x
);
8628 /* Convert condition code CONDITION to the valid AVR condition code. */
8631 avr_normalize_condition (RTX_CODE condition
)
8648 /* Helper function for `avr_reorg'. */
8651 avr_compare_pattern (rtx insn
)
8653 rtx pattern
= single_set (insn
);
8656 && NONJUMP_INSN_P (insn
)
8657 && SET_DEST (pattern
) == cc0_rtx
8658 && GET_CODE (SET_SRC (pattern
)) == COMPARE
8659 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 0))
8660 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 1)))
8668 /* Helper function for `avr_reorg'. */
8670 /* Expansion of switch/case decision trees leads to code like
8672 cc0 = compare (Reg, Num)
8676 cc0 = compare (Reg, Num)
8680 The second comparison is superfluous and can be deleted.
8681 The second jump condition can be transformed from a
8682 "difficult" one to a "simple" one because "cc0 > 0" and
8683 "cc0 >= 0" will have the same effect here.
8685 This function relies on the way switch/case is being expaned
8686 as binary decision tree. For example code see PR 49903.
8688 Return TRUE if optimization performed.
8689 Return FALSE if nothing changed.
8691 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8693 We don't want to do this in text peephole because it is
8694 tedious to work out jump offsets there and the second comparison
8695 might have been transormed by `avr_reorg'.
8697 RTL peephole won't do because peephole2 does not scan across
8701 avr_reorg_remove_redundant_compare (rtx insn1
)
8703 rtx comp1
, ifelse1
, xcond1
, branch1
;
8704 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
8706 rtx jump
, target
, cond
;
8708 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8710 branch1
= next_nonnote_nondebug_insn (insn1
);
8711 if (!branch1
|| !JUMP_P (branch1
))
8714 insn2
= next_nonnote_nondebug_insn (branch1
);
8715 if (!insn2
|| !avr_compare_pattern (insn2
))
8718 branch2
= next_nonnote_nondebug_insn (insn2
);
8719 if (!branch2
|| !JUMP_P (branch2
))
8722 comp1
= avr_compare_pattern (insn1
);
8723 comp2
= avr_compare_pattern (insn2
);
8724 xcond1
= single_set (branch1
);
8725 xcond2
= single_set (branch2
);
8727 if (!comp1
|| !comp2
8728 || !rtx_equal_p (comp1
, comp2
)
8729 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
8730 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
8731 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
8732 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
8737 comp1
= SET_SRC (comp1
);
8738 ifelse1
= SET_SRC (xcond1
);
8739 ifelse2
= SET_SRC (xcond2
);
8741 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8743 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
8744 || !REG_P (XEXP (comp1
, 0))
8745 || !CONST_INT_P (XEXP (comp1
, 1))
8746 || XEXP (ifelse1
, 2) != pc_rtx
8747 || XEXP (ifelse2
, 2) != pc_rtx
8748 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
8749 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
8750 || !COMPARISON_P (XEXP (ifelse2
, 0))
8751 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
8752 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
8753 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
8754 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
8759 /* We filtered the insn sequence to look like
8765 (if_then_else (eq (cc0)
8774 (if_then_else (CODE (cc0)
8780 code
= GET_CODE (XEXP (ifelse2
, 0));
8782 /* Map GT/GTU to GE/GEU which is easier for AVR.
8783 The first two instructions compare/branch on EQ
8784 so we may replace the difficult
8786 if (x == VAL) goto L1;
8787 if (x > VAL) goto L2;
8791 if (x == VAL) goto L1;
8792 if (x >= VAL) goto L2;
8794 Similarly, replace LE/LEU by LT/LTU. */
8805 code
= avr_normalize_condition (code
);
8812 /* Wrap the branches into UNSPECs so they won't be changed or
8813 optimized in the remainder. */
8815 target
= XEXP (XEXP (ifelse1
, 1), 0);
8816 cond
= XEXP (ifelse1
, 0);
8817 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
8819 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
8821 target
= XEXP (XEXP (ifelse2
, 1), 0);
8822 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8823 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
8825 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
8827 /* The comparisons in insn1 and insn2 are exactly the same;
8828 insn2 is superfluous so delete it. */
8830 delete_insn (insn2
);
8831 delete_insn (branch1
);
8832 delete_insn (branch2
);
8838 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8839 /* Optimize conditional jumps. */
8844 rtx insn
= get_insns();
8846 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
8848 rtx pattern
= avr_compare_pattern (insn
);
8854 && avr_reorg_remove_redundant_compare (insn
))
8859 if (compare_diff_p (insn
))
8861 /* Now we work under compare insn with difficult branch. */
8863 rtx next
= next_real_insn (insn
);
8864 rtx pat
= PATTERN (next
);
8866 pattern
= SET_SRC (pattern
);
8868 if (true_regnum (XEXP (pattern
, 0)) >= 0
8869 && true_regnum (XEXP (pattern
, 1)) >= 0)
8871 rtx x
= XEXP (pattern
, 0);
8872 rtx src
= SET_SRC (pat
);
8873 rtx t
= XEXP (src
,0);
8874 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8875 XEXP (pattern
, 0) = XEXP (pattern
, 1);
8876 XEXP (pattern
, 1) = x
;
8877 INSN_CODE (next
) = -1;
8879 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8880 && XEXP (pattern
, 1) == const0_rtx
)
8882 /* This is a tst insn, we can reverse it. */
8883 rtx src
= SET_SRC (pat
);
8884 rtx t
= XEXP (src
,0);
8886 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8887 XEXP (pattern
, 1) = XEXP (pattern
, 0);
8888 XEXP (pattern
, 0) = const0_rtx
;
8889 INSN_CODE (next
) = -1;
8890 INSN_CODE (insn
) = -1;
8892 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8893 && CONST_INT_P (XEXP (pattern
, 1)))
8895 rtx x
= XEXP (pattern
, 1);
8896 rtx src
= SET_SRC (pat
);
8897 rtx t
= XEXP (src
,0);
8898 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
8900 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
8902 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
8903 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
8904 INSN_CODE (next
) = -1;
8905 INSN_CODE (insn
) = -1;
8912 /* Returns register number for function return value.*/
8914 static inline unsigned int
8915 avr_ret_register (void)
8920 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8923 avr_function_value_regno_p (const unsigned int regno
)
8925 return (regno
== avr_ret_register ());
8928 /* Create an RTX representing the place where a
8929 library function returns a value of mode MODE. */
8932 avr_libcall_value (enum machine_mode mode
,
8933 const_rtx func ATTRIBUTE_UNUSED
)
8935 int offs
= GET_MODE_SIZE (mode
);
8938 offs
= (offs
+ 1) & ~1;
8940 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
8943 /* Create an RTX representing the place where a
8944 function returns a value of data type VALTYPE. */
8947 avr_function_value (const_tree type
,
8948 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
8949 bool outgoing ATTRIBUTE_UNUSED
)
8953 if (TYPE_MODE (type
) != BLKmode
)
8954 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
8956 offs
= int_size_in_bytes (type
);
8959 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
8960 offs
= GET_MODE_SIZE (SImode
);
8961 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
8962 offs
= GET_MODE_SIZE (DImode
);
8964 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
8968 test_hard_reg_class (enum reg_class rclass
, rtx x
)
8970 int regno
= true_regnum (x
);
8974 if (TEST_HARD_REG_CLASS (rclass
, regno
))
8981 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8982 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8985 avr_2word_insn_p (rtx insn
)
8987 if (avr_current_device
->errata_skip
8989 || 2 != get_attr_length (insn
))
8994 switch (INSN_CODE (insn
))
8999 case CODE_FOR_movqi_insn
:
9001 rtx set
= single_set (insn
);
9002 rtx src
= SET_SRC (set
);
9003 rtx dest
= SET_DEST (set
);
9005 /* Factor out LDS and STS from movqi_insn. */
9008 && (REG_P (src
) || src
== const0_rtx
))
9010 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9012 else if (REG_P (dest
)
9015 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9021 case CODE_FOR_call_insn
:
9022 case CODE_FOR_call_value_insn
:
9029 jump_over_one_insn_p (rtx insn
, rtx dest
)
9031 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9034 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9035 int dest_addr
= INSN_ADDRESSES (uid
);
9036 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9038 return (jump_offset
== 1
9039 || (jump_offset
== 2
9040 && avr_2word_insn_p (next_active_insn (insn
))));
9043 /* Returns 1 if a value of mode MODE can be stored starting with hard
9044 register number REGNO. On the enhanced core, anything larger than
9045 1 byte must start in even numbered register for "movw" to work
9046 (this way we don't have to check for odd registers everywhere). */
9049 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9051 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9052 Disallowing QI et al. in these regs might lead to code like
9053 (set (subreg:QI (reg:HI 28) n) ...)
9054 which will result in wrong code because reload does not
9055 handle SUBREGs of hard regsisters like this.
9056 This could be fixed in reload. However, it appears
9057 that fixing reload is not wanted by reload people. */
9059 /* Any GENERAL_REGS register can hold 8-bit values. */
9061 if (GET_MODE_SIZE (mode
) == 1)
9064 /* FIXME: Ideally, the following test is not needed.
9065 However, it turned out that it can reduce the number
9066 of spill fails. AVR and it's poor endowment with
9067 address registers is extreme stress test for reload. */
9069 if (GET_MODE_SIZE (mode
) >= 4
9073 /* All modes larger than 8 bits should start in an even register. */
9075 return !(regno
& 1);
9079 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9082 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9083 addr_space_t as
, RTX_CODE outer_code
,
9084 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9086 if (!ADDR_SPACE_GENERIC_P (as
))
9088 return POINTER_Z_REGS
;
9092 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9094 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9098 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9101 avr_regno_mode_code_ok_for_base_p (int regno
,
9102 enum machine_mode mode ATTRIBUTE_UNUSED
,
9103 addr_space_t as ATTRIBUTE_UNUSED
,
9104 RTX_CODE outer_code
,
9105 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9109 if (!ADDR_SPACE_GENERIC_P (as
))
9111 if (regno
< FIRST_PSEUDO_REGISTER
9119 regno
= reg_renumber
[regno
];
9130 if (regno
< FIRST_PSEUDO_REGISTER
9134 || regno
== ARG_POINTER_REGNUM
))
9138 else if (reg_renumber
)
9140 regno
= reg_renumber
[regno
];
9145 || regno
== ARG_POINTER_REGNUM
)
9152 && PLUS
== outer_code
9162 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9163 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9164 CLOBBER_REG is a QI clobber register or NULL_RTX.
9165 LEN == NULL: output instructions.
9166 LEN != NULL: set *LEN to the length of the instruction sequence
9167 (in words) printed with LEN = NULL.
9168 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9169 If CLEAR_P is false, nothing is known about OP[0].
9171 The effect on cc0 is as follows:
9173 Load 0 to any register except ZERO_REG : NONE
9174 Load ld register with any value : NONE
9175 Anything else: : CLOBBER */
9178 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
9184 int clobber_val
= 1234;
9185 bool cooked_clobber_p
= false;
9187 enum machine_mode mode
= GET_MODE (dest
);
9188 int n
, n_bytes
= GET_MODE_SIZE (mode
);
9190 gcc_assert (REG_P (dest
)
9191 && CONSTANT_P (src
));
9196 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9197 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9199 if (REGNO (dest
) < 16
9200 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
9202 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
9205 /* We might need a clobber reg but don't have one. Look at the value to
9206 be loaded more closely. A clobber is only needed if it is a symbol
9207 or contains a byte that is neither 0, -1 or a power of 2. */
9209 if (NULL_RTX
== clobber_reg
9210 && !test_hard_reg_class (LD_REGS
, dest
)
9211 && (! (CONST_INT_P (src
) || CONST_DOUBLE_P (src
))
9212 || !avr_popcount_each_byte (src
, n_bytes
,
9213 (1 << 0) | (1 << 1) | (1 << 8))))
9215 /* We have no clobber register but need one. Cook one up.
9216 That's cheaper than loading from constant pool. */
9218 cooked_clobber_p
= true;
9219 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9220 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9223 /* Now start filling DEST from LSB to MSB. */
9225 for (n
= 0; n
< n_bytes
; n
++)
9228 bool done_byte
= false;
9232 /* Crop the n-th destination byte. */
9234 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9235 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9237 if (!CONST_INT_P (src
)
9238 && !CONST_DOUBLE_P (src
))
9240 static const char* const asm_code
[][2] =
9242 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9243 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9244 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9245 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9250 xop
[2] = clobber_reg
;
9252 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9257 /* Crop the n-th source byte. */
9259 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9260 ival
[n
] = INTVAL (xval
);
9262 /* Look if we can reuse the low word by means of MOVW. */
9268 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9269 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9271 if (INTVAL (lo16
) == INTVAL (hi16
))
9273 if (0 != INTVAL (lo16
)
9276 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9283 /* Don't use CLR so that cc0 is set as expected. */
9288 avr_asm_len (ldreg_p
? "ldi %0,0"
9289 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9290 : "mov %0,__zero_reg__",
9295 if (clobber_val
== ival
[n
]
9296 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9301 /* LD_REGS can use LDI to move a constant value */
9307 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9311 /* Try to reuse value already loaded in some lower byte. */
9313 for (j
= 0; j
< n
; j
++)
9314 if (ival
[j
] == ival
[n
])
9319 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9327 /* Need no clobber reg for -1: Use CLR/DEC */
9332 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9334 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9337 else if (1 == ival
[n
])
9340 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9342 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9346 /* Use T flag or INC to manage powers of 2 if we have
9349 if (NULL_RTX
== clobber_reg
9350 && single_one_operand (xval
, QImode
))
9353 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9355 gcc_assert (constm1_rtx
!= xop
[1]);
9360 avr_asm_len ("set", xop
, len
, 1);
9364 avr_asm_len ("clr %0", xop
, len
, 1);
9366 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9370 /* We actually need the LD_REGS clobber reg. */
9372 gcc_assert (NULL_RTX
!= clobber_reg
);
9376 xop
[2] = clobber_reg
;
9377 clobber_val
= ival
[n
];
9379 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9380 "mov %0,%2", xop
, len
, 2);
9383 /* If we cooked up a clobber reg above, restore it. */
9385 if (cooked_clobber_p
)
9387 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9392 /* Reload the constant OP[1] into the HI register OP[0].
9393 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9394 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9395 need a clobber reg or have to cook one up.
9397 PLEN == NULL: Output instructions.
9398 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9399 by the insns printed.
9404 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9406 output_reload_in_const (op
, clobber_reg
, plen
, false);
9411 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9412 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9413 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9414 need a clobber reg or have to cook one up.
9416 LEN == NULL: Output instructions.
9418 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9419 by the insns printed.
9424 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9427 && !test_hard_reg_class (LD_REGS
, op
[0])
9428 && (CONST_INT_P (op
[1])
9429 || CONST_DOUBLE_P (op
[1])))
9431 int len_clr
, len_noclr
;
9433 /* In some cases it is better to clear the destination beforehand, e.g.
9435 CLR R2 CLR R3 MOVW R4,R2 INC R2
9439 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9441 We find it too tedious to work that out in the print function.
9442 Instead, we call the print function twice to get the lengths of
9443 both methods and use the shortest one. */
9445 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9446 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9448 if (len_noclr
- len_clr
== 4)
9450 /* Default needs 4 CLR instructions: clear register beforehand. */
9452 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9453 "mov %B0,__zero_reg__" CR_TAB
9454 "movw %C0,%A0", &op
[0], len
, 3);
9456 output_reload_in_const (op
, clobber_reg
, len
, true);
9465 /* Default: destination not pre-cleared. */
9467 output_reload_in_const (op
, clobber_reg
, len
, false);
9472 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9474 output_reload_in_const (op
, clobber_reg
, len
, false);
9479 avr_output_bld (rtx operands
[], int bit_nr
)
9481 static char s
[] = "bld %A0,0";
9483 s
[5] = 'A' + (bit_nr
>> 3);
9484 s
[8] = '0' + (bit_nr
& 7);
9485 output_asm_insn (s
, operands
);
9489 avr_output_addr_vec_elt (FILE *stream
, int value
)
9491 if (AVR_HAVE_JMP_CALL
)
9492 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9494 fprintf (stream
, "\trjmp .L%d\n", value
);
9497 /* Returns true if SCRATCH are safe to be allocated as a scratch
9498 registers (for a define_peephole2) in the current function. */
9501 avr_hard_regno_scratch_ok (unsigned int regno
)
9503 /* Interrupt functions can only use registers that have already been saved
9504 by the prologue, even if they would normally be call-clobbered. */
9506 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9507 && !df_regs_ever_live_p (regno
))
9510 /* Don't allow hard registers that might be part of the frame pointer.
9511 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9512 and don't care for a frame pointer that spans more than one register. */
9514 if ((!reload_completed
|| frame_pointer_needed
)
9515 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9523 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9526 avr_hard_regno_rename_ok (unsigned int old_reg
,
9527 unsigned int new_reg
)
9529 /* Interrupt functions can only use registers that have already been
9530 saved by the prologue, even if they would normally be
9533 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9534 && !df_regs_ever_live_p (new_reg
))
9537 /* Don't allow hard registers that might be part of the frame pointer.
9538 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9539 and don't care for a frame pointer that spans more than one register. */
9541 if ((!reload_completed
|| frame_pointer_needed
)
9542 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9543 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9551 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9552 or memory location in the I/O space (QImode only).
9554 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9555 Operand 1: register operand to test, or CONST_INT memory address.
9556 Operand 2: bit number.
9557 Operand 3: label to jump to if the test is true. */
9560 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9562 enum rtx_code comp
= GET_CODE (operands
[0]);
9563 bool long_jump
= get_attr_length (insn
) >= 4;
9564 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9568 else if (comp
== LT
)
9572 comp
= reverse_condition (comp
);
9574 switch (GET_CODE (operands
[1]))
9581 if (low_io_address_operand (operands
[1], QImode
))
9584 output_asm_insn ("sbis %i1,%2", operands
);
9586 output_asm_insn ("sbic %i1,%2", operands
);
9590 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9592 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9594 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9597 break; /* CONST_INT */
9601 if (GET_MODE (operands
[1]) == QImode
)
9604 output_asm_insn ("sbrs %1,%2", operands
);
9606 output_asm_insn ("sbrc %1,%2", operands
);
9608 else /* HImode, PSImode or SImode */
9610 static char buf
[] = "sbrc %A1,0";
9611 unsigned int bit_nr
= UINTVAL (operands
[2]);
9613 buf
[3] = (comp
== EQ
) ? 's' : 'c';
9614 buf
[6] = 'A' + (bit_nr
/ 8);
9615 buf
[9] = '0' + (bit_nr
% 8);
9616 output_asm_insn (buf
, operands
);
9623 return ("rjmp .+4" CR_TAB
9632 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9635 avr_asm_out_ctor (rtx symbol
, int priority
)
9637 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9638 default_ctor_section_asm_out_constructor (symbol
, priority
);
9641 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9644 avr_asm_out_dtor (rtx symbol
, int priority
)
9646 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9647 default_dtor_section_asm_out_destructor (symbol
, priority
);
9650 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9653 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9655 if (TYPE_MODE (type
) == BLKmode
)
9657 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9658 return (size
== -1 || size
> 8);
9664 /* Worker function for CASE_VALUES_THRESHOLD. */
9667 avr_case_values_threshold (void)
9669 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
9673 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9675 static enum machine_mode
9676 avr_addr_space_address_mode (addr_space_t as
)
9678 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
9682 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9684 static enum machine_mode
9685 avr_addr_space_pointer_mode (addr_space_t as
)
9687 return avr_addr_space_address_mode (as
);
9691 /* Helper for following function. */
9694 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
9696 gcc_assert (REG_P (reg
));
9700 return REGNO (reg
) == REG_Z
;
9703 /* Avoid combine to propagate hard regs. */
9705 if (can_create_pseudo_p()
9706 && REGNO (reg
) < REG_Z
)
9715 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9718 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
9719 bool strict
, addr_space_t as
)
9728 case ADDR_SPACE_GENERIC
:
9729 return avr_legitimate_address_p (mode
, x
, strict
);
9731 case ADDR_SPACE_FLASH
:
9732 case ADDR_SPACE_FLASH1
:
9733 case ADDR_SPACE_FLASH2
:
9734 case ADDR_SPACE_FLASH3
:
9735 case ADDR_SPACE_FLASH4
:
9736 case ADDR_SPACE_FLASH5
:
9738 switch (GET_CODE (x
))
9741 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
9745 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
9754 case ADDR_SPACE_MEMX
:
9757 && can_create_pseudo_p());
9759 if (LO_SUM
== GET_CODE (x
))
9761 rtx hi
= XEXP (x
, 0);
9762 rtx lo
= XEXP (x
, 1);
9765 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
9767 && REGNO (lo
) == REG_Z
);
9773 if (avr_log
.legitimate_address_p
)
9775 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9776 "reload_completed=%d reload_in_progress=%d %s:",
9777 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
9778 reg_renumber
? "(reg_renumber)" : "");
9780 if (GET_CODE (x
) == PLUS
9781 && REG_P (XEXP (x
, 0))
9782 && CONST_INT_P (XEXP (x
, 1))
9783 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
9786 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
9787 true_regnum (XEXP (x
, 0)));
9790 avr_edump ("\n%r\n", x
);
9797 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9800 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
9801 enum machine_mode mode
, addr_space_t as
)
9803 if (ADDR_SPACE_GENERIC_P (as
))
9804 return avr_legitimize_address (x
, old_x
, mode
);
9806 if (avr_log
.legitimize_address
)
9808 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
9815 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9818 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
9820 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
9821 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
9823 if (avr_log
.progmem
)
9824 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9825 src
, type_from
, type_to
);
9827 /* Up-casting from 16-bit to 24-bit pointer. */
9829 if (as_from
!= ADDR_SPACE_MEMX
9830 && as_to
== ADDR_SPACE_MEMX
)
9834 rtx reg
= gen_reg_rtx (PSImode
);
9836 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
9837 sym
= XEXP (sym
, 0);
9839 /* Look at symbol flags: avr_encode_section_info set the flags
9840 also if attribute progmem was seen so that we get the right
9841 promotion for, e.g. PSTR-like strings that reside in generic space
9842 but are located in flash. In that case we patch the incoming
9845 if (SYMBOL_REF
== GET_CODE (sym
)
9846 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
9848 as_from
= ADDR_SPACE_FLASH
;
9851 /* Linearize memory: RAM has bit 23 set. */
9853 msb
= ADDR_SPACE_GENERIC_P (as_from
)
9855 : avr_addrspace
[as_from
].segment
% avr_current_arch
->n_segments
;
9857 src
= force_reg (Pmode
, src
);
9860 ? gen_zero_extendhipsi2 (reg
, src
)
9861 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
9866 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9868 if (as_from
== ADDR_SPACE_MEMX
9869 && as_to
!= ADDR_SPACE_MEMX
)
9871 rtx new_src
= gen_reg_rtx (Pmode
);
9873 src
= force_reg (PSImode
, src
);
9875 emit_move_insn (new_src
,
9876 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
9884 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9887 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
9888 addr_space_t superset ATTRIBUTE_UNUSED
)
9890 /* Allow any kind of pointer mess. */
9896 /* Worker function for movmemhi expander.
9897 XOP[0] Destination as MEM:BLK
9899 XOP[2] # Bytes to copy
9901 Return TRUE if the expansion is accomplished.
9902 Return FALSE if the operand compination is not supported. */
9905 avr_emit_movmemhi (rtx
*xop
)
9907 HOST_WIDE_INT count
;
9908 enum machine_mode loop_mode
;
9909 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
9910 rtx loop_reg
, addr0
, addr1
, a_src
, a_dest
, insn
, xas
, reg_x
;
9911 rtx a_hi8
= NULL_RTX
;
9913 if (avr_mem_flash_p (xop
[0]))
9916 if (!CONST_INT_P (xop
[2]))
9919 count
= INTVAL (xop
[2]);
9923 a_src
= XEXP (xop
[1], 0);
9924 a_dest
= XEXP (xop
[0], 0);
9926 if (PSImode
== GET_MODE (a_src
))
9928 gcc_assert (as
== ADDR_SPACE_MEMX
);
9930 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
9931 loop_reg
= gen_rtx_REG (loop_mode
, 24);
9932 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
9934 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
9935 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
9939 int segment
= avr_addrspace
[as
].segment
% avr_current_arch
->n_segments
;
9942 && avr_current_arch
->n_segments
> 1)
9944 a_hi8
= GEN_INT (segment
);
9945 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
9947 else if (!ADDR_SPACE_GENERIC_P (as
))
9949 as
= ADDR_SPACE_FLASH
;
9954 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
9955 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
9960 /* FIXME: Register allocator might come up with spill fails if it is left
9961 on its own. Thus, we allocate the pointer registers by hand:
9963 X = destination address */
9965 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
9966 addr1
= lpm_addr_reg_rtx
;
9968 reg_x
= gen_rtx_REG (HImode
, REG_X
);
9969 emit_move_insn (reg_x
, a_dest
);
9972 /* FIXME: Register allocator does a bad job and might spill address
9973 register(s) inside the loop leading to additional move instruction
9974 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9975 load and store as seperate insns. Instead, we perform the copy
9976 by means of one monolithic insn. */
9978 gcc_assert (TMP_REGNO
== LPM_REGNO
);
9980 if (as
!= ADDR_SPACE_MEMX
)
9982 /* Load instruction ([E]LPM or LD) is known at compile time:
9983 Do the copy-loop inline. */
9985 rtx (*fun
) (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
)
9986 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
9988 insn
= fun (addr0
, addr1
, xas
, loop_reg
,
9989 addr0
, addr1
, tmp_reg_rtx
, loop_reg
);
9993 rtx loop_reg16
= gen_rtx_REG (HImode
, 24);
9994 rtx r23
= gen_rtx_REG (QImode
, 23);
9995 rtx (*fun
) (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
, rtx
)
9996 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
9998 emit_move_insn (r23
, a_hi8
);
10000 insn
= fun (addr0
, addr1
, xas
, loop_reg
, addr0
, addr1
,
10001 lpm_reg_rtx
, loop_reg16
, r23
, r23
, GEN_INT (avr_addr
.rampz
));
10004 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10011 /* Print assembler for movmem_qi, movmem_hi insns...
10015 $3, $7 : Loop register
10016 $6 : Scratch register
10018 ...and movmem_qi_elpm, movmem_hi_elpm insns.
10020 $8, $9 : hh8 (& src)
10025 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
10027 addr_space_t as
= (addr_space_t
) INTVAL (xop
[2]);
10028 enum machine_mode loop_mode
= GET_MODE (xop
[3]);
10030 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, xop
[3]);
10032 gcc_assert (REG_X
== REGNO (xop
[0])
10033 && REG_Z
== REGNO (xop
[1]));
10040 avr_asm_len ("0:", xop
, plen
, 0);
10042 /* Load with post-increment */
10049 case ADDR_SPACE_GENERIC
:
10051 avr_asm_len ("ld %6,%a1+", xop
, plen
, 1);
10054 case ADDR_SPACE_FLASH
:
10057 avr_asm_len ("lpm %6,%a1+", xop
, plen
, 1);
10059 avr_asm_len ("lpm" CR_TAB
10060 "adiw %1,1", xop
, plen
, 2);
10063 case ADDR_SPACE_FLASH1
:
10064 case ADDR_SPACE_FLASH2
:
10065 case ADDR_SPACE_FLASH3
:
10066 case ADDR_SPACE_FLASH4
:
10067 case ADDR_SPACE_FLASH5
:
10069 if (AVR_HAVE_ELPMX
)
10070 avr_asm_len ("elpm %6,%a1+", xop
, plen
, 1);
10072 avr_asm_len ("elpm" CR_TAB
10073 "adiw %1,1", xop
, plen
, 2);
10077 /* Store with post-increment */
10079 avr_asm_len ("st %a0+,%6", xop
, plen
, 1);
10081 /* Decrement loop-counter and set Z-flag */
10083 if (QImode
== loop_mode
)
10085 avr_asm_len ("dec %3", xop
, plen
, 1);
10089 avr_asm_len ("sbiw %3,1", xop
, plen
, 1);
10093 avr_asm_len ("subi %A3,1" CR_TAB
10094 "sbci %B3,0", xop
, plen
, 2);
10097 /* Loop until zero */
10099 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10104 /* Helper for __builtin_avr_delay_cycles */
10107 avr_expand_delay_cycles (rtx operands0
)
10109 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
10110 unsigned HOST_WIDE_INT cycles_used
;
10111 unsigned HOST_WIDE_INT loop_count
;
10113 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10115 loop_count
= ((cycles
- 9) / 6) + 1;
10116 cycles_used
= ((loop_count
- 1) * 6) + 9;
10117 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
10118 cycles
-= cycles_used
;
10121 if (IN_RANGE (cycles
, 262145, 83886081))
10123 loop_count
= ((cycles
- 7) / 5) + 1;
10124 if (loop_count
> 0xFFFFFF)
10125 loop_count
= 0xFFFFFF;
10126 cycles_used
= ((loop_count
- 1) * 5) + 7;
10127 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
10128 cycles
-= cycles_used
;
10131 if (IN_RANGE (cycles
, 768, 262144))
10133 loop_count
= ((cycles
- 5) / 4) + 1;
10134 if (loop_count
> 0xFFFF)
10135 loop_count
= 0xFFFF;
10136 cycles_used
= ((loop_count
- 1) * 4) + 5;
10137 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
10138 cycles
-= cycles_used
;
10141 if (IN_RANGE (cycles
, 6, 767))
10143 loop_count
= cycles
/ 3;
10144 if (loop_count
> 255)
10146 cycles_used
= loop_count
* 3;
10147 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
10148 cycles
-= cycles_used
;
10151 while (cycles
>= 2)
10153 emit_insn (gen_nopv (GEN_INT(2)));
10159 emit_insn (gen_nopv (GEN_INT(1)));
10165 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10168 avr_double_int_push_digit (double_int val
, int base
,
10169 unsigned HOST_WIDE_INT digit
)
10172 ? double_int_lshift (val
, 32, 64, false)
10173 : double_int_mul (val
, uhwi_to_double_int (base
));
10175 return double_int_add (val
, uhwi_to_double_int (digit
));
10179 /* Compute the image of x under f, i.e. perform x --> f(x) */
10182 avr_map (double_int f
, int x
)
10184 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
10188 /* Return some metrics of map A. */
10192 /* Number of fixed points in { 0 ... 7 } */
10195 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10198 /* Mask representing the fixed points in { 0 ... 7 } */
10199 MAP_MASK_FIXED_0_7
,
10201 /* Size of the preimage of { 0 ... 7 } */
10204 /* Mask that represents the preimage of { f } */
10205 MAP_MASK_PREIMAGE_F
10209 avr_map_metric (double_int a
, int mode
)
10211 unsigned i
, metric
= 0;
10213 for (i
= 0; i
< 8; i
++)
10215 unsigned ai
= avr_map (a
, i
);
10217 if (mode
== MAP_FIXED_0_7
)
10219 else if (mode
== MAP_NONFIXED_0_7
)
10220 metric
+= ai
< 8 && ai
!= i
;
10221 else if (mode
== MAP_MASK_FIXED_0_7
)
10222 metric
|= ((unsigned) (ai
== i
)) << i
;
10223 else if (mode
== MAP_PREIMAGE_0_7
)
10225 else if (mode
== MAP_MASK_PREIMAGE_F
)
10226 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10235 /* Return true if IVAL has a 0xf in its hexadecimal representation
10236 and false, otherwise. Only nibbles 0..7 are taken into account.
10237 Used as constraint helper for C0f and Cxf. */
10240 avr_has_nibble_0xf (rtx ival
)
10242 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10246 /* We have a set of bits that are mapped by a function F.
10247 Try to decompose F by means of a second function G so that
10253 cost (F o G^-1) + cost (G) < cost (F)
10255 Example: Suppose builtin insert_bits supplies us with the map
10256 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10257 nibble of the result, we can just as well rotate the bits before inserting
10258 them and use the map 0x7654ffff which is cheaper than the original map.
10259 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10263 /* tree code of binary function G */
10264 enum tree_code code
;
10266 /* The constant second argument of G */
10269 /* G^-1, the inverse of G (*, arg) */
10272 /* The cost of appplying G (*, arg) */
10275 /* The composition F o G^-1 (*, arg) for some function F */
10278 /* For debug purpose only */
10282 static const avr_map_op_t avr_map_op
[] =
10284 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10285 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10286 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10287 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10288 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10289 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10290 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10291 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10292 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10293 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10294 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10295 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10296 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10297 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10298 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10302 /* Try to decompose F as F = (F o G^-1) o G as described above.
10303 The result is a struct representing F o G^-1 and G.
10304 If result.cost < 0 then such a decomposition does not exist. */
10306 static avr_map_op_t
10307 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10310 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10311 avr_map_op_t f_ginv
= *g
;
10312 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10316 /* Step 1: Computing F o G^-1 */
10318 for (i
= 7; i
>= 0; i
--)
10320 int x
= avr_map (f
, i
);
10324 x
= avr_map (ginv
, x
);
10326 /* The bit is no element of the image of G: no avail (cost = -1) */
10332 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10335 /* Step 2: Compute the cost of the operations.
10336 The overall cost of doing an operation prior to the insertion is
10337 the cost of the insertion plus the cost of the operation. */
10339 /* Step 2a: Compute cost of F o G^-1 */
10341 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10343 /* The mapping consists only of fixed points and can be folded
10344 to AND/OR logic in the remainder. Reasonable cost is 3. */
10346 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10352 /* Get the cost of the insn by calling the output worker with some
10353 fake values. Mimic effect of reloading xop[3]: Unused operands
10354 are mapped to 0 and used operands are reloaded to xop[0]. */
10356 xop
[0] = all_regs_rtx
[24];
10357 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
10358 xop
[2] = all_regs_rtx
[25];
10359 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
10361 avr_out_insert_bits (xop
, &f_ginv
.cost
);
10363 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
10366 /* Step 2b: Add cost of G */
10368 f_ginv
.cost
+= g
->cost
;
10370 if (avr_log
.builtin
)
10371 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
10377 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10378 XOP[0] and XOP[1] don't overlap.
10379 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10380 If FIXP_P = false: Just move the bit if its position in the destination
10381 is different to its source position. */
10384 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
10388 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10389 int t_bit_src
= -1;
10391 /* We order the operations according to the requested source bit b. */
10393 for (b
= 0; b
< 8; b
++)
10394 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
10396 int bit_src
= avr_map (map
, bit_dest
);
10400 /* Same position: No need to copy as requested by FIXP_P. */
10401 || (bit_dest
== bit_src
&& !fixp_p
))
10404 if (t_bit_src
!= bit_src
)
10406 /* Source bit is not yet in T: Store it to T. */
10408 t_bit_src
= bit_src
;
10410 xop
[3] = GEN_INT (bit_src
);
10411 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
10414 /* Load destination bit with T. */
10416 xop
[3] = GEN_INT (bit_dest
);
10417 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
10422 /* PLEN == 0: Print assembler code for `insert_bits'.
10423 PLEN != 0: Compute code length in bytes.
10426 OP[1]: The mapping composed of nibbles. If nibble no. N is
10427 0: Bit N of result is copied from bit OP[2].0
10429 7: Bit N of result is copied from bit OP[2].7
10430 0xf: Bit N of result is copied from bit OP[3].N
10431 OP[2]: Bits to be inserted
10432 OP[3]: Target value */
10435 avr_out_insert_bits (rtx
*op
, int *plen
)
10437 double_int map
= rtx_to_double_int (op
[1]);
10438 unsigned mask_fixed
;
10439 bool fixp_p
= true;
10446 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
10450 else if (flag_print_asm_name
)
10451 fprintf (asm_out_file
,
10452 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
10453 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
10455 /* If MAP has fixed points it might be better to initialize the result
10456 with the bits to be inserted instead of moving all bits by hand. */
10458 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
10460 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10462 /* Avoid early-clobber conflicts */
10464 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10465 xop
[1] = tmp_reg_rtx
;
10469 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10471 /* XOP[2] is used and reloaded to XOP[0] already */
10473 int n_fix
= 0, n_nofix
= 0;
10475 gcc_assert (REG_P (xop
[2]));
10477 /* Get the code size of the bit insertions; once with all bits
10478 moved and once with fixed points omitted. */
10480 avr_move_bits (xop
, map
, true, &n_fix
);
10481 avr_move_bits (xop
, map
, false, &n_nofix
);
10483 if (fixp_p
&& n_fix
- n_nofix
> 3)
10485 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
10487 avr_asm_len ("eor %0,%1" CR_TAB
10488 "andi %0,%3" CR_TAB
10489 "eor %0,%1", xop
, plen
, 3);
10495 /* XOP[2] is unused */
10497 if (fixp_p
&& mask_fixed
)
10499 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10504 /* Move/insert remaining bits. */
10506 avr_move_bits (xop
, map
, fixp_p
, plen
);
10512 /* IDs for all the AVR builtins. */
10514 enum avr_builtin_id
10522 AVR_BUILTIN_INSERT_BITS
,
10525 AVR_BUILTIN_FMULSU
,
10526 AVR_BUILTIN_DELAY_CYCLES
10530 avr_init_builtin_int24 (void)
10532 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10533 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10535 (*lang_hooks
.types
.register_builtin_type
) (int24_type
, "__int24");
10536 (*lang_hooks
.types
.register_builtin_type
) (uint24_type
, "__uint24");
10539 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10542 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10543 NULL, NULL_TREE); \
10547 /* Implement `TARGET_INIT_BUILTINS' */
10548 /* Set up all builtin functions for this target. */
10551 avr_init_builtins (void)
10553 tree void_ftype_void
10554 = build_function_type_list (void_type_node
, NULL_TREE
);
10555 tree uchar_ftype_uchar
10556 = build_function_type_list (unsigned_char_type_node
,
10557 unsigned_char_type_node
,
10559 tree uint_ftype_uchar_uchar
10560 = build_function_type_list (unsigned_type_node
,
10561 unsigned_char_type_node
,
10562 unsigned_char_type_node
,
10564 tree int_ftype_char_char
10565 = build_function_type_list (integer_type_node
,
10569 tree int_ftype_char_uchar
10570 = build_function_type_list (integer_type_node
,
10572 unsigned_char_type_node
,
10574 tree void_ftype_ulong
10575 = build_function_type_list (void_type_node
,
10576 long_unsigned_type_node
,
10579 tree uchar_ftype_ulong_uchar_uchar
10580 = build_function_type_list (unsigned_char_type_node
,
10581 long_unsigned_type_node
,
10582 unsigned_char_type_node
,
10583 unsigned_char_type_node
,
10586 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void
, AVR_BUILTIN_NOP
);
10587 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void
, AVR_BUILTIN_SEI
);
10588 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void
, AVR_BUILTIN_CLI
);
10589 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void
, AVR_BUILTIN_WDR
);
10590 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void
, AVR_BUILTIN_SLEEP
);
10591 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar
, AVR_BUILTIN_SWAP
);
10592 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong
,
10593 AVR_BUILTIN_DELAY_CYCLES
);
10595 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar
,
10597 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char
,
10598 AVR_BUILTIN_FMULS
);
10599 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar
,
10600 AVR_BUILTIN_FMULSU
);
10602 DEF_BUILTIN ("__builtin_avr_insert_bits", uchar_ftype_ulong_uchar_uchar
,
10603 AVR_BUILTIN_INSERT_BITS
);
10605 avr_init_builtin_int24 ();
10610 struct avr_builtin_description
10612 const enum insn_code icode
;
10613 const char *const name
;
10614 const enum avr_builtin_id id
;
10617 static const struct avr_builtin_description
10620 { CODE_FOR_rotlqi3_4
, "__builtin_avr_swap", AVR_BUILTIN_SWAP
}
10623 static const struct avr_builtin_description
10626 { CODE_FOR_fmul
, "__builtin_avr_fmul", AVR_BUILTIN_FMUL
},
10627 { CODE_FOR_fmuls
, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS
},
10628 { CODE_FOR_fmulsu
, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU
}
10631 static const struct avr_builtin_description
10634 { CODE_FOR_insert_bits
, "__builtin_avr_insert_bits",
10635 AVR_BUILTIN_INSERT_BITS
}
10638 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10641 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
10645 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10646 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10647 enum machine_mode op0mode
= GET_MODE (op0
);
10648 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10649 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10652 || GET_MODE (target
) != tmode
10653 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10655 target
= gen_reg_rtx (tmode
);
10658 if (op0mode
== SImode
&& mode0
== HImode
)
10661 op0
= gen_lowpart (HImode
, op0
);
10664 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
10666 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10667 op0
= copy_to_mode_reg (mode0
, op0
);
10669 pat
= GEN_FCN (icode
) (target
, op0
);
10679 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10682 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10685 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10686 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10687 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10688 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10689 enum machine_mode op0mode
= GET_MODE (op0
);
10690 enum machine_mode op1mode
= GET_MODE (op1
);
10691 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10692 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10693 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10696 || GET_MODE (target
) != tmode
10697 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10699 target
= gen_reg_rtx (tmode
);
10702 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10705 op0
= gen_lowpart (HImode
, op0
);
10708 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10711 op1
= gen_lowpart (HImode
, op1
);
10714 /* In case the insn wants input operands in modes different from
10715 the result, abort. */
10717 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10718 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
10720 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10721 op0
= copy_to_mode_reg (mode0
, op0
);
10723 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10724 op1
= copy_to_mode_reg (mode1
, op1
);
10726 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
10735 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10738 avr_expand_triop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10741 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10742 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10743 tree arg2
= CALL_EXPR_ARG (exp
, 2);
10744 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10745 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10746 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10747 enum machine_mode op0mode
= GET_MODE (op0
);
10748 enum machine_mode op1mode
= GET_MODE (op1
);
10749 enum machine_mode op2mode
= GET_MODE (op2
);
10750 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10751 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10752 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10753 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
10756 || GET_MODE (target
) != tmode
10757 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10759 target
= gen_reg_rtx (tmode
);
10762 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10765 op0
= gen_lowpart (HImode
, op0
);
10768 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10771 op1
= gen_lowpart (HImode
, op1
);
10774 if ((op2mode
== SImode
|| op2mode
== VOIDmode
) && mode2
== HImode
)
10777 op2
= gen_lowpart (HImode
, op2
);
10780 /* In case the insn wants input operands in modes different from
10781 the result, abort. */
10783 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10784 && (op1mode
== mode1
|| op1mode
== VOIDmode
)
10785 && (op2mode
== mode2
|| op2mode
== VOIDmode
));
10787 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10788 op0
= copy_to_mode_reg (mode0
, op0
);
10790 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10791 op1
= copy_to_mode_reg (mode1
, op1
);
10793 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
10794 op2
= copy_to_mode_reg (mode2
, op2
);
10796 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
10806 /* Expand an expression EXP that calls a built-in function,
10807 with result going to TARGET if that's convenient
10808 (and in mode MODE if that's convenient).
10809 SUBTARGET may be used as the target for computing one of EXP's operands.
10810 IGNORE is nonzero if the value is to be ignored. */
10813 avr_expand_builtin (tree exp
, rtx target
,
10814 rtx subtarget ATTRIBUTE_UNUSED
,
10815 enum machine_mode mode ATTRIBUTE_UNUSED
,
10816 int ignore ATTRIBUTE_UNUSED
)
10819 const struct avr_builtin_description
*d
;
10820 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
10821 const char* bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
10822 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
10828 case AVR_BUILTIN_NOP
:
10829 emit_insn (gen_nopv (GEN_INT(1)));
10832 case AVR_BUILTIN_SEI
:
10833 emit_insn (gen_enable_interrupt ());
10836 case AVR_BUILTIN_CLI
:
10837 emit_insn (gen_disable_interrupt ());
10840 case AVR_BUILTIN_WDR
:
10841 emit_insn (gen_wdr ());
10844 case AVR_BUILTIN_SLEEP
:
10845 emit_insn (gen_sleep ());
10848 case AVR_BUILTIN_DELAY_CYCLES
:
10850 arg0
= CALL_EXPR_ARG (exp
, 0);
10851 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10853 if (! CONST_INT_P (op0
))
10854 error ("%s expects a compile time integer constant", bname
);
10856 avr_expand_delay_cycles (op0
);
10860 case AVR_BUILTIN_INSERT_BITS
:
10862 arg0
= CALL_EXPR_ARG (exp
, 0);
10863 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10865 if (!CONST_INT_P (op0
))
10867 error ("%s expects a compile time long integer constant"
10868 " as first argument", bname
);
10874 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
10876 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
10878 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
10880 return avr_expand_binop_builtin (d
->icode
, exp
, target
);
10882 for (i
= 0, d
= bdesc_3arg
; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
10884 return avr_expand_triop_builtin (d
->icode
, exp
, target
);
10886 gcc_unreachable ();
10890 /* Implement `TARGET_FOLD_BUILTIN'. */
10893 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
10894 bool ignore ATTRIBUTE_UNUSED
)
10896 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
10897 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
10907 case AVR_BUILTIN_INSERT_BITS
:
10909 tree tbits
= arg
[1];
10910 tree tval
= arg
[2];
10912 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
10913 double_int map
= tree_to_double_int (arg
[0]);
10914 bool changed
= false;
10916 avr_map_op_t best_g
;
10918 tmap
= double_int_to_tree (map_type
, map
);
10920 if (TREE_CODE (tval
) != INTEGER_CST
10921 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10923 /* There are no F in the map, i.e. 3rd operand is unused.
10924 Replace that argument with some constant to render
10925 respective input unused. */
10927 tval
= build_int_cst (val_type
, 0);
10931 if (TREE_CODE (tbits
) != INTEGER_CST
10932 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
10934 /* Similar for the bits to be inserted. If they are unused,
10935 we can just as well pass 0. */
10937 tbits
= build_int_cst (val_type
, 0);
10940 if (TREE_CODE (tbits
) == INTEGER_CST
)
10942 /* Inserting bits known at compile time is easy and can be
10943 performed by AND and OR with appropriate masks. */
10945 int bits
= TREE_INT_CST_LOW (tbits
);
10946 int mask_ior
= 0, mask_and
= 0xff;
10948 for (i
= 0; i
< 8; i
++)
10950 int mi
= avr_map (map
, i
);
10954 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
10955 else mask_and
&= ~(1 << i
);
10959 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
10960 build_int_cst (val_type
, mask_ior
));
10961 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
10962 build_int_cst (val_type
, mask_and
));
10966 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10968 /* If bits don't change their position we can use vanilla logic
10969 to merge the two arguments. */
10971 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
10973 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
10974 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
10976 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
10977 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
10978 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
10981 /* Try to decomposing map to reduce overall cost. */
10983 if (avr_log
.builtin
)
10984 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
10986 best_g
= avr_map_op
[0];
10987 best_g
.cost
= 1000;
10989 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
10992 = avr_map_decompose (map
, avr_map_op
+ i
,
10993 TREE_CODE (tval
) == INTEGER_CST
);
10995 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
10999 if (avr_log
.builtin
)
11002 if (best_g
.arg
== 0)
11003 /* No optimization found */
11006 /* Apply operation G to the 2nd argument. */
11008 if (avr_log
.builtin
)
11009 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11010 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
11012 /* Do right-shifts arithmetically: They copy the MSB instead of
11013 shifting in a non-usable value (0) as with logic right-shift. */
11015 tbits
= fold_convert (signed_char_type_node
, tbits
);
11016 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
11017 build_int_cst (val_type
, best_g
.arg
));
11018 tbits
= fold_convert (val_type
, tbits
);
11020 /* Use map o G^-1 instead of original map to undo the effect of G. */
11022 tmap
= double_int_to_tree (map_type
, best_g
.map
);
11024 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11025 } /* AVR_BUILTIN_INSERT_BITS */
11033 struct gcc_target targetm
= TARGET_INITIALIZER
;
11035 #include "gt-avr.h"