1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr
;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
135 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
136 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
137 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
138 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
139 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
141 static int avr_naked_function_p (tree
);
142 static int interrupt_function_p (tree
);
143 static int signal_function_p (tree
);
144 static int avr_OS_task_function_p (tree
);
145 static int avr_OS_main_function_p (tree
);
146 static int avr_regs_to_save (HARD_REG_SET
*);
147 static int get_sequence_length (rtx insns
);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code
);
151 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
152 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
154 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
155 static struct machine_function
* avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx
;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx
;
172 rtx lpm_addr_reg_rtx
;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx
;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx
;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx
[32];
184 rtx all_regs_rtx
[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx
;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx
;
192 extern GTY(()) rtx rampx_rtx
;
193 extern GTY(()) rtx rampy_rtx
;
194 extern GTY(()) rtx rampz_rtx
;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty
;
202 static GTY(()) rtx xstring_e
;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro
;
207 /* Current architecture. */
208 const struct base_arch_s
*avr_current_arch
;
210 /* Current device. */
211 const struct mcu_type_s
*avr_current_device
;
213 /* Section to put switch tables in. */
214 static GTY(()) section
*progmem_swtable_section
;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section
*progmem_section
[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode
= true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p
= false;
225 bool avr_need_copy_data_p
= false;
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val
)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
256 enum machine_mode mode
= GET_MODE (xval
);
258 if (VOIDmode
== mode
)
261 for (i
= 0; i
< n_bytes
; i
++)
263 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
264 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
266 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
274 avr_option_override (void)
276 flag_delete_null_pointer_checks
= 0;
278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
287 flag_caller_saves
= 0;
289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables
)
295 && !ACCUMULATE_OUTGOING_ARGS
)
297 flag_omit_frame_pointer
= 0;
300 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
301 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
302 avr_extra_arch_macro
= avr_current_device
->macro
;
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
312 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
313 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
314 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
315 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
319 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
321 init_machine_status
= avr_init_machine_status
;
323 avr_log_set_avr_log();
326 /* Function to set up the backend function structure. */
328 static struct machine_function
*
329 avr_init_machine_status (void)
331 return ggc_alloc_cleared_machine_function ();
335 /* Implement `INIT_EXPANDERS'. */
336 /* The function works like a singleton. */
339 avr_init_expanders (void)
343 for (regno
= 0; regno
< 32; regno
++)
344 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
346 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
347 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
348 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
350 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
352 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
353 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
354 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
355 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
356 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
358 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
359 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
363 /* Return register class for register R. */
366 avr_regno_reg_class (int r
)
368 static const enum reg_class reg_class_tab
[] =
372 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
373 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
374 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
375 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
377 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
378 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
380 ADDW_REGS
, ADDW_REGS
,
382 POINTER_X_REGS
, POINTER_X_REGS
,
384 POINTER_Y_REGS
, POINTER_Y_REGS
,
386 POINTER_Z_REGS
, POINTER_Z_REGS
,
392 return reg_class_tab
[r
];
399 avr_scalar_mode_supported_p (enum machine_mode mode
)
404 return default_scalar_mode_supported_p (mode
);
408 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
411 avr_decl_flash_p (tree decl
)
413 if (TREE_CODE (decl
) != VAR_DECL
414 || TREE_TYPE (decl
) == error_mark_node
)
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
423 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
427 avr_decl_memx_p (tree decl
)
429 if (TREE_CODE (decl
) != VAR_DECL
430 || TREE_TYPE (decl
) == error_mark_node
)
435 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
439 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
442 avr_mem_flash_p (rtx x
)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
449 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
453 avr_mem_memx_p (rtx x
)
456 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
460 /* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
464 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
466 if (FUNCTION_DECL
== TREE_CODE (func
))
468 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
473 func
= TREE_TYPE (func
);
476 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
477 || TREE_CODE (func
) == METHOD_TYPE
);
479 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
482 /* Return nonzero if FUNC is a naked function. */
485 avr_naked_function_p (tree func
)
487 return avr_lookup_function_attribute1 (func
, "naked");
490 /* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
494 interrupt_function_p (tree func
)
496 return avr_lookup_function_attribute1 (func
, "interrupt");
499 /* Return nonzero if FUNC is a signal function as specified
500 by the "signal" attribute. */
503 signal_function_p (tree func
)
505 return avr_lookup_function_attribute1 (func
, "signal");
508 /* Return nonzero if FUNC is an OS_task function. */
511 avr_OS_task_function_p (tree func
)
513 return avr_lookup_function_attribute1 (func
, "OS_task");
516 /* Return nonzero if FUNC is an OS_main function. */
519 avr_OS_main_function_p (tree func
)
521 return avr_lookup_function_attribute1 (func
, "OS_main");
525 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
528 avr_accumulate_outgoing_args (void)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun
->calls_setjmp
542 || cfun
->has_nonlocal_label
));
546 /* Report contribution of accumulated outgoing arguments to stack size. */
549 avr_outgoing_args_size (void)
551 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
555 /* Implement `STARTING_FRAME_OFFSET'. */
556 /* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
560 avr_starting_frame_offset (void)
562 return 1 + avr_outgoing_args_size ();
566 /* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
570 avr_regs_to_save (HARD_REG_SET
*set
)
573 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
574 || signal_function_p (current_function_decl
));
577 CLEAR_HARD_REG_SET (*set
);
580 /* No need to save any registers if the function never returns or
581 has the "OS_task" or "OS_main" attribute. */
582 if (TREE_THIS_VOLATILE (current_function_decl
)
583 || cfun
->machine
->is_OS_task
584 || cfun
->machine
->is_OS_main
)
587 for (reg
= 0; reg
< 32; reg
++)
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590 any global register variables. */
594 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
595 || (df_regs_ever_live_p (reg
)
596 && (int_or_sig_p
|| !call_used_regs
[reg
])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
603 SET_HARD_REG_BIT (*set
, reg
);
610 /* Return true if register FROM can be eliminated via register TO. */
613 avr_can_eliminate (const int from
, const int to
)
615 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
616 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
617 || ((from
== FRAME_POINTER_REGNUM
618 || from
== FRAME_POINTER_REGNUM
+ 1)
619 && !frame_pointer_needed
));
622 /* Compute offset between arg_pointer and frame_pointer. */
625 avr_initial_elimination_offset (int from
, int to
)
627 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
631 int offset
= frame_pointer_needed
? 2 : 0;
632 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
634 offset
+= avr_regs_to_save (NULL
);
635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size
+ 1 + offset
);
640 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
646 avr_builtin_setjmp_frame_value (void)
648 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
649 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
652 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
655 avr_return_addr_rtx (int count
, rtx tem
)
659 /* Can only return this function's return address. Others not supported. */
665 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
669 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
671 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
672 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
673 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
677 /* Return 1 if the function epilogue is just a single "ret". */
680 avr_simple_epilogue (void)
682 return (! frame_pointer_needed
683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL
) == 0
686 && ! interrupt_function_p (current_function_decl
)
687 && ! signal_function_p (current_function_decl
)
688 && ! avr_naked_function_p (current_function_decl
)
689 && ! TREE_THIS_VOLATILE (current_function_decl
));
692 /* This function checks sequence of live registers. */
695 sequent_regs_live (void)
701 for (reg
= 0; reg
< 18; ++reg
)
705 /* Don't recognize sequences that contain global register
714 if (!call_used_regs
[reg
])
716 if (df_regs_ever_live_p (reg
))
726 if (!frame_pointer_needed
)
728 if (df_regs_ever_live_p (REG_Y
))
736 if (df_regs_ever_live_p (REG_Y
+1))
749 return (cur_seq
== live_seq
) ? live_seq
: 0;
752 /* Obtain the length sequence of insns. */
755 get_sequence_length (rtx insns
)
760 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
761 length
+= get_attr_length (insn
);
766 /* Implement INCOMING_RETURN_ADDR_RTX. */
769 avr_incoming_return_addr_rtx (void)
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
776 /* Helper for expand_prologue. Emit a push of a byte register. */
779 emit_push_byte (unsigned regno
, bool frame_related_p
)
783 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
784 mem
= gen_frame_mem (QImode
, mem
);
785 reg
= gen_rtx_REG (QImode
, regno
);
787 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
789 RTX_FRAME_RELATED_P (insn
) = 1;
791 cfun
->machine
->stack_usage
++;
795 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
800 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
804 gcc_assert (MEM_P (sfr
));
806 /* IN __tmp_reg__, IO(SFR) */
807 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
809 RTX_FRAME_RELATED_P (insn
) = 1;
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO
, frame_related_p
);
816 /* OUT IO(SFR), __zero_reg__ */
817 insn
= emit_move_insn (sfr
, const0_rtx
);
819 RTX_FRAME_RELATED_P (insn
) = 1;
824 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
827 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
828 int live_seq
= sequent_regs_live ();
830 HOST_WIDE_INT size_max
831 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
833 bool minimize
= (TARGET_CALL_PROLOGUES
837 && !cfun
->machine
->is_OS_task
838 && !cfun
->machine
->is_OS_main
);
841 && (frame_pointer_needed
842 || avr_outgoing_args_size() > 8
843 || (AVR_2_BYTE_PC
&& live_seq
> 6)
847 int first_reg
, reg
, offset
;
849 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
850 gen_int_mode (size
, HImode
));
852 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
853 gen_int_mode (live_seq
+size
, HImode
));
854 insn
= emit_insn (pattern
);
855 RTX_FRAME_RELATED_P (insn
) = 1;
857 /* Describe the effect of the unspec_volatile call to prologue_saves.
858 Note that this formulation assumes that add_reg_note pushes the
859 notes to the front. Thus we build them in the reverse order of
860 how we want dwarf2out to process them. */
862 /* The function does always set frame_pointer_rtx, but whether that
863 is going to be permanent in the function is frame_pointer_needed. */
865 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
866 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
868 : stack_pointer_rtx
),
869 plus_constant (Pmode
, stack_pointer_rtx
,
870 -(size
+ live_seq
))));
872 /* Note that live_seq always contains r28+r29, but the other
873 registers to be saved are all below 18. */
875 first_reg
= 18 - (live_seq
- 2);
877 for (reg
= 29, offset
= -live_seq
+ 1;
879 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
883 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
885 r
= gen_rtx_REG (QImode
, reg
);
886 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
889 cfun
->machine
->stack_usage
+= size
+ live_seq
;
895 for (reg
= 0; reg
< 32; ++reg
)
896 if (TEST_HARD_REG_BIT (set
, reg
))
897 emit_push_byte (reg
, true);
899 if (frame_pointer_needed
900 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
902 /* Push frame pointer. Always be consistent about the
903 ordering of pushes -- epilogue_restores expects the
904 register pair to be pushed low byte first. */
906 emit_push_byte (REG_Y
, true);
907 emit_push_byte (REG_Y
+ 1, true);
910 if (frame_pointer_needed
913 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
914 RTX_FRAME_RELATED_P (insn
) = 1;
919 /* Creating a frame can be done by direct manipulation of the
920 stack or via the frame pointer. These two methods are:
927 the optimum method depends on function type, stack and
928 frame size. To avoid a complex logic, both methods are
929 tested and shortest is selected.
931 There is also the case where SIZE != 0 and no frame pointer is
932 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
933 In that case, insn (*) is not needed in that case.
934 We use the X register as scratch. This is save because in X
936 In an interrupt routine, the case of SIZE != 0 together with
937 !frame_pointer_needed can only occur if the function is not a
938 leaf function and thus X has already been saved. */
941 HOST_WIDE_INT size_cfa
= size
;
942 rtx fp_plus_insns
, fp
, my_fp
;
944 gcc_assert (frame_pointer_needed
946 || !current_function_is_leaf
);
948 fp
= my_fp
= (frame_pointer_needed
950 : gen_rtx_REG (Pmode
, REG_X
));
952 if (AVR_HAVE_8BIT_SP
)
954 /* The high byte (r29) does not change:
955 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
957 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
960 /* Cut down size and avoid size = 0 so that we don't run
961 into ICE like PR52488 in the remainder. */
965 /* Don't error so that insane code from newlib still compiles
966 and does not break building newlib. As PR51345 is implemented
967 now, there are multilib variants with -msp8.
969 If user wants sanity checks he can use -Wstack-usage=
972 For CFA we emit the original, non-saturated size so that
973 the generic machinery is aware of the real stack usage and
974 will print the above diagnostic as expected. */
979 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
981 /************ Method 1: Adjust frame pointer ************/
985 /* Normally, the dwarf2out frame-related-expr interpreter does
986 not expect to have the CFA change once the frame pointer is
987 set up. Thus, we avoid marking the move insn below and
988 instead indicate that the entire operation is complete after
989 the frame pointer subtraction is done. */
991 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
992 if (frame_pointer_needed
)
994 RTX_FRAME_RELATED_P (insn
) = 1;
995 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
996 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
999 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1001 if (frame_pointer_needed
)
1003 RTX_FRAME_RELATED_P (insn
) = 1;
1004 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1005 gen_rtx_SET (VOIDmode
, fp
,
1006 plus_constant (Pmode
, fp
,
1010 /* Copy to stack pointer. Note that since we've already
1011 changed the CFA to the frame pointer this operation
1012 need not be annotated if frame pointer is needed.
1013 Always move through unspec, see PR50063.
1014 For meaning of irq_state see movhi_sp_r insn. */
1016 if (cfun
->machine
->is_interrupt
)
1019 if (TARGET_NO_INTERRUPTS
1020 || cfun
->machine
->is_signal
1021 || cfun
->machine
->is_OS_main
)
1024 if (AVR_HAVE_8BIT_SP
)
1027 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1028 fp
, GEN_INT (irq_state
)));
1029 if (!frame_pointer_needed
)
1031 RTX_FRAME_RELATED_P (insn
) = 1;
1032 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1033 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1034 plus_constant (Pmode
,
1039 fp_plus_insns
= get_insns ();
1042 /************ Method 2: Adjust Stack pointer ************/
1044 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1045 can only handle specific offsets. */
1047 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1053 insn
= emit_move_insn (stack_pointer_rtx
,
1054 plus_constant (Pmode
, stack_pointer_rtx
,
1056 RTX_FRAME_RELATED_P (insn
) = 1;
1057 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1058 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1059 plus_constant (Pmode
,
1062 if (frame_pointer_needed
)
1064 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1065 RTX_FRAME_RELATED_P (insn
) = 1;
1068 sp_plus_insns
= get_insns ();
1071 /************ Use shortest method ************/
1073 emit_insn (get_sequence_length (sp_plus_insns
)
1074 < get_sequence_length (fp_plus_insns
)
1080 emit_insn (fp_plus_insns
);
1083 cfun
->machine
->stack_usage
+= size_cfa
;
1084 } /* !minimize && size != 0 */
1089 /* Output function prologue. */
1092 expand_prologue (void)
1097 size
= get_frame_size() + avr_outgoing_args_size();
1099 /* Init cfun->machine. */
1100 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
1101 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
1102 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
1103 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
1104 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
1105 cfun
->machine
->stack_usage
= 0;
1107 /* Prologue: naked. */
1108 if (cfun
->machine
->is_naked
)
1113 avr_regs_to_save (&set
);
1115 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1117 /* Enable interrupts. */
1118 if (cfun
->machine
->is_interrupt
)
1119 emit_insn (gen_enable_interrupt ());
1121 /* Push zero reg. */
1122 emit_push_byte (ZERO_REGNO
, true);
1125 emit_push_byte (TMP_REGNO
, true);
1128 /* ??? There's no dwarf2 column reserved for SREG. */
1129 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1131 /* Clear zero reg. */
1132 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1134 /* Prevent any attempt to delete the setting of ZERO_REG! */
1135 emit_use (zero_reg_rtx
);
1137 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1138 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1141 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1144 && TEST_HARD_REG_BIT (set
, REG_X
)
1145 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1147 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1151 && (frame_pointer_needed
1152 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1153 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1155 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1159 && TEST_HARD_REG_BIT (set
, REG_Z
)
1160 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1162 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1164 } /* is_interrupt is_signal */
1166 avr_prologue_setup_frame (size
, set
);
1168 if (flag_stack_usage_info
)
1169 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1172 /* Output summary at end of function prologue. */
1175 avr_asm_function_end_prologue (FILE *file
)
1177 if (cfun
->machine
->is_naked
)
1179 fputs ("/* prologue: naked */\n", file
);
1183 if (cfun
->machine
->is_interrupt
)
1185 fputs ("/* prologue: Interrupt */\n", file
);
1187 else if (cfun
->machine
->is_signal
)
1189 fputs ("/* prologue: Signal */\n", file
);
1192 fputs ("/* prologue: function */\n", file
);
1195 if (ACCUMULATE_OUTGOING_ARGS
)
1196 fprintf (file
, "/* outgoing args size = %d */\n",
1197 avr_outgoing_args_size());
1199 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1201 fprintf (file
, "/* stack size = %d */\n",
1202 cfun
->machine
->stack_usage
);
1203 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1204 usage for offset so that SP + .L__stack_offset = return address. */
1205 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1209 /* Implement EPILOGUE_USES. */
1212 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1214 if (reload_completed
1216 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1221 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1224 emit_pop_byte (unsigned regno
)
1228 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1229 mem
= gen_frame_mem (QImode
, mem
);
1230 reg
= gen_rtx_REG (QImode
, regno
);
1232 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1235 /* Output RTL epilogue. */
1238 expand_epilogue (bool sibcall_p
)
1245 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1247 size
= get_frame_size() + avr_outgoing_args_size();
1249 /* epilogue: naked */
1250 if (cfun
->machine
->is_naked
)
1252 gcc_assert (!sibcall_p
);
1254 emit_jump_insn (gen_return ());
1258 avr_regs_to_save (&set
);
1259 live_seq
= sequent_regs_live ();
1261 minimize
= (TARGET_CALL_PROLOGUES
1264 && !cfun
->machine
->is_OS_task
1265 && !cfun
->machine
->is_OS_main
);
1269 || frame_pointer_needed
1272 /* Get rid of frame. */
1274 if (!frame_pointer_needed
)
1276 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1281 emit_move_insn (frame_pointer_rtx
,
1282 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1285 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1291 /* Try two methods to adjust stack and select shortest. */
1296 HOST_WIDE_INT size_max
;
1298 gcc_assert (frame_pointer_needed
1300 || !current_function_is_leaf
);
1302 fp
= my_fp
= (frame_pointer_needed
1304 : gen_rtx_REG (Pmode
, REG_X
));
1306 if (AVR_HAVE_8BIT_SP
)
1308 /* The high byte (r29) does not change:
1309 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1311 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1314 /* For rationale see comment in prologue generation. */
1316 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1317 if (size
> size_max
)
1319 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1321 /********** Method 1: Adjust fp register **********/
1325 if (!frame_pointer_needed
)
1326 emit_move_insn (fp
, stack_pointer_rtx
);
1328 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1330 /* Copy to stack pointer. */
1332 if (TARGET_NO_INTERRUPTS
)
1335 if (AVR_HAVE_8BIT_SP
)
1338 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1339 GEN_INT (irq_state
)));
1341 fp_plus_insns
= get_insns ();
1344 /********** Method 2: Adjust Stack pointer **********/
1346 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1352 emit_move_insn (stack_pointer_rtx
,
1353 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1355 sp_plus_insns
= get_insns ();
1358 /************ Use shortest method ************/
1360 emit_insn (get_sequence_length (sp_plus_insns
)
1361 < get_sequence_length (fp_plus_insns
)
1366 emit_insn (fp_plus_insns
);
1369 if (frame_pointer_needed
1370 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1372 /* Restore previous frame_pointer. See expand_prologue for
1373 rationale for not using pophi. */
1375 emit_pop_byte (REG_Y
+ 1);
1376 emit_pop_byte (REG_Y
);
1379 /* Restore used registers. */
1381 for (reg
= 31; reg
>= 0; --reg
)
1382 if (TEST_HARD_REG_BIT (set
, reg
))
1383 emit_pop_byte (reg
);
1387 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1388 The conditions to restore them must be tha same as in prologue. */
1391 && TEST_HARD_REG_BIT (set
, REG_Z
)
1392 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1394 emit_pop_byte (TMP_REGNO
);
1395 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1399 && (frame_pointer_needed
1400 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1401 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1403 emit_pop_byte (TMP_REGNO
);
1404 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1408 && TEST_HARD_REG_BIT (set
, REG_X
)
1409 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1411 emit_pop_byte (TMP_REGNO
);
1412 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1417 emit_pop_byte (TMP_REGNO
);
1418 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1421 /* Restore SREG using tmp_reg as scratch. */
1423 emit_pop_byte (TMP_REGNO
);
1424 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1426 /* Restore tmp REG. */
1427 emit_pop_byte (TMP_REGNO
);
1429 /* Restore zero REG. */
1430 emit_pop_byte (ZERO_REGNO
);
1434 emit_jump_insn (gen_return ());
1437 /* Output summary messages at beginning of function epilogue. */
1440 avr_asm_function_begin_epilogue (FILE *file
)
1442 fprintf (file
, "/* epilogue start */\n");
1446 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1449 avr_cannot_modify_jumps_p (void)
1452 /* Naked Functions must not have any instructions after
1453 their epilogue, see PR42240 */
1455 if (reload_completed
1457 && cfun
->machine
->is_naked
)
1466 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1468 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1469 This hook just serves to hack around PR rtl-optimization/52543 by
1470 claiming that PSImode addresses (which are used for the 24-bit
1471 address space __memx) were mode-dependent so that lower-subreg.s
1472 will skip these addresses. See also the similar FIXME comment along
1473 with mov<mode> expanders in avr.md. */
1476 avr_mode_dependent_address_p (const_rtx addr
)
1478 return GET_MODE (addr
) != Pmode
;
1482 /* Helper function for `avr_legitimate_address_p'. */
1485 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1486 RTX_CODE outer_code
, bool strict
)
1489 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1490 as
, outer_code
, UNKNOWN
)
1492 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1496 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1497 machine for a memory operand of mode MODE. */
1500 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1502 bool ok
= CONSTANT_ADDRESS_P (x
);
1504 switch (GET_CODE (x
))
1507 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1512 && REG_X
== REGNO (x
))
1520 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1521 GET_CODE (x
), strict
);
1526 rtx reg
= XEXP (x
, 0);
1527 rtx op1
= XEXP (x
, 1);
1530 && CONST_INT_P (op1
)
1531 && INTVAL (op1
) >= 0)
1533 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1538 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1541 if (reg
== frame_pointer_rtx
1542 || reg
== arg_pointer_rtx
)
1547 else if (frame_pointer_needed
1548 && reg
== frame_pointer_rtx
)
1560 if (avr_log
.legitimate_address_p
)
1562 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1563 "reload_completed=%d reload_in_progress=%d %s:",
1564 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1565 reg_renumber
? "(reg_renumber)" : "");
1567 if (GET_CODE (x
) == PLUS
1568 && REG_P (XEXP (x
, 0))
1569 && CONST_INT_P (XEXP (x
, 1))
1570 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1573 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1574 true_regnum (XEXP (x
, 0)));
1577 avr_edump ("\n%r\n", x
);
1584 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1585 now only a helper for avr_addr_space_legitimize_address. */
1586 /* Attempts to replace X with a valid
1587 memory address for an operand of mode MODE */
1590 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1592 bool big_offset_p
= false;
1596 if (GET_CODE (oldx
) == PLUS
1597 && REG_P (XEXP (oldx
, 0)))
1599 if (REG_P (XEXP (oldx
, 1)))
1600 x
= force_reg (GET_MODE (oldx
), oldx
);
1601 else if (CONST_INT_P (XEXP (oldx
, 1)))
1603 int offs
= INTVAL (XEXP (oldx
, 1));
1604 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1605 && offs
> MAX_LD_OFFSET (mode
))
1607 big_offset_p
= true;
1608 x
= force_reg (GET_MODE (oldx
), oldx
);
1613 if (avr_log
.legitimize_address
)
1615 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1618 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1625 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1626 /* This will allow register R26/27 to be used where it is no worse than normal
1627 base pointers R28/29 or R30/31. For example, if base offset is greater
1628 than 63 bytes or for R++ or --R addressing. */
1631 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1632 int opnum
, int type
, int addr_type
,
1633 int ind_levels ATTRIBUTE_UNUSED
,
1634 rtx (*mk_memloc
)(rtx
,int))
1638 if (avr_log
.legitimize_reload_address
)
1639 avr_edump ("\n%?:%m %r\n", mode
, x
);
1641 if (1 && (GET_CODE (x
) == POST_INC
1642 || GET_CODE (x
) == PRE_DEC
))
1644 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1645 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1646 opnum
, RELOAD_OTHER
);
1648 if (avr_log
.legitimize_reload_address
)
1649 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1650 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1655 if (GET_CODE (x
) == PLUS
1656 && REG_P (XEXP (x
, 0))
1657 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1658 && CONST_INT_P (XEXP (x
, 1))
1659 && INTVAL (XEXP (x
, 1)) >= 1)
1661 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1665 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1667 int regno
= REGNO (XEXP (x
, 0));
1668 rtx mem
= mk_memloc (x
, regno
);
1670 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1671 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1674 if (avr_log
.legitimize_reload_address
)
1675 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1676 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1678 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1679 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1682 if (avr_log
.legitimize_reload_address
)
1683 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1684 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1689 else if (! (frame_pointer_needed
1690 && XEXP (x
, 0) == frame_pointer_rtx
))
1692 push_reload (x
, NULL_RTX
, px
, NULL
,
1693 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1696 if (avr_log
.legitimize_reload_address
)
1697 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1698 POINTER_REGS
, x
, NULL_RTX
);
1708 /* Helper function to print assembler resp. track instruction
1709 sequence lengths. Always return "".
1712 Output assembler code from template TPL with operands supplied
1713 by OPERANDS. This is just forwarding to output_asm_insn.
1716 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1717 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1718 Don't output anything.
1722 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1726 output_asm_insn (tpl
, operands
);
1740 /* Return a pointer register name as a string. */
1743 ptrreg_to_str (int regno
)
1747 case REG_X
: return "X";
1748 case REG_Y
: return "Y";
1749 case REG_Z
: return "Z";
1751 output_operand_lossage ("address operand requires constraint for"
1752 " X, Y, or Z register");
1757 /* Return the condition name as a string.
1758 Used in conditional jump constructing */
1761 cond_string (enum rtx_code code
)
1770 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1775 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1791 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1792 /* Output ADDR to FILE as address. */
1795 avr_print_operand_address (FILE *file
, rtx addr
)
1797 switch (GET_CODE (addr
))
1800 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1804 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1808 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1812 if (CONSTANT_ADDRESS_P (addr
)
1813 && text_segment_operand (addr
, VOIDmode
))
1816 if (GET_CODE (x
) == CONST
)
1818 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1820 /* Assembler gs() will implant word address. Make offset
1821 a byte offset inside gs() for assembler. This is
1822 needed because the more logical (constant+gs(sym)) is not
1823 accepted by gas. For 128K and lower devices this is ok.
1824 For large devices it will create a Trampoline to offset
1825 from symbol which may not be what the user really wanted. */
1826 fprintf (file
, "gs(");
1827 output_addr_const (file
, XEXP (x
,0));
1828 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1829 2 * INTVAL (XEXP (x
, 1)));
1831 if (warning (0, "pointer offset from symbol maybe incorrect"))
1833 output_addr_const (stderr
, addr
);
1834 fprintf(stderr
,"\n");
1839 fprintf (file
, "gs(");
1840 output_addr_const (file
, addr
);
1841 fprintf (file
, ")");
1845 output_addr_const (file
, addr
);
1850 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1853 avr_print_operand_punct_valid_p (unsigned char code
)
1855 return code
== '~' || code
== '!';
1859 /* Implement `TARGET_PRINT_OPERAND'. */
1860 /* Output X as assembler operand to file FILE.
1861 For a description of supported %-codes, see top of avr.md. */
1864 avr_print_operand (FILE *file
, rtx x
, int code
)
1868 if (code
>= 'A' && code
<= 'D')
1873 if (!AVR_HAVE_JMP_CALL
)
1876 else if (code
== '!')
1878 if (AVR_HAVE_EIJMP_EICALL
)
1881 else if (code
== 't'
1884 static int t_regno
= -1;
1885 static int t_nbits
= -1;
1887 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
1889 t_regno
= REGNO (x
);
1890 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
1892 else if (CONST_INT_P (x
) && t_regno
>= 0
1893 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
1895 int bpos
= INTVAL (x
);
1897 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
1899 fprintf (file
, ",%d", bpos
% 8);
1904 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
1908 if (x
== zero_reg_rtx
)
1909 fprintf (file
, "__zero_reg__");
1911 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1913 else if (CONST_INT_P (x
))
1915 HOST_WIDE_INT ival
= INTVAL (x
);
1918 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
1919 else if (low_io_address_operand (x
, VOIDmode
)
1920 || high_io_address_operand (x
, VOIDmode
))
1922 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
1923 fprintf (file
, "__RAMPZ__");
1924 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
1925 fprintf (file
, "__RAMPY__");
1926 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
1927 fprintf (file
, "__RAMPX__");
1928 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
1929 fprintf (file
, "__RAMPD__");
1930 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
1931 fprintf (file
, "__CCP__");
1932 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
1933 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
1934 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
1937 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
1938 ival
- avr_current_arch
->sfr_offset
);
1942 fatal_insn ("bad address, not an I/O address:", x
);
1946 rtx addr
= XEXP (x
, 0);
1950 if (!CONSTANT_P (addr
))
1951 fatal_insn ("bad address, not a constant:", addr
);
1952 /* Assembler template with m-code is data - not progmem section */
1953 if (text_segment_operand (addr
, VOIDmode
))
1954 if (warning (0, "accessing data memory with"
1955 " program memory address"))
1957 output_addr_const (stderr
, addr
);
1958 fprintf(stderr
,"\n");
1960 output_addr_const (file
, addr
);
1962 else if (code
== 'i')
1964 avr_print_operand (file
, addr
, 'i');
1966 else if (code
== 'o')
1968 if (GET_CODE (addr
) != PLUS
)
1969 fatal_insn ("bad address, not (reg+disp):", addr
);
1971 avr_print_operand (file
, XEXP (addr
, 1), 0);
1973 else if (code
== 'p' || code
== 'r')
1975 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1976 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1979 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1981 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1983 else if (GET_CODE (addr
) == PLUS
)
1985 avr_print_operand_address (file
, XEXP (addr
,0));
1986 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1987 fatal_insn ("internal compiler error. Bad address:"
1990 avr_print_operand (file
, XEXP (addr
,1), code
);
1993 avr_print_operand_address (file
, addr
);
1995 else if (code
== 'i')
1997 fatal_insn ("bad address, not an I/O address:", x
);
1999 else if (code
== 'x')
2001 /* Constant progmem address - like used in jmp or call */
2002 if (0 == text_segment_operand (x
, VOIDmode
))
2003 if (warning (0, "accessing program memory"
2004 " with data memory address"))
2006 output_addr_const (stderr
, x
);
2007 fprintf(stderr
,"\n");
2009 /* Use normal symbol for direct address no linker trampoline needed */
2010 output_addr_const (file
, x
);
2012 else if (GET_CODE (x
) == CONST_DOUBLE
)
2016 if (GET_MODE (x
) != SFmode
)
2017 fatal_insn ("internal compiler error. Unknown mode:", x
);
2018 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2019 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2020 fprintf (file
, "0x%lx", val
);
2022 else if (GET_CODE (x
) == CONST_STRING
)
2023 fputs (XSTR (x
, 0), file
);
2024 else if (code
== 'j')
2025 fputs (cond_string (GET_CODE (x
)), file
);
2026 else if (code
== 'k')
2027 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2029 avr_print_operand_address (file
, x
);
2032 /* Update the condition code in the INSN. */
2035 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2038 enum attr_cc cc
= get_attr_cc (insn
);
2046 case CC_OUT_PLUS_NOCLOBBER
:
2049 rtx
*op
= recog_data
.operand
;
2052 /* Extract insn's operands. */
2053 extract_constrain_insn_cached (insn
);
2061 avr_out_plus (op
, &len_dummy
, &icc
);
2062 cc
= (enum attr_cc
) icc
;
2065 case CC_OUT_PLUS_NOCLOBBER
:
2066 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2067 cc
= (enum attr_cc
) icc
;
2072 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2073 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2074 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2076 /* Any other "r,rL" combination does not alter cc0. */
2080 } /* inner switch */
2084 } /* outer swicth */
2089 /* Special values like CC_OUT_PLUS from above have been
2090 mapped to "standard" CC_* values so we never come here. */
2096 /* Insn does not affect CC at all. */
2104 set
= single_set (insn
);
2108 cc_status
.flags
|= CC_NO_OVERFLOW
;
2109 cc_status
.value1
= SET_DEST (set
);
2114 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2115 The V flag may or may not be known but that's ok because
2116 alter_cond will change tests to use EQ/NE. */
2117 set
= single_set (insn
);
2121 cc_status
.value1
= SET_DEST (set
);
2122 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2127 set
= single_set (insn
);
2130 cc_status
.value1
= SET_SRC (set
);
2134 /* Insn doesn't leave CC in a usable state. */
2140 /* Choose mode for jump insn:
2141 1 - relative jump in range -63 <= x <= 62 ;
2142 2 - relative jump in range -2046 <= x <= 2045 ;
2143 3 - absolute jump (only for ATmega[16]03). */
2146 avr_jump_mode (rtx x
, rtx insn
)
2148 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2149 ? XEXP (x
, 0) : x
));
2150 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2151 int jump_distance
= cur_addr
- dest_addr
;
2153 if (-63 <= jump_distance
&& jump_distance
<= 62)
2155 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2157 else if (AVR_HAVE_JMP_CALL
)
2163 /* return an AVR condition jump commands.
2164 X is a comparison RTX.
2165 LEN is a number returned by avr_jump_mode function.
2166 if REVERSE nonzero then condition code in X must be reversed. */
2169 ret_cond_branch (rtx x
, int len
, int reverse
)
2171 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2176 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2177 return (len
== 1 ? ("breq .+2" CR_TAB
2179 len
== 2 ? ("breq .+4" CR_TAB
2187 return (len
== 1 ? ("breq .+2" CR_TAB
2189 len
== 2 ? ("breq .+4" CR_TAB
2196 return (len
== 1 ? ("breq .+2" CR_TAB
2198 len
== 2 ? ("breq .+4" CR_TAB
2205 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2206 return (len
== 1 ? ("breq %0" CR_TAB
2208 len
== 2 ? ("breq .+2" CR_TAB
2215 return (len
== 1 ? ("breq %0" CR_TAB
2217 len
== 2 ? ("breq .+2" CR_TAB
2224 return (len
== 1 ? ("breq %0" CR_TAB
2226 len
== 2 ? ("breq .+2" CR_TAB
2240 return ("br%j1 .+2" CR_TAB
2243 return ("br%j1 .+4" CR_TAB
2254 return ("br%k1 .+2" CR_TAB
2257 return ("br%k1 .+4" CR_TAB
2265 /* Output insn cost for next insn. */
2268 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2269 int num_operands ATTRIBUTE_UNUSED
)
2271 if (avr_log
.rtx_costs
)
2273 rtx set
= single_set (insn
);
2276 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2277 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2279 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2280 rtx_cost (PATTERN (insn
), INSN
, 0,
2281 optimize_insn_for_speed_p()));
2285 /* Return 0 if undefined, 1 if always true or always false. */
2288 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2290 unsigned int max
= (mode
== QImode
? 0xff :
2291 mode
== HImode
? 0xffff :
2292 mode
== PSImode
? 0xffffff :
2293 mode
== SImode
? 0xffffffff : 0);
2294 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2296 if (unsigned_condition (op
) != op
)
2299 if (max
!= (INTVAL (x
) & max
)
2300 && INTVAL (x
) != 0xff)
2307 /* Returns nonzero if REGNO is the number of a hard
2308 register in which function arguments are sometimes passed. */
2311 function_arg_regno_p(int r
)
2313 return (r
>= 8 && r
<= 25);
2316 /* Initializing the variable cum for the state at the beginning
2317 of the argument list. */
2320 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2321 tree fndecl ATTRIBUTE_UNUSED
)
2324 cum
->regno
= FIRST_CUM_REG
;
2325 if (!libname
&& stdarg_p (fntype
))
2328 /* Assume the calle may be tail called */
2330 cfun
->machine
->sibcall_fails
= 0;
2333 /* Returns the number of registers to allocate for a function argument. */
2336 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2340 if (mode
== BLKmode
)
2341 size
= int_size_in_bytes (type
);
2343 size
= GET_MODE_SIZE (mode
);
2345 /* Align all function arguments to start in even-numbered registers.
2346 Odd-sized arguments leave holes above them. */
2348 return (size
+ 1) & ~1;
2351 /* Controls whether a function argument is passed
2352 in a register, and which register. */
2355 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2356 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2358 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2359 int bytes
= avr_num_arg_regs (mode
, type
);
2361 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2362 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2367 /* Update the summarizer variable CUM to advance past an argument
2368 in the argument list. */
2371 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2372 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2374 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2375 int bytes
= avr_num_arg_regs (mode
, type
);
2377 cum
->nregs
-= bytes
;
2378 cum
->regno
-= bytes
;
2380 /* A parameter is being passed in a call-saved register. As the original
2381 contents of these regs has to be restored before leaving the function,
2382 a function must not pass arguments in call-saved regs in order to get
2387 && !call_used_regs
[cum
->regno
])
2389 /* FIXME: We ship info on failing tail-call in struct machine_function.
2390 This uses internals of calls.c:expand_call() and the way args_so_far
2391 is used. targetm.function_ok_for_sibcall() needs to be extended to
2392 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2393 dependent so that such an extension is not wanted. */
2395 cfun
->machine
->sibcall_fails
= 1;
2398 /* Test if all registers needed by the ABI are actually available. If the
2399 user has fixed a GPR needed to pass an argument, an (implicit) function
2400 call will clobber that fixed register. See PR45099 for an example. */
2407 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2408 if (fixed_regs
[regno
])
2409 warning (0, "fixed register %s used to pass parameter to function",
2413 if (cum
->nregs
<= 0)
2416 cum
->regno
= FIRST_CUM_REG
;
2420 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2421 /* Decide whether we can make a sibling call to a function. DECL is the
2422 declaration of the function being targeted by the call and EXP is the
2423 CALL_EXPR representing the call. */
2426 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2430 /* Tail-calling must fail if callee-saved regs are used to pass
2431 function args. We must not tail-call when `epilogue_restores'
2432 is used. Unfortunately, we cannot tell at this point if that
2433 actually will happen or not, and we cannot step back from
2434 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2436 if (cfun
->machine
->sibcall_fails
2437 || TARGET_CALL_PROLOGUES
)
2442 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2446 decl_callee
= TREE_TYPE (decl_callee
);
2450 decl_callee
= fntype_callee
;
2452 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2453 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2455 decl_callee
= TREE_TYPE (decl_callee
);
2459 /* Ensure that caller and callee have compatible epilogues */
2461 if (interrupt_function_p (current_function_decl
)
2462 || signal_function_p (current_function_decl
)
2463 || avr_naked_function_p (decl_callee
)
2464 || avr_naked_function_p (current_function_decl
)
2465 /* FIXME: For OS_task and OS_main, we are over-conservative.
2466 This is due to missing documentation of these attributes
2467 and what they actually should do and should not do. */
2468 || (avr_OS_task_function_p (decl_callee
)
2469 != avr_OS_task_function_p (current_function_decl
))
2470 || (avr_OS_main_function_p (decl_callee
)
2471 != avr_OS_main_function_p (current_function_decl
)))
2479 /***********************************************************************
2480 Functions for outputting various mov's for a various modes
2481 ************************************************************************/
2483 /* Return true if a value of mode MODE is read from flash by
2484 __load_* function from libgcc. */
2487 avr_load_libgcc_p (rtx op
)
2489 enum machine_mode mode
= GET_MODE (op
);
2490 int n_bytes
= GET_MODE_SIZE (mode
);
2495 && MEM_ADDR_SPACE (op
) == ADDR_SPACE_FLASH
);
2498 /* Return true if a value of mode MODE is read by __xload_* function. */
2501 avr_xload_libgcc_p (enum machine_mode mode
)
2503 int n_bytes
= GET_MODE_SIZE (mode
);
2506 || avr_current_device
->n_flash
> 1);
2510 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2511 OP[1] in AS1 to register OP[0].
2512 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2516 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2520 rtx src
= SET_SRC (single_set (insn
));
2522 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2524 addr_space_t as
= MEM_ADDR_SPACE (src
);
2531 warning (0, "writing to address space %qs not supported",
2532 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2537 addr
= XEXP (src
, 0);
2538 code
= GET_CODE (addr
);
2540 gcc_assert (REG_P (dest
));
2541 gcc_assert (REG
== code
|| POST_INC
== code
);
2543 /* Only 1-byte moves from __flash are representes as open coded
2544 mov insns. All other loads from flash are not handled here but
2545 by some UNSPEC instead, see respective FIXME in machine description. */
2547 gcc_assert (as
== ADDR_SPACE_FLASH
);
2548 gcc_assert (n_bytes
== 1);
2551 xop
[1] = lpm_addr_reg_rtx
;
2552 xop
[2] = lpm_reg_rtx
;
2561 gcc_assert (REG_Z
== REGNO (addr
));
2563 return AVR_HAVE_LPMX
2564 ? avr_asm_len ("lpm %0,%a1", xop
, plen
, 1)
2565 : avr_asm_len ("lpm" CR_TAB
2566 "mov %0,%2", xop
, plen
, 2);
2570 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0)));
2572 return AVR_HAVE_LPMX
2573 ? avr_asm_len ("lpm %0,%a1+", xop
, plen
, 1)
2574 : avr_asm_len ("lpm" CR_TAB
2576 "mov %0,%2", xop
, plen
, 3);
2583 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2584 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2586 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2590 avr_load_lpm (rtx insn
, rtx
*op
, int *plen
)
2593 int n
, n_bytes
= GET_MODE_SIZE (GET_MODE (op
[0]));
2594 rtx xsegment
= op
[1];
2595 bool clobber_z
= PARALLEL
== GET_CODE (PATTERN (insn
));
2596 bool r30_in_tmp
= false;
2601 xop
[1] = lpm_addr_reg_rtx
;
2602 xop
[2] = lpm_reg_rtx
;
2603 xop
[3] = xstring_empty
;
2605 /* Set RAMPZ as needed. */
2607 if (REG_P (xsegment
))
2609 avr_asm_len ("out __RAMPZ__,%0", &xsegment
, plen
, 1);
2613 /* Load the individual bytes from LSB to MSB. */
2615 for (n
= 0; n
< n_bytes
; n
++)
2617 xop
[0] = all_regs_rtx
[REGNO (op
[0]) + n
];
2619 if ((CONST_INT_P (xsegment
) && AVR_HAVE_LPMX
)
2620 || (REG_P (xsegment
) && AVR_HAVE_ELPMX
))
2623 avr_asm_len ("%3lpm %0,%a1", xop
, plen
, 1);
2624 else if (REGNO (xop
[0]) == REG_Z
)
2626 avr_asm_len ("%3lpm %2,%a1+", xop
, plen
, 1);
2630 avr_asm_len ("%3lpm %0,%a1+", xop
, plen
, 1);
2634 gcc_assert (clobber_z
);
2636 avr_asm_len ("%3lpm" CR_TAB
2637 "mov %0,%2", xop
, plen
, 2);
2640 avr_asm_len ("adiw %1,1", xop
, plen
, 1);
2645 avr_asm_len ("mov %1,%2", xop
, plen
, 1);
2649 && !reg_unused_after (insn
, lpm_addr_reg_rtx
)
2650 && !reg_overlap_mentioned_p (op
[0], lpm_addr_reg_rtx
))
2652 xop
[2] = GEN_INT (n_bytes
-1);
2653 avr_asm_len ("sbiw %1,%2", xop
, plen
, 1);
2656 if (REG_P (xsegment
) && AVR_HAVE_RAMPD
)
2658 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2660 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop
, plen
, 1);
2667 /* Worker function for xload_8 insn. */
2670 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2676 xop
[2] = lpm_addr_reg_rtx
;
2677 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2682 avr_asm_len ("sbrc %1,7" CR_TAB
2684 "sbrs %1,7", xop
, plen
, 3);
2686 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2688 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2689 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2696 output_movqi (rtx insn
, rtx operands
[], int *real_l
)
2698 rtx dest
= operands
[0];
2699 rtx src
= operands
[1];
2701 if (avr_mem_flash_p (src
)
2702 || avr_mem_flash_p (dest
))
2704 return avr_out_lpm (insn
, operands
, real_l
);
2710 if (register_operand (dest
, QImode
))
2712 if (register_operand (src
, QImode
)) /* mov r,r */
2714 if (test_hard_reg_class (STACK_REG
, dest
))
2716 else if (test_hard_reg_class (STACK_REG
, src
))
2721 else if (CONSTANT_P (src
))
2723 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2726 else if (MEM_P (src
))
2727 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2729 else if (MEM_P (dest
))
2734 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2736 return out_movqi_mr_r (insn
, xop
, real_l
);
2743 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2748 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2750 if (avr_mem_flash_p (src
)
2751 || avr_mem_flash_p (dest
))
2753 return avr_out_lpm (insn
, xop
, plen
);
2758 if (REG_P (src
)) /* mov r,r */
2760 if (test_hard_reg_class (STACK_REG
, dest
))
2762 if (AVR_HAVE_8BIT_SP
)
2763 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2766 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2767 "out __SP_H__,%B1", xop
, plen
, -2);
2769 /* Use simple load of SP if no interrupts are used. */
2771 return TARGET_NO_INTERRUPTS
2772 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2773 "out __SP_L__,%A1", xop
, plen
, -2)
2775 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2777 "out __SP_H__,%B1" CR_TAB
2778 "out __SREG__,__tmp_reg__" CR_TAB
2779 "out __SP_L__,%A1", xop
, plen
, -5);
2781 else if (test_hard_reg_class (STACK_REG
, src
))
2783 return !AVR_HAVE_SPH
2784 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2785 "clr %B0", xop
, plen
, -2)
2787 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2788 "in %B0,__SP_H__", xop
, plen
, -2);
2791 return AVR_HAVE_MOVW
2792 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2794 : avr_asm_len ("mov %A0,%A1" CR_TAB
2795 "mov %B0,%B1", xop
, plen
, -2);
2797 else if (CONSTANT_P (src
))
2799 return output_reload_inhi (xop
, NULL
, plen
);
2801 else if (MEM_P (src
))
2803 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2806 else if (MEM_P (dest
))
2811 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2813 return out_movhi_mr_r (insn
, xop
, plen
);
2816 fatal_insn ("invalid insn:", insn
);
2822 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
2826 rtx x
= XEXP (src
, 0);
2828 if (CONSTANT_ADDRESS_P (x
))
2830 return optimize
> 0 && io_address_operand (x
, QImode
)
2831 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
2832 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
2834 else if (GET_CODE (x
) == PLUS
2835 && REG_P (XEXP (x
, 0))
2836 && CONST_INT_P (XEXP (x
, 1)))
2838 /* memory access by reg+disp */
2840 int disp
= INTVAL (XEXP (x
, 1));
2842 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
2844 if (REGNO (XEXP (x
, 0)) != REG_Y
)
2845 fatal_insn ("incorrect insn:",insn
);
2847 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2848 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2849 "ldd %0,Y+63" CR_TAB
2850 "sbiw r28,%o1-63", op
, plen
, -3);
2852 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2853 "sbci r29,hi8(-%o1)" CR_TAB
2855 "subi r28,lo8(%o1)" CR_TAB
2856 "sbci r29,hi8(%o1)", op
, plen
, -5);
2858 else if (REGNO (XEXP (x
, 0)) == REG_X
)
2860 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2861 it but I have this situation with extremal optimizing options. */
2863 avr_asm_len ("adiw r26,%o1" CR_TAB
2864 "ld %0,X", op
, plen
, -2);
2866 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2867 && !reg_unused_after (insn
, XEXP (x
,0)))
2869 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
2875 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
2878 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
2882 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
2886 rtx base
= XEXP (src
, 0);
2887 int reg_dest
= true_regnum (dest
);
2888 int reg_base
= true_regnum (base
);
2889 /* "volatile" forces reading low byte first, even if less efficient,
2890 for correct operation with 16-bit I/O registers. */
2891 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2895 if (reg_dest
== reg_base
) /* R = (R) */
2896 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2898 "mov %A0,__tmp_reg__", op
, plen
, -3);
2900 if (reg_base
!= REG_X
)
2901 return avr_asm_len ("ld %A0,%1" CR_TAB
2902 "ldd %B0,%1+1", op
, plen
, -2);
2904 avr_asm_len ("ld %A0,X+" CR_TAB
2905 "ld %B0,X", op
, plen
, -2);
2907 if (!reg_unused_after (insn
, base
))
2908 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
2912 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2914 int disp
= INTVAL (XEXP (base
, 1));
2915 int reg_base
= true_regnum (XEXP (base
, 0));
2917 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2919 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2920 fatal_insn ("incorrect insn:",insn
);
2922 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
2923 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2924 "ldd %A0,Y+62" CR_TAB
2925 "ldd %B0,Y+63" CR_TAB
2926 "sbiw r28,%o1-62", op
, plen
, -4)
2928 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2929 "sbci r29,hi8(-%o1)" CR_TAB
2931 "ldd %B0,Y+1" CR_TAB
2932 "subi r28,lo8(%o1)" CR_TAB
2933 "sbci r29,hi8(%o1)", op
, plen
, -6);
2936 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2937 it but I have this situation with extremal
2938 optimization options. */
2940 if (reg_base
== REG_X
)
2941 return reg_base
== reg_dest
2942 ? avr_asm_len ("adiw r26,%o1" CR_TAB
2943 "ld __tmp_reg__,X+" CR_TAB
2945 "mov %A0,__tmp_reg__", op
, plen
, -4)
2947 : avr_asm_len ("adiw r26,%o1" CR_TAB
2950 "sbiw r26,%o1+1", op
, plen
, -4);
2952 return reg_base
== reg_dest
2953 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
2954 "ldd %B0,%B1" CR_TAB
2955 "mov %A0,__tmp_reg__", op
, plen
, -3)
2957 : avr_asm_len ("ldd %A0,%A1" CR_TAB
2958 "ldd %B0,%B1", op
, plen
, -2);
2960 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2962 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2963 fatal_insn ("incorrect insn:", insn
);
2965 if (!mem_volatile_p
)
2966 return avr_asm_len ("ld %B0,%1" CR_TAB
2967 "ld %A0,%1", op
, plen
, -2);
2969 return REGNO (XEXP (base
, 0)) == REG_X
2970 ? avr_asm_len ("sbiw r26,2" CR_TAB
2973 "sbiw r26,1", op
, plen
, -4)
2975 : avr_asm_len ("sbiw %r1,2" CR_TAB
2977 "ldd %B0,%p1+1", op
, plen
, -3);
2979 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2981 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2982 fatal_insn ("incorrect insn:", insn
);
2984 return avr_asm_len ("ld %A0,%1" CR_TAB
2985 "ld %B0,%1", op
, plen
, -2);
2987 else if (CONSTANT_ADDRESS_P (base
))
2989 return optimize
> 0 && io_address_operand (base
, HImode
)
2990 ? avr_asm_len ("in %A0,%i1" CR_TAB
2991 "in %B0,%i1+1", op
, plen
, -2)
2993 : avr_asm_len ("lds %A0,%m1" CR_TAB
2994 "lds %B0,%m1+1", op
, plen
, -4);
2997 fatal_insn ("unknown move insn:",insn
);
3002 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3006 rtx base
= XEXP (src
, 0);
3007 int reg_dest
= true_regnum (dest
);
3008 int reg_base
= true_regnum (base
);
3016 if (reg_base
== REG_X
) /* (R26) */
3018 if (reg_dest
== REG_X
)
3019 /* "ld r26,-X" is undefined */
3020 return *l
=7, ("adiw r26,3" CR_TAB
3023 "ld __tmp_reg__,-X" CR_TAB
3026 "mov r27,__tmp_reg__");
3027 else if (reg_dest
== REG_X
- 2)
3028 return *l
=5, ("ld %A0,X+" CR_TAB
3030 "ld __tmp_reg__,X+" CR_TAB
3032 "mov %C0,__tmp_reg__");
3033 else if (reg_unused_after (insn
, base
))
3034 return *l
=4, ("ld %A0,X+" CR_TAB
3039 return *l
=5, ("ld %A0,X+" CR_TAB
3047 if (reg_dest
== reg_base
)
3048 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3049 "ldd %C0,%1+2" CR_TAB
3050 "ldd __tmp_reg__,%1+1" CR_TAB
3052 "mov %B0,__tmp_reg__");
3053 else if (reg_base
== reg_dest
+ 2)
3054 return *l
=5, ("ld %A0,%1" CR_TAB
3055 "ldd %B0,%1+1" CR_TAB
3056 "ldd __tmp_reg__,%1+2" CR_TAB
3057 "ldd %D0,%1+3" CR_TAB
3058 "mov %C0,__tmp_reg__");
3060 return *l
=4, ("ld %A0,%1" CR_TAB
3061 "ldd %B0,%1+1" CR_TAB
3062 "ldd %C0,%1+2" CR_TAB
3066 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3068 int disp
= INTVAL (XEXP (base
, 1));
3070 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3072 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3073 fatal_insn ("incorrect insn:",insn
);
3075 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3076 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3077 "ldd %A0,Y+60" CR_TAB
3078 "ldd %B0,Y+61" CR_TAB
3079 "ldd %C0,Y+62" CR_TAB
3080 "ldd %D0,Y+63" CR_TAB
3083 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3084 "sbci r29,hi8(-%o1)" CR_TAB
3086 "ldd %B0,Y+1" CR_TAB
3087 "ldd %C0,Y+2" CR_TAB
3088 "ldd %D0,Y+3" CR_TAB
3089 "subi r28,lo8(%o1)" CR_TAB
3090 "sbci r29,hi8(%o1)");
3093 reg_base
= true_regnum (XEXP (base
, 0));
3094 if (reg_base
== REG_X
)
3097 if (reg_dest
== REG_X
)
3100 /* "ld r26,-X" is undefined */
3101 return ("adiw r26,%o1+3" CR_TAB
3104 "ld __tmp_reg__,-X" CR_TAB
3107 "mov r27,__tmp_reg__");
3110 if (reg_dest
== REG_X
- 2)
3111 return ("adiw r26,%o1" CR_TAB
3114 "ld __tmp_reg__,X+" CR_TAB
3116 "mov r26,__tmp_reg__");
3118 return ("adiw r26,%o1" CR_TAB
3125 if (reg_dest
== reg_base
)
3126 return *l
=5, ("ldd %D0,%D1" CR_TAB
3127 "ldd %C0,%C1" CR_TAB
3128 "ldd __tmp_reg__,%B1" CR_TAB
3129 "ldd %A0,%A1" CR_TAB
3130 "mov %B0,__tmp_reg__");
3131 else if (reg_dest
== reg_base
- 2)
3132 return *l
=5, ("ldd %A0,%A1" CR_TAB
3133 "ldd %B0,%B1" CR_TAB
3134 "ldd __tmp_reg__,%C1" CR_TAB
3135 "ldd %D0,%D1" CR_TAB
3136 "mov %C0,__tmp_reg__");
3137 return *l
=4, ("ldd %A0,%A1" CR_TAB
3138 "ldd %B0,%B1" CR_TAB
3139 "ldd %C0,%C1" CR_TAB
3142 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3143 return *l
=4, ("ld %D0,%1" CR_TAB
3147 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3148 return *l
=4, ("ld %A0,%1" CR_TAB
3152 else if (CONSTANT_ADDRESS_P (base
))
3153 return *l
=8, ("lds %A0,%m1" CR_TAB
3154 "lds %B0,%m1+1" CR_TAB
3155 "lds %C0,%m1+2" CR_TAB
3158 fatal_insn ("unknown move insn:",insn
);
3163 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3167 rtx base
= XEXP (dest
, 0);
3168 int reg_base
= true_regnum (base
);
3169 int reg_src
= true_regnum (src
);
3175 if (CONSTANT_ADDRESS_P (base
))
3176 return *l
=8,("sts %m0,%A1" CR_TAB
3177 "sts %m0+1,%B1" CR_TAB
3178 "sts %m0+2,%C1" CR_TAB
3180 if (reg_base
> 0) /* (r) */
3182 if (reg_base
== REG_X
) /* (R26) */
3184 if (reg_src
== REG_X
)
3186 /* "st X+,r26" is undefined */
3187 if (reg_unused_after (insn
, base
))
3188 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3191 "st X+,__tmp_reg__" CR_TAB
3195 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3198 "st X+,__tmp_reg__" CR_TAB
3203 else if (reg_base
== reg_src
+ 2)
3205 if (reg_unused_after (insn
, base
))
3206 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3207 "mov __tmp_reg__,%D1" CR_TAB
3210 "st %0+,__zero_reg__" CR_TAB
3211 "st %0,__tmp_reg__" CR_TAB
3212 "clr __zero_reg__");
3214 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3215 "mov __tmp_reg__,%D1" CR_TAB
3218 "st %0+,__zero_reg__" CR_TAB
3219 "st %0,__tmp_reg__" CR_TAB
3220 "clr __zero_reg__" CR_TAB
3223 return *l
=5, ("st %0+,%A1" CR_TAB
3230 return *l
=4, ("st %0,%A1" CR_TAB
3231 "std %0+1,%B1" CR_TAB
3232 "std %0+2,%C1" CR_TAB
3235 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3237 int disp
= INTVAL (XEXP (base
, 1));
3238 reg_base
= REGNO (XEXP (base
, 0));
3239 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3241 if (reg_base
!= REG_Y
)
3242 fatal_insn ("incorrect insn:",insn
);
3244 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3245 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3246 "std Y+60,%A1" CR_TAB
3247 "std Y+61,%B1" CR_TAB
3248 "std Y+62,%C1" CR_TAB
3249 "std Y+63,%D1" CR_TAB
3252 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3253 "sbci r29,hi8(-%o0)" CR_TAB
3255 "std Y+1,%B1" CR_TAB
3256 "std Y+2,%C1" CR_TAB
3257 "std Y+3,%D1" CR_TAB
3258 "subi r28,lo8(%o0)" CR_TAB
3259 "sbci r29,hi8(%o0)");
3261 if (reg_base
== REG_X
)
3264 if (reg_src
== REG_X
)
3267 return ("mov __tmp_reg__,r26" CR_TAB
3268 "mov __zero_reg__,r27" CR_TAB
3269 "adiw r26,%o0" CR_TAB
3270 "st X+,__tmp_reg__" CR_TAB
3271 "st X+,__zero_reg__" CR_TAB
3274 "clr __zero_reg__" CR_TAB
3277 else if (reg_src
== REG_X
- 2)
3280 return ("mov __tmp_reg__,r26" CR_TAB
3281 "mov __zero_reg__,r27" CR_TAB
3282 "adiw r26,%o0" CR_TAB
3285 "st X+,__tmp_reg__" CR_TAB
3286 "st X,__zero_reg__" CR_TAB
3287 "clr __zero_reg__" CR_TAB
3291 return ("adiw r26,%o0" CR_TAB
3298 return *l
=4, ("std %A0,%A1" CR_TAB
3299 "std %B0,%B1" CR_TAB
3300 "std %C0,%C1" CR_TAB
3303 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3304 return *l
=4, ("st %0,%D1" CR_TAB
3308 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3309 return *l
=4, ("st %0,%A1" CR_TAB
3313 fatal_insn ("unknown move insn:",insn
);
3318 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3321 rtx dest
= operands
[0];
3322 rtx src
= operands
[1];
3325 if (avr_mem_flash_p (src
)
3326 || avr_mem_flash_p (dest
))
3328 return avr_out_lpm (insn
, operands
, real_l
);
3334 if (register_operand (dest
, VOIDmode
))
3336 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3338 if (true_regnum (dest
) > true_regnum (src
))
3343 return ("movw %C0,%C1" CR_TAB
3347 return ("mov %D0,%D1" CR_TAB
3348 "mov %C0,%C1" CR_TAB
3349 "mov %B0,%B1" CR_TAB
3357 return ("movw %A0,%A1" CR_TAB
3361 return ("mov %A0,%A1" CR_TAB
3362 "mov %B0,%B1" CR_TAB
3363 "mov %C0,%C1" CR_TAB
3367 else if (CONSTANT_P (src
))
3369 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3371 else if (GET_CODE (src
) == MEM
)
3372 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3374 else if (GET_CODE (dest
) == MEM
)
3378 if (src
== CONST0_RTX (GET_MODE (dest
)))
3379 operands
[1] = zero_reg_rtx
;
3381 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3384 output_asm_insn (templ
, operands
);
3389 fatal_insn ("invalid insn:", insn
);
3394 /* Handle loads of 24-bit types from memory to register. */
3397 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3401 rtx base
= XEXP (src
, 0);
3402 int reg_dest
= true_regnum (dest
);
3403 int reg_base
= true_regnum (base
);
3407 if (reg_base
== REG_X
) /* (R26) */
3409 if (reg_dest
== REG_X
)
3410 /* "ld r26,-X" is undefined */
3411 return avr_asm_len ("adiw r26,2" CR_TAB
3413 "ld __tmp_reg__,-X" CR_TAB
3416 "mov r27,__tmp_reg__", op
, plen
, -6);
3419 avr_asm_len ("ld %A0,X+" CR_TAB
3421 "ld %C0,X", op
, plen
, -3);
3423 if (reg_dest
!= REG_X
- 2
3424 && !reg_unused_after (insn
, base
))
3426 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3432 else /* reg_base != REG_X */
3434 if (reg_dest
== reg_base
)
3435 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3436 "ldd __tmp_reg__,%1+1" CR_TAB
3438 "mov %B0,__tmp_reg__", op
, plen
, -4);
3440 return avr_asm_len ("ld %A0,%1" CR_TAB
3441 "ldd %B0,%1+1" CR_TAB
3442 "ldd %C0,%1+2", op
, plen
, -3);
3445 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3447 int disp
= INTVAL (XEXP (base
, 1));
3449 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3451 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3452 fatal_insn ("incorrect insn:",insn
);
3454 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3455 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3456 "ldd %A0,Y+61" CR_TAB
3457 "ldd %B0,Y+62" CR_TAB
3458 "ldd %C0,Y+63" CR_TAB
3459 "sbiw r28,%o1-61", op
, plen
, -5);
3461 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3462 "sbci r29,hi8(-%o1)" CR_TAB
3464 "ldd %B0,Y+1" CR_TAB
3465 "ldd %C0,Y+2" CR_TAB
3466 "subi r28,lo8(%o1)" CR_TAB
3467 "sbci r29,hi8(%o1)", op
, plen
, -7);
3470 reg_base
= true_regnum (XEXP (base
, 0));
3471 if (reg_base
== REG_X
)
3474 if (reg_dest
== REG_X
)
3476 /* "ld r26,-X" is undefined */
3477 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3479 "ld __tmp_reg__,-X" CR_TAB
3482 "mov r27,__tmp_reg__", op
, plen
, -6);
3485 avr_asm_len ("adiw r26,%o1" CR_TAB
3488 "ld %C0,X", op
, plen
, -4);
3490 if (reg_dest
!= REG_W
3491 && !reg_unused_after (insn
, XEXP (base
, 0)))
3492 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3497 if (reg_dest
== reg_base
)
3498 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3499 "ldd __tmp_reg__,%B1" CR_TAB
3500 "ldd %A0,%A1" CR_TAB
3501 "mov %B0,__tmp_reg__", op
, plen
, -4);
3503 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3504 "ldd %B0,%B1" CR_TAB
3505 "ldd %C0,%C1", op
, plen
, -3);
3507 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3508 return avr_asm_len ("ld %C0,%1" CR_TAB
3510 "ld %A0,%1", op
, plen
, -3);
3511 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3512 return avr_asm_len ("ld %A0,%1" CR_TAB
3514 "ld %C0,%1", op
, plen
, -3);
3516 else if (CONSTANT_ADDRESS_P (base
))
3517 return avr_asm_len ("lds %A0,%m1" CR_TAB
3518 "lds %B0,%m1+1" CR_TAB
3519 "lds %C0,%m1+2", op
, plen
, -6);
3521 fatal_insn ("unknown move insn:",insn
);
3525 /* Handle store of 24-bit type from register or zero to memory. */
3528 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3532 rtx base
= XEXP (dest
, 0);
3533 int reg_base
= true_regnum (base
);
3535 if (CONSTANT_ADDRESS_P (base
))
3536 return avr_asm_len ("sts %m0,%A1" CR_TAB
3537 "sts %m0+1,%B1" CR_TAB
3538 "sts %m0+2,%C1", op
, plen
, -6);
3540 if (reg_base
> 0) /* (r) */
3542 if (reg_base
== REG_X
) /* (R26) */
3544 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3546 avr_asm_len ("st %0+,%A1" CR_TAB
3548 "st %0,%C1", op
, plen
, -3);
3550 if (!reg_unused_after (insn
, base
))
3551 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3556 return avr_asm_len ("st %0,%A1" CR_TAB
3557 "std %0+1,%B1" CR_TAB
3558 "std %0+2,%C1", op
, plen
, -3);
3560 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3562 int disp
= INTVAL (XEXP (base
, 1));
3563 reg_base
= REGNO (XEXP (base
, 0));
3565 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3567 if (reg_base
!= REG_Y
)
3568 fatal_insn ("incorrect insn:",insn
);
3570 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3571 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3572 "std Y+61,%A1" CR_TAB
3573 "std Y+62,%B1" CR_TAB
3574 "std Y+63,%C1" CR_TAB
3575 "sbiw r28,%o0-60", op
, plen
, -5);
3577 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3578 "sbci r29,hi8(-%o0)" CR_TAB
3580 "std Y+1,%B1" CR_TAB
3581 "std Y+2,%C1" CR_TAB
3582 "subi r28,lo8(%o0)" CR_TAB
3583 "sbci r29,hi8(%o0)", op
, plen
, -7);
3585 if (reg_base
== REG_X
)
3588 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3590 avr_asm_len ("adiw r26,%o0" CR_TAB
3593 "st X,%C1", op
, plen
, -4);
3595 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3596 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3601 return avr_asm_len ("std %A0,%A1" CR_TAB
3602 "std %B0,%B1" CR_TAB
3603 "std %C0,%C1", op
, plen
, -3);
3605 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3606 return avr_asm_len ("st %0,%C1" CR_TAB
3608 "st %0,%A1", op
, plen
, -3);
3609 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3610 return avr_asm_len ("st %0,%A1" CR_TAB
3612 "st %0,%C1", op
, plen
, -3);
3614 fatal_insn ("unknown move insn:",insn
);
3619 /* Move around 24-bit stuff. */
3622 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3627 if (avr_mem_flash_p (src
)
3628 || avr_mem_flash_p (dest
))
3630 return avr_out_lpm (insn
, op
, plen
);
3633 if (register_operand (dest
, VOIDmode
))
3635 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3637 if (true_regnum (dest
) > true_regnum (src
))
3639 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3642 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3644 return avr_asm_len ("mov %B0,%B1" CR_TAB
3645 "mov %A0,%A1", op
, plen
, 2);
3650 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3652 avr_asm_len ("mov %A0,%A1" CR_TAB
3653 "mov %B0,%B1", op
, plen
, -2);
3655 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3658 else if (CONSTANT_P (src
))
3660 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3662 else if (MEM_P (src
))
3663 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3665 else if (MEM_P (dest
))
3670 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3672 return avr_out_store_psi (insn
, xop
, plen
);
3675 fatal_insn ("invalid insn:", insn
);
3681 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3685 rtx x
= XEXP (dest
, 0);
3687 if (CONSTANT_ADDRESS_P (x
))
3689 return optimize
> 0 && io_address_operand (x
, QImode
)
3690 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3691 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3693 else if (GET_CODE (x
) == PLUS
3694 && REG_P (XEXP (x
, 0))
3695 && CONST_INT_P (XEXP (x
, 1)))
3697 /* memory access by reg+disp */
3699 int disp
= INTVAL (XEXP (x
, 1));
3701 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3703 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3704 fatal_insn ("incorrect insn:",insn
);
3706 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3707 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3708 "std Y+63,%1" CR_TAB
3709 "sbiw r28,%o0-63", op
, plen
, -3);
3711 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3712 "sbci r29,hi8(-%o0)" CR_TAB
3714 "subi r28,lo8(%o0)" CR_TAB
3715 "sbci r29,hi8(%o0)", op
, plen
, -5);
3717 else if (REGNO (XEXP (x
,0)) == REG_X
)
3719 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3721 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3722 "adiw r26,%o0" CR_TAB
3723 "st X,__tmp_reg__", op
, plen
, -3);
3727 avr_asm_len ("adiw r26,%o0" CR_TAB
3728 "st X,%1", op
, plen
, -2);
3731 if (!reg_unused_after (insn
, XEXP (x
,0)))
3732 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3737 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3740 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3744 /* Helper for the next function for XMEGA. It does the same
3745 but with low byte first. */
3748 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3752 rtx base
= XEXP (dest
, 0);
3753 int reg_base
= true_regnum (base
);
3754 int reg_src
= true_regnum (src
);
3756 /* "volatile" forces writing low byte first, even if less efficient,
3757 for correct operation with 16-bit I/O registers like SP. */
3758 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3760 if (CONSTANT_ADDRESS_P (base
))
3761 return optimize
> 0 && io_address_operand (base
, HImode
)
3762 ? avr_asm_len ("out %i0,%A1" CR_TAB
3763 "out %i0+1,%B1", op
, plen
, -2)
3765 : avr_asm_len ("sts %m0,%A1" CR_TAB
3766 "sts %m0+1,%B1", op
, plen
, -4);
3770 if (reg_base
!= REG_X
)
3771 return avr_asm_len ("st %0,%A1" CR_TAB
3772 "std %0+1,%B1", op
, plen
, -2);
3774 if (reg_src
== REG_X
)
3775 /* "st X+,r26" and "st -X,r26" are undefined. */
3776 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3779 "st X,__tmp_reg__", op
, plen
, -4);
3781 avr_asm_len ("st X+,%A1" CR_TAB
3782 "st X,%B1", op
, plen
, -2);
3784 return reg_unused_after (insn
, base
)
3786 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3788 else if (GET_CODE (base
) == PLUS
)
3790 int disp
= INTVAL (XEXP (base
, 1));
3791 reg_base
= REGNO (XEXP (base
, 0));
3792 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3794 if (reg_base
!= REG_Y
)
3795 fatal_insn ("incorrect insn:",insn
);
3797 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3798 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3799 "std Y+62,%A1" CR_TAB
3800 "std Y+63,%B1" CR_TAB
3801 "sbiw r28,%o0-62", op
, plen
, -4)
3803 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3804 "sbci r29,hi8(-%o0)" CR_TAB
3806 "std Y+1,%B1" CR_TAB
3807 "subi r28,lo8(%o0)" CR_TAB
3808 "sbci r29,hi8(%o0)", op
, plen
, -6);
3811 if (reg_base
!= REG_X
)
3812 return avr_asm_len ("std %A0,%A1" CR_TAB
3813 "std %B0,%B1", op
, plen
, -2);
3815 return reg_src
== REG_X
3816 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3817 "mov __zero_reg__,r27" CR_TAB
3818 "adiw r26,%o0" CR_TAB
3819 "st X+,__tmp_reg__" CR_TAB
3820 "st X,__zero_reg__" CR_TAB
3821 "clr __zero_reg__" CR_TAB
3822 "sbiw r26,%o0+1", op
, plen
, -7)
3824 : avr_asm_len ("adiw r26,%o0" CR_TAB
3827 "sbiw r26,%o0+1", op
, plen
, -4);
3829 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3831 if (!mem_volatile_p
)
3832 return avr_asm_len ("st %0,%B1" CR_TAB
3833 "st %0,%A1", op
, plen
, -2);
3835 return REGNO (XEXP (base
, 0)) == REG_X
3836 ? avr_asm_len ("sbiw r26,2" CR_TAB
3839 "sbiw r26,1", op
, plen
, -4)
3841 : avr_asm_len ("sbiw %r0,2" CR_TAB
3843 "std %p0+1,%B1", op
, plen
, -3);
3845 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3847 return avr_asm_len ("st %0,%A1" CR_TAB
3848 "st %0,%B1", op
, plen
, -2);
3851 fatal_insn ("unknown move insn:",insn
);
3857 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
3861 rtx base
= XEXP (dest
, 0);
3862 int reg_base
= true_regnum (base
);
3863 int reg_src
= true_regnum (src
);
3866 /* "volatile" forces writing high-byte first (no-xmega) resp.
3867 low-byte first (xmega) even if less efficient, for correct
3868 operation with 16-bit I/O registers like. */
3871 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
3873 mem_volatile_p
= MEM_VOLATILE_P (dest
);
3875 if (CONSTANT_ADDRESS_P (base
))
3876 return optimize
> 0 && io_address_operand (base
, HImode
)
3877 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3878 "out %i0,%A1", op
, plen
, -2)
3880 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3881 "sts %m0,%A1", op
, plen
, -4);
3885 if (reg_base
!= REG_X
)
3886 return avr_asm_len ("std %0+1,%B1" CR_TAB
3887 "st %0,%A1", op
, plen
, -2);
3889 if (reg_src
== REG_X
)
3890 /* "st X+,r26" and "st -X,r26" are undefined. */
3891 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
3892 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3895 "st X,__tmp_reg__", op
, plen
, -4)
3897 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3899 "st X,__tmp_reg__" CR_TAB
3901 "st X,r26", op
, plen
, -5);
3903 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
3904 ? avr_asm_len ("st X+,%A1" CR_TAB
3905 "st X,%B1", op
, plen
, -2)
3906 : avr_asm_len ("adiw r26,1" CR_TAB
3908 "st -X,%A1", op
, plen
, -3);
3910 else if (GET_CODE (base
) == PLUS
)
3912 int disp
= INTVAL (XEXP (base
, 1));
3913 reg_base
= REGNO (XEXP (base
, 0));
3914 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3916 if (reg_base
!= REG_Y
)
3917 fatal_insn ("incorrect insn:",insn
);
3919 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3920 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3921 "std Y+63,%B1" CR_TAB
3922 "std Y+62,%A1" CR_TAB
3923 "sbiw r28,%o0-62", op
, plen
, -4)
3925 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3926 "sbci r29,hi8(-%o0)" CR_TAB
3927 "std Y+1,%B1" CR_TAB
3929 "subi r28,lo8(%o0)" CR_TAB
3930 "sbci r29,hi8(%o0)", op
, plen
, -6);
3933 if (reg_base
!= REG_X
)
3934 return avr_asm_len ("std %B0,%B1" CR_TAB
3935 "std %A0,%A1", op
, plen
, -2);
3937 return reg_src
== REG_X
3938 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3939 "mov __zero_reg__,r27" CR_TAB
3940 "adiw r26,%o0+1" CR_TAB
3941 "st X,__zero_reg__" CR_TAB
3942 "st -X,__tmp_reg__" CR_TAB
3943 "clr __zero_reg__" CR_TAB
3944 "sbiw r26,%o0", op
, plen
, -7)
3946 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3949 "sbiw r26,%o0", op
, plen
, -4);
3951 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3953 return avr_asm_len ("st %0,%B1" CR_TAB
3954 "st %0,%A1", op
, plen
, -2);
3956 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3958 if (!mem_volatile_p
)
3959 return avr_asm_len ("st %0,%A1" CR_TAB
3960 "st %0,%B1", op
, plen
, -2);
3962 return REGNO (XEXP (base
, 0)) == REG_X
3963 ? avr_asm_len ("adiw r26,1" CR_TAB
3966 "adiw r26,2", op
, plen
, -4)
3968 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3970 "adiw %r0,2", op
, plen
, -3);
3972 fatal_insn ("unknown move insn:",insn
);
3976 /* Return 1 if frame pointer for current function required. */
3979 avr_frame_pointer_required_p (void)
3981 return (cfun
->calls_alloca
3982 || cfun
->calls_setjmp
3983 || cfun
->has_nonlocal_label
3984 || crtl
->args
.info
.nregs
== 0
3985 || get_frame_size () > 0);
3988 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3991 compare_condition (rtx insn
)
3993 rtx next
= next_real_insn (insn
);
3995 if (next
&& JUMP_P (next
))
3997 rtx pat
= PATTERN (next
);
3998 rtx src
= SET_SRC (pat
);
4000 if (IF_THEN_ELSE
== GET_CODE (src
))
4001 return GET_CODE (XEXP (src
, 0));
4008 /* Returns true iff INSN is a tst insn that only tests the sign. */
4011 compare_sign_p (rtx insn
)
4013 RTX_CODE cond
= compare_condition (insn
);
4014 return (cond
== GE
|| cond
== LT
);
4018 /* Returns true iff the next insn is a JUMP_INSN with a condition
4019 that needs to be swapped (GT, GTU, LE, LEU). */
4022 compare_diff_p (rtx insn
)
4024 RTX_CODE cond
= compare_condition (insn
);
4025 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4028 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4031 compare_eq_p (rtx insn
)
4033 RTX_CODE cond
= compare_condition (insn
);
4034 return (cond
== EQ
|| cond
== NE
);
4038 /* Output compare instruction
4040 compare (XOP[0], XOP[1])
4042 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4043 XOP[2] is an 8-bit scratch register as needed.
4045 PLEN == NULL: Output instructions.
4046 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4047 Don't output anything. */
4050 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4052 /* Register to compare and value to compare against. */
4056 /* MODE of the comparison. */
4057 enum machine_mode mode
= GET_MODE (xreg
);
4059 /* Number of bytes to operate on. */
4060 int i
, n_bytes
= GET_MODE_SIZE (mode
);
4062 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4063 int clobber_val
= -1;
4065 gcc_assert (REG_P (xreg
));
4066 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4067 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4072 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4073 against 0 by ORing the bytes. This is one instruction shorter.
4074 Notice that DImode comparisons are always against reg:DI 18
4075 and therefore don't use this. */
4077 if (!test_hard_reg_class (LD_REGS
, xreg
)
4078 && compare_eq_p (insn
)
4079 && reg_unused_after (insn
, xreg
))
4081 if (xval
== const1_rtx
)
4083 avr_asm_len ("dec %A0" CR_TAB
4084 "or %A0,%B0", xop
, plen
, 2);
4087 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4090 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4094 else if (xval
== constm1_rtx
)
4097 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4100 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4102 return avr_asm_len ("and %A0,%B0" CR_TAB
4103 "com %A0", xop
, plen
, 2);
4107 for (i
= 0; i
< n_bytes
; i
++)
4109 /* We compare byte-wise. */
4110 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4111 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4113 /* 8-bit value to compare with this byte. */
4114 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4116 /* Registers R16..R31 can operate with immediate. */
4117 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4120 xop
[1] = gen_int_mode (val8
, QImode
);
4122 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4125 && test_hard_reg_class (ADDW_REGS
, reg8
))
4127 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4129 if (IN_RANGE (val16
, 0, 63)
4131 || reg_unused_after (insn
, xreg
)))
4133 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4139 && IN_RANGE (val16
, -63, -1)
4140 && compare_eq_p (insn
)
4141 && reg_unused_after (insn
, xreg
))
4143 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4147 /* Comparing against 0 is easy. */
4152 ? "cp %0,__zero_reg__"
4153 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4157 /* Upper registers can compare and subtract-with-carry immediates.
4158 Notice that compare instructions do the same as respective subtract
4159 instruction; the only difference is that comparisons don't write
4160 the result back to the target register. */
4166 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4169 else if (reg_unused_after (insn
, xreg
))
4171 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4176 /* Must load the value into the scratch register. */
4178 gcc_assert (REG_P (xop
[2]));
4180 if (clobber_val
!= (int) val8
)
4181 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4182 clobber_val
= (int) val8
;
4186 : "cpc %0,%2", xop
, plen
, 1);
4193 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4196 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4200 xop
[0] = gen_rtx_REG (DImode
, 18);
4204 return avr_out_compare (insn
, xop
, plen
);
4207 /* Output test instruction for HImode. */
4210 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4212 if (compare_sign_p (insn
))
4214 avr_asm_len ("tst %B0", op
, plen
, -1);
4216 else if (reg_unused_after (insn
, op
[0])
4217 && compare_eq_p (insn
))
4219 /* Faster than sbiw if we can clobber the operand. */
4220 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4224 avr_out_compare (insn
, op
, plen
);
4231 /* Output test instruction for PSImode. */
4234 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4236 if (compare_sign_p (insn
))
4238 avr_asm_len ("tst %C0", op
, plen
, -1);
4240 else if (reg_unused_after (insn
, op
[0])
4241 && compare_eq_p (insn
))
4243 /* Faster than sbiw if we can clobber the operand. */
4244 avr_asm_len ("or %A0,%B0" CR_TAB
4245 "or %A0,%C0", op
, plen
, -2);
4249 avr_out_compare (insn
, op
, plen
);
4256 /* Output test instruction for SImode. */
4259 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4261 if (compare_sign_p (insn
))
4263 avr_asm_len ("tst %D0", op
, plen
, -1);
4265 else if (reg_unused_after (insn
, op
[0])
4266 && compare_eq_p (insn
))
4268 /* Faster than sbiw if we can clobber the operand. */
4269 avr_asm_len ("or %A0,%B0" CR_TAB
4271 "or %A0,%D0", op
, plen
, -3);
4275 avr_out_compare (insn
, op
, plen
);
4282 /* Generate asm equivalent for various shifts. This only handles cases
4283 that are not already carefully hand-optimized in ?sh??i3_out.
4285 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4286 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4287 OPERANDS[3] is a QImode scratch register from LD regs if
4288 available and SCRATCH, otherwise (no scratch available)
4290 TEMPL is an assembler template that shifts by one position.
4291 T_LEN is the length of this template. */
4294 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4295 int *plen
, int t_len
)
4297 bool second_label
= true;
4298 bool saved_in_tmp
= false;
4299 bool use_zero_reg
= false;
4302 op
[0] = operands
[0];
4303 op
[1] = operands
[1];
4304 op
[2] = operands
[2];
4305 op
[3] = operands
[3];
4310 if (CONST_INT_P (operands
[2]))
4312 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4313 && REG_P (operands
[3]));
4314 int count
= INTVAL (operands
[2]);
4315 int max_len
= 10; /* If larger than this, always use a loop. */
4320 if (count
< 8 && !scratch
)
4321 use_zero_reg
= true;
4324 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4326 if (t_len
* count
<= max_len
)
4328 /* Output shifts inline with no loop - faster. */
4331 avr_asm_len (templ
, op
, plen
, t_len
);
4338 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4340 else if (use_zero_reg
)
4342 /* Hack to save one word: use __zero_reg__ as loop counter.
4343 Set one bit, then shift in a loop until it is 0 again. */
4345 op
[3] = zero_reg_rtx
;
4347 avr_asm_len ("set" CR_TAB
4348 "bld %3,%2-1", op
, plen
, 2);
4352 /* No scratch register available, use one from LD_REGS (saved in
4353 __tmp_reg__) that doesn't overlap with registers to shift. */
4355 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4356 op
[4] = tmp_reg_rtx
;
4357 saved_in_tmp
= true;
4359 avr_asm_len ("mov %4,%3" CR_TAB
4360 "ldi %3,%2", op
, plen
, 2);
4363 second_label
= false;
4365 else if (MEM_P (op
[2]))
4369 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4372 out_movqi_r_mr (insn
, op_mov
, plen
);
4374 else if (register_operand (op
[2], QImode
))
4378 if (!reg_unused_after (insn
, op
[2])
4379 || reg_overlap_mentioned_p (op
[0], op
[2]))
4381 op
[3] = tmp_reg_rtx
;
4382 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4386 fatal_insn ("bad shift insn:", insn
);
4389 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4391 avr_asm_len ("1:", op
, plen
, 0);
4392 avr_asm_len (templ
, op
, plen
, t_len
);
4395 avr_asm_len ("2:", op
, plen
, 0);
4397 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4398 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4401 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4405 /* 8bit shift left ((char)x << i) */
4408 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4410 if (GET_CODE (operands
[2]) == CONST_INT
)
4417 switch (INTVAL (operands
[2]))
4420 if (INTVAL (operands
[2]) < 8)
4432 return ("lsl %0" CR_TAB
4437 return ("lsl %0" CR_TAB
4442 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4445 return ("swap %0" CR_TAB
4449 return ("lsl %0" CR_TAB
4455 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4458 return ("swap %0" CR_TAB
4463 return ("lsl %0" CR_TAB
4470 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4473 return ("swap %0" CR_TAB
4479 return ("lsl %0" CR_TAB
4488 return ("ror %0" CR_TAB
4493 else if (CONSTANT_P (operands
[2]))
4494 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4496 out_shift_with_cnt ("lsl %0",
4497 insn
, operands
, len
, 1);
4502 /* 16bit shift left ((short)x << i) */
4505 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4507 if (GET_CODE (operands
[2]) == CONST_INT
)
4509 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4510 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4517 switch (INTVAL (operands
[2]))
4520 if (INTVAL (operands
[2]) < 16)
4524 return ("clr %B0" CR_TAB
4528 if (optimize_size
&& scratch
)
4533 return ("swap %A0" CR_TAB
4535 "andi %B0,0xf0" CR_TAB
4536 "eor %B0,%A0" CR_TAB
4537 "andi %A0,0xf0" CR_TAB
4543 return ("swap %A0" CR_TAB
4545 "ldi %3,0xf0" CR_TAB
4547 "eor %B0,%A0" CR_TAB
4551 break; /* optimize_size ? 6 : 8 */
4555 break; /* scratch ? 5 : 6 */
4559 return ("lsl %A0" CR_TAB
4563 "andi %B0,0xf0" CR_TAB
4564 "eor %B0,%A0" CR_TAB
4565 "andi %A0,0xf0" CR_TAB
4571 return ("lsl %A0" CR_TAB
4575 "ldi %3,0xf0" CR_TAB
4577 "eor %B0,%A0" CR_TAB
4585 break; /* scratch ? 5 : 6 */
4587 return ("clr __tmp_reg__" CR_TAB
4590 "ror __tmp_reg__" CR_TAB
4593 "ror __tmp_reg__" CR_TAB
4594 "mov %B0,%A0" CR_TAB
4595 "mov %A0,__tmp_reg__");
4599 return ("lsr %B0" CR_TAB
4600 "mov %B0,%A0" CR_TAB
4606 return *len
= 2, ("mov %B0,%A1" CR_TAB
4611 return ("mov %B0,%A0" CR_TAB
4617 return ("mov %B0,%A0" CR_TAB
4624 return ("mov %B0,%A0" CR_TAB
4634 return ("mov %B0,%A0" CR_TAB
4642 return ("mov %B0,%A0" CR_TAB
4645 "ldi %3,0xf0" CR_TAB
4649 return ("mov %B0,%A0" CR_TAB
4660 return ("mov %B0,%A0" CR_TAB
4666 if (AVR_HAVE_MUL
&& scratch
)
4669 return ("ldi %3,0x20" CR_TAB
4673 "clr __zero_reg__");
4675 if (optimize_size
&& scratch
)
4680 return ("mov %B0,%A0" CR_TAB
4684 "ldi %3,0xe0" CR_TAB
4690 return ("set" CR_TAB
4695 "clr __zero_reg__");
4698 return ("mov %B0,%A0" CR_TAB
4707 if (AVR_HAVE_MUL
&& ldi_ok
)
4710 return ("ldi %B0,0x40" CR_TAB
4711 "mul %A0,%B0" CR_TAB
4714 "clr __zero_reg__");
4716 if (AVR_HAVE_MUL
&& scratch
)
4719 return ("ldi %3,0x40" CR_TAB
4723 "clr __zero_reg__");
4725 if (optimize_size
&& ldi_ok
)
4728 return ("mov %B0,%A0" CR_TAB
4729 "ldi %A0,6" "\n1:\t"
4734 if (optimize_size
&& scratch
)
4737 return ("clr %B0" CR_TAB
4746 return ("clr %B0" CR_TAB
4753 out_shift_with_cnt ("lsl %A0" CR_TAB
4754 "rol %B0", insn
, operands
, len
, 2);
4759 /* 24-bit shift left */
4762 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4767 if (CONST_INT_P (op
[2]))
4769 switch (INTVAL (op
[2]))
4772 if (INTVAL (op
[2]) < 24)
4775 return avr_asm_len ("clr %A0" CR_TAB
4777 "clr %C0", op
, plen
, 3);
4781 int reg0
= REGNO (op
[0]);
4782 int reg1
= REGNO (op
[1]);
4785 return avr_asm_len ("mov %C0,%B1" CR_TAB
4786 "mov %B0,%A1" CR_TAB
4787 "clr %A0", op
, plen
, 3);
4789 return avr_asm_len ("clr %A0" CR_TAB
4790 "mov %B0,%A1" CR_TAB
4791 "mov %C0,%B1", op
, plen
, 3);
4796 int reg0
= REGNO (op
[0]);
4797 int reg1
= REGNO (op
[1]);
4799 if (reg0
+ 2 != reg1
)
4800 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4802 return avr_asm_len ("clr %B0" CR_TAB
4803 "clr %A0", op
, plen
, 2);
4807 return avr_asm_len ("clr %C0" CR_TAB
4811 "clr %A0", op
, plen
, 5);
4815 out_shift_with_cnt ("lsl %A0" CR_TAB
4817 "rol %C0", insn
, op
, plen
, 3);
4822 /* 32bit shift left ((long)x << i) */
4825 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
4827 if (GET_CODE (operands
[2]) == CONST_INT
)
4835 switch (INTVAL (operands
[2]))
4838 if (INTVAL (operands
[2]) < 32)
4842 return *len
= 3, ("clr %D0" CR_TAB
4846 return ("clr %D0" CR_TAB
4853 int reg0
= true_regnum (operands
[0]);
4854 int reg1
= true_regnum (operands
[1]);
4857 return ("mov %D0,%C1" CR_TAB
4858 "mov %C0,%B1" CR_TAB
4859 "mov %B0,%A1" CR_TAB
4862 return ("clr %A0" CR_TAB
4863 "mov %B0,%A1" CR_TAB
4864 "mov %C0,%B1" CR_TAB
4870 int reg0
= true_regnum (operands
[0]);
4871 int reg1
= true_regnum (operands
[1]);
4872 if (reg0
+ 2 == reg1
)
4873 return *len
= 2, ("clr %B0" CR_TAB
4876 return *len
= 3, ("movw %C0,%A1" CR_TAB
4880 return *len
= 4, ("mov %C0,%A1" CR_TAB
4881 "mov %D0,%B1" CR_TAB
4888 return ("mov %D0,%A1" CR_TAB
4895 return ("clr %D0" CR_TAB
4904 out_shift_with_cnt ("lsl %A0" CR_TAB
4907 "rol %D0", insn
, operands
, len
, 4);
4911 /* 8bit arithmetic shift right ((signed char)x >> i) */
4914 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
4916 if (GET_CODE (operands
[2]) == CONST_INT
)
4923 switch (INTVAL (operands
[2]))
4931 return ("asr %0" CR_TAB
4936 return ("asr %0" CR_TAB
4942 return ("asr %0" CR_TAB
4949 return ("asr %0" CR_TAB
4957 return ("bst %0,6" CR_TAB
4963 if (INTVAL (operands
[2]) < 8)
4970 return ("lsl %0" CR_TAB
4974 else if (CONSTANT_P (operands
[2]))
4975 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4977 out_shift_with_cnt ("asr %0",
4978 insn
, operands
, len
, 1);
4983 /* 16bit arithmetic shift right ((signed short)x >> i) */
4986 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
4988 if (GET_CODE (operands
[2]) == CONST_INT
)
4990 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4991 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4998 switch (INTVAL (operands
[2]))
5002 /* XXX try to optimize this too? */
5007 break; /* scratch ? 5 : 6 */
5009 return ("mov __tmp_reg__,%A0" CR_TAB
5010 "mov %A0,%B0" CR_TAB
5011 "lsl __tmp_reg__" CR_TAB
5013 "sbc %B0,%B0" CR_TAB
5014 "lsl __tmp_reg__" CR_TAB
5020 return ("lsl %A0" CR_TAB
5021 "mov %A0,%B0" CR_TAB
5027 int reg0
= true_regnum (operands
[0]);
5028 int reg1
= true_regnum (operands
[1]);
5031 return *len
= 3, ("mov %A0,%B0" CR_TAB
5035 return *len
= 4, ("mov %A0,%B1" CR_TAB
5043 return ("mov %A0,%B0" CR_TAB
5045 "sbc %B0,%B0" CR_TAB
5050 return ("mov %A0,%B0" CR_TAB
5052 "sbc %B0,%B0" CR_TAB
5057 if (AVR_HAVE_MUL
&& ldi_ok
)
5060 return ("ldi %A0,0x20" CR_TAB
5061 "muls %B0,%A0" CR_TAB
5063 "sbc %B0,%B0" CR_TAB
5064 "clr __zero_reg__");
5066 if (optimize_size
&& scratch
)
5069 return ("mov %A0,%B0" CR_TAB
5071 "sbc %B0,%B0" CR_TAB
5077 if (AVR_HAVE_MUL
&& ldi_ok
)
5080 return ("ldi %A0,0x10" CR_TAB
5081 "muls %B0,%A0" CR_TAB
5083 "sbc %B0,%B0" CR_TAB
5084 "clr __zero_reg__");
5086 if (optimize_size
&& scratch
)
5089 return ("mov %A0,%B0" CR_TAB
5091 "sbc %B0,%B0" CR_TAB
5098 if (AVR_HAVE_MUL
&& ldi_ok
)
5101 return ("ldi %A0,0x08" CR_TAB
5102 "muls %B0,%A0" CR_TAB
5104 "sbc %B0,%B0" CR_TAB
5105 "clr __zero_reg__");
5108 break; /* scratch ? 5 : 7 */
5110 return ("mov %A0,%B0" CR_TAB
5112 "sbc %B0,%B0" CR_TAB
5121 return ("lsl %B0" CR_TAB
5122 "sbc %A0,%A0" CR_TAB
5124 "mov %B0,%A0" CR_TAB
5128 if (INTVAL (operands
[2]) < 16)
5134 return *len
= 3, ("lsl %B0" CR_TAB
5135 "sbc %A0,%A0" CR_TAB
5140 out_shift_with_cnt ("asr %B0" CR_TAB
5141 "ror %A0", insn
, operands
, len
, 2);
5146 /* 24-bit arithmetic shift right */
5149 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5151 int dest
= REGNO (op
[0]);
5152 int src
= REGNO (op
[1]);
5154 if (CONST_INT_P (op
[2]))
5159 switch (INTVAL (op
[2]))
5163 return avr_asm_len ("mov %A0,%B1" CR_TAB
5164 "mov %B0,%C1" CR_TAB
5167 "dec %C0", op
, plen
, 5);
5169 return avr_asm_len ("clr %C0" CR_TAB
5172 "mov %B0,%C1" CR_TAB
5173 "mov %A0,%B1", op
, plen
, 5);
5176 if (dest
!= src
+ 2)
5177 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5179 return avr_asm_len ("clr %B0" CR_TAB
5182 "mov %C0,%B0", op
, plen
, 4);
5185 if (INTVAL (op
[2]) < 24)
5191 return avr_asm_len ("lsl %C0" CR_TAB
5192 "sbc %A0,%A0" CR_TAB
5193 "mov %B0,%A0" CR_TAB
5194 "mov %C0,%A0", op
, plen
, 4);
5198 out_shift_with_cnt ("asr %C0" CR_TAB
5200 "ror %A0", insn
, op
, plen
, 3);
5205 /* 32bit arithmetic shift right ((signed long)x >> i) */
5208 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5210 if (GET_CODE (operands
[2]) == CONST_INT
)
5218 switch (INTVAL (operands
[2]))
5222 int reg0
= true_regnum (operands
[0]);
5223 int reg1
= true_regnum (operands
[1]);
5226 return ("mov %A0,%B1" CR_TAB
5227 "mov %B0,%C1" CR_TAB
5228 "mov %C0,%D1" CR_TAB
5233 return ("clr %D0" CR_TAB
5236 "mov %C0,%D1" CR_TAB
5237 "mov %B0,%C1" CR_TAB
5243 int reg0
= true_regnum (operands
[0]);
5244 int reg1
= true_regnum (operands
[1]);
5246 if (reg0
== reg1
+ 2)
5247 return *len
= 4, ("clr %D0" CR_TAB
5252 return *len
= 5, ("movw %A0,%C1" CR_TAB
5258 return *len
= 6, ("mov %B0,%D1" CR_TAB
5259 "mov %A0,%C1" CR_TAB
5267 return *len
= 6, ("mov %A0,%D1" CR_TAB
5271 "mov %B0,%D0" CR_TAB
5275 if (INTVAL (operands
[2]) < 32)
5282 return *len
= 4, ("lsl %D0" CR_TAB
5283 "sbc %A0,%A0" CR_TAB
5284 "mov %B0,%A0" CR_TAB
5287 return *len
= 5, ("lsl %D0" CR_TAB
5288 "sbc %A0,%A0" CR_TAB
5289 "mov %B0,%A0" CR_TAB
5290 "mov %C0,%A0" CR_TAB
5295 out_shift_with_cnt ("asr %D0" CR_TAB
5298 "ror %A0", insn
, operands
, len
, 4);
5302 /* 8bit logic shift right ((unsigned char)x >> i) */
5305 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5307 if (GET_CODE (operands
[2]) == CONST_INT
)
5314 switch (INTVAL (operands
[2]))
5317 if (INTVAL (operands
[2]) < 8)
5329 return ("lsr %0" CR_TAB
5333 return ("lsr %0" CR_TAB
5338 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5341 return ("swap %0" CR_TAB
5345 return ("lsr %0" CR_TAB
5351 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5354 return ("swap %0" CR_TAB
5359 return ("lsr %0" CR_TAB
5366 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5369 return ("swap %0" CR_TAB
5375 return ("lsr %0" CR_TAB
5384 return ("rol %0" CR_TAB
5389 else if (CONSTANT_P (operands
[2]))
5390 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5392 out_shift_with_cnt ("lsr %0",
5393 insn
, operands
, len
, 1);
5397 /* 16bit logic shift right ((unsigned short)x >> i) */
5400 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5402 if (GET_CODE (operands
[2]) == CONST_INT
)
5404 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5405 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5412 switch (INTVAL (operands
[2]))
5415 if (INTVAL (operands
[2]) < 16)
5419 return ("clr %B0" CR_TAB
5423 if (optimize_size
&& scratch
)
5428 return ("swap %B0" CR_TAB
5430 "andi %A0,0x0f" CR_TAB
5431 "eor %A0,%B0" CR_TAB
5432 "andi %B0,0x0f" CR_TAB
5438 return ("swap %B0" CR_TAB
5440 "ldi %3,0x0f" CR_TAB
5442 "eor %A0,%B0" CR_TAB
5446 break; /* optimize_size ? 6 : 8 */
5450 break; /* scratch ? 5 : 6 */
5454 return ("lsr %B0" CR_TAB
5458 "andi %A0,0x0f" CR_TAB
5459 "eor %A0,%B0" CR_TAB
5460 "andi %B0,0x0f" CR_TAB
5466 return ("lsr %B0" CR_TAB
5470 "ldi %3,0x0f" CR_TAB
5472 "eor %A0,%B0" CR_TAB
5480 break; /* scratch ? 5 : 6 */
5482 return ("clr __tmp_reg__" CR_TAB
5485 "rol __tmp_reg__" CR_TAB
5488 "rol __tmp_reg__" CR_TAB
5489 "mov %A0,%B0" CR_TAB
5490 "mov %B0,__tmp_reg__");
5494 return ("lsl %A0" CR_TAB
5495 "mov %A0,%B0" CR_TAB
5497 "sbc %B0,%B0" CR_TAB
5501 return *len
= 2, ("mov %A0,%B1" CR_TAB
5506 return ("mov %A0,%B0" CR_TAB
5512 return ("mov %A0,%B0" CR_TAB
5519 return ("mov %A0,%B0" CR_TAB
5529 return ("mov %A0,%B0" CR_TAB
5537 return ("mov %A0,%B0" CR_TAB
5540 "ldi %3,0x0f" CR_TAB
5544 return ("mov %A0,%B0" CR_TAB
5555 return ("mov %A0,%B0" CR_TAB
5561 if (AVR_HAVE_MUL
&& scratch
)
5564 return ("ldi %3,0x08" CR_TAB
5568 "clr __zero_reg__");
5570 if (optimize_size
&& scratch
)
5575 return ("mov %A0,%B0" CR_TAB
5579 "ldi %3,0x07" CR_TAB
5585 return ("set" CR_TAB
5590 "clr __zero_reg__");
5593 return ("mov %A0,%B0" CR_TAB
5602 if (AVR_HAVE_MUL
&& ldi_ok
)
5605 return ("ldi %A0,0x04" CR_TAB
5606 "mul %B0,%A0" CR_TAB
5609 "clr __zero_reg__");
5611 if (AVR_HAVE_MUL
&& scratch
)
5614 return ("ldi %3,0x04" CR_TAB
5618 "clr __zero_reg__");
5620 if (optimize_size
&& ldi_ok
)
5623 return ("mov %A0,%B0" CR_TAB
5624 "ldi %B0,6" "\n1:\t"
5629 if (optimize_size
&& scratch
)
5632 return ("clr %A0" CR_TAB
5641 return ("clr %A0" CR_TAB
5648 out_shift_with_cnt ("lsr %B0" CR_TAB
5649 "ror %A0", insn
, operands
, len
, 2);
5654 /* 24-bit logic shift right */
5657 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5659 int dest
= REGNO (op
[0]);
5660 int src
= REGNO (op
[1]);
5662 if (CONST_INT_P (op
[2]))
5667 switch (INTVAL (op
[2]))
5671 return avr_asm_len ("mov %A0,%B1" CR_TAB
5672 "mov %B0,%C1" CR_TAB
5673 "clr %C0", op
, plen
, 3);
5675 return avr_asm_len ("clr %C0" CR_TAB
5676 "mov %B0,%C1" CR_TAB
5677 "mov %A0,%B1", op
, plen
, 3);
5680 if (dest
!= src
+ 2)
5681 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5683 return avr_asm_len ("clr %B0" CR_TAB
5684 "clr %C0", op
, plen
, 2);
5687 if (INTVAL (op
[2]) < 24)
5693 return avr_asm_len ("clr %A0" CR_TAB
5697 "clr %C0", op
, plen
, 5);
5701 out_shift_with_cnt ("lsr %C0" CR_TAB
5703 "ror %A0", insn
, op
, plen
, 3);
5708 /* 32bit logic shift right ((unsigned int)x >> i) */
5711 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5713 if (GET_CODE (operands
[2]) == CONST_INT
)
5721 switch (INTVAL (operands
[2]))
5724 if (INTVAL (operands
[2]) < 32)
5728 return *len
= 3, ("clr %D0" CR_TAB
5732 return ("clr %D0" CR_TAB
5739 int reg0
= true_regnum (operands
[0]);
5740 int reg1
= true_regnum (operands
[1]);
5743 return ("mov %A0,%B1" CR_TAB
5744 "mov %B0,%C1" CR_TAB
5745 "mov %C0,%D1" CR_TAB
5748 return ("clr %D0" CR_TAB
5749 "mov %C0,%D1" CR_TAB
5750 "mov %B0,%C1" CR_TAB
5756 int reg0
= true_regnum (operands
[0]);
5757 int reg1
= true_regnum (operands
[1]);
5759 if (reg0
== reg1
+ 2)
5760 return *len
= 2, ("clr %C0" CR_TAB
5763 return *len
= 3, ("movw %A0,%C1" CR_TAB
5767 return *len
= 4, ("mov %B0,%D1" CR_TAB
5768 "mov %A0,%C1" CR_TAB
5774 return *len
= 4, ("mov %A0,%D1" CR_TAB
5781 return ("clr %A0" CR_TAB
5790 out_shift_with_cnt ("lsr %D0" CR_TAB
5793 "ror %A0", insn
, operands
, len
, 4);
5798 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5800 XOP[0] = XOP[0] + XOP[2]
5802 and return "". If PLEN == NULL, print assembler instructions to perform the
5803 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5804 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5805 CODE == PLUS: perform addition by using ADD instructions.
5806 CODE == MINUS: perform addition by using SUB instructions.
5807 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5810 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
5812 /* MODE of the operation. */
5813 enum machine_mode mode
= GET_MODE (xop
[0]);
5815 /* Number of bytes to operate on. */
5816 int i
, n_bytes
= GET_MODE_SIZE (mode
);
5818 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5819 int clobber_val
= -1;
5821 /* op[0]: 8-bit destination register
5822 op[1]: 8-bit const int
5823 op[2]: 8-bit scratch register */
5826 /* Started the operation? Before starting the operation we may skip
5827 adding 0. This is no more true after the operation started because
5828 carry must be taken into account. */
5829 bool started
= false;
5831 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5834 /* Except in the case of ADIW with 16-bit register (see below)
5835 addition does not set cc0 in a usable way. */
5837 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
5840 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
5847 for (i
= 0; i
< n_bytes
; i
++)
5849 /* We operate byte-wise on the destination. */
5850 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
5851 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5853 /* 8-bit value to operate with this byte. */
5854 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5856 /* Registers R16..R31 can operate with immediate. */
5857 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5860 op
[1] = gen_int_mode (val8
, QImode
);
5862 /* To get usable cc0 no low-bytes must have been skipped. */
5870 && test_hard_reg_class (ADDW_REGS
, reg8
))
5872 rtx xval16
= simplify_gen_subreg (HImode
, xval
, mode
, i
);
5873 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
5875 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5876 i.e. operate word-wise. */
5883 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
5886 if (n_bytes
== 2 && PLUS
== code
)
5898 avr_asm_len (code
== PLUS
5899 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5903 else if ((val8
== 1 || val8
== 0xff)
5905 && i
== n_bytes
- 1)
5907 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
5916 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5918 if (clobber_val
!= (int) val8
)
5919 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5920 clobber_val
= (int) val8
;
5922 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
5929 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
5932 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5934 if (clobber_val
!= (int) val8
)
5935 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5936 clobber_val
= (int) val8
;
5938 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
5950 } /* for all sub-bytes */
5952 /* No output doesn't change cc0. */
5954 if (plen
&& *plen
== 0)
5959 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5961 XOP[0] = XOP[0] + XOP[2]
5963 and return "". If PLEN == NULL, print assembler instructions to perform the
5964 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5965 words) printed with PLEN == NULL.
5966 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5967 condition code (with respect to XOP[0]). */
5970 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
5972 int len_plus
, len_minus
;
5973 int cc_plus
, cc_minus
, cc_dummy
;
5978 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5980 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
5981 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
5983 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5987 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
5988 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
5990 else if (len_minus
<= len_plus
)
5991 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
5993 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
5999 /* Same as above but XOP has just 3 entries.
6000 Supply a dummy 4th operand. */
6003 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
6012 return avr_out_plus (op
, plen
, pcc
);
6016 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6019 avr_out_plus64 (rtx addend
, int *plen
)
6024 op
[0] = gen_rtx_REG (DImode
, 18);
6029 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6034 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6035 time constant XOP[2]:
6037 XOP[0] = XOP[0] <op> XOP[2]
6039 and return "". If PLEN == NULL, print assembler instructions to perform the
6040 operation; otherwise, set *PLEN to the length of the instruction sequence
6041 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6042 register or SCRATCH if no clobber register is needed for the operation. */
6045 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6047 /* CODE and MODE of the operation. */
6048 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6049 enum machine_mode mode
= GET_MODE (xop
[0]);
6051 /* Number of bytes to operate on. */
6052 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6054 /* Value of T-flag (0 or 1) or -1 if unknow. */
6057 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6058 int clobber_val
= -1;
6060 /* op[0]: 8-bit destination register
6061 op[1]: 8-bit const int
6062 op[2]: 8-bit clobber register or SCRATCH
6063 op[3]: 8-bit register containing 0xff or NULL_RTX */
6072 for (i
= 0; i
< n_bytes
; i
++)
6074 /* We operate byte-wise on the destination. */
6075 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6076 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6078 /* 8-bit value to operate with this byte. */
6079 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6081 /* Number of bits set in the current byte of the constant. */
6082 int pop8
= avr_popcount (val8
);
6084 /* Registers R16..R31 can operate with immediate. */
6085 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6088 op
[1] = GEN_INT (val8
);
6097 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6101 avr_asm_len ("set", op
, plen
, 1);
6104 op
[1] = GEN_INT (exact_log2 (val8
));
6105 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6109 if (op
[3] != NULL_RTX
)
6110 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6112 avr_asm_len ("clr %0" CR_TAB
6113 "dec %0", op
, plen
, 2);
6119 if (clobber_val
!= (int) val8
)
6120 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6121 clobber_val
= (int) val8
;
6123 avr_asm_len ("or %0,%2", op
, plen
, 1);
6133 avr_asm_len ("clr %0", op
, plen
, 1);
6135 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6139 avr_asm_len ("clt", op
, plen
, 1);
6142 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6143 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6147 if (clobber_val
!= (int) val8
)
6148 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6149 clobber_val
= (int) val8
;
6151 avr_asm_len ("and %0,%2", op
, plen
, 1);
6161 avr_asm_len ("com %0", op
, plen
, 1);
6162 else if (ld_reg_p
&& val8
== (1 << 7))
6163 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6166 if (clobber_val
!= (int) val8
)
6167 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6168 clobber_val
= (int) val8
;
6170 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6176 /* Unknown rtx_code */
6179 } /* for all sub-bytes */
6185 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6186 PLEN != NULL: Set *PLEN to the length of that sequence.
6190 avr_out_addto_sp (rtx
*op
, int *plen
)
6192 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6193 int addend
= INTVAL (op
[0]);
6200 if (flag_verbose_asm
|| flag_print_asm_name
)
6201 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6203 while (addend
<= -pc_len
)
6206 avr_asm_len ("rcall .", op
, plen
, 1);
6209 while (addend
++ < 0)
6210 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6212 else if (addend
> 0)
6214 if (flag_verbose_asm
|| flag_print_asm_name
)
6215 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6217 while (addend
-- > 0)
6218 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6225 /* Create RTL split patterns for byte sized rotate expressions. This
6226 produces a series of move instructions and considers overlap situations.
6227 Overlapping non-HImode operands need a scratch register. */
6230 avr_rotate_bytes (rtx operands
[])
6233 enum machine_mode mode
= GET_MODE (operands
[0]);
6234 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6235 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6236 int num
= INTVAL (operands
[2]);
6237 rtx scratch
= operands
[3];
6238 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6239 Word move if no scratch is needed, otherwise use size of scratch. */
6240 enum machine_mode move_mode
= QImode
;
6241 int move_size
, offset
, size
;
6245 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6248 move_mode
= GET_MODE (scratch
);
6250 /* Force DI rotate to use QI moves since other DI moves are currently split
6251 into QI moves so forward propagation works better. */
6254 /* Make scratch smaller if needed. */
6255 if (SCRATCH
!= GET_CODE (scratch
)
6256 && HImode
== GET_MODE (scratch
)
6257 && QImode
== move_mode
)
6258 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6260 move_size
= GET_MODE_SIZE (move_mode
);
6261 /* Number of bytes/words to rotate. */
6262 offset
= (num
>> 3) / move_size
;
6263 /* Number of moves needed. */
6264 size
= GET_MODE_SIZE (mode
) / move_size
;
6265 /* Himode byte swap is special case to avoid a scratch register. */
6266 if (mode
== HImode
&& same_reg
)
6268 /* HImode byte swap, using xor. This is as quick as using scratch. */
6270 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6271 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6272 if (!rtx_equal_p (dst
, src
))
6274 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6275 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6276 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6281 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6282 /* Create linked list of moves to determine move order. */
6286 } move
[MAX_SIZE
+ 8];
6289 gcc_assert (size
<= MAX_SIZE
);
6290 /* Generate list of subreg moves. */
6291 for (i
= 0; i
< size
; i
++)
6294 int to
= (from
+ offset
) % size
;
6295 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6296 mode
, from
* move_size
);
6297 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6298 mode
, to
* move_size
);
6301 /* Mark dependence where a dst of one move is the src of another move.
6302 The first move is a conflict as it must wait until second is
6303 performed. We ignore moves to self - we catch this later. */
6305 for (i
= 0; i
< size
; i
++)
6306 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6307 for (j
= 0; j
< size
; j
++)
6308 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6310 /* The dst of move i is the src of move j. */
6317 /* Go through move list and perform non-conflicting moves. As each
6318 non-overlapping move is made, it may remove other conflicts
6319 so the process is repeated until no conflicts remain. */
6324 /* Emit move where dst is not also a src or we have used that
6326 for (i
= 0; i
< size
; i
++)
6327 if (move
[i
].src
!= NULL_RTX
)
6329 if (move
[i
].links
== -1
6330 || move
[move
[i
].links
].src
== NULL_RTX
)
6333 /* Ignore NOP moves to self. */
6334 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6335 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6337 /* Remove conflict from list. */
6338 move
[i
].src
= NULL_RTX
;
6344 /* Check for deadlock. This is when no moves occurred and we have
6345 at least one blocked move. */
6346 if (moves
== 0 && blocked
!= -1)
6348 /* Need to use scratch register to break deadlock.
6349 Add move to put dst of blocked move into scratch.
6350 When this move occurs, it will break chain deadlock.
6351 The scratch register is substituted for real move. */
6353 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6355 move
[size
].src
= move
[blocked
].dst
;
6356 move
[size
].dst
= scratch
;
6357 /* Scratch move is never blocked. */
6358 move
[size
].links
= -1;
6359 /* Make sure we have valid link. */
6360 gcc_assert (move
[blocked
].links
!= -1);
6361 /* Replace src of blocking move with scratch reg. */
6362 move
[move
[blocked
].links
].src
= scratch
;
6363 /* Make dependent on scratch move occuring. */
6364 move
[blocked
].links
= size
;
6368 while (blocked
!= -1);
6373 /* Modifies the length assigned to instruction INSN
6374 LEN is the initially computed length of the insn. */
6377 adjust_insn_length (rtx insn
, int len
)
6379 rtx
*op
= recog_data
.operand
;
6380 enum attr_adjust_len adjust_len
;
6382 /* Some complex insns don't need length adjustment and therefore
6383 the length need not/must not be adjusted for these insns.
6384 It is easier to state this in an insn attribute "adjust_len" than
6385 to clutter up code here... */
6387 if (-1 == recog_memoized (insn
))
6392 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6394 adjust_len
= get_attr_adjust_len (insn
);
6396 if (adjust_len
== ADJUST_LEN_NO
)
6398 /* Nothing to adjust: The length from attribute "length" is fine.
6399 This is the default. */
6404 /* Extract insn's operands. */
6406 extract_constrain_insn_cached (insn
);
6408 /* Dispatch to right function. */
6412 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
6413 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
6414 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
6416 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
6418 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
6419 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
6420 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
6421 avr_out_plus_noclobber (op
, &len
, NULL
); break;
6423 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
6425 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
6426 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
6427 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
6428 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
6429 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
6430 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
6431 case ADJUST_LEN_LOAD_LPM
: avr_load_lpm (insn
, op
, &len
); break;
6433 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
6434 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
6435 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
6436 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
6437 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
6439 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
6440 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
6441 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
6443 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
6444 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
6445 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
6447 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
6448 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
6449 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
6451 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
6452 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
6453 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
6455 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
6457 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
6466 /* Return nonzero if register REG dead after INSN. */
6469 reg_unused_after (rtx insn
, rtx reg
)
6471 return (dead_or_set_p (insn
, reg
)
6472 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
6475 /* Return nonzero if REG is not used after INSN.
6476 We assume REG is a reload reg, and therefore does
6477 not live past labels. It may live past calls or jumps though. */
6480 _reg_unused_after (rtx insn
, rtx reg
)
6485 /* If the reg is set by this instruction, then it is safe for our
6486 case. Disregard the case where this is a store to memory, since
6487 we are checking a register used in the store address. */
6488 set
= single_set (insn
);
6489 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
6490 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6493 while ((insn
= NEXT_INSN (insn
)))
6496 code
= GET_CODE (insn
);
6499 /* If this is a label that existed before reload, then the register
6500 if dead here. However, if this is a label added by reorg, then
6501 the register may still be live here. We can't tell the difference,
6502 so we just ignore labels completely. */
6503 if (code
== CODE_LABEL
)
6511 if (code
== JUMP_INSN
)
6514 /* If this is a sequence, we must handle them all at once.
6515 We could have for instance a call that sets the target register,
6516 and an insn in a delay slot that uses the register. In this case,
6517 we must return 0. */
6518 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6523 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
6525 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
6526 rtx set
= single_set (this_insn
);
6528 if (GET_CODE (this_insn
) == CALL_INSN
)
6530 else if (GET_CODE (this_insn
) == JUMP_INSN
)
6532 if (INSN_ANNULLED_BRANCH_P (this_insn
))
6537 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6539 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6541 if (GET_CODE (SET_DEST (set
)) != MEM
)
6547 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
6552 else if (code
== JUMP_INSN
)
6556 if (code
== CALL_INSN
)
6559 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6560 if (GET_CODE (XEXP (tem
, 0)) == USE
6561 && REG_P (XEXP (XEXP (tem
, 0), 0))
6562 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
6564 if (call_used_regs
[REGNO (reg
)])
6568 set
= single_set (insn
);
6570 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6572 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6573 return GET_CODE (SET_DEST (set
)) != MEM
;
6574 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
6581 /* Return RTX that represents the lower 16 bits of a constant address.
6582 Unfortunately, simplify_gen_subreg does not handle this case. */
6585 avr_const_address_lo16 (rtx x
)
6589 switch (GET_CODE (x
))
6595 if (PLUS
== GET_CODE (XEXP (x
, 0))
6596 && SYMBOL_REF
== GET_CODE (XEXP (XEXP (x
, 0), 0))
6597 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
6599 HOST_WIDE_INT offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
6600 const char *name
= XSTR (XEXP (XEXP (x
, 0), 0), 0);
6602 lo16
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6603 lo16
= gen_rtx_CONST (Pmode
, plus_constant (Pmode
, lo16
, offset
));
6612 const char *name
= XSTR (x
, 0);
6614 return gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6618 avr_edump ("\n%?: %r\n", x
);
6623 /* Target hook for assembling integer objects. The AVR version needs
6624 special handling for references to certain labels. */
6627 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
6629 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
6630 && text_segment_operand (x
, VOIDmode
) )
6632 fputs ("\t.word\tgs(", asm_out_file
);
6633 output_addr_const (asm_out_file
, x
);
6634 fputs (")\n", asm_out_file
);
6638 else if (GET_MODE (x
) == PSImode
)
6640 default_assemble_integer (avr_const_address_lo16 (x
),
6641 GET_MODE_SIZE (HImode
), aligned_p
);
6643 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6644 " extension for hh8(", asm_out_file
);
6645 output_addr_const (asm_out_file
, x
);
6646 fputs (")\"\n", asm_out_file
);
6648 fputs ("\t.byte\t0\t" ASM_COMMENT_START
" hh8(", asm_out_file
);
6649 output_addr_const (asm_out_file
, x
);
6650 fputs (")\n", asm_out_file
);
6655 return default_assemble_integer (x
, size
, aligned_p
);
6659 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6662 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
6665 /* If the function has the 'signal' or 'interrupt' attribute, test to
6666 make sure that the name of the function is "__vector_NN" so as to
6667 catch when the user misspells the interrupt vector name. */
6669 if (cfun
->machine
->is_interrupt
)
6671 if (!STR_PREFIX_P (name
, "__vector"))
6673 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6674 "%qs appears to be a misspelled interrupt handler",
6678 else if (cfun
->machine
->is_signal
)
6680 if (!STR_PREFIX_P (name
, "__vector"))
6682 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6683 "%qs appears to be a misspelled signal handler",
6688 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
6689 ASM_OUTPUT_LABEL (file
, name
);
6693 /* Return value is nonzero if pseudos that have been
6694 assigned to registers of class CLASS would likely be spilled
6695 because registers of CLASS are needed for spill registers. */
6698 avr_class_likely_spilled_p (reg_class_t c
)
6700 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
6703 /* Valid attributes:
6704 progmem - put data to program memory;
6705 signal - make a function to be hardware interrupt. After function
6706 prologue interrupts are disabled;
6707 interrupt - make a function to be hardware interrupt. After function
6708 prologue interrupts are enabled;
6709 naked - don't generate function prologue/epilogue and `ret' command.
6711 Only `progmem' attribute valid for type. */
6713 /* Handle a "progmem" attribute; arguments as in
6714 struct attribute_spec.handler. */
6716 avr_handle_progmem_attribute (tree
*node
, tree name
,
6717 tree args ATTRIBUTE_UNUSED
,
6718 int flags ATTRIBUTE_UNUSED
,
6723 if (TREE_CODE (*node
) == TYPE_DECL
)
6725 /* This is really a decl attribute, not a type attribute,
6726 but try to handle it for GCC 3.0 backwards compatibility. */
6728 tree type
= TREE_TYPE (*node
);
6729 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
6730 tree newtype
= build_type_attribute_variant (type
, attr
);
6732 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
6733 TREE_TYPE (*node
) = newtype
;
6734 *no_add_attrs
= true;
6736 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
6738 *no_add_attrs
= false;
6742 warning (OPT_Wattributes
, "%qE attribute ignored",
6744 *no_add_attrs
= true;
6751 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6752 struct attribute_spec.handler. */
6755 avr_handle_fndecl_attribute (tree
*node
, tree name
,
6756 tree args ATTRIBUTE_UNUSED
,
6757 int flags ATTRIBUTE_UNUSED
,
6760 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6762 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6764 *no_add_attrs
= true;
6771 avr_handle_fntype_attribute (tree
*node
, tree name
,
6772 tree args ATTRIBUTE_UNUSED
,
6773 int flags ATTRIBUTE_UNUSED
,
6776 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
6778 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6780 *no_add_attrs
= true;
6787 /* AVR attributes. */
6788 static const struct attribute_spec
6789 avr_attribute_table
[] =
6791 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6792 affects_type_identity } */
6793 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
6795 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6797 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6799 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6801 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6803 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6805 { NULL
, 0, 0, false, false, false, NULL
, false }
6809 /* Look if DECL shall be placed in program memory space by
6810 means of attribute `progmem' or some address-space qualifier.
6811 Return non-zero if DECL is data that must end up in Flash and
6812 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6814 Return 2 if DECL is located in 24-bit flash address-space
6815 Return 1 if DECL is located in 16-bit flash address-space
6816 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6817 Return 0 otherwise */
6820 avr_progmem_p (tree decl
, tree attributes
)
6824 if (TREE_CODE (decl
) != VAR_DECL
)
6827 if (avr_decl_memx_p (decl
))
6830 if (avr_decl_flash_p (decl
))
6834 != lookup_attribute ("progmem", attributes
))
6841 while (TREE_CODE (a
) == ARRAY_TYPE
);
6843 if (a
== error_mark_node
)
6846 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
6853 /* Scan type TYP for pointer references to address space ASn.
6854 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6855 the AS are also declared to be CONST.
6856 Otherwise, return the respective addres space, i.e. a value != 0. */
6859 avr_nonconst_pointer_addrspace (tree typ
)
6861 while (ARRAY_TYPE
== TREE_CODE (typ
))
6862 typ
= TREE_TYPE (typ
);
6864 if (POINTER_TYPE_P (typ
))
6867 tree target
= TREE_TYPE (typ
);
6869 /* Pointer to function: Test the function's return type. */
6871 if (FUNCTION_TYPE
== TREE_CODE (target
))
6872 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
6874 /* "Ordinary" pointers... */
6876 while (TREE_CODE (target
) == ARRAY_TYPE
)
6877 target
= TREE_TYPE (target
);
6879 /* Pointers to non-generic address space must be const.
6880 Refuse address spaces outside the device's flash. */
6882 as
= TYPE_ADDR_SPACE (target
);
6884 if (!ADDR_SPACE_GENERIC_P (as
)
6885 && (!TYPE_READONLY (target
)
6886 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
6891 /* Scan pointer's target type. */
6893 return avr_nonconst_pointer_addrspace (target
);
6896 return ADDR_SPACE_GENERIC
;
6900 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6901 go along with CONST qualifier. Writing to these address spaces should
6902 be detected and complained about as early as possible. */
6905 avr_pgm_check_var_decl (tree node
)
6907 const char *reason
= NULL
;
6909 addr_space_t as
= ADDR_SPACE_GENERIC
;
6911 gcc_assert (as
== 0);
6913 if (avr_log
.progmem
)
6914 avr_edump ("%?: %t\n", node
);
6916 switch (TREE_CODE (node
))
6922 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6923 reason
= "variable";
6927 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6928 reason
= "function parameter";
6932 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6933 reason
= "structure field";
6937 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
6939 reason
= "return type of function";
6943 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
6950 avr_edump ("%?: %s, %d, %d\n",
6951 avr_addrspace
[as
].name
,
6952 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
6953 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
6956 error ("%qT uses address space %qs beyond flash of %qs",
6957 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
6959 error ("%s %q+D uses address space %qs beyond flash of %qs",
6960 reason
, node
, avr_addrspace
[as
].name
,
6961 avr_current_device
->name
);
6966 error ("pointer targeting address space %qs must be const in %qT",
6967 avr_addrspace
[as
].name
, node
);
6969 error ("pointer targeting address space %qs must be const"
6971 avr_addrspace
[as
].name
, reason
, node
);
6975 return reason
== NULL
;
6979 /* Add the section attribute if the variable is in progmem. */
6982 avr_insert_attributes (tree node
, tree
*attributes
)
6984 avr_pgm_check_var_decl (node
);
6986 if (TREE_CODE (node
) == VAR_DECL
6987 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
6988 && avr_progmem_p (node
, *attributes
))
6993 /* For C++, we have to peel arrays in order to get correct
6994 determination of readonlyness. */
6997 node0
= TREE_TYPE (node0
);
6998 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7000 if (error_mark_node
== node0
)
7003 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7005 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7007 error ("variable %q+D located in address space %qs"
7008 " beyond flash of %qs",
7009 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7012 if (!TYPE_READONLY (node0
)
7013 && !TREE_READONLY (node
))
7015 const char *reason
= "__attribute__((progmem))";
7017 if (!ADDR_SPACE_GENERIC_P (as
))
7018 reason
= avr_addrspace
[as
].name
;
7020 if (avr_log
.progmem
)
7021 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7023 error ("variable %q+D must be const in order to be put into"
7024 " read-only section by means of %qs", node
, reason
);
7030 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7031 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7032 /* Track need of __do_clear_bss. */
7035 avr_asm_output_aligned_decl_common (FILE * stream
,
7036 const_tree decl ATTRIBUTE_UNUSED
,
7038 unsigned HOST_WIDE_INT size
,
7039 unsigned int align
, bool local_p
)
7041 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7042 There is no need to trigger __do_clear_bss code for them. */
7044 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7045 avr_need_clear_bss_p
= true;
7048 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7050 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7054 /* Unnamed section callback for data_section
7055 to track need of __do_copy_data. */
7058 avr_output_data_section_asm_op (const void *data
)
7060 avr_need_copy_data_p
= true;
7062 /* Dispatch to default. */
7063 output_section_asm_op (data
);
7067 /* Unnamed section callback for bss_section
7068 to track need of __do_clear_bss. */
7071 avr_output_bss_section_asm_op (const void *data
)
7073 avr_need_clear_bss_p
= true;
7075 /* Dispatch to default. */
7076 output_section_asm_op (data
);
7080 /* Unnamed section callback for progmem*.data sections. */
7083 avr_output_progmem_section_asm_op (const void *data
)
7085 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7086 (const char*) data
);
7090 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7093 avr_asm_init_sections (void)
7097 /* Set up a section for jump tables. Alignment is handled by
7098 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7100 if (AVR_HAVE_JMP_CALL
)
7102 progmem_swtable_section
7103 = get_unnamed_section (0, output_section_asm_op
,
7104 "\t.section\t.progmem.gcc_sw_table"
7105 ",\"a\",@progbits");
7109 progmem_swtable_section
7110 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7111 "\t.section\t.progmem.gcc_sw_table"
7112 ",\"ax\",@progbits");
7115 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7118 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7119 progmem_section_prefix
[n
]);
7122 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7123 resp. `avr_need_copy_data_p'. */
7125 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7126 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7127 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7131 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7134 avr_asm_function_rodata_section (tree decl
)
7136 /* If a function is unused and optimized out by -ffunction-sections
7137 and --gc-sections, ensure that the same will happen for its jump
7138 tables by putting them into individual sections. */
7143 /* Get the frodata section from the default function in varasm.c
7144 but treat function-associated data-like jump tables as code
7145 rather than as user defined data. AVR has no constant pools. */
7147 int fdata
= flag_data_sections
;
7149 flag_data_sections
= flag_function_sections
;
7150 frodata
= default_function_rodata_section (decl
);
7151 flag_data_sections
= fdata
;
7152 flags
= frodata
->common
.flags
;
7155 if (frodata
!= readonly_data_section
7156 && flags
& SECTION_NAMED
)
7158 /* Adjust section flags and replace section name prefix. */
7162 static const char* const prefix
[] =
7164 ".rodata", ".progmem.gcc_sw_table",
7165 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7168 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7170 const char * old_prefix
= prefix
[i
];
7171 const char * new_prefix
= prefix
[i
+1];
7172 const char * name
= frodata
->named
.name
;
7174 if (STR_PREFIX_P (name
, old_prefix
))
7176 const char *rname
= ACONCAT ((new_prefix
,
7177 name
+ strlen (old_prefix
), NULL
));
7178 flags
&= ~SECTION_CODE
;
7179 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7181 return get_section (rname
, flags
, frodata
->named
.decl
);
7186 return progmem_swtable_section
;
7190 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7191 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7194 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7196 if (flags
& AVR_SECTION_PROGMEM
)
7198 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7199 int segment
= avr_addrspace
[as
].segment
;
7200 const char *old_prefix
= ".rodata";
7201 const char *new_prefix
= progmem_section_prefix
[segment
];
7203 if (STR_PREFIX_P (name
, old_prefix
))
7205 const char *sname
= ACONCAT ((new_prefix
,
7206 name
+ strlen (old_prefix
), NULL
));
7207 default_elf_asm_named_section (sname
, flags
, decl
);
7211 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7215 if (!avr_need_copy_data_p
)
7216 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7217 || STR_PREFIX_P (name
, ".rodata")
7218 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7220 if (!avr_need_clear_bss_p
)
7221 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7223 default_elf_asm_named_section (name
, flags
, decl
);
7227 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7229 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7231 if (STR_PREFIX_P (name
, ".noinit"))
7233 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7234 && DECL_INITIAL (decl
) == NULL_TREE
)
7235 flags
|= SECTION_BSS
; /* @nobits */
7237 warning (0, "only uninitialized variables can be placed in the "
7241 if (decl
&& DECL_P (decl
)
7242 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7244 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7246 /* Attribute progmem puts data in generic address space.
7247 Set section flags as if it was in __flash to get the right
7248 section prefix in the remainder. */
7250 if (ADDR_SPACE_GENERIC_P (as
))
7251 as
= ADDR_SPACE_FLASH
;
7253 flags
|= as
* SECTION_MACH_DEP
;
7254 flags
&= ~SECTION_WRITE
;
7255 flags
&= ~SECTION_BSS
;
7262 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7265 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7267 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7268 readily available, see PR34734. So we postpone the warning
7269 about uninitialized data in program memory section until here. */
7272 && decl
&& DECL_P (decl
)
7273 && NULL_TREE
== DECL_INITIAL (decl
)
7274 && !DECL_EXTERNAL (decl
)
7275 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7277 warning (OPT_Wuninitialized
,
7278 "uninitialized variable %q+D put into "
7279 "program memory area", decl
);
7282 default_encode_section_info (decl
, rtl
, new_decl_p
);
7284 if (decl
&& DECL_P (decl
)
7285 && TREE_CODE (decl
) != FUNCTION_DECL
7287 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7289 rtx sym
= XEXP (rtl
, 0);
7290 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7292 /* PSTR strings are in generic space but located in flash:
7293 patch address space. */
7295 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7296 as
= ADDR_SPACE_FLASH
;
7298 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7303 /* Implement `TARGET_ASM_SELECT_SECTION' */
7306 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7308 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7310 if (decl
&& DECL_P (decl
)
7311 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7313 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7314 int segment
= avr_addrspace
[as
].segment
;
7316 if (sect
->common
.flags
& SECTION_NAMED
)
7318 const char * name
= sect
->named
.name
;
7319 const char * old_prefix
= ".rodata";
7320 const char * new_prefix
= progmem_section_prefix
[segment
];
7322 if (STR_PREFIX_P (name
, old_prefix
))
7324 const char *sname
= ACONCAT ((new_prefix
,
7325 name
+ strlen (old_prefix
), NULL
));
7326 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7330 return progmem_section
[segment
];
7336 /* Implement `TARGET_ASM_FILE_START'. */
7337 /* Outputs some text at the start of each assembler file. */
7340 avr_file_start (void)
7342 int sfr_offset
= avr_current_arch
->sfr_offset
;
7344 if (avr_current_arch
->asm_only
)
7345 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7347 default_file_start ();
7349 /* Print I/O addresses of some SFRs used with IN and OUT. */
7352 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
7354 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
7355 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
7357 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
7359 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
7361 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
7363 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
7365 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
7366 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
7367 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
7371 /* Implement `TARGET_ASM_FILE_END'. */
7372 /* Outputs to the stdio stream FILE some
7373 appropriate text to go at the end of an assembler file. */
7378 /* Output these only if there is anything in the
7379 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7380 input section(s) - some code size can be saved by not
7381 linking in the initialization code from libgcc if resp.
7382 sections are empty. */
7384 if (avr_need_copy_data_p
)
7385 fputs (".global __do_copy_data\n", asm_out_file
);
7387 if (avr_need_clear_bss_p
)
7388 fputs (".global __do_clear_bss\n", asm_out_file
);
7391 /* Choose the order in which to allocate hard registers for
7392 pseudo-registers local to a basic block.
7394 Store the desired register order in the array `reg_alloc_order'.
7395 Element 0 should be the register to allocate first; element 1, the
7396 next register; and so on. */
7399 order_regs_for_local_alloc (void)
7402 static const int order_0
[] = {
7410 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7414 static const int order_1
[] = {
7422 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7426 static const int order_2
[] = {
7435 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7440 const int *order
= (TARGET_ORDER_1
? order_1
:
7441 TARGET_ORDER_2
? order_2
:
7443 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7444 reg_alloc_order
[i
] = order
[i
];
7448 /* Implement `TARGET_REGISTER_MOVE_COST' */
7451 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7452 reg_class_t from
, reg_class_t to
)
7454 return (from
== STACK_REG
? 6
7455 : to
== STACK_REG
? 12
7460 /* Implement `TARGET_MEMORY_MOVE_COST' */
7463 avr_memory_move_cost (enum machine_mode mode
,
7464 reg_class_t rclass ATTRIBUTE_UNUSED
,
7465 bool in ATTRIBUTE_UNUSED
)
7467 return (mode
== QImode
? 2
7468 : mode
== HImode
? 4
7469 : mode
== SImode
? 8
7470 : mode
== SFmode
? 8
7475 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7476 cost of an RTX operand given its context. X is the rtx of the
7477 operand, MODE is its mode, and OUTER is the rtx_code of this
7478 operand's parent operator. */
7481 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
7482 int opno
, bool speed
)
7484 enum rtx_code code
= GET_CODE (x
);
7495 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7502 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
7506 /* Worker function for AVR backend's rtx_cost function.
7507 X is rtx expression whose cost is to be calculated.
7508 Return true if the complete cost has been computed.
7509 Return false if subexpressions should be scanned.
7510 In either case, *TOTAL contains the cost result. */
7513 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
7514 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
7516 enum rtx_code code
= (enum rtx_code
) codearg
;
7517 enum machine_mode mode
= GET_MODE (x
);
7527 /* Immediate constants are as cheap as registers. */
7532 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7540 *total
= COSTS_N_INSNS (1);
7546 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
7552 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7560 *total
= COSTS_N_INSNS (1);
7566 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7570 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7571 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7575 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
7576 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7577 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7581 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
7582 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7583 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7591 && MULT
== GET_CODE (XEXP (x
, 0))
7592 && register_operand (XEXP (x
, 1), QImode
))
7595 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7596 /* multiply-add with constant: will be split and load constant. */
7597 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7598 *total
= COSTS_N_INSNS (1) + *total
;
7601 *total
= COSTS_N_INSNS (1);
7602 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7603 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7608 && (MULT
== GET_CODE (XEXP (x
, 0))
7609 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
7610 && register_operand (XEXP (x
, 1), HImode
)
7611 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
7612 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
7615 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7616 /* multiply-add with constant: will be split and load constant. */
7617 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7618 *total
= COSTS_N_INSNS (1) + *total
;
7621 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7623 *total
= COSTS_N_INSNS (2);
7624 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7627 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7628 *total
= COSTS_N_INSNS (1);
7630 *total
= COSTS_N_INSNS (2);
7634 if (!CONST_INT_P (XEXP (x
, 1)))
7636 *total
= COSTS_N_INSNS (3);
7637 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7640 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7641 *total
= COSTS_N_INSNS (2);
7643 *total
= COSTS_N_INSNS (3);
7647 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7649 *total
= COSTS_N_INSNS (4);
7650 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7653 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7654 *total
= COSTS_N_INSNS (1);
7656 *total
= COSTS_N_INSNS (4);
7662 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7668 && register_operand (XEXP (x
, 0), QImode
)
7669 && MULT
== GET_CODE (XEXP (x
, 1)))
7672 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7673 /* multiply-sub with constant: will be split and load constant. */
7674 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7675 *total
= COSTS_N_INSNS (1) + *total
;
7680 && register_operand (XEXP (x
, 0), HImode
)
7681 && (MULT
== GET_CODE (XEXP (x
, 1))
7682 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
7683 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
7684 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
7687 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7688 /* multiply-sub with constant: will be split and load constant. */
7689 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7690 *total
= COSTS_N_INSNS (1) + *total
;
7696 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7697 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7698 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7699 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7703 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7704 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7705 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7713 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
7715 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7723 rtx op0
= XEXP (x
, 0);
7724 rtx op1
= XEXP (x
, 1);
7725 enum rtx_code code0
= GET_CODE (op0
);
7726 enum rtx_code code1
= GET_CODE (op1
);
7727 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
7728 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
7731 && (u8_operand (op1
, HImode
)
7732 || s8_operand (op1
, HImode
)))
7734 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7738 && register_operand (op1
, HImode
))
7740 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7743 else if (ex0
|| ex1
)
7745 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
7748 else if (register_operand (op0
, HImode
)
7749 && (u8_operand (op1
, HImode
)
7750 || s8_operand (op1
, HImode
)))
7752 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
7756 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
7759 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7766 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7776 /* Add some additional costs besides CALL like moves etc. */
7778 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7782 /* Just a rough estimate. Even with -O2 we don't want bulky
7783 code expanded inline. */
7785 *total
= COSTS_N_INSNS (25);
7791 *total
= COSTS_N_INSNS (300);
7793 /* Add some additional costs besides CALL like moves etc. */
7794 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7802 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7803 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7811 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7813 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
7814 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7815 /* For div/mod with const-int divisor we have at least the cost of
7816 loading the divisor. */
7817 if (CONST_INT_P (XEXP (x
, 1)))
7818 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7819 /* Add some overall penaly for clobbering and moving around registers */
7820 *total
+= COSTS_N_INSNS (2);
7827 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
7828 *total
= COSTS_N_INSNS (1);
7833 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
7834 *total
= COSTS_N_INSNS (3);
7839 if (CONST_INT_P (XEXP (x
, 1)))
7840 switch (INTVAL (XEXP (x
, 1)))
7844 *total
= COSTS_N_INSNS (5);
7847 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
7855 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7862 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7864 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
7865 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7870 val
= INTVAL (XEXP (x
, 1));
7872 *total
= COSTS_N_INSNS (3);
7873 else if (val
>= 0 && val
<= 7)
7874 *total
= COSTS_N_INSNS (val
);
7876 *total
= COSTS_N_INSNS (1);
7883 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
7884 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
7885 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
7887 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7892 if (const1_rtx
== (XEXP (x
, 1))
7893 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
7895 *total
= COSTS_N_INSNS (2);
7899 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7901 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7902 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7906 switch (INTVAL (XEXP (x
, 1)))
7913 *total
= COSTS_N_INSNS (2);
7916 *total
= COSTS_N_INSNS (3);
7922 *total
= COSTS_N_INSNS (4);
7927 *total
= COSTS_N_INSNS (5);
7930 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7933 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
7936 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
7939 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7940 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7946 if (!CONST_INT_P (XEXP (x
, 1)))
7948 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
7951 switch (INTVAL (XEXP (x
, 1)))
7959 *total
= COSTS_N_INSNS (3);
7962 *total
= COSTS_N_INSNS (5);
7965 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
7971 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7973 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7974 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7978 switch (INTVAL (XEXP (x
, 1)))
7984 *total
= COSTS_N_INSNS (3);
7989 *total
= COSTS_N_INSNS (4);
7992 *total
= COSTS_N_INSNS (6);
7995 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
7998 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7999 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8007 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8014 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8016 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8017 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8022 val
= INTVAL (XEXP (x
, 1));
8024 *total
= COSTS_N_INSNS (4);
8026 *total
= COSTS_N_INSNS (2);
8027 else if (val
>= 0 && val
<= 7)
8028 *total
= COSTS_N_INSNS (val
);
8030 *total
= COSTS_N_INSNS (1);
8035 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8037 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8038 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8042 switch (INTVAL (XEXP (x
, 1)))
8048 *total
= COSTS_N_INSNS (2);
8051 *total
= COSTS_N_INSNS (3);
8057 *total
= COSTS_N_INSNS (4);
8061 *total
= COSTS_N_INSNS (5);
8064 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8067 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8071 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8074 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8075 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8081 if (!CONST_INT_P (XEXP (x
, 1)))
8083 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8086 switch (INTVAL (XEXP (x
, 1)))
8092 *total
= COSTS_N_INSNS (3);
8096 *total
= COSTS_N_INSNS (5);
8099 *total
= COSTS_N_INSNS (4);
8102 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8108 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8110 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8111 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8115 switch (INTVAL (XEXP (x
, 1)))
8121 *total
= COSTS_N_INSNS (4);
8126 *total
= COSTS_N_INSNS (6);
8129 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8132 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8135 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8136 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8144 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8151 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8153 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8154 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8159 val
= INTVAL (XEXP (x
, 1));
8161 *total
= COSTS_N_INSNS (3);
8162 else if (val
>= 0 && val
<= 7)
8163 *total
= COSTS_N_INSNS (val
);
8165 *total
= COSTS_N_INSNS (1);
8170 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8172 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8173 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8177 switch (INTVAL (XEXP (x
, 1)))
8184 *total
= COSTS_N_INSNS (2);
8187 *total
= COSTS_N_INSNS (3);
8192 *total
= COSTS_N_INSNS (4);
8196 *total
= COSTS_N_INSNS (5);
8202 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8205 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8209 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8212 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8213 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8219 if (!CONST_INT_P (XEXP (x
, 1)))
8221 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8224 switch (INTVAL (XEXP (x
, 1)))
8232 *total
= COSTS_N_INSNS (3);
8235 *total
= COSTS_N_INSNS (5);
8238 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8244 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8246 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8247 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8251 switch (INTVAL (XEXP (x
, 1)))
8257 *total
= COSTS_N_INSNS (4);
8260 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8265 *total
= COSTS_N_INSNS (4);
8268 *total
= COSTS_N_INSNS (6);
8271 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8272 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8280 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8284 switch (GET_MODE (XEXP (x
, 0)))
8287 *total
= COSTS_N_INSNS (1);
8288 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8289 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8293 *total
= COSTS_N_INSNS (2);
8294 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8295 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8296 else if (INTVAL (XEXP (x
, 1)) != 0)
8297 *total
+= COSTS_N_INSNS (1);
8301 *total
= COSTS_N_INSNS (3);
8302 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8303 *total
+= COSTS_N_INSNS (2);
8307 *total
= COSTS_N_INSNS (4);
8308 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8309 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8310 else if (INTVAL (XEXP (x
, 1)) != 0)
8311 *total
+= COSTS_N_INSNS (3);
8317 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8322 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8323 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8324 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8326 if (QImode
== mode
|| HImode
== mode
)
8328 *total
= COSTS_N_INSNS (2);
8341 /* Implement `TARGET_RTX_COSTS'. */
8344 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8345 int opno
, int *total
, bool speed
)
8347 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8348 opno
, total
, speed
);
8350 if (avr_log
.rtx_costs
)
8352 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8353 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8360 /* Implement `TARGET_ADDRESS_COST'. */
8363 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8367 if (GET_CODE (x
) == PLUS
8368 && CONST_INT_P (XEXP (x
, 1))
8369 && (REG_P (XEXP (x
, 0))
8370 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8372 if (INTVAL (XEXP (x
, 1)) >= 61)
8375 else if (CONSTANT_ADDRESS_P (x
))
8378 && io_address_operand (x
, QImode
))
8382 if (avr_log
.address_cost
)
8383 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8388 /* Test for extra memory constraint 'Q'.
8389 It's a memory address based on Y or Z pointer with valid displacement. */
8392 extra_constraint_Q (rtx x
)
8396 if (GET_CODE (XEXP (x
,0)) == PLUS
8397 && REG_P (XEXP (XEXP (x
,0), 0))
8398 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8399 && (INTVAL (XEXP (XEXP (x
,0), 1))
8400 <= MAX_LD_OFFSET (GET_MODE (x
))))
8402 rtx xx
= XEXP (XEXP (x
,0), 0);
8403 int regno
= REGNO (xx
);
8405 ok
= (/* allocate pseudos */
8406 regno
>= FIRST_PSEUDO_REGISTER
8407 /* strictly check */
8408 || regno
== REG_Z
|| regno
== REG_Y
8409 /* XXX frame & arg pointer checks */
8410 || xx
== frame_pointer_rtx
8411 || xx
== arg_pointer_rtx
);
8413 if (avr_log
.constraints
)
8414 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8415 ok
, reload_completed
, reload_in_progress
, x
);
8421 /* Convert condition code CONDITION to the valid AVR condition code. */
8424 avr_normalize_condition (RTX_CODE condition
)
8441 /* Helper function for `avr_reorg'. */
8444 avr_compare_pattern (rtx insn
)
8446 rtx pattern
= single_set (insn
);
8449 && NONJUMP_INSN_P (insn
)
8450 && SET_DEST (pattern
) == cc0_rtx
8451 && GET_CODE (SET_SRC (pattern
)) == COMPARE
8452 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 0))
8453 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 1)))
8461 /* Helper function for `avr_reorg'. */
8463 /* Expansion of switch/case decision trees leads to code like
8465 cc0 = compare (Reg, Num)
8469 cc0 = compare (Reg, Num)
8473 The second comparison is superfluous and can be deleted.
8474 The second jump condition can be transformed from a
8475 "difficult" one to a "simple" one because "cc0 > 0" and
8476 "cc0 >= 0" will have the same effect here.
8478 This function relies on the way switch/case is being expaned
8479 as binary decision tree. For example code see PR 49903.
8481 Return TRUE if optimization performed.
8482 Return FALSE if nothing changed.
8484 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8486 We don't want to do this in text peephole because it is
8487 tedious to work out jump offsets there and the second comparison
8488 might have been transormed by `avr_reorg'.
8490 RTL peephole won't do because peephole2 does not scan across
8494 avr_reorg_remove_redundant_compare (rtx insn1
)
8496 rtx comp1
, ifelse1
, xcond1
, branch1
;
8497 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
8499 rtx jump
, target
, cond
;
8501 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8503 branch1
= next_nonnote_nondebug_insn (insn1
);
8504 if (!branch1
|| !JUMP_P (branch1
))
8507 insn2
= next_nonnote_nondebug_insn (branch1
);
8508 if (!insn2
|| !avr_compare_pattern (insn2
))
8511 branch2
= next_nonnote_nondebug_insn (insn2
);
8512 if (!branch2
|| !JUMP_P (branch2
))
8515 comp1
= avr_compare_pattern (insn1
);
8516 comp2
= avr_compare_pattern (insn2
);
8517 xcond1
= single_set (branch1
);
8518 xcond2
= single_set (branch2
);
8520 if (!comp1
|| !comp2
8521 || !rtx_equal_p (comp1
, comp2
)
8522 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
8523 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
8524 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
8525 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
8530 comp1
= SET_SRC (comp1
);
8531 ifelse1
= SET_SRC (xcond1
);
8532 ifelse2
= SET_SRC (xcond2
);
8534 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8536 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
8537 || !REG_P (XEXP (comp1
, 0))
8538 || !CONST_INT_P (XEXP (comp1
, 1))
8539 || XEXP (ifelse1
, 2) != pc_rtx
8540 || XEXP (ifelse2
, 2) != pc_rtx
8541 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
8542 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
8543 || !COMPARISON_P (XEXP (ifelse2
, 0))
8544 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
8545 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
8546 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
8547 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
8552 /* We filtered the insn sequence to look like
8558 (if_then_else (eq (cc0)
8567 (if_then_else (CODE (cc0)
8573 code
= GET_CODE (XEXP (ifelse2
, 0));
8575 /* Map GT/GTU to GE/GEU which is easier for AVR.
8576 The first two instructions compare/branch on EQ
8577 so we may replace the difficult
8579 if (x == VAL) goto L1;
8580 if (x > VAL) goto L2;
8584 if (x == VAL) goto L1;
8585 if (x >= VAL) goto L2;
8587 Similarly, replace LE/LEU by LT/LTU. */
8598 code
= avr_normalize_condition (code
);
8605 /* Wrap the branches into UNSPECs so they won't be changed or
8606 optimized in the remainder. */
8608 target
= XEXP (XEXP (ifelse1
, 1), 0);
8609 cond
= XEXP (ifelse1
, 0);
8610 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
8612 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
8614 target
= XEXP (XEXP (ifelse2
, 1), 0);
8615 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8616 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
8618 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
8620 /* The comparisons in insn1 and insn2 are exactly the same;
8621 insn2 is superfluous so delete it. */
8623 delete_insn (insn2
);
8624 delete_insn (branch1
);
8625 delete_insn (branch2
);
8631 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8632 /* Optimize conditional jumps. */
8637 rtx insn
= get_insns();
8639 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
8641 rtx pattern
= avr_compare_pattern (insn
);
8647 && avr_reorg_remove_redundant_compare (insn
))
8652 if (compare_diff_p (insn
))
8654 /* Now we work under compare insn with difficult branch. */
8656 rtx next
= next_real_insn (insn
);
8657 rtx pat
= PATTERN (next
);
8659 pattern
= SET_SRC (pattern
);
8661 if (true_regnum (XEXP (pattern
, 0)) >= 0
8662 && true_regnum (XEXP (pattern
, 1)) >= 0)
8664 rtx x
= XEXP (pattern
, 0);
8665 rtx src
= SET_SRC (pat
);
8666 rtx t
= XEXP (src
,0);
8667 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8668 XEXP (pattern
, 0) = XEXP (pattern
, 1);
8669 XEXP (pattern
, 1) = x
;
8670 INSN_CODE (next
) = -1;
8672 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8673 && XEXP (pattern
, 1) == const0_rtx
)
8675 /* This is a tst insn, we can reverse it. */
8676 rtx src
= SET_SRC (pat
);
8677 rtx t
= XEXP (src
,0);
8679 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8680 XEXP (pattern
, 1) = XEXP (pattern
, 0);
8681 XEXP (pattern
, 0) = const0_rtx
;
8682 INSN_CODE (next
) = -1;
8683 INSN_CODE (insn
) = -1;
8685 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8686 && CONST_INT_P (XEXP (pattern
, 1)))
8688 rtx x
= XEXP (pattern
, 1);
8689 rtx src
= SET_SRC (pat
);
8690 rtx t
= XEXP (src
,0);
8691 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
8693 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
8695 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
8696 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
8697 INSN_CODE (next
) = -1;
8698 INSN_CODE (insn
) = -1;
8705 /* Returns register number for function return value.*/
8707 static inline unsigned int
8708 avr_ret_register (void)
8713 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8716 avr_function_value_regno_p (const unsigned int regno
)
8718 return (regno
== avr_ret_register ());
8721 /* Create an RTX representing the place where a
8722 library function returns a value of mode MODE. */
8725 avr_libcall_value (enum machine_mode mode
,
8726 const_rtx func ATTRIBUTE_UNUSED
)
8728 int offs
= GET_MODE_SIZE (mode
);
8731 offs
= (offs
+ 1) & ~1;
8733 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
8736 /* Create an RTX representing the place where a
8737 function returns a value of data type VALTYPE. */
8740 avr_function_value (const_tree type
,
8741 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
8742 bool outgoing ATTRIBUTE_UNUSED
)
8746 if (TYPE_MODE (type
) != BLKmode
)
8747 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
8749 offs
= int_size_in_bytes (type
);
8752 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
8753 offs
= GET_MODE_SIZE (SImode
);
8754 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
8755 offs
= GET_MODE_SIZE (DImode
);
8757 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
8761 test_hard_reg_class (enum reg_class rclass
, rtx x
)
8763 int regno
= true_regnum (x
);
8767 if (TEST_HARD_REG_CLASS (rclass
, regno
))
8774 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8775 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8778 avr_2word_insn_p (rtx insn
)
8780 if (avr_current_device
->errata_skip
8782 || 2 != get_attr_length (insn
))
8787 switch (INSN_CODE (insn
))
8792 case CODE_FOR_movqi_insn
:
8794 rtx set
= single_set (insn
);
8795 rtx src
= SET_SRC (set
);
8796 rtx dest
= SET_DEST (set
);
8798 /* Factor out LDS and STS from movqi_insn. */
8801 && (REG_P (src
) || src
== const0_rtx
))
8803 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
8805 else if (REG_P (dest
)
8808 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
8814 case CODE_FOR_call_insn
:
8815 case CODE_FOR_call_value_insn
:
8822 jump_over_one_insn_p (rtx insn
, rtx dest
)
8824 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
8827 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
8828 int dest_addr
= INSN_ADDRESSES (uid
);
8829 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
8831 return (jump_offset
== 1
8832 || (jump_offset
== 2
8833 && avr_2word_insn_p (next_active_insn (insn
))));
8836 /* Returns 1 if a value of mode MODE can be stored starting with hard
8837 register number REGNO. On the enhanced core, anything larger than
8838 1 byte must start in even numbered register for "movw" to work
8839 (this way we don't have to check for odd registers everywhere). */
8842 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
8844 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8845 Disallowing QI et al. in these regs might lead to code like
8846 (set (subreg:QI (reg:HI 28) n) ...)
8847 which will result in wrong code because reload does not
8848 handle SUBREGs of hard regsisters like this.
8849 This could be fixed in reload. However, it appears
8850 that fixing reload is not wanted by reload people. */
8852 /* Any GENERAL_REGS register can hold 8-bit values. */
8854 if (GET_MODE_SIZE (mode
) == 1)
8857 /* FIXME: Ideally, the following test is not needed.
8858 However, it turned out that it can reduce the number
8859 of spill fails. AVR and it's poor endowment with
8860 address registers is extreme stress test for reload. */
8862 if (GET_MODE_SIZE (mode
) >= 4
8866 /* All modes larger than 8 bits should start in an even register. */
8868 return !(regno
& 1);
8872 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8875 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
8876 addr_space_t as
, RTX_CODE outer_code
,
8877 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8879 if (!ADDR_SPACE_GENERIC_P (as
))
8881 return POINTER_Z_REGS
;
8885 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
8887 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
8891 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8894 avr_regno_mode_code_ok_for_base_p (int regno
,
8895 enum machine_mode mode ATTRIBUTE_UNUSED
,
8896 addr_space_t as ATTRIBUTE_UNUSED
,
8897 RTX_CODE outer_code
,
8898 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8902 if (!ADDR_SPACE_GENERIC_P (as
))
8904 if (regno
< FIRST_PSEUDO_REGISTER
8912 regno
= reg_renumber
[regno
];
8923 if (regno
< FIRST_PSEUDO_REGISTER
8927 || regno
== ARG_POINTER_REGNUM
))
8931 else if (reg_renumber
)
8933 regno
= reg_renumber
[regno
];
8938 || regno
== ARG_POINTER_REGNUM
)
8945 && PLUS
== outer_code
8955 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8956 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8957 CLOBBER_REG is a QI clobber register or NULL_RTX.
8958 LEN == NULL: output instructions.
8959 LEN != NULL: set *LEN to the length of the instruction sequence
8960 (in words) printed with LEN = NULL.
8961 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8962 If CLEAR_P is false, nothing is known about OP[0].
8964 The effect on cc0 is as follows:
8966 Load 0 to any register except ZERO_REG : NONE
8967 Load ld register with any value : NONE
8968 Anything else: : CLOBBER */
8971 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
8977 int clobber_val
= 1234;
8978 bool cooked_clobber_p
= false;
8980 enum machine_mode mode
= GET_MODE (dest
);
8981 int n
, n_bytes
= GET_MODE_SIZE (mode
);
8983 gcc_assert (REG_P (dest
)
8984 && CONSTANT_P (src
));
8989 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8990 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8992 if (REGNO (dest
) < 16
8993 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
8995 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
8998 /* We might need a clobber reg but don't have one. Look at the value to
8999 be loaded more closely. A clobber is only needed if it is a symbol
9000 or contains a byte that is neither 0, -1 or a power of 2. */
9002 if (NULL_RTX
== clobber_reg
9003 && !test_hard_reg_class (LD_REGS
, dest
)
9004 && (! (CONST_INT_P (src
) || CONST_DOUBLE_P (src
))
9005 || !avr_popcount_each_byte (src
, n_bytes
,
9006 (1 << 0) | (1 << 1) | (1 << 8))))
9008 /* We have no clobber register but need one. Cook one up.
9009 That's cheaper than loading from constant pool. */
9011 cooked_clobber_p
= true;
9012 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9013 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9016 /* Now start filling DEST from LSB to MSB. */
9018 for (n
= 0; n
< n_bytes
; n
++)
9021 bool done_byte
= false;
9025 /* Crop the n-th destination byte. */
9027 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9028 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9030 if (!CONST_INT_P (src
)
9031 && !CONST_DOUBLE_P (src
))
9033 static const char* const asm_code
[][2] =
9035 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9036 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9037 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9038 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9043 xop
[2] = clobber_reg
;
9045 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9050 /* Crop the n-th source byte. */
9052 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9053 ival
[n
] = INTVAL (xval
);
9055 /* Look if we can reuse the low word by means of MOVW. */
9061 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9062 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9064 if (INTVAL (lo16
) == INTVAL (hi16
))
9066 if (0 != INTVAL (lo16
)
9069 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9076 /* Don't use CLR so that cc0 is set as expected. */
9081 avr_asm_len (ldreg_p
? "ldi %0,0"
9082 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9083 : "mov %0,__zero_reg__",
9088 if (clobber_val
== ival
[n
]
9089 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9094 /* LD_REGS can use LDI to move a constant value */
9100 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9104 /* Try to reuse value already loaded in some lower byte. */
9106 for (j
= 0; j
< n
; j
++)
9107 if (ival
[j
] == ival
[n
])
9112 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9120 /* Need no clobber reg for -1: Use CLR/DEC */
9125 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9127 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9130 else if (1 == ival
[n
])
9133 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9135 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9139 /* Use T flag or INC to manage powers of 2 if we have
9142 if (NULL_RTX
== clobber_reg
9143 && single_one_operand (xval
, QImode
))
9146 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9148 gcc_assert (constm1_rtx
!= xop
[1]);
9153 avr_asm_len ("set", xop
, len
, 1);
9157 avr_asm_len ("clr %0", xop
, len
, 1);
9159 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9163 /* We actually need the LD_REGS clobber reg. */
9165 gcc_assert (NULL_RTX
!= clobber_reg
);
9169 xop
[2] = clobber_reg
;
9170 clobber_val
= ival
[n
];
9172 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9173 "mov %0,%2", xop
, len
, 2);
9176 /* If we cooked up a clobber reg above, restore it. */
9178 if (cooked_clobber_p
)
9180 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9185 /* Reload the constant OP[1] into the HI register OP[0].
9186 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9187 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9188 need a clobber reg or have to cook one up.
9190 PLEN == NULL: Output instructions.
9191 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9192 by the insns printed.
9197 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9199 output_reload_in_const (op
, clobber_reg
, plen
, false);
9204 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9205 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9206 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9207 need a clobber reg or have to cook one up.
9209 LEN == NULL: Output instructions.
9211 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9212 by the insns printed.
9217 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9220 && !test_hard_reg_class (LD_REGS
, op
[0])
9221 && (CONST_INT_P (op
[1])
9222 || CONST_DOUBLE_P (op
[1])))
9224 int len_clr
, len_noclr
;
9226 /* In some cases it is better to clear the destination beforehand, e.g.
9228 CLR R2 CLR R3 MOVW R4,R2 INC R2
9232 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9234 We find it too tedious to work that out in the print function.
9235 Instead, we call the print function twice to get the lengths of
9236 both methods and use the shortest one. */
9238 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9239 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9241 if (len_noclr
- len_clr
== 4)
9243 /* Default needs 4 CLR instructions: clear register beforehand. */
9245 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9246 "mov %B0,__zero_reg__" CR_TAB
9247 "movw %C0,%A0", &op
[0], len
, 3);
9249 output_reload_in_const (op
, clobber_reg
, len
, true);
9258 /* Default: destination not pre-cleared. */
9260 output_reload_in_const (op
, clobber_reg
, len
, false);
9265 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9267 output_reload_in_const (op
, clobber_reg
, len
, false);
9273 avr_output_addr_vec_elt (FILE *stream
, int value
)
9275 if (AVR_HAVE_JMP_CALL
)
9276 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9278 fprintf (stream
, "\trjmp .L%d\n", value
);
9281 /* Returns true if SCRATCH are safe to be allocated as a scratch
9282 registers (for a define_peephole2) in the current function. */
9285 avr_hard_regno_scratch_ok (unsigned int regno
)
9287 /* Interrupt functions can only use registers that have already been saved
9288 by the prologue, even if they would normally be call-clobbered. */
9290 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9291 && !df_regs_ever_live_p (regno
))
9294 /* Don't allow hard registers that might be part of the frame pointer.
9295 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9296 and don't care for a frame pointer that spans more than one register. */
9298 if ((!reload_completed
|| frame_pointer_needed
)
9299 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9307 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9310 avr_hard_regno_rename_ok (unsigned int old_reg
,
9311 unsigned int new_reg
)
9313 /* Interrupt functions can only use registers that have already been
9314 saved by the prologue, even if they would normally be
9317 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9318 && !df_regs_ever_live_p (new_reg
))
9321 /* Don't allow hard registers that might be part of the frame pointer.
9322 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9323 and don't care for a frame pointer that spans more than one register. */
9325 if ((!reload_completed
|| frame_pointer_needed
)
9326 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9327 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9335 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9336 or memory location in the I/O space (QImode only).
9338 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9339 Operand 1: register operand to test, or CONST_INT memory address.
9340 Operand 2: bit number.
9341 Operand 3: label to jump to if the test is true. */
9344 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9346 enum rtx_code comp
= GET_CODE (operands
[0]);
9347 bool long_jump
= get_attr_length (insn
) >= 4;
9348 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9352 else if (comp
== LT
)
9356 comp
= reverse_condition (comp
);
9358 switch (GET_CODE (operands
[1]))
9365 if (low_io_address_operand (operands
[1], QImode
))
9368 output_asm_insn ("sbis %i1,%2", operands
);
9370 output_asm_insn ("sbic %i1,%2", operands
);
9374 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9376 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9378 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9381 break; /* CONST_INT */
9386 output_asm_insn ("sbrs %T1%T2", operands
);
9388 output_asm_insn ("sbrc %T1%T2", operands
);
9394 return ("rjmp .+4" CR_TAB
9403 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9406 avr_asm_out_ctor (rtx symbol
, int priority
)
9408 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9409 default_ctor_section_asm_out_constructor (symbol
, priority
);
9412 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9415 avr_asm_out_dtor (rtx symbol
, int priority
)
9417 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9418 default_dtor_section_asm_out_destructor (symbol
, priority
);
9421 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9424 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9426 if (TYPE_MODE (type
) == BLKmode
)
9428 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9429 return (size
== -1 || size
> 8);
9435 /* Worker function for CASE_VALUES_THRESHOLD. */
9438 avr_case_values_threshold (void)
9440 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
9444 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9446 static enum machine_mode
9447 avr_addr_space_address_mode (addr_space_t as
)
9449 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
9453 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9455 static enum machine_mode
9456 avr_addr_space_pointer_mode (addr_space_t as
)
9458 return avr_addr_space_address_mode (as
);
9462 /* Helper for following function. */
9465 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
9472 return REGNO (reg
) == REG_Z
;
9475 /* Avoid combine to propagate hard regs. */
9477 if (can_create_pseudo_p()
9478 && REGNO (reg
) < REG_Z
)
9487 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9490 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
9491 bool strict
, addr_space_t as
)
9500 case ADDR_SPACE_GENERIC
:
9501 return avr_legitimate_address_p (mode
, x
, strict
);
9503 case ADDR_SPACE_FLASH
:
9504 case ADDR_SPACE_FLASH1
:
9505 case ADDR_SPACE_FLASH2
:
9506 case ADDR_SPACE_FLASH3
:
9507 case ADDR_SPACE_FLASH4
:
9508 case ADDR_SPACE_FLASH5
:
9510 switch (GET_CODE (x
))
9513 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
9517 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
9526 case ADDR_SPACE_MEMX
:
9529 && can_create_pseudo_p());
9531 if (LO_SUM
== GET_CODE (x
))
9533 rtx hi
= XEXP (x
, 0);
9534 rtx lo
= XEXP (x
, 1);
9537 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
9539 && REGNO (lo
) == REG_Z
);
9545 if (avr_log
.legitimate_address_p
)
9547 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9548 "reload_completed=%d reload_in_progress=%d %s:",
9549 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
9550 reg_renumber
? "(reg_renumber)" : "");
9552 if (GET_CODE (x
) == PLUS
9553 && REG_P (XEXP (x
, 0))
9554 && CONST_INT_P (XEXP (x
, 1))
9555 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
9558 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
9559 true_regnum (XEXP (x
, 0)));
9562 avr_edump ("\n%r\n", x
);
9569 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9572 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
9573 enum machine_mode mode
, addr_space_t as
)
9575 if (ADDR_SPACE_GENERIC_P (as
))
9576 return avr_legitimize_address (x
, old_x
, mode
);
9578 if (avr_log
.legitimize_address
)
9580 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
9587 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9590 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
9592 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
9593 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
9595 if (avr_log
.progmem
)
9596 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9597 src
, type_from
, type_to
);
9599 /* Up-casting from 16-bit to 24-bit pointer. */
9601 if (as_from
!= ADDR_SPACE_MEMX
9602 && as_to
== ADDR_SPACE_MEMX
)
9606 rtx reg
= gen_reg_rtx (PSImode
);
9608 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
9609 sym
= XEXP (sym
, 0);
9611 /* Look at symbol flags: avr_encode_section_info set the flags
9612 also if attribute progmem was seen so that we get the right
9613 promotion for, e.g. PSTR-like strings that reside in generic space
9614 but are located in flash. In that case we patch the incoming
9617 if (SYMBOL_REF
== GET_CODE (sym
)
9618 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
9620 as_from
= ADDR_SPACE_FLASH
;
9623 /* Linearize memory: RAM has bit 23 set. */
9625 msb
= ADDR_SPACE_GENERIC_P (as_from
)
9627 : avr_addrspace
[as_from
].segment
;
9629 src
= force_reg (Pmode
, src
);
9632 ? gen_zero_extendhipsi2 (reg
, src
)
9633 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
9638 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9640 if (as_from
== ADDR_SPACE_MEMX
9641 && as_to
!= ADDR_SPACE_MEMX
)
9643 rtx new_src
= gen_reg_rtx (Pmode
);
9645 src
= force_reg (PSImode
, src
);
9647 emit_move_insn (new_src
,
9648 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
9656 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9659 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
9660 addr_space_t superset ATTRIBUTE_UNUSED
)
9662 /* Allow any kind of pointer mess. */
9668 /* Worker function for movmemhi expander.
9669 XOP[0] Destination as MEM:BLK
9671 XOP[2] # Bytes to copy
9673 Return TRUE if the expansion is accomplished.
9674 Return FALSE if the operand compination is not supported. */
9677 avr_emit_movmemhi (rtx
*xop
)
9679 HOST_WIDE_INT count
;
9680 enum machine_mode loop_mode
;
9681 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
9682 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
9683 rtx a_hi8
= NULL_RTX
;
9685 if (avr_mem_flash_p (xop
[0]))
9688 if (!CONST_INT_P (xop
[2]))
9691 count
= INTVAL (xop
[2]);
9695 a_src
= XEXP (xop
[1], 0);
9696 a_dest
= XEXP (xop
[0], 0);
9698 if (PSImode
== GET_MODE (a_src
))
9700 gcc_assert (as
== ADDR_SPACE_MEMX
);
9702 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
9703 loop_reg
= gen_rtx_REG (loop_mode
, 24);
9704 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
9706 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
9707 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
9711 int segment
= avr_addrspace
[as
].segment
;
9714 && avr_current_device
->n_flash
> 1)
9716 a_hi8
= GEN_INT (segment
);
9717 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
9719 else if (!ADDR_SPACE_GENERIC_P (as
))
9721 as
= ADDR_SPACE_FLASH
;
9726 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
9727 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
9732 /* FIXME: Register allocator might come up with spill fails if it is left
9733 on its own. Thus, we allocate the pointer registers by hand:
9735 X = destination address */
9737 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
9738 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
9740 /* FIXME: Register allocator does a bad job and might spill address
9741 register(s) inside the loop leading to additional move instruction
9742 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9743 load and store as seperate insns. Instead, we perform the copy
9744 by means of one monolithic insn. */
9746 gcc_assert (TMP_REGNO
== LPM_REGNO
);
9748 if (as
!= ADDR_SPACE_MEMX
)
9750 /* Load instruction ([E]LPM or LD) is known at compile time:
9751 Do the copy-loop inline. */
9753 rtx (*fun
) (rtx
, rtx
, rtx
)
9754 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
9756 insn
= fun (xas
, loop_reg
, loop_reg
);
9760 rtx (*fun
) (rtx
, rtx
)
9761 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
9763 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
9765 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
9768 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
9775 /* Print assembler for movmem_qi, movmem_hi insns...
9777 $1, $2 : Loop register
9779 X : Destination address
9783 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
9785 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
9786 enum machine_mode loop_mode
= GET_MODE (op
[1]);
9787 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
9795 xop
[2] = tmp_reg_rtx
;
9799 avr_asm_len ("0:", xop
, plen
, 0);
9801 /* Load with post-increment */
9808 case ADDR_SPACE_GENERIC
:
9810 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
9813 case ADDR_SPACE_FLASH
:
9816 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
9818 avr_asm_len ("lpm" CR_TAB
9819 "adiw r30,1", xop
, plen
, 2);
9822 case ADDR_SPACE_FLASH1
:
9823 case ADDR_SPACE_FLASH2
:
9824 case ADDR_SPACE_FLASH3
:
9825 case ADDR_SPACE_FLASH4
:
9826 case ADDR_SPACE_FLASH5
:
9829 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
9831 avr_asm_len ("elpm" CR_TAB
9832 "adiw r30,1", xop
, plen
, 2);
9836 /* Store with post-increment */
9838 avr_asm_len ("st X+,%2", xop
, plen
, 1);
9840 /* Decrement loop-counter and set Z-flag */
9842 if (QImode
== loop_mode
)
9844 avr_asm_len ("dec %1", xop
, plen
, 1);
9848 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
9852 avr_asm_len ("subi %A1,1" CR_TAB
9853 "sbci %B1,0", xop
, plen
, 2);
9856 /* Loop until zero */
9858 return avr_asm_len ("brne 0b", xop
, plen
, 1);
9863 /* Helper for __builtin_avr_delay_cycles */
9866 avr_mem_clobber (void)
9868 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
9869 MEM_VOLATILE_P (mem
) = 1;
9874 avr_expand_delay_cycles (rtx operands0
)
9876 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
9877 unsigned HOST_WIDE_INT cycles_used
;
9878 unsigned HOST_WIDE_INT loop_count
;
9880 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
9882 loop_count
= ((cycles
- 9) / 6) + 1;
9883 cycles_used
= ((loop_count
- 1) * 6) + 9;
9884 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
9885 avr_mem_clobber()));
9886 cycles
-= cycles_used
;
9889 if (IN_RANGE (cycles
, 262145, 83886081))
9891 loop_count
= ((cycles
- 7) / 5) + 1;
9892 if (loop_count
> 0xFFFFFF)
9893 loop_count
= 0xFFFFFF;
9894 cycles_used
= ((loop_count
- 1) * 5) + 7;
9895 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
9896 avr_mem_clobber()));
9897 cycles
-= cycles_used
;
9900 if (IN_RANGE (cycles
, 768, 262144))
9902 loop_count
= ((cycles
- 5) / 4) + 1;
9903 if (loop_count
> 0xFFFF)
9904 loop_count
= 0xFFFF;
9905 cycles_used
= ((loop_count
- 1) * 4) + 5;
9906 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
9907 avr_mem_clobber()));
9908 cycles
-= cycles_used
;
9911 if (IN_RANGE (cycles
, 6, 767))
9913 loop_count
= cycles
/ 3;
9914 if (loop_count
> 255)
9916 cycles_used
= loop_count
* 3;
9917 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
9918 avr_mem_clobber()));
9919 cycles
-= cycles_used
;
9924 emit_insn (gen_nopv (GEN_INT(2)));
9930 emit_insn (gen_nopv (GEN_INT(1)));
9936 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9939 avr_double_int_push_digit (double_int val
, int base
,
9940 unsigned HOST_WIDE_INT digit
)
9943 ? double_int_lshift (val
, 32, 64, false)
9944 : double_int_mul (val
, uhwi_to_double_int (base
));
9946 return double_int_add (val
, uhwi_to_double_int (digit
));
9950 /* Compute the image of x under f, i.e. perform x --> f(x) */
9953 avr_map (double_int f
, int x
)
9955 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
9959 /* Return some metrics of map A. */
9963 /* Number of fixed points in { 0 ... 7 } */
9966 /* Size of preimage of non-fixed points in { 0 ... 7 } */
9969 /* Mask representing the fixed points in { 0 ... 7 } */
9972 /* Size of the preimage of { 0 ... 7 } */
9975 /* Mask that represents the preimage of { f } */
9980 avr_map_metric (double_int a
, int mode
)
9982 unsigned i
, metric
= 0;
9984 for (i
= 0; i
< 8; i
++)
9986 unsigned ai
= avr_map (a
, i
);
9988 if (mode
== MAP_FIXED_0_7
)
9990 else if (mode
== MAP_NONFIXED_0_7
)
9991 metric
+= ai
< 8 && ai
!= i
;
9992 else if (mode
== MAP_MASK_FIXED_0_7
)
9993 metric
|= ((unsigned) (ai
== i
)) << i
;
9994 else if (mode
== MAP_PREIMAGE_0_7
)
9996 else if (mode
== MAP_MASK_PREIMAGE_F
)
9997 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10006 /* Return true if IVAL has a 0xf in its hexadecimal representation
10007 and false, otherwise. Only nibbles 0..7 are taken into account.
10008 Used as constraint helper for C0f and Cxf. */
10011 avr_has_nibble_0xf (rtx ival
)
10013 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10017 /* We have a set of bits that are mapped by a function F.
10018 Try to decompose F by means of a second function G so that
10024 cost (F o G^-1) + cost (G) < cost (F)
10026 Example: Suppose builtin insert_bits supplies us with the map
10027 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10028 nibble of the result, we can just as well rotate the bits before inserting
10029 them and use the map 0x7654ffff which is cheaper than the original map.
10030 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10034 /* tree code of binary function G */
10035 enum tree_code code
;
10037 /* The constant second argument of G */
10040 /* G^-1, the inverse of G (*, arg) */
10043 /* The cost of appplying G (*, arg) */
10046 /* The composition F o G^-1 (*, arg) for some function F */
10049 /* For debug purpose only */
10053 static const avr_map_op_t avr_map_op
[] =
10055 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10056 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10057 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10058 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10059 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10060 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10061 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10062 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10063 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10064 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10065 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10066 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10067 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10068 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10069 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10073 /* Try to decompose F as F = (F o G^-1) o G as described above.
10074 The result is a struct representing F o G^-1 and G.
10075 If result.cost < 0 then such a decomposition does not exist. */
10077 static avr_map_op_t
10078 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10081 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10082 avr_map_op_t f_ginv
= *g
;
10083 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10087 /* Step 1: Computing F o G^-1 */
10089 for (i
= 7; i
>= 0; i
--)
10091 int x
= avr_map (f
, i
);
10095 x
= avr_map (ginv
, x
);
10097 /* The bit is no element of the image of G: no avail (cost = -1) */
10103 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10106 /* Step 2: Compute the cost of the operations.
10107 The overall cost of doing an operation prior to the insertion is
10108 the cost of the insertion plus the cost of the operation. */
10110 /* Step 2a: Compute cost of F o G^-1 */
10112 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10114 /* The mapping consists only of fixed points and can be folded
10115 to AND/OR logic in the remainder. Reasonable cost is 3. */
10117 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10123 /* Get the cost of the insn by calling the output worker with some
10124 fake values. Mimic effect of reloading xop[3]: Unused operands
10125 are mapped to 0 and used operands are reloaded to xop[0]. */
10127 xop
[0] = all_regs_rtx
[24];
10128 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
10129 xop
[2] = all_regs_rtx
[25];
10130 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
10132 avr_out_insert_bits (xop
, &f_ginv
.cost
);
10134 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
10137 /* Step 2b: Add cost of G */
10139 f_ginv
.cost
+= g
->cost
;
10141 if (avr_log
.builtin
)
10142 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
10148 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10149 XOP[0] and XOP[1] don't overlap.
10150 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10151 If FIXP_P = false: Just move the bit if its position in the destination
10152 is different to its source position. */
10155 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
10159 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10160 int t_bit_src
= -1;
10162 /* We order the operations according to the requested source bit b. */
10164 for (b
= 0; b
< 8; b
++)
10165 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
10167 int bit_src
= avr_map (map
, bit_dest
);
10171 /* Same position: No need to copy as requested by FIXP_P. */
10172 || (bit_dest
== bit_src
&& !fixp_p
))
10175 if (t_bit_src
!= bit_src
)
10177 /* Source bit is not yet in T: Store it to T. */
10179 t_bit_src
= bit_src
;
10181 xop
[3] = GEN_INT (bit_src
);
10182 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
10185 /* Load destination bit with T. */
10187 xop
[3] = GEN_INT (bit_dest
);
10188 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
10193 /* PLEN == 0: Print assembler code for `insert_bits'.
10194 PLEN != 0: Compute code length in bytes.
10197 OP[1]: The mapping composed of nibbles. If nibble no. N is
10198 0: Bit N of result is copied from bit OP[2].0
10200 7: Bit N of result is copied from bit OP[2].7
10201 0xf: Bit N of result is copied from bit OP[3].N
10202 OP[2]: Bits to be inserted
10203 OP[3]: Target value */
10206 avr_out_insert_bits (rtx
*op
, int *plen
)
10208 double_int map
= rtx_to_double_int (op
[1]);
10209 unsigned mask_fixed
;
10210 bool fixp_p
= true;
10217 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
10221 else if (flag_print_asm_name
)
10222 fprintf (asm_out_file
,
10223 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
10224 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
10226 /* If MAP has fixed points it might be better to initialize the result
10227 with the bits to be inserted instead of moving all bits by hand. */
10229 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
10231 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10233 /* Avoid early-clobber conflicts */
10235 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10236 xop
[1] = tmp_reg_rtx
;
10240 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10242 /* XOP[2] is used and reloaded to XOP[0] already */
10244 int n_fix
= 0, n_nofix
= 0;
10246 gcc_assert (REG_P (xop
[2]));
10248 /* Get the code size of the bit insertions; once with all bits
10249 moved and once with fixed points omitted. */
10251 avr_move_bits (xop
, map
, true, &n_fix
);
10252 avr_move_bits (xop
, map
, false, &n_nofix
);
10254 if (fixp_p
&& n_fix
- n_nofix
> 3)
10256 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
10258 avr_asm_len ("eor %0,%1" CR_TAB
10259 "andi %0,%3" CR_TAB
10260 "eor %0,%1", xop
, plen
, 3);
10266 /* XOP[2] is unused */
10268 if (fixp_p
&& mask_fixed
)
10270 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10275 /* Move/insert remaining bits. */
10277 avr_move_bits (xop
, map
, fixp_p
, plen
);
10283 /* IDs for all the AVR builtins. */
10285 enum avr_builtin_id
10288 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10289 #include "builtins.def"
10295 struct GTY(()) avr_builtin_description
10297 enum insn_code icode
;
10304 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
10305 that a built-in's ID can be used to access the built-in by means of
10308 static GTY(()) struct avr_builtin_description
10309 avr_bdesc
[AVR_BUILTIN_COUNT
] =
10312 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10313 { ICODE, NAME, N_ARGS, NULL_TREE },
10314 #include "builtins.def"
10319 /* Implement `TARGET_BUILTIN_DECL'. */
10322 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
10324 if (id
< AVR_BUILTIN_COUNT
)
10325 return avr_bdesc
[id
].fndecl
;
10327 return error_mark_node
;
10332 avr_init_builtin_int24 (void)
10334 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10335 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10337 (*lang_hooks
.types
.register_builtin_type
) (int24_type
, "__int24");
10338 (*lang_hooks
.types
.register_builtin_type
) (uint24_type
, "__uint24");
10342 /* Implement `TARGET_INIT_BUILTINS' */
10343 /* Set up all builtin functions for this target. */
10346 avr_init_builtins (void)
10348 tree void_ftype_void
10349 = build_function_type_list (void_type_node
, NULL_TREE
);
10350 tree uchar_ftype_uchar
10351 = build_function_type_list (unsigned_char_type_node
,
10352 unsigned_char_type_node
,
10354 tree uint_ftype_uchar_uchar
10355 = build_function_type_list (unsigned_type_node
,
10356 unsigned_char_type_node
,
10357 unsigned_char_type_node
,
10359 tree int_ftype_char_char
10360 = build_function_type_list (integer_type_node
,
10364 tree int_ftype_char_uchar
10365 = build_function_type_list (integer_type_node
,
10367 unsigned_char_type_node
,
10369 tree void_ftype_ulong
10370 = build_function_type_list (void_type_node
,
10371 long_unsigned_type_node
,
10374 tree uchar_ftype_ulong_uchar_uchar
10375 = build_function_type_list (unsigned_char_type_node
,
10376 long_unsigned_type_node
,
10377 unsigned_char_type_node
,
10378 unsigned_char_type_node
,
10381 tree const_memx_void_node
10382 = build_qualified_type (void_type_node
,
10384 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
10386 tree const_memx_ptr_type_node
10387 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
10389 tree char_ftype_const_memx_ptr
10390 = build_function_type_list (char_type_node
,
10391 const_memx_ptr_type_node
,
10394 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10395 gcc_assert (ID < AVR_BUILTIN_COUNT); \
10396 avr_bdesc[ID].fndecl \
10397 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10398 #include "builtins.def"
10401 avr_init_builtin_int24 ();
10405 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10408 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
10412 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10413 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10414 enum machine_mode op0mode
= GET_MODE (op0
);
10415 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10416 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10419 || GET_MODE (target
) != tmode
10420 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10422 target
= gen_reg_rtx (tmode
);
10425 if (op0mode
== SImode
&& mode0
== HImode
)
10428 op0
= gen_lowpart (HImode
, op0
);
10431 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
10433 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10434 op0
= copy_to_mode_reg (mode0
, op0
);
10436 pat
= GEN_FCN (icode
) (target
, op0
);
10446 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10449 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10452 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10453 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10454 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10455 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10456 enum machine_mode op0mode
= GET_MODE (op0
);
10457 enum machine_mode op1mode
= GET_MODE (op1
);
10458 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10459 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10460 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10463 || GET_MODE (target
) != tmode
10464 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10466 target
= gen_reg_rtx (tmode
);
10469 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10472 op0
= gen_lowpart (HImode
, op0
);
10475 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10478 op1
= gen_lowpart (HImode
, op1
);
10481 /* In case the insn wants input operands in modes different from
10482 the result, abort. */
10484 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10485 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
10487 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10488 op0
= copy_to_mode_reg (mode0
, op0
);
10490 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10491 op1
= copy_to_mode_reg (mode1
, op1
);
10493 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
10502 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10505 avr_expand_triop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10508 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10509 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10510 tree arg2
= CALL_EXPR_ARG (exp
, 2);
10511 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10512 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10513 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10514 enum machine_mode op0mode
= GET_MODE (op0
);
10515 enum machine_mode op1mode
= GET_MODE (op1
);
10516 enum machine_mode op2mode
= GET_MODE (op2
);
10517 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10518 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10519 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10520 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
10523 || GET_MODE (target
) != tmode
10524 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10526 target
= gen_reg_rtx (tmode
);
10529 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10532 op0
= gen_lowpart (HImode
, op0
);
10535 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10538 op1
= gen_lowpart (HImode
, op1
);
10541 if ((op2mode
== SImode
|| op2mode
== VOIDmode
) && mode2
== HImode
)
10544 op2
= gen_lowpart (HImode
, op2
);
10547 /* In case the insn wants input operands in modes different from
10548 the result, abort. */
10550 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10551 && (op1mode
== mode1
|| op1mode
== VOIDmode
)
10552 && (op2mode
== mode2
|| op2mode
== VOIDmode
));
10554 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10555 op0
= copy_to_mode_reg (mode0
, op0
);
10557 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10558 op1
= copy_to_mode_reg (mode1
, op1
);
10560 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
10561 op2
= copy_to_mode_reg (mode2
, op2
);
10563 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
10573 /* Implement `TARGET_EXPAND_BUILTIN'. */
10574 /* Expand an expression EXP that calls a built-in function,
10575 with result going to TARGET if that's convenient
10576 (and in mode MODE if that's convenient).
10577 SUBTARGET may be used as the target for computing one of EXP's operands.
10578 IGNORE is nonzero if the value is to be ignored. */
10581 avr_expand_builtin (tree exp
, rtx target
,
10582 rtx subtarget ATTRIBUTE_UNUSED
,
10583 enum machine_mode mode ATTRIBUTE_UNUSED
,
10584 int ignore ATTRIBUTE_UNUSED
)
10586 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
10587 const char* bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
10588 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
10589 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
10593 gcc_assert (id
< AVR_BUILTIN_COUNT
);
10597 case AVR_BUILTIN_NOP
:
10598 emit_insn (gen_nopv (GEN_INT(1)));
10601 case AVR_BUILTIN_DELAY_CYCLES
:
10603 arg0
= CALL_EXPR_ARG (exp
, 0);
10604 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10606 if (!CONST_INT_P (op0
))
10607 error ("%s expects a compile time integer constant", bname
);
10609 avr_expand_delay_cycles (op0
);
10614 case AVR_BUILTIN_INSERT_BITS
:
10616 arg0
= CALL_EXPR_ARG (exp
, 0);
10617 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10619 if (!CONST_INT_P (op0
))
10621 error ("%s expects a compile time long integer constant"
10622 " as first argument", bname
);
10628 /* No special treatment needed: vanilla expand. */
10633 emit_insn ((GEN_FCN (d
->icode
)) (target
));
10637 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
10640 return avr_expand_binop_builtin (d
->icode
, exp
, target
);
10643 return avr_expand_triop_builtin (d
->icode
, exp
, target
);
10646 gcc_unreachable ();
10650 /* Implement `TARGET_FOLD_BUILTIN'. */
10653 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
10654 bool ignore ATTRIBUTE_UNUSED
)
10656 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
10657 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
10667 case AVR_BUILTIN_SWAP
:
10669 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
10670 build_int_cst (val_type
, 4));
10673 case AVR_BUILTIN_INSERT_BITS
:
10675 tree tbits
= arg
[1];
10676 tree tval
= arg
[2];
10678 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
10680 bool changed
= false;
10682 avr_map_op_t best_g
;
10684 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
10686 /* No constant as first argument: Don't fold this and run into
10687 error in avr_expand_builtin. */
10692 map
= tree_to_double_int (arg
[0]);
10693 tmap
= double_int_to_tree (map_type
, map
);
10695 if (TREE_CODE (tval
) != INTEGER_CST
10696 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10698 /* There are no F in the map, i.e. 3rd operand is unused.
10699 Replace that argument with some constant to render
10700 respective input unused. */
10702 tval
= build_int_cst (val_type
, 0);
10706 if (TREE_CODE (tbits
) != INTEGER_CST
10707 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
10709 /* Similar for the bits to be inserted. If they are unused,
10710 we can just as well pass 0. */
10712 tbits
= build_int_cst (val_type
, 0);
10715 if (TREE_CODE (tbits
) == INTEGER_CST
)
10717 /* Inserting bits known at compile time is easy and can be
10718 performed by AND and OR with appropriate masks. */
10720 int bits
= TREE_INT_CST_LOW (tbits
);
10721 int mask_ior
= 0, mask_and
= 0xff;
10723 for (i
= 0; i
< 8; i
++)
10725 int mi
= avr_map (map
, i
);
10729 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
10730 else mask_and
&= ~(1 << i
);
10734 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
10735 build_int_cst (val_type
, mask_ior
));
10736 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
10737 build_int_cst (val_type
, mask_and
));
10741 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10743 /* If bits don't change their position we can use vanilla logic
10744 to merge the two arguments. */
10746 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
10748 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
10749 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
10751 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
10752 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
10753 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
10756 /* Try to decomposing map to reduce overall cost. */
10758 if (avr_log
.builtin
)
10759 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
10761 best_g
= avr_map_op
[0];
10762 best_g
.cost
= 1000;
10764 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
10767 = avr_map_decompose (map
, avr_map_op
+ i
,
10768 TREE_CODE (tval
) == INTEGER_CST
);
10770 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
10774 if (avr_log
.builtin
)
10777 if (best_g
.arg
== 0)
10778 /* No optimization found */
10781 /* Apply operation G to the 2nd argument. */
10783 if (avr_log
.builtin
)
10784 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10785 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
10787 /* Do right-shifts arithmetically: They copy the MSB instead of
10788 shifting in a non-usable value (0) as with logic right-shift. */
10790 tbits
= fold_convert (signed_char_type_node
, tbits
);
10791 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
10792 build_int_cst (val_type
, best_g
.arg
));
10793 tbits
= fold_convert (val_type
, tbits
);
10795 /* Use map o G^-1 instead of original map to undo the effect of G. */
10797 tmap
= double_int_to_tree (map_type
, best_g
.map
);
10799 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10800 } /* AVR_BUILTIN_INSERT_BITS */
10808 /* Initialize the GCC target structure. */
10810 #undef TARGET_ASM_ALIGNED_HI_OP
10811 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10812 #undef TARGET_ASM_ALIGNED_SI_OP
10813 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10814 #undef TARGET_ASM_UNALIGNED_HI_OP
10815 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10816 #undef TARGET_ASM_UNALIGNED_SI_OP
10817 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10818 #undef TARGET_ASM_INTEGER
10819 #define TARGET_ASM_INTEGER avr_assemble_integer
10820 #undef TARGET_ASM_FILE_START
10821 #define TARGET_ASM_FILE_START avr_file_start
10822 #undef TARGET_ASM_FILE_END
10823 #define TARGET_ASM_FILE_END avr_file_end
10825 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10826 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10827 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10828 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10830 #undef TARGET_FUNCTION_VALUE
10831 #define TARGET_FUNCTION_VALUE avr_function_value
10832 #undef TARGET_LIBCALL_VALUE
10833 #define TARGET_LIBCALL_VALUE avr_libcall_value
10834 #undef TARGET_FUNCTION_VALUE_REGNO_P
10835 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10837 #undef TARGET_ATTRIBUTE_TABLE
10838 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10839 #undef TARGET_INSERT_ATTRIBUTES
10840 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10841 #undef TARGET_SECTION_TYPE_FLAGS
10842 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10844 #undef TARGET_ASM_NAMED_SECTION
10845 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10846 #undef TARGET_ASM_INIT_SECTIONS
10847 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10848 #undef TARGET_ENCODE_SECTION_INFO
10849 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10850 #undef TARGET_ASM_SELECT_SECTION
10851 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10853 #undef TARGET_REGISTER_MOVE_COST
10854 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10855 #undef TARGET_MEMORY_MOVE_COST
10856 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10857 #undef TARGET_RTX_COSTS
10858 #define TARGET_RTX_COSTS avr_rtx_costs
10859 #undef TARGET_ADDRESS_COST
10860 #define TARGET_ADDRESS_COST avr_address_cost
10861 #undef TARGET_MACHINE_DEPENDENT_REORG
10862 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10863 #undef TARGET_FUNCTION_ARG
10864 #define TARGET_FUNCTION_ARG avr_function_arg
10865 #undef TARGET_FUNCTION_ARG_ADVANCE
10866 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10868 #undef TARGET_RETURN_IN_MEMORY
10869 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10871 #undef TARGET_STRICT_ARGUMENT_NAMING
10872 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10874 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10875 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10877 #undef TARGET_HARD_REGNO_SCRATCH_OK
10878 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10879 #undef TARGET_CASE_VALUES_THRESHOLD
10880 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10882 #undef TARGET_FRAME_POINTER_REQUIRED
10883 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10884 #undef TARGET_CAN_ELIMINATE
10885 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10887 #undef TARGET_CLASS_LIKELY_SPILLED_P
10888 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10890 #undef TARGET_OPTION_OVERRIDE
10891 #define TARGET_OPTION_OVERRIDE avr_option_override
10893 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10894 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10896 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10897 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10899 #undef TARGET_INIT_BUILTINS
10900 #define TARGET_INIT_BUILTINS avr_init_builtins
10902 #undef TARGET_BUILTIN_DECL
10903 #define TARGET_BUILTIN_DECL avr_builtin_decl
10905 #undef TARGET_EXPAND_BUILTIN
10906 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10908 #undef TARGET_FOLD_BUILTIN
10909 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10911 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10912 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10914 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10915 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10917 #undef TARGET_ADDR_SPACE_SUBSET_P
10918 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10920 #undef TARGET_ADDR_SPACE_CONVERT
10921 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10923 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
10924 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10926 #undef TARGET_ADDR_SPACE_POINTER_MODE
10927 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10929 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10930 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
10931 avr_addr_space_legitimate_address_p
10933 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10934 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10936 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
10937 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
10939 #undef TARGET_PRINT_OPERAND
10940 #define TARGET_PRINT_OPERAND avr_print_operand
10941 #undef TARGET_PRINT_OPERAND_ADDRESS
10942 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
10943 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10944 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
10946 struct gcc_target targetm
= TARGET_INITIALIZER
;
10949 #include "gt-avr.h"