1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr
;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
135 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
136 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
137 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
138 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
139 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
141 static int avr_naked_function_p (tree
);
142 static int interrupt_function_p (tree
);
143 static int signal_function_p (tree
);
144 static int avr_OS_task_function_p (tree
);
145 static int avr_OS_main_function_p (tree
);
146 static int avr_regs_to_save (HARD_REG_SET
*);
147 static int get_sequence_length (rtx insns
);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code
);
151 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
152 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
154 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
155 static struct machine_function
* avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx
;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx
;
172 rtx lpm_addr_reg_rtx
;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx
;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx
;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx
[32];
184 rtx all_regs_rtx
[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx
;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx
;
192 extern GTY(()) rtx rampx_rtx
;
193 extern GTY(()) rtx rampy_rtx
;
194 extern GTY(()) rtx rampz_rtx
;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty
;
202 static GTY(()) rtx xstring_e
;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro
;
207 /* Current architecture. */
208 const struct base_arch_s
*avr_current_arch
;
210 /* Current device. */
211 const struct mcu_type_s
*avr_current_device
;
213 /* Section to put switch tables in. */
214 static GTY(()) section
*progmem_swtable_section
;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section
*progmem_section
[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode
= true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p
= false;
225 bool avr_need_copy_data_p
= false;
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val
)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
256 enum machine_mode mode
= GET_MODE (xval
);
258 if (VOIDmode
== mode
)
261 for (i
= 0; i
< n_bytes
; i
++)
263 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
264 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
266 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
274 avr_option_override (void)
276 flag_delete_null_pointer_checks
= 0;
278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
287 flag_caller_saves
= 0;
289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables
)
295 && !ACCUMULATE_OUTGOING_ARGS
)
297 flag_omit_frame_pointer
= 0;
300 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
301 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
302 avr_extra_arch_macro
= avr_current_device
->macro
;
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
312 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
313 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
314 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
315 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
319 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
321 init_machine_status
= avr_init_machine_status
;
323 avr_log_set_avr_log();
326 /* Function to set up the backend function structure. */
328 static struct machine_function
*
329 avr_init_machine_status (void)
331 return ggc_alloc_cleared_machine_function ();
335 /* Implement `INIT_EXPANDERS'. */
336 /* The function works like a singleton. */
339 avr_init_expanders (void)
343 for (regno
= 0; regno
< 32; regno
++)
344 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
346 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
347 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
348 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
350 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
352 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
353 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
354 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
355 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
356 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
358 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
359 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
363 /* Return register class for register R. */
366 avr_regno_reg_class (int r
)
368 static const enum reg_class reg_class_tab
[] =
372 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
373 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
374 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
375 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
377 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
378 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
380 ADDW_REGS
, ADDW_REGS
,
382 POINTER_X_REGS
, POINTER_X_REGS
,
384 POINTER_Y_REGS
, POINTER_Y_REGS
,
386 POINTER_Z_REGS
, POINTER_Z_REGS
,
392 return reg_class_tab
[r
];
399 avr_scalar_mode_supported_p (enum machine_mode mode
)
404 return default_scalar_mode_supported_p (mode
);
408 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
411 avr_decl_flash_p (tree decl
)
413 if (TREE_CODE (decl
) != VAR_DECL
414 || TREE_TYPE (decl
) == error_mark_node
)
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
423 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
427 avr_decl_memx_p (tree decl
)
429 if (TREE_CODE (decl
) != VAR_DECL
430 || TREE_TYPE (decl
) == error_mark_node
)
435 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
439 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
442 avr_mem_flash_p (rtx x
)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
449 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
453 avr_mem_memx_p (rtx x
)
456 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
460 /* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
464 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
466 if (FUNCTION_DECL
== TREE_CODE (func
))
468 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
473 func
= TREE_TYPE (func
);
476 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
477 || TREE_CODE (func
) == METHOD_TYPE
);
479 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
482 /* Return nonzero if FUNC is a naked function. */
485 avr_naked_function_p (tree func
)
487 return avr_lookup_function_attribute1 (func
, "naked");
490 /* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
494 interrupt_function_p (tree func
)
496 return avr_lookup_function_attribute1 (func
, "interrupt");
499 /* Return nonzero if FUNC is a signal function as specified
500 by the "signal" attribute. */
503 signal_function_p (tree func
)
505 return avr_lookup_function_attribute1 (func
, "signal");
508 /* Return nonzero if FUNC is an OS_task function. */
511 avr_OS_task_function_p (tree func
)
513 return avr_lookup_function_attribute1 (func
, "OS_task");
516 /* Return nonzero if FUNC is an OS_main function. */
519 avr_OS_main_function_p (tree func
)
521 return avr_lookup_function_attribute1 (func
, "OS_main");
525 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
528 avr_accumulate_outgoing_args (void)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun
->calls_setjmp
542 || cfun
->has_nonlocal_label
));
546 /* Report contribution of accumulated outgoing arguments to stack size. */
549 avr_outgoing_args_size (void)
551 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
555 /* Implement `STARTING_FRAME_OFFSET'. */
556 /* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
560 avr_starting_frame_offset (void)
562 return 1 + avr_outgoing_args_size ();
566 /* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
570 avr_regs_to_save (HARD_REG_SET
*set
)
573 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
574 || signal_function_p (current_function_decl
));
577 CLEAR_HARD_REG_SET (*set
);
580 /* No need to save any registers if the function never returns or
581 has the "OS_task" or "OS_main" attribute. */
582 if (TREE_THIS_VOLATILE (current_function_decl
)
583 || cfun
->machine
->is_OS_task
584 || cfun
->machine
->is_OS_main
)
587 for (reg
= 0; reg
< 32; reg
++)
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590 any global register variables. */
594 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
595 || (df_regs_ever_live_p (reg
)
596 && (int_or_sig_p
|| !call_used_regs
[reg
])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
603 SET_HARD_REG_BIT (*set
, reg
);
610 /* Return true if register FROM can be eliminated via register TO. */
613 avr_can_eliminate (const int from
, const int to
)
615 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
616 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
617 || ((from
== FRAME_POINTER_REGNUM
618 || from
== FRAME_POINTER_REGNUM
+ 1)
619 && !frame_pointer_needed
));
622 /* Compute offset between arg_pointer and frame_pointer. */
625 avr_initial_elimination_offset (int from
, int to
)
627 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
631 int offset
= frame_pointer_needed
? 2 : 0;
632 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
634 offset
+= avr_regs_to_save (NULL
);
635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size
+ 1 + offset
);
640 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
646 avr_builtin_setjmp_frame_value (void)
648 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
649 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
652 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
655 avr_return_addr_rtx (int count
, rtx tem
)
659 /* Can only return this function's return address. Others not supported. */
665 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
669 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
671 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
672 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
673 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
677 /* Return 1 if the function epilogue is just a single "ret". */
680 avr_simple_epilogue (void)
682 return (! frame_pointer_needed
683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL
) == 0
686 && ! interrupt_function_p (current_function_decl
)
687 && ! signal_function_p (current_function_decl
)
688 && ! avr_naked_function_p (current_function_decl
)
689 && ! TREE_THIS_VOLATILE (current_function_decl
));
692 /* This function checks sequence of live registers. */
695 sequent_regs_live (void)
701 for (reg
= 0; reg
< 18; ++reg
)
705 /* Don't recognize sequences that contain global register
714 if (!call_used_regs
[reg
])
716 if (df_regs_ever_live_p (reg
))
726 if (!frame_pointer_needed
)
728 if (df_regs_ever_live_p (REG_Y
))
736 if (df_regs_ever_live_p (REG_Y
+1))
749 return (cur_seq
== live_seq
) ? live_seq
: 0;
752 /* Obtain the length sequence of insns. */
755 get_sequence_length (rtx insns
)
760 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
761 length
+= get_attr_length (insn
);
766 /* Implement INCOMING_RETURN_ADDR_RTX. */
769 avr_incoming_return_addr_rtx (void)
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
776 /* Helper for expand_prologue. Emit a push of a byte register. */
779 emit_push_byte (unsigned regno
, bool frame_related_p
)
783 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
784 mem
= gen_frame_mem (QImode
, mem
);
785 reg
= gen_rtx_REG (QImode
, regno
);
787 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
789 RTX_FRAME_RELATED_P (insn
) = 1;
791 cfun
->machine
->stack_usage
++;
795 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
800 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
804 gcc_assert (MEM_P (sfr
));
806 /* IN __tmp_reg__, IO(SFR) */
807 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
809 RTX_FRAME_RELATED_P (insn
) = 1;
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO
, frame_related_p
);
816 /* OUT IO(SFR), __zero_reg__ */
817 insn
= emit_move_insn (sfr
, const0_rtx
);
819 RTX_FRAME_RELATED_P (insn
) = 1;
824 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
827 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
828 int live_seq
= sequent_regs_live ();
830 HOST_WIDE_INT size_max
831 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
833 bool minimize
= (TARGET_CALL_PROLOGUES
837 && !cfun
->machine
->is_OS_task
838 && !cfun
->machine
->is_OS_main
);
841 && (frame_pointer_needed
842 || avr_outgoing_args_size() > 8
843 || (AVR_2_BYTE_PC
&& live_seq
> 6)
847 int first_reg
, reg
, offset
;
849 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
850 gen_int_mode (size
, HImode
));
852 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
853 gen_int_mode (live_seq
+size
, HImode
));
854 insn
= emit_insn (pattern
);
855 RTX_FRAME_RELATED_P (insn
) = 1;
857 /* Describe the effect of the unspec_volatile call to prologue_saves.
858 Note that this formulation assumes that add_reg_note pushes the
859 notes to the front. Thus we build them in the reverse order of
860 how we want dwarf2out to process them. */
862 /* The function does always set frame_pointer_rtx, but whether that
863 is going to be permanent in the function is frame_pointer_needed. */
865 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
866 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
868 : stack_pointer_rtx
),
869 plus_constant (stack_pointer_rtx
,
870 -(size
+ live_seq
))));
872 /* Note that live_seq always contains r28+r29, but the other
873 registers to be saved are all below 18. */
875 first_reg
= 18 - (live_seq
- 2);
877 for (reg
= 29, offset
= -live_seq
+ 1;
879 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
883 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
884 r
= gen_rtx_REG (QImode
, reg
);
885 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
888 cfun
->machine
->stack_usage
+= size
+ live_seq
;
894 for (reg
= 0; reg
< 32; ++reg
)
895 if (TEST_HARD_REG_BIT (set
, reg
))
896 emit_push_byte (reg
, true);
898 if (frame_pointer_needed
899 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
901 /* Push frame pointer. Always be consistent about the
902 ordering of pushes -- epilogue_restores expects the
903 register pair to be pushed low byte first. */
905 emit_push_byte (REG_Y
, true);
906 emit_push_byte (REG_Y
+ 1, true);
909 if (frame_pointer_needed
912 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
913 RTX_FRAME_RELATED_P (insn
) = 1;
918 /* Creating a frame can be done by direct manipulation of the
919 stack or via the frame pointer. These two methods are:
926 the optimum method depends on function type, stack and
927 frame size. To avoid a complex logic, both methods are
928 tested and shortest is selected.
930 There is also the case where SIZE != 0 and no frame pointer is
931 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
932 In that case, insn (*) is not needed in that case.
933 We use the X register as scratch. This is save because in X
935 In an interrupt routine, the case of SIZE != 0 together with
936 !frame_pointer_needed can only occur if the function is not a
937 leaf function and thus X has already been saved. */
940 HOST_WIDE_INT size_cfa
= size
;
941 rtx fp_plus_insns
, fp
, my_fp
;
943 gcc_assert (frame_pointer_needed
945 || !current_function_is_leaf
);
947 fp
= my_fp
= (frame_pointer_needed
949 : gen_rtx_REG (Pmode
, REG_X
));
951 if (AVR_HAVE_8BIT_SP
)
953 /* The high byte (r29) does not change:
954 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
956 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
959 /* Cut down size and avoid size = 0 so that we don't run
960 into ICE like PR52488 in the remainder. */
964 /* Don't error so that insane code from newlib still compiles
965 and does not break building newlib. As PR51345 is implemented
966 now, there are multilib variants with -msp8.
968 If user wants sanity checks he can use -Wstack-usage=
971 For CFA we emit the original, non-saturated size so that
972 the generic machinery is aware of the real stack usage and
973 will print the above diagnostic as expected. */
978 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
980 /************ Method 1: Adjust frame pointer ************/
984 /* Normally, the dwarf2out frame-related-expr interpreter does
985 not expect to have the CFA change once the frame pointer is
986 set up. Thus, we avoid marking the move insn below and
987 instead indicate that the entire operation is complete after
988 the frame pointer subtraction is done. */
990 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
991 if (frame_pointer_needed
)
993 RTX_FRAME_RELATED_P (insn
) = 1;
994 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
995 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
998 insn
= emit_move_insn (my_fp
, plus_constant (my_fp
, -size
));
999 if (frame_pointer_needed
)
1001 RTX_FRAME_RELATED_P (insn
) = 1;
1002 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1003 gen_rtx_SET (VOIDmode
, fp
,
1004 plus_constant (fp
, -size_cfa
)));
1007 /* Copy to stack pointer. Note that since we've already
1008 changed the CFA to the frame pointer this operation
1009 need not be annotated if frame pointer is needed.
1010 Always move through unspec, see PR50063.
1011 For meaning of irq_state see movhi_sp_r insn. */
1013 if (cfun
->machine
->is_interrupt
)
1016 if (TARGET_NO_INTERRUPTS
1017 || cfun
->machine
->is_signal
1018 || cfun
->machine
->is_OS_main
)
1021 if (AVR_HAVE_8BIT_SP
)
1024 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1025 fp
, GEN_INT (irq_state
)));
1026 if (!frame_pointer_needed
)
1028 RTX_FRAME_RELATED_P (insn
) = 1;
1029 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1030 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1031 plus_constant (stack_pointer_rtx
,
1035 fp_plus_insns
= get_insns ();
1038 /************ Method 2: Adjust Stack pointer ************/
1040 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1041 can only handle specific offsets. */
1043 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1049 insn
= emit_move_insn (stack_pointer_rtx
,
1050 plus_constant (stack_pointer_rtx
, -size
));
1051 RTX_FRAME_RELATED_P (insn
) = 1;
1052 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1053 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1054 plus_constant (stack_pointer_rtx
,
1056 if (frame_pointer_needed
)
1058 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1059 RTX_FRAME_RELATED_P (insn
) = 1;
1062 sp_plus_insns
= get_insns ();
1065 /************ Use shortest method ************/
1067 emit_insn (get_sequence_length (sp_plus_insns
)
1068 < get_sequence_length (fp_plus_insns
)
1074 emit_insn (fp_plus_insns
);
1077 cfun
->machine
->stack_usage
+= size_cfa
;
1078 } /* !minimize && size != 0 */
1083 /* Output function prologue. */
1086 expand_prologue (void)
1091 size
= get_frame_size() + avr_outgoing_args_size();
1093 /* Init cfun->machine. */
1094 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
1095 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
1096 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
1097 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
1098 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
1099 cfun
->machine
->stack_usage
= 0;
1101 /* Prologue: naked. */
1102 if (cfun
->machine
->is_naked
)
1107 avr_regs_to_save (&set
);
1109 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1111 /* Enable interrupts. */
1112 if (cfun
->machine
->is_interrupt
)
1113 emit_insn (gen_enable_interrupt ());
1115 /* Push zero reg. */
1116 emit_push_byte (ZERO_REGNO
, true);
1119 emit_push_byte (TMP_REGNO
, true);
1122 /* ??? There's no dwarf2 column reserved for SREG. */
1123 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1125 /* Clear zero reg. */
1126 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1128 /* Prevent any attempt to delete the setting of ZERO_REG! */
1129 emit_use (zero_reg_rtx
);
1131 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1132 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1135 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1138 && TEST_HARD_REG_BIT (set
, REG_X
)
1139 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1141 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1145 && (frame_pointer_needed
1146 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1147 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1149 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1153 && TEST_HARD_REG_BIT (set
, REG_Z
)
1154 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1156 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1158 } /* is_interrupt is_signal */
1160 avr_prologue_setup_frame (size
, set
);
1162 if (flag_stack_usage_info
)
1163 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1166 /* Output summary at end of function prologue. */
1169 avr_asm_function_end_prologue (FILE *file
)
1171 if (cfun
->machine
->is_naked
)
1173 fputs ("/* prologue: naked */\n", file
);
1177 if (cfun
->machine
->is_interrupt
)
1179 fputs ("/* prologue: Interrupt */\n", file
);
1181 else if (cfun
->machine
->is_signal
)
1183 fputs ("/* prologue: Signal */\n", file
);
1186 fputs ("/* prologue: function */\n", file
);
1189 if (ACCUMULATE_OUTGOING_ARGS
)
1190 fprintf (file
, "/* outgoing args size = %d */\n",
1191 avr_outgoing_args_size());
1193 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1195 fprintf (file
, "/* stack size = %d */\n",
1196 cfun
->machine
->stack_usage
);
1197 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1198 usage for offset so that SP + .L__stack_offset = return address. */
1199 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1203 /* Implement EPILOGUE_USES. */
1206 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1208 if (reload_completed
1210 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1215 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1218 emit_pop_byte (unsigned regno
)
1222 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1223 mem
= gen_frame_mem (QImode
, mem
);
1224 reg
= gen_rtx_REG (QImode
, regno
);
1226 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1229 /* Output RTL epilogue. */
1232 expand_epilogue (bool sibcall_p
)
1239 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1241 size
= get_frame_size() + avr_outgoing_args_size();
1243 /* epilogue: naked */
1244 if (cfun
->machine
->is_naked
)
1246 gcc_assert (!sibcall_p
);
1248 emit_jump_insn (gen_return ());
1252 avr_regs_to_save (&set
);
1253 live_seq
= sequent_regs_live ();
1255 minimize
= (TARGET_CALL_PROLOGUES
1258 && !cfun
->machine
->is_OS_task
1259 && !cfun
->machine
->is_OS_main
);
1263 || frame_pointer_needed
1266 /* Get rid of frame. */
1268 if (!frame_pointer_needed
)
1270 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1275 emit_move_insn (frame_pointer_rtx
,
1276 plus_constant (frame_pointer_rtx
, size
));
1279 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1285 /* Try two methods to adjust stack and select shortest. */
1290 HOST_WIDE_INT size_max
;
1292 gcc_assert (frame_pointer_needed
1294 || !current_function_is_leaf
);
1296 fp
= my_fp
= (frame_pointer_needed
1298 : gen_rtx_REG (Pmode
, REG_X
));
1300 if (AVR_HAVE_8BIT_SP
)
1302 /* The high byte (r29) does not change:
1303 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1305 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1308 /* For rationale see comment in prologue generation. */
1310 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1311 if (size
> size_max
)
1313 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1315 /********** Method 1: Adjust fp register **********/
1319 if (!frame_pointer_needed
)
1320 emit_move_insn (fp
, stack_pointer_rtx
);
1322 emit_move_insn (my_fp
, plus_constant (my_fp
, size
));
1324 /* Copy to stack pointer. */
1326 if (TARGET_NO_INTERRUPTS
)
1329 if (AVR_HAVE_8BIT_SP
)
1332 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1333 GEN_INT (irq_state
)));
1335 fp_plus_insns
= get_insns ();
1338 /********** Method 2: Adjust Stack pointer **********/
1340 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1346 emit_move_insn (stack_pointer_rtx
,
1347 plus_constant (stack_pointer_rtx
, size
));
1349 sp_plus_insns
= get_insns ();
1352 /************ Use shortest method ************/
1354 emit_insn (get_sequence_length (sp_plus_insns
)
1355 < get_sequence_length (fp_plus_insns
)
1360 emit_insn (fp_plus_insns
);
1363 if (frame_pointer_needed
1364 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1366 /* Restore previous frame_pointer. See expand_prologue for
1367 rationale for not using pophi. */
1369 emit_pop_byte (REG_Y
+ 1);
1370 emit_pop_byte (REG_Y
);
1373 /* Restore used registers. */
1375 for (reg
= 31; reg
>= 0; --reg
)
1376 if (TEST_HARD_REG_BIT (set
, reg
))
1377 emit_pop_byte (reg
);
1381 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1382 The conditions to restore them must be tha same as in prologue. */
1385 && TEST_HARD_REG_BIT (set
, REG_Z
)
1386 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1388 emit_pop_byte (TMP_REGNO
);
1389 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1393 && (frame_pointer_needed
1394 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1395 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1397 emit_pop_byte (TMP_REGNO
);
1398 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1402 && TEST_HARD_REG_BIT (set
, REG_X
)
1403 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1405 emit_pop_byte (TMP_REGNO
);
1406 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1411 emit_pop_byte (TMP_REGNO
);
1412 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1415 /* Restore SREG using tmp_reg as scratch. */
1417 emit_pop_byte (TMP_REGNO
);
1418 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1420 /* Restore tmp REG. */
1421 emit_pop_byte (TMP_REGNO
);
1423 /* Restore zero REG. */
1424 emit_pop_byte (ZERO_REGNO
);
1428 emit_jump_insn (gen_return ());
1431 /* Output summary messages at beginning of function epilogue. */
1434 avr_asm_function_begin_epilogue (FILE *file
)
1436 fprintf (file
, "/* epilogue start */\n");
1440 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1443 avr_cannot_modify_jumps_p (void)
1446 /* Naked Functions must not have any instructions after
1447 their epilogue, see PR42240 */
1449 if (reload_completed
1451 && cfun
->machine
->is_naked
)
1460 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1462 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1463 This hook just serves to hack around PR rtl-optimization/52543 by
1464 claiming that PSImode addresses (which are used for the 24-bit
1465 address space __memx) were mode-dependent so that lower-subreg.s
1466 will skip these addresses. See also the similar FIXME comment along
1467 with mov<mode> expanders in avr.md. */
1470 avr_mode_dependent_address_p (const_rtx addr
)
1472 return GET_MODE (addr
) != Pmode
;
1476 /* Helper function for `avr_legitimate_address_p'. */
1479 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1480 RTX_CODE outer_code
, bool strict
)
1483 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1484 as
, outer_code
, UNKNOWN
)
1486 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1490 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1491 machine for a memory operand of mode MODE. */
1494 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1496 bool ok
= CONSTANT_ADDRESS_P (x
);
1498 switch (GET_CODE (x
))
1501 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1506 && REG_X
== REGNO (x
))
1514 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1515 GET_CODE (x
), strict
);
1520 rtx reg
= XEXP (x
, 0);
1521 rtx op1
= XEXP (x
, 1);
1524 && CONST_INT_P (op1
)
1525 && INTVAL (op1
) >= 0)
1527 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1532 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1535 if (reg
== frame_pointer_rtx
1536 || reg
== arg_pointer_rtx
)
1541 else if (frame_pointer_needed
1542 && reg
== frame_pointer_rtx
)
1554 if (avr_log
.legitimate_address_p
)
1556 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1557 "reload_completed=%d reload_in_progress=%d %s:",
1558 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1559 reg_renumber
? "(reg_renumber)" : "");
1561 if (GET_CODE (x
) == PLUS
1562 && REG_P (XEXP (x
, 0))
1563 && CONST_INT_P (XEXP (x
, 1))
1564 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1567 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1568 true_regnum (XEXP (x
, 0)));
1571 avr_edump ("\n%r\n", x
);
1578 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1579 now only a helper for avr_addr_space_legitimize_address. */
1580 /* Attempts to replace X with a valid
1581 memory address for an operand of mode MODE */
1584 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1586 bool big_offset_p
= false;
1590 if (GET_CODE (oldx
) == PLUS
1591 && REG_P (XEXP (oldx
, 0)))
1593 if (REG_P (XEXP (oldx
, 1)))
1594 x
= force_reg (GET_MODE (oldx
), oldx
);
1595 else if (CONST_INT_P (XEXP (oldx
, 1)))
1597 int offs
= INTVAL (XEXP (oldx
, 1));
1598 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1599 && offs
> MAX_LD_OFFSET (mode
))
1601 big_offset_p
= true;
1602 x
= force_reg (GET_MODE (oldx
), oldx
);
1607 if (avr_log
.legitimize_address
)
1609 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1612 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1619 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1620 /* This will allow register R26/27 to be used where it is no worse than normal
1621 base pointers R28/29 or R30/31. For example, if base offset is greater
1622 than 63 bytes or for R++ or --R addressing. */
1625 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1626 int opnum
, int type
, int addr_type
,
1627 int ind_levels ATTRIBUTE_UNUSED
,
1628 rtx (*mk_memloc
)(rtx
,int))
1632 if (avr_log
.legitimize_reload_address
)
1633 avr_edump ("\n%?:%m %r\n", mode
, x
);
1635 if (1 && (GET_CODE (x
) == POST_INC
1636 || GET_CODE (x
) == PRE_DEC
))
1638 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1639 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1640 opnum
, RELOAD_OTHER
);
1642 if (avr_log
.legitimize_reload_address
)
1643 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1644 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1649 if (GET_CODE (x
) == PLUS
1650 && REG_P (XEXP (x
, 0))
1651 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1652 && CONST_INT_P (XEXP (x
, 1))
1653 && INTVAL (XEXP (x
, 1)) >= 1)
1655 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1659 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1661 int regno
= REGNO (XEXP (x
, 0));
1662 rtx mem
= mk_memloc (x
, regno
);
1664 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1665 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1668 if (avr_log
.legitimize_reload_address
)
1669 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1670 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1672 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1673 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1676 if (avr_log
.legitimize_reload_address
)
1677 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1678 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1683 else if (! (frame_pointer_needed
1684 && XEXP (x
, 0) == frame_pointer_rtx
))
1686 push_reload (x
, NULL_RTX
, px
, NULL
,
1687 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1690 if (avr_log
.legitimize_reload_address
)
1691 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1692 POINTER_REGS
, x
, NULL_RTX
);
1702 /* Helper function to print assembler resp. track instruction
1703 sequence lengths. Always return "".
1706 Output assembler code from template TPL with operands supplied
1707 by OPERANDS. This is just forwarding to output_asm_insn.
1710 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1711 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1712 Don't output anything.
1716 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1720 output_asm_insn (tpl
, operands
);
1734 /* Return a pointer register name as a string. */
1737 ptrreg_to_str (int regno
)
1741 case REG_X
: return "X";
1742 case REG_Y
: return "Y";
1743 case REG_Z
: return "Z";
1745 output_operand_lossage ("address operand requires constraint for"
1746 " X, Y, or Z register");
1751 /* Return the condition name as a string.
1752 Used in conditional jump constructing */
1755 cond_string (enum rtx_code code
)
1764 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1769 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1785 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1786 /* Output ADDR to FILE as address. */
1789 avr_print_operand_address (FILE *file
, rtx addr
)
1791 switch (GET_CODE (addr
))
1794 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1798 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1802 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1806 if (CONSTANT_ADDRESS_P (addr
)
1807 && text_segment_operand (addr
, VOIDmode
))
1810 if (GET_CODE (x
) == CONST
)
1812 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1814 /* Assembler gs() will implant word address. Make offset
1815 a byte offset inside gs() for assembler. This is
1816 needed because the more logical (constant+gs(sym)) is not
1817 accepted by gas. For 128K and lower devices this is ok.
1818 For large devices it will create a Trampoline to offset
1819 from symbol which may not be what the user really wanted. */
1820 fprintf (file
, "gs(");
1821 output_addr_const (file
, XEXP (x
,0));
1822 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1823 2 * INTVAL (XEXP (x
, 1)));
1825 if (warning (0, "pointer offset from symbol maybe incorrect"))
1827 output_addr_const (stderr
, addr
);
1828 fprintf(stderr
,"\n");
1833 fprintf (file
, "gs(");
1834 output_addr_const (file
, addr
);
1835 fprintf (file
, ")");
1839 output_addr_const (file
, addr
);
1844 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1847 avr_print_operand_punct_valid_p (unsigned char code
)
1849 return code
== '~' || code
== '!';
1853 /* Implement `TARGET_PRINT_OPERAND'. */
1854 /* Output X as assembler operand to file FILE.
1855 For a description of supported %-codes, see top of avr.md. */
1858 avr_print_operand (FILE *file
, rtx x
, int code
)
1862 if (code
>= 'A' && code
<= 'D')
1867 if (!AVR_HAVE_JMP_CALL
)
1870 else if (code
== '!')
1872 if (AVR_HAVE_EIJMP_EICALL
)
1875 else if (code
== 't'
1878 static int t_regno
= -1;
1879 static int t_nbits
= -1;
1881 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
1883 t_regno
= REGNO (x
);
1884 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
1886 else if (CONST_INT_P (x
) && t_regno
>= 0
1887 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
1889 int bpos
= INTVAL (x
);
1891 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
1893 fprintf (file
, ",%d", bpos
% 8);
1898 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
1902 if (x
== zero_reg_rtx
)
1903 fprintf (file
, "__zero_reg__");
1905 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1907 else if (CONST_INT_P (x
))
1909 HOST_WIDE_INT ival
= INTVAL (x
);
1912 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
1913 else if (low_io_address_operand (x
, VOIDmode
)
1914 || high_io_address_operand (x
, VOIDmode
))
1916 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
1917 fprintf (file
, "__RAMPZ__");
1918 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
1919 fprintf (file
, "__RAMPY__");
1920 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
1921 fprintf (file
, "__RAMPX__");
1922 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
1923 fprintf (file
, "__RAMPD__");
1924 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
1925 fprintf (file
, "__CCP__");
1926 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
1927 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
1928 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
1931 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
1932 ival
- avr_current_arch
->sfr_offset
);
1936 fatal_insn ("bad address, not an I/O address:", x
);
1940 rtx addr
= XEXP (x
, 0);
1944 if (!CONSTANT_P (addr
))
1945 fatal_insn ("bad address, not a constant:", addr
);
1946 /* Assembler template with m-code is data - not progmem section */
1947 if (text_segment_operand (addr
, VOIDmode
))
1948 if (warning (0, "accessing data memory with"
1949 " program memory address"))
1951 output_addr_const (stderr
, addr
);
1952 fprintf(stderr
,"\n");
1954 output_addr_const (file
, addr
);
1956 else if (code
== 'i')
1958 avr_print_operand (file
, addr
, 'i');
1960 else if (code
== 'o')
1962 if (GET_CODE (addr
) != PLUS
)
1963 fatal_insn ("bad address, not (reg+disp):", addr
);
1965 avr_print_operand (file
, XEXP (addr
, 1), 0);
1967 else if (code
== 'p' || code
== 'r')
1969 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1970 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1973 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1975 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1977 else if (GET_CODE (addr
) == PLUS
)
1979 avr_print_operand_address (file
, XEXP (addr
,0));
1980 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1981 fatal_insn ("internal compiler error. Bad address:"
1984 avr_print_operand (file
, XEXP (addr
,1), code
);
1987 avr_print_operand_address (file
, addr
);
1989 else if (code
== 'i')
1991 fatal_insn ("bad address, not an I/O address:", x
);
1993 else if (code
== 'x')
1995 /* Constant progmem address - like used in jmp or call */
1996 if (0 == text_segment_operand (x
, VOIDmode
))
1997 if (warning (0, "accessing program memory"
1998 " with data memory address"))
2000 output_addr_const (stderr
, x
);
2001 fprintf(stderr
,"\n");
2003 /* Use normal symbol for direct address no linker trampoline needed */
2004 output_addr_const (file
, x
);
2006 else if (GET_CODE (x
) == CONST_DOUBLE
)
2010 if (GET_MODE (x
) != SFmode
)
2011 fatal_insn ("internal compiler error. Unknown mode:", x
);
2012 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2013 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2014 fprintf (file
, "0x%lx", val
);
2016 else if (GET_CODE (x
) == CONST_STRING
)
2017 fputs (XSTR (x
, 0), file
);
2018 else if (code
== 'j')
2019 fputs (cond_string (GET_CODE (x
)), file
);
2020 else if (code
== 'k')
2021 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2023 avr_print_operand_address (file
, x
);
2026 /* Update the condition code in the INSN. */
2029 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2032 enum attr_cc cc
= get_attr_cc (insn
);
2040 case CC_OUT_PLUS_NOCLOBBER
:
2043 rtx
*op
= recog_data
.operand
;
2046 /* Extract insn's operands. */
2047 extract_constrain_insn_cached (insn
);
2055 avr_out_plus (op
, &len_dummy
, &icc
);
2056 cc
= (enum attr_cc
) icc
;
2059 case CC_OUT_PLUS_NOCLOBBER
:
2060 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2061 cc
= (enum attr_cc
) icc
;
2066 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2067 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2068 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2070 /* Any other "r,rL" combination does not alter cc0. */
2074 } /* inner switch */
2078 } /* outer swicth */
2083 /* Special values like CC_OUT_PLUS from above have been
2084 mapped to "standard" CC_* values so we never come here. */
2090 /* Insn does not affect CC at all. */
2098 set
= single_set (insn
);
2102 cc_status
.flags
|= CC_NO_OVERFLOW
;
2103 cc_status
.value1
= SET_DEST (set
);
2108 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2109 The V flag may or may not be known but that's ok because
2110 alter_cond will change tests to use EQ/NE. */
2111 set
= single_set (insn
);
2115 cc_status
.value1
= SET_DEST (set
);
2116 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2121 set
= single_set (insn
);
2124 cc_status
.value1
= SET_SRC (set
);
2128 /* Insn doesn't leave CC in a usable state. */
2134 /* Choose mode for jump insn:
2135 1 - relative jump in range -63 <= x <= 62 ;
2136 2 - relative jump in range -2046 <= x <= 2045 ;
2137 3 - absolute jump (only for ATmega[16]03). */
2140 avr_jump_mode (rtx x
, rtx insn
)
2142 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2143 ? XEXP (x
, 0) : x
));
2144 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2145 int jump_distance
= cur_addr
- dest_addr
;
2147 if (-63 <= jump_distance
&& jump_distance
<= 62)
2149 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2151 else if (AVR_HAVE_JMP_CALL
)
2157 /* return an AVR condition jump commands.
2158 X is a comparison RTX.
2159 LEN is a number returned by avr_jump_mode function.
2160 if REVERSE nonzero then condition code in X must be reversed. */
2163 ret_cond_branch (rtx x
, int len
, int reverse
)
2165 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2170 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2171 return (len
== 1 ? ("breq .+2" CR_TAB
2173 len
== 2 ? ("breq .+4" CR_TAB
2181 return (len
== 1 ? ("breq .+2" CR_TAB
2183 len
== 2 ? ("breq .+4" CR_TAB
2190 return (len
== 1 ? ("breq .+2" CR_TAB
2192 len
== 2 ? ("breq .+4" CR_TAB
2199 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2200 return (len
== 1 ? ("breq %0" CR_TAB
2202 len
== 2 ? ("breq .+2" CR_TAB
2209 return (len
== 1 ? ("breq %0" CR_TAB
2211 len
== 2 ? ("breq .+2" CR_TAB
2218 return (len
== 1 ? ("breq %0" CR_TAB
2220 len
== 2 ? ("breq .+2" CR_TAB
2234 return ("br%j1 .+2" CR_TAB
2237 return ("br%j1 .+4" CR_TAB
2248 return ("br%k1 .+2" CR_TAB
2251 return ("br%k1 .+4" CR_TAB
2259 /* Output insn cost for next insn. */
2262 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2263 int num_operands ATTRIBUTE_UNUSED
)
2265 if (avr_log
.rtx_costs
)
2267 rtx set
= single_set (insn
);
2270 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2271 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2273 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2274 rtx_cost (PATTERN (insn
), INSN
, 0,
2275 optimize_insn_for_speed_p()));
2279 /* Return 0 if undefined, 1 if always true or always false. */
2282 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2284 unsigned int max
= (mode
== QImode
? 0xff :
2285 mode
== HImode
? 0xffff :
2286 mode
== PSImode
? 0xffffff :
2287 mode
== SImode
? 0xffffffff : 0);
2288 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2290 if (unsigned_condition (op
) != op
)
2293 if (max
!= (INTVAL (x
) & max
)
2294 && INTVAL (x
) != 0xff)
2301 /* Returns nonzero if REGNO is the number of a hard
2302 register in which function arguments are sometimes passed. */
2305 function_arg_regno_p(int r
)
2307 return (r
>= 8 && r
<= 25);
2310 /* Initializing the variable cum for the state at the beginning
2311 of the argument list. */
2314 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2315 tree fndecl ATTRIBUTE_UNUSED
)
2318 cum
->regno
= FIRST_CUM_REG
;
2319 if (!libname
&& stdarg_p (fntype
))
2322 /* Assume the calle may be tail called */
2324 cfun
->machine
->sibcall_fails
= 0;
2327 /* Returns the number of registers to allocate for a function argument. */
2330 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2334 if (mode
== BLKmode
)
2335 size
= int_size_in_bytes (type
);
2337 size
= GET_MODE_SIZE (mode
);
2339 /* Align all function arguments to start in even-numbered registers.
2340 Odd-sized arguments leave holes above them. */
2342 return (size
+ 1) & ~1;
2345 /* Controls whether a function argument is passed
2346 in a register, and which register. */
2349 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2350 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2352 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2353 int bytes
= avr_num_arg_regs (mode
, type
);
2355 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2356 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2361 /* Update the summarizer variable CUM to advance past an argument
2362 in the argument list. */
2365 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2366 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2368 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2369 int bytes
= avr_num_arg_regs (mode
, type
);
2371 cum
->nregs
-= bytes
;
2372 cum
->regno
-= bytes
;
2374 /* A parameter is being passed in a call-saved register. As the original
2375 contents of these regs has to be restored before leaving the function,
2376 a function must not pass arguments in call-saved regs in order to get
2381 && !call_used_regs
[cum
->regno
])
2383 /* FIXME: We ship info on failing tail-call in struct machine_function.
2384 This uses internals of calls.c:expand_call() and the way args_so_far
2385 is used. targetm.function_ok_for_sibcall() needs to be extended to
2386 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2387 dependent so that such an extension is not wanted. */
2389 cfun
->machine
->sibcall_fails
= 1;
2392 /* Test if all registers needed by the ABI are actually available. If the
2393 user has fixed a GPR needed to pass an argument, an (implicit) function
2394 call will clobber that fixed register. See PR45099 for an example. */
2401 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2402 if (fixed_regs
[regno
])
2403 warning (0, "fixed register %s used to pass parameter to function",
2407 if (cum
->nregs
<= 0)
2410 cum
->regno
= FIRST_CUM_REG
;
2414 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2415 /* Decide whether we can make a sibling call to a function. DECL is the
2416 declaration of the function being targeted by the call and EXP is the
2417 CALL_EXPR representing the call. */
2420 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2424 /* Tail-calling must fail if callee-saved regs are used to pass
2425 function args. We must not tail-call when `epilogue_restores'
2426 is used. Unfortunately, we cannot tell at this point if that
2427 actually will happen or not, and we cannot step back from
2428 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2430 if (cfun
->machine
->sibcall_fails
2431 || TARGET_CALL_PROLOGUES
)
2436 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2440 decl_callee
= TREE_TYPE (decl_callee
);
2444 decl_callee
= fntype_callee
;
2446 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2447 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2449 decl_callee
= TREE_TYPE (decl_callee
);
2453 /* Ensure that caller and callee have compatible epilogues */
2455 if (interrupt_function_p (current_function_decl
)
2456 || signal_function_p (current_function_decl
)
2457 || avr_naked_function_p (decl_callee
)
2458 || avr_naked_function_p (current_function_decl
)
2459 /* FIXME: For OS_task and OS_main, we are over-conservative.
2460 This is due to missing documentation of these attributes
2461 and what they actually should do and should not do. */
2462 || (avr_OS_task_function_p (decl_callee
)
2463 != avr_OS_task_function_p (current_function_decl
))
2464 || (avr_OS_main_function_p (decl_callee
)
2465 != avr_OS_main_function_p (current_function_decl
)))
2473 /***********************************************************************
2474 Functions for outputting various mov's for a various modes
2475 ************************************************************************/
2477 /* Return true if a value of mode MODE is read from flash by
2478 __load_* function from libgcc. */
2481 avr_load_libgcc_p (rtx op
)
2483 enum machine_mode mode
= GET_MODE (op
);
2484 int n_bytes
= GET_MODE_SIZE (mode
);
2489 && MEM_ADDR_SPACE (op
) == ADDR_SPACE_FLASH
);
2492 /* Return true if a value of mode MODE is read by __xload_* function. */
2495 avr_xload_libgcc_p (enum machine_mode mode
)
2497 int n_bytes
= GET_MODE_SIZE (mode
);
2500 || avr_current_device
->n_flash
> 1);
2504 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2505 OP[1] in AS1 to register OP[0].
2506 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2510 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2514 rtx src
= SET_SRC (single_set (insn
));
2516 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2518 addr_space_t as
= MEM_ADDR_SPACE (src
);
2525 warning (0, "writing to address space %qs not supported",
2526 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2531 addr
= XEXP (src
, 0);
2532 code
= GET_CODE (addr
);
2534 gcc_assert (REG_P (dest
));
2535 gcc_assert (REG
== code
|| POST_INC
== code
);
2537 /* Only 1-byte moves from __flash are representes as open coded
2538 mov insns. All other loads from flash are not handled here but
2539 by some UNSPEC instead, see respective FIXME in machine description. */
2541 gcc_assert (as
== ADDR_SPACE_FLASH
);
2542 gcc_assert (n_bytes
== 1);
2545 xop
[1] = lpm_addr_reg_rtx
;
2546 xop
[2] = lpm_reg_rtx
;
2555 gcc_assert (REG_Z
== REGNO (addr
));
2557 return AVR_HAVE_LPMX
2558 ? avr_asm_len ("lpm %0,%a1", xop
, plen
, 1)
2559 : avr_asm_len ("lpm" CR_TAB
2560 "mov %0,%2", xop
, plen
, 2);
2564 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0)));
2566 return AVR_HAVE_LPMX
2567 ? avr_asm_len ("lpm %0,%a1+", xop
, plen
, 1)
2568 : avr_asm_len ("lpm" CR_TAB
2570 "mov %0,%2", xop
, plen
, 3);
2577 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2578 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2580 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2584 avr_load_lpm (rtx insn
, rtx
*op
, int *plen
)
2587 int n
, n_bytes
= GET_MODE_SIZE (GET_MODE (op
[0]));
2588 rtx xsegment
= op
[1];
2589 bool clobber_z
= PARALLEL
== GET_CODE (PATTERN (insn
));
2590 bool r30_in_tmp
= false;
2595 xop
[1] = lpm_addr_reg_rtx
;
2596 xop
[2] = lpm_reg_rtx
;
2597 xop
[3] = xstring_empty
;
2599 /* Set RAMPZ as needed. */
2601 if (REG_P (xsegment
))
2603 avr_asm_len ("out __RAMPZ__,%0", &xsegment
, plen
, 1);
2607 /* Load the individual bytes from LSB to MSB. */
2609 for (n
= 0; n
< n_bytes
; n
++)
2611 xop
[0] = all_regs_rtx
[REGNO (op
[0]) + n
];
2613 if ((CONST_INT_P (xsegment
) && AVR_HAVE_LPMX
)
2614 || (REG_P (xsegment
) && AVR_HAVE_ELPMX
))
2617 avr_asm_len ("%3lpm %0,%a1", xop
, plen
, 1);
2618 else if (REGNO (xop
[0]) == REG_Z
)
2620 avr_asm_len ("%3lpm %2,%a1+", xop
, plen
, 1);
2624 avr_asm_len ("%3lpm %0,%a1+", xop
, plen
, 1);
2628 gcc_assert (clobber_z
);
2630 avr_asm_len ("%3lpm" CR_TAB
2631 "mov %0,%2", xop
, plen
, 2);
2634 avr_asm_len ("adiw %1,1", xop
, plen
, 1);
2639 avr_asm_len ("mov %1,%2", xop
, plen
, 1);
2643 && !reg_unused_after (insn
, lpm_addr_reg_rtx
)
2644 && !reg_overlap_mentioned_p (op
[0], lpm_addr_reg_rtx
))
2646 xop
[2] = GEN_INT (n_bytes
-1);
2647 avr_asm_len ("sbiw %1,%2", xop
, plen
, 1);
2650 if (REG_P (xsegment
) && AVR_HAVE_RAMPD
)
2652 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2654 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop
, plen
, 1);
2661 /* Worker function for xload_8 insn. */
2664 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2670 xop
[2] = lpm_addr_reg_rtx
;
2671 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2676 avr_asm_len ("sbrc %1,7" CR_TAB
2678 "sbrs %1,7", xop
, plen
, 3);
2680 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2682 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2683 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2690 output_movqi (rtx insn
, rtx operands
[], int *real_l
)
2692 rtx dest
= operands
[0];
2693 rtx src
= operands
[1];
2695 if (avr_mem_flash_p (src
)
2696 || avr_mem_flash_p (dest
))
2698 return avr_out_lpm (insn
, operands
, real_l
);
2704 if (register_operand (dest
, QImode
))
2706 if (register_operand (src
, QImode
)) /* mov r,r */
2708 if (test_hard_reg_class (STACK_REG
, dest
))
2710 else if (test_hard_reg_class (STACK_REG
, src
))
2715 else if (CONSTANT_P (src
))
2717 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2720 else if (MEM_P (src
))
2721 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2723 else if (MEM_P (dest
))
2728 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2730 return out_movqi_mr_r (insn
, xop
, real_l
);
2737 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2742 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2744 if (avr_mem_flash_p (src
)
2745 || avr_mem_flash_p (dest
))
2747 return avr_out_lpm (insn
, xop
, plen
);
2752 if (REG_P (src
)) /* mov r,r */
2754 if (test_hard_reg_class (STACK_REG
, dest
))
2756 if (AVR_HAVE_8BIT_SP
)
2757 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2760 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2761 "out __SP_H__,%B1", xop
, plen
, -2);
2763 /* Use simple load of SP if no interrupts are used. */
2765 return TARGET_NO_INTERRUPTS
2766 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2767 "out __SP_L__,%A1", xop
, plen
, -2)
2769 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2771 "out __SP_H__,%B1" CR_TAB
2772 "out __SREG__,__tmp_reg__" CR_TAB
2773 "out __SP_L__,%A1", xop
, plen
, -5);
2775 else if (test_hard_reg_class (STACK_REG
, src
))
2777 return !AVR_HAVE_SPH
2778 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2779 "clr %B0", xop
, plen
, -2)
2781 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2782 "in %B0,__SP_H__", xop
, plen
, -2);
2785 return AVR_HAVE_MOVW
2786 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2788 : avr_asm_len ("mov %A0,%A1" CR_TAB
2789 "mov %B0,%B1", xop
, plen
, -2);
2791 else if (CONSTANT_P (src
))
2793 return output_reload_inhi (xop
, NULL
, plen
);
2795 else if (MEM_P (src
))
2797 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2800 else if (MEM_P (dest
))
2805 xop
[1] = src
== const0_rtx
? zero_reg_rtx
: src
;
2807 return out_movhi_mr_r (insn
, xop
, plen
);
2810 fatal_insn ("invalid insn:", insn
);
2816 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
2820 rtx x
= XEXP (src
, 0);
2822 if (CONSTANT_ADDRESS_P (x
))
2824 return optimize
> 0 && io_address_operand (x
, QImode
)
2825 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
2826 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
2828 else if (GET_CODE (x
) == PLUS
2829 && REG_P (XEXP (x
, 0))
2830 && CONST_INT_P (XEXP (x
, 1)))
2832 /* memory access by reg+disp */
2834 int disp
= INTVAL (XEXP (x
, 1));
2836 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
2838 if (REGNO (XEXP (x
, 0)) != REG_Y
)
2839 fatal_insn ("incorrect insn:",insn
);
2841 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2842 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2843 "ldd %0,Y+63" CR_TAB
2844 "sbiw r28,%o1-63", op
, plen
, -3);
2846 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2847 "sbci r29,hi8(-%o1)" CR_TAB
2849 "subi r28,lo8(%o1)" CR_TAB
2850 "sbci r29,hi8(%o1)", op
, plen
, -5);
2852 else if (REGNO (XEXP (x
, 0)) == REG_X
)
2854 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2855 it but I have this situation with extremal optimizing options. */
2857 avr_asm_len ("adiw r26,%o1" CR_TAB
2858 "ld %0,X", op
, plen
, -2);
2860 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2861 && !reg_unused_after (insn
, XEXP (x
,0)))
2863 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
2869 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
2872 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
2876 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
2880 rtx base
= XEXP (src
, 0);
2881 int reg_dest
= true_regnum (dest
);
2882 int reg_base
= true_regnum (base
);
2883 /* "volatile" forces reading low byte first, even if less efficient,
2884 for correct operation with 16-bit I/O registers. */
2885 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2889 if (reg_dest
== reg_base
) /* R = (R) */
2890 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2892 "mov %A0,__tmp_reg__", op
, plen
, -3);
2894 if (reg_base
!= REG_X
)
2895 return avr_asm_len ("ld %A0,%1" CR_TAB
2896 "ldd %B0,%1+1", op
, plen
, -2);
2898 avr_asm_len ("ld %A0,X+" CR_TAB
2899 "ld %B0,X", op
, plen
, -2);
2901 if (!reg_unused_after (insn
, base
))
2902 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
2906 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2908 int disp
= INTVAL (XEXP (base
, 1));
2909 int reg_base
= true_regnum (XEXP (base
, 0));
2911 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2913 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2914 fatal_insn ("incorrect insn:",insn
);
2916 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
2917 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2918 "ldd %A0,Y+62" CR_TAB
2919 "ldd %B0,Y+63" CR_TAB
2920 "sbiw r28,%o1-62", op
, plen
, -4)
2922 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2923 "sbci r29,hi8(-%o1)" CR_TAB
2925 "ldd %B0,Y+1" CR_TAB
2926 "subi r28,lo8(%o1)" CR_TAB
2927 "sbci r29,hi8(%o1)", op
, plen
, -6);
2930 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2931 it but I have this situation with extremal
2932 optimization options. */
2934 if (reg_base
== REG_X
)
2935 return reg_base
== reg_dest
2936 ? avr_asm_len ("adiw r26,%o1" CR_TAB
2937 "ld __tmp_reg__,X+" CR_TAB
2939 "mov %A0,__tmp_reg__", op
, plen
, -4)
2941 : avr_asm_len ("adiw r26,%o1" CR_TAB
2944 "sbiw r26,%o1+1", op
, plen
, -4);
2946 return reg_base
== reg_dest
2947 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
2948 "ldd %B0,%B1" CR_TAB
2949 "mov %A0,__tmp_reg__", op
, plen
, -3)
2951 : avr_asm_len ("ldd %A0,%A1" CR_TAB
2952 "ldd %B0,%B1", op
, plen
, -2);
2954 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2956 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2957 fatal_insn ("incorrect insn:", insn
);
2959 if (!mem_volatile_p
)
2960 return avr_asm_len ("ld %B0,%1" CR_TAB
2961 "ld %A0,%1", op
, plen
, -2);
2963 return REGNO (XEXP (base
, 0)) == REG_X
2964 ? avr_asm_len ("sbiw r26,2" CR_TAB
2967 "sbiw r26,1", op
, plen
, -4)
2969 : avr_asm_len ("sbiw %r1,2" CR_TAB
2971 "ldd %B0,%p1+1", op
, plen
, -3);
2973 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2975 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2976 fatal_insn ("incorrect insn:", insn
);
2978 return avr_asm_len ("ld %A0,%1" CR_TAB
2979 "ld %B0,%1", op
, plen
, -2);
2981 else if (CONSTANT_ADDRESS_P (base
))
2983 return optimize
> 0 && io_address_operand (base
, HImode
)
2984 ? avr_asm_len ("in %A0,%i1" CR_TAB
2985 "in %B0,%i1+1", op
, plen
, -2)
2987 : avr_asm_len ("lds %A0,%m1" CR_TAB
2988 "lds %B0,%m1+1", op
, plen
, -4);
2991 fatal_insn ("unknown move insn:",insn
);
2996 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3000 rtx base
= XEXP (src
, 0);
3001 int reg_dest
= true_regnum (dest
);
3002 int reg_base
= true_regnum (base
);
3010 if (reg_base
== REG_X
) /* (R26) */
3012 if (reg_dest
== REG_X
)
3013 /* "ld r26,-X" is undefined */
3014 return *l
=7, ("adiw r26,3" CR_TAB
3017 "ld __tmp_reg__,-X" CR_TAB
3020 "mov r27,__tmp_reg__");
3021 else if (reg_dest
== REG_X
- 2)
3022 return *l
=5, ("ld %A0,X+" CR_TAB
3024 "ld __tmp_reg__,X+" CR_TAB
3026 "mov %C0,__tmp_reg__");
3027 else if (reg_unused_after (insn
, base
))
3028 return *l
=4, ("ld %A0,X+" CR_TAB
3033 return *l
=5, ("ld %A0,X+" CR_TAB
3041 if (reg_dest
== reg_base
)
3042 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3043 "ldd %C0,%1+2" CR_TAB
3044 "ldd __tmp_reg__,%1+1" CR_TAB
3046 "mov %B0,__tmp_reg__");
3047 else if (reg_base
== reg_dest
+ 2)
3048 return *l
=5, ("ld %A0,%1" CR_TAB
3049 "ldd %B0,%1+1" CR_TAB
3050 "ldd __tmp_reg__,%1+2" CR_TAB
3051 "ldd %D0,%1+3" CR_TAB
3052 "mov %C0,__tmp_reg__");
3054 return *l
=4, ("ld %A0,%1" CR_TAB
3055 "ldd %B0,%1+1" CR_TAB
3056 "ldd %C0,%1+2" CR_TAB
3060 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3062 int disp
= INTVAL (XEXP (base
, 1));
3064 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3066 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3067 fatal_insn ("incorrect insn:",insn
);
3069 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3070 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3071 "ldd %A0,Y+60" CR_TAB
3072 "ldd %B0,Y+61" CR_TAB
3073 "ldd %C0,Y+62" CR_TAB
3074 "ldd %D0,Y+63" CR_TAB
3077 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3078 "sbci r29,hi8(-%o1)" CR_TAB
3080 "ldd %B0,Y+1" CR_TAB
3081 "ldd %C0,Y+2" CR_TAB
3082 "ldd %D0,Y+3" CR_TAB
3083 "subi r28,lo8(%o1)" CR_TAB
3084 "sbci r29,hi8(%o1)");
3087 reg_base
= true_regnum (XEXP (base
, 0));
3088 if (reg_base
== REG_X
)
3091 if (reg_dest
== REG_X
)
3094 /* "ld r26,-X" is undefined */
3095 return ("adiw r26,%o1+3" CR_TAB
3098 "ld __tmp_reg__,-X" CR_TAB
3101 "mov r27,__tmp_reg__");
3104 if (reg_dest
== REG_X
- 2)
3105 return ("adiw r26,%o1" CR_TAB
3108 "ld __tmp_reg__,X+" CR_TAB
3110 "mov r26,__tmp_reg__");
3112 return ("adiw r26,%o1" CR_TAB
3119 if (reg_dest
== reg_base
)
3120 return *l
=5, ("ldd %D0,%D1" CR_TAB
3121 "ldd %C0,%C1" CR_TAB
3122 "ldd __tmp_reg__,%B1" CR_TAB
3123 "ldd %A0,%A1" CR_TAB
3124 "mov %B0,__tmp_reg__");
3125 else if (reg_dest
== reg_base
- 2)
3126 return *l
=5, ("ldd %A0,%A1" CR_TAB
3127 "ldd %B0,%B1" CR_TAB
3128 "ldd __tmp_reg__,%C1" CR_TAB
3129 "ldd %D0,%D1" CR_TAB
3130 "mov %C0,__tmp_reg__");
3131 return *l
=4, ("ldd %A0,%A1" CR_TAB
3132 "ldd %B0,%B1" CR_TAB
3133 "ldd %C0,%C1" CR_TAB
3136 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3137 return *l
=4, ("ld %D0,%1" CR_TAB
3141 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3142 return *l
=4, ("ld %A0,%1" CR_TAB
3146 else if (CONSTANT_ADDRESS_P (base
))
3147 return *l
=8, ("lds %A0,%m1" CR_TAB
3148 "lds %B0,%m1+1" CR_TAB
3149 "lds %C0,%m1+2" CR_TAB
3152 fatal_insn ("unknown move insn:",insn
);
3157 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3161 rtx base
= XEXP (dest
, 0);
3162 int reg_base
= true_regnum (base
);
3163 int reg_src
= true_regnum (src
);
3169 if (CONSTANT_ADDRESS_P (base
))
3170 return *l
=8,("sts %m0,%A1" CR_TAB
3171 "sts %m0+1,%B1" CR_TAB
3172 "sts %m0+2,%C1" CR_TAB
3174 if (reg_base
> 0) /* (r) */
3176 if (reg_base
== REG_X
) /* (R26) */
3178 if (reg_src
== REG_X
)
3180 /* "st X+,r26" is undefined */
3181 if (reg_unused_after (insn
, base
))
3182 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3185 "st X+,__tmp_reg__" CR_TAB
3189 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3192 "st X+,__tmp_reg__" CR_TAB
3197 else if (reg_base
== reg_src
+ 2)
3199 if (reg_unused_after (insn
, base
))
3200 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3201 "mov __tmp_reg__,%D1" CR_TAB
3204 "st %0+,__zero_reg__" CR_TAB
3205 "st %0,__tmp_reg__" CR_TAB
3206 "clr __zero_reg__");
3208 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3209 "mov __tmp_reg__,%D1" CR_TAB
3212 "st %0+,__zero_reg__" CR_TAB
3213 "st %0,__tmp_reg__" CR_TAB
3214 "clr __zero_reg__" CR_TAB
3217 return *l
=5, ("st %0+,%A1" CR_TAB
3224 return *l
=4, ("st %0,%A1" CR_TAB
3225 "std %0+1,%B1" CR_TAB
3226 "std %0+2,%C1" CR_TAB
3229 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3231 int disp
= INTVAL (XEXP (base
, 1));
3232 reg_base
= REGNO (XEXP (base
, 0));
3233 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3235 if (reg_base
!= REG_Y
)
3236 fatal_insn ("incorrect insn:",insn
);
3238 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3239 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3240 "std Y+60,%A1" CR_TAB
3241 "std Y+61,%B1" CR_TAB
3242 "std Y+62,%C1" CR_TAB
3243 "std Y+63,%D1" CR_TAB
3246 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3247 "sbci r29,hi8(-%o0)" CR_TAB
3249 "std Y+1,%B1" CR_TAB
3250 "std Y+2,%C1" CR_TAB
3251 "std Y+3,%D1" CR_TAB
3252 "subi r28,lo8(%o0)" CR_TAB
3253 "sbci r29,hi8(%o0)");
3255 if (reg_base
== REG_X
)
3258 if (reg_src
== REG_X
)
3261 return ("mov __tmp_reg__,r26" CR_TAB
3262 "mov __zero_reg__,r27" CR_TAB
3263 "adiw r26,%o0" CR_TAB
3264 "st X+,__tmp_reg__" CR_TAB
3265 "st X+,__zero_reg__" CR_TAB
3268 "clr __zero_reg__" CR_TAB
3271 else if (reg_src
== REG_X
- 2)
3274 return ("mov __tmp_reg__,r26" CR_TAB
3275 "mov __zero_reg__,r27" CR_TAB
3276 "adiw r26,%o0" CR_TAB
3279 "st X+,__tmp_reg__" CR_TAB
3280 "st X,__zero_reg__" CR_TAB
3281 "clr __zero_reg__" CR_TAB
3285 return ("adiw r26,%o0" CR_TAB
3292 return *l
=4, ("std %A0,%A1" CR_TAB
3293 "std %B0,%B1" CR_TAB
3294 "std %C0,%C1" CR_TAB
3297 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3298 return *l
=4, ("st %0,%D1" CR_TAB
3302 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3303 return *l
=4, ("st %0,%A1" CR_TAB
3307 fatal_insn ("unknown move insn:",insn
);
3312 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3315 rtx dest
= operands
[0];
3316 rtx src
= operands
[1];
3319 if (avr_mem_flash_p (src
)
3320 || avr_mem_flash_p (dest
))
3322 return avr_out_lpm (insn
, operands
, real_l
);
3328 if (register_operand (dest
, VOIDmode
))
3330 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3332 if (true_regnum (dest
) > true_regnum (src
))
3337 return ("movw %C0,%C1" CR_TAB
3341 return ("mov %D0,%D1" CR_TAB
3342 "mov %C0,%C1" CR_TAB
3343 "mov %B0,%B1" CR_TAB
3351 return ("movw %A0,%A1" CR_TAB
3355 return ("mov %A0,%A1" CR_TAB
3356 "mov %B0,%B1" CR_TAB
3357 "mov %C0,%C1" CR_TAB
3361 else if (CONSTANT_P (src
))
3363 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3365 else if (GET_CODE (src
) == MEM
)
3366 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3368 else if (GET_CODE (dest
) == MEM
)
3372 if (src
== CONST0_RTX (GET_MODE (dest
)))
3373 operands
[1] = zero_reg_rtx
;
3375 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3378 output_asm_insn (templ
, operands
);
3383 fatal_insn ("invalid insn:", insn
);
3388 /* Handle loads of 24-bit types from memory to register. */
3391 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3395 rtx base
= XEXP (src
, 0);
3396 int reg_dest
= true_regnum (dest
);
3397 int reg_base
= true_regnum (base
);
3401 if (reg_base
== REG_X
) /* (R26) */
3403 if (reg_dest
== REG_X
)
3404 /* "ld r26,-X" is undefined */
3405 return avr_asm_len ("adiw r26,2" CR_TAB
3407 "ld __tmp_reg__,-X" CR_TAB
3410 "mov r27,__tmp_reg__", op
, plen
, -6);
3413 avr_asm_len ("ld %A0,X+" CR_TAB
3415 "ld %C0,X", op
, plen
, -3);
3417 if (reg_dest
!= REG_X
- 2
3418 && !reg_unused_after (insn
, base
))
3420 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3426 else /* reg_base != REG_X */
3428 if (reg_dest
== reg_base
)
3429 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3430 "ldd __tmp_reg__,%1+1" CR_TAB
3432 "mov %B0,__tmp_reg__", op
, plen
, -4);
3434 return avr_asm_len ("ld %A0,%1" CR_TAB
3435 "ldd %B0,%1+1" CR_TAB
3436 "ldd %C0,%1+2", op
, plen
, -3);
3439 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3441 int disp
= INTVAL (XEXP (base
, 1));
3443 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3445 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3446 fatal_insn ("incorrect insn:",insn
);
3448 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3449 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3450 "ldd %A0,Y+61" CR_TAB
3451 "ldd %B0,Y+62" CR_TAB
3452 "ldd %C0,Y+63" CR_TAB
3453 "sbiw r28,%o1-61", op
, plen
, -5);
3455 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3456 "sbci r29,hi8(-%o1)" CR_TAB
3458 "ldd %B0,Y+1" CR_TAB
3459 "ldd %C0,Y+2" CR_TAB
3460 "subi r28,lo8(%o1)" CR_TAB
3461 "sbci r29,hi8(%o1)", op
, plen
, -7);
3464 reg_base
= true_regnum (XEXP (base
, 0));
3465 if (reg_base
== REG_X
)
3468 if (reg_dest
== REG_X
)
3470 /* "ld r26,-X" is undefined */
3471 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3473 "ld __tmp_reg__,-X" CR_TAB
3476 "mov r27,__tmp_reg__", op
, plen
, -6);
3479 avr_asm_len ("adiw r26,%o1" CR_TAB
3482 "ld %C0,X", op
, plen
, -4);
3484 if (reg_dest
!= REG_W
3485 && !reg_unused_after (insn
, XEXP (base
, 0)))
3486 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3491 if (reg_dest
== reg_base
)
3492 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3493 "ldd __tmp_reg__,%B1" CR_TAB
3494 "ldd %A0,%A1" CR_TAB
3495 "mov %B0,__tmp_reg__", op
, plen
, -4);
3497 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3498 "ldd %B0,%B1" CR_TAB
3499 "ldd %C0,%C1", op
, plen
, -3);
3501 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3502 return avr_asm_len ("ld %C0,%1" CR_TAB
3504 "ld %A0,%1", op
, plen
, -3);
3505 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3506 return avr_asm_len ("ld %A0,%1" CR_TAB
3508 "ld %C0,%1", op
, plen
, -3);
3510 else if (CONSTANT_ADDRESS_P (base
))
3511 return avr_asm_len ("lds %A0,%m1" CR_TAB
3512 "lds %B0,%m1+1" CR_TAB
3513 "lds %C0,%m1+2", op
, plen
, -6);
3515 fatal_insn ("unknown move insn:",insn
);
3519 /* Handle store of 24-bit type from register or zero to memory. */
3522 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3526 rtx base
= XEXP (dest
, 0);
3527 int reg_base
= true_regnum (base
);
3529 if (CONSTANT_ADDRESS_P (base
))
3530 return avr_asm_len ("sts %m0,%A1" CR_TAB
3531 "sts %m0+1,%B1" CR_TAB
3532 "sts %m0+2,%C1", op
, plen
, -6);
3534 if (reg_base
> 0) /* (r) */
3536 if (reg_base
== REG_X
) /* (R26) */
3538 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3540 avr_asm_len ("st %0+,%A1" CR_TAB
3542 "st %0,%C1", op
, plen
, -3);
3544 if (!reg_unused_after (insn
, base
))
3545 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3550 return avr_asm_len ("st %0,%A1" CR_TAB
3551 "std %0+1,%B1" CR_TAB
3552 "std %0+2,%C1", op
, plen
, -3);
3554 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3556 int disp
= INTVAL (XEXP (base
, 1));
3557 reg_base
= REGNO (XEXP (base
, 0));
3559 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3561 if (reg_base
!= REG_Y
)
3562 fatal_insn ("incorrect insn:",insn
);
3564 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3565 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3566 "std Y+61,%A1" CR_TAB
3567 "std Y+62,%B1" CR_TAB
3568 "std Y+63,%C1" CR_TAB
3569 "sbiw r28,%o0-60", op
, plen
, -5);
3571 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3572 "sbci r29,hi8(-%o0)" CR_TAB
3574 "std Y+1,%B1" CR_TAB
3575 "std Y+2,%C1" CR_TAB
3576 "subi r28,lo8(%o0)" CR_TAB
3577 "sbci r29,hi8(%o0)", op
, plen
, -7);
3579 if (reg_base
== REG_X
)
3582 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3584 avr_asm_len ("adiw r26,%o0" CR_TAB
3587 "st X,%C1", op
, plen
, -4);
3589 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3590 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3595 return avr_asm_len ("std %A0,%A1" CR_TAB
3596 "std %B0,%B1" CR_TAB
3597 "std %C0,%C1", op
, plen
, -3);
3599 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3600 return avr_asm_len ("st %0,%C1" CR_TAB
3602 "st %0,%A1", op
, plen
, -3);
3603 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3604 return avr_asm_len ("st %0,%A1" CR_TAB
3606 "st %0,%C1", op
, plen
, -3);
3608 fatal_insn ("unknown move insn:",insn
);
3613 /* Move around 24-bit stuff. */
3616 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3621 if (avr_mem_flash_p (src
)
3622 || avr_mem_flash_p (dest
))
3624 return avr_out_lpm (insn
, op
, plen
);
3627 if (register_operand (dest
, VOIDmode
))
3629 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3631 if (true_regnum (dest
) > true_regnum (src
))
3633 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3636 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3638 return avr_asm_len ("mov %B0,%B1" CR_TAB
3639 "mov %A0,%A1", op
, plen
, 2);
3644 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3646 avr_asm_len ("mov %A0,%A1" CR_TAB
3647 "mov %B0,%B1", op
, plen
, -2);
3649 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3652 else if (CONSTANT_P (src
))
3654 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3656 else if (MEM_P (src
))
3657 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3659 else if (MEM_P (dest
))
3664 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3666 return avr_out_store_psi (insn
, xop
, plen
);
3669 fatal_insn ("invalid insn:", insn
);
3675 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3679 rtx x
= XEXP (dest
, 0);
3681 if (CONSTANT_ADDRESS_P (x
))
3683 return optimize
> 0 && io_address_operand (x
, QImode
)
3684 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3685 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3687 else if (GET_CODE (x
) == PLUS
3688 && REG_P (XEXP (x
, 0))
3689 && CONST_INT_P (XEXP (x
, 1)))
3691 /* memory access by reg+disp */
3693 int disp
= INTVAL (XEXP (x
, 1));
3695 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3697 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3698 fatal_insn ("incorrect insn:",insn
);
3700 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3701 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3702 "std Y+63,%1" CR_TAB
3703 "sbiw r28,%o0-63", op
, plen
, -3);
3705 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3706 "sbci r29,hi8(-%o0)" CR_TAB
3708 "subi r28,lo8(%o0)" CR_TAB
3709 "sbci r29,hi8(%o0)", op
, plen
, -5);
3711 else if (REGNO (XEXP (x
,0)) == REG_X
)
3713 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3715 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3716 "adiw r26,%o0" CR_TAB
3717 "st X,__tmp_reg__", op
, plen
, -3);
3721 avr_asm_len ("adiw r26,%o0" CR_TAB
3722 "st X,%1", op
, plen
, -2);
3725 if (!reg_unused_after (insn
, XEXP (x
,0)))
3726 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3731 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3734 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3738 /* Helper for the next function for XMEGA. It does the same
3739 but with low byte first. */
3742 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3746 rtx base
= XEXP (dest
, 0);
3747 int reg_base
= true_regnum (base
);
3748 int reg_src
= true_regnum (src
);
3750 /* "volatile" forces writing low byte first, even if less efficient,
3751 for correct operation with 16-bit I/O registers like SP. */
3752 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3754 if (CONSTANT_ADDRESS_P (base
))
3755 return optimize
> 0 && io_address_operand (base
, HImode
)
3756 ? avr_asm_len ("out %i0,%A1" CR_TAB
3757 "out %i0+1,%B1", op
, plen
, -2)
3759 : avr_asm_len ("sts %m0,%A1" CR_TAB
3760 "sts %m0+1,%B1", op
, plen
, -4);
3764 if (reg_base
!= REG_X
)
3765 return avr_asm_len ("st %0,%A1" CR_TAB
3766 "std %0+1,%B1", op
, plen
, -2);
3768 if (reg_src
== REG_X
)
3769 /* "st X+,r26" and "st -X,r26" are undefined. */
3770 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3773 "st X,__tmp_reg__", op
, plen
, -4);
3775 avr_asm_len ("st X+,%A1" CR_TAB
3776 "st X,%B1", op
, plen
, -2);
3778 return reg_unused_after (insn
, base
)
3780 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3782 else if (GET_CODE (base
) == PLUS
)
3784 int disp
= INTVAL (XEXP (base
, 1));
3785 reg_base
= REGNO (XEXP (base
, 0));
3786 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3788 if (reg_base
!= REG_Y
)
3789 fatal_insn ("incorrect insn:",insn
);
3791 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3792 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3793 "std Y+62,%A1" CR_TAB
3794 "std Y+63,%B1" CR_TAB
3795 "sbiw r28,%o0-62", op
, plen
, -4)
3797 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3798 "sbci r29,hi8(-%o0)" CR_TAB
3800 "std Y+1,%B1" CR_TAB
3801 "subi r28,lo8(%o0)" CR_TAB
3802 "sbci r29,hi8(%o0)", op
, plen
, -6);
3805 if (reg_base
!= REG_X
)
3806 return avr_asm_len ("std %A0,%A1" CR_TAB
3807 "std %B0,%B1", op
, plen
, -2);
3809 return reg_src
== REG_X
3810 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3811 "mov __zero_reg__,r27" CR_TAB
3812 "adiw r26,%o0" CR_TAB
3813 "st X+,__tmp_reg__" CR_TAB
3814 "st X,__zero_reg__" CR_TAB
3815 "clr __zero_reg__" CR_TAB
3816 "sbiw r26,%o0+1", op
, plen
, -7)
3818 : avr_asm_len ("adiw r26,%o0" CR_TAB
3821 "sbiw r26,%o0+1", op
, plen
, -4);
3823 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3825 if (!mem_volatile_p
)
3826 return avr_asm_len ("st %0,%B1" CR_TAB
3827 "st %0,%A1", op
, plen
, -2);
3829 return REGNO (XEXP (base
, 0)) == REG_X
3830 ? avr_asm_len ("sbiw r26,2" CR_TAB
3833 "sbiw r26,1", op
, plen
, -4)
3835 : avr_asm_len ("sbiw %r0,2" CR_TAB
3837 "std %p0+1,%B1", op
, plen
, -3);
3839 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3841 return avr_asm_len ("st %0,%A1" CR_TAB
3842 "st %0,%B1", op
, plen
, -2);
3845 fatal_insn ("unknown move insn:",insn
);
3851 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
3855 rtx base
= XEXP (dest
, 0);
3856 int reg_base
= true_regnum (base
);
3857 int reg_src
= true_regnum (src
);
3860 /* "volatile" forces writing high-byte first (no-xmega) resp.
3861 low-byte first (xmega) even if less efficient, for correct
3862 operation with 16-bit I/O registers like. */
3865 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
3867 mem_volatile_p
= MEM_VOLATILE_P (dest
);
3869 if (CONSTANT_ADDRESS_P (base
))
3870 return optimize
> 0 && io_address_operand (base
, HImode
)
3871 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3872 "out %i0,%A1", op
, plen
, -2)
3874 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3875 "sts %m0,%A1", op
, plen
, -4);
3879 if (reg_base
!= REG_X
)
3880 return avr_asm_len ("std %0+1,%B1" CR_TAB
3881 "st %0,%A1", op
, plen
, -2);
3883 if (reg_src
== REG_X
)
3884 /* "st X+,r26" and "st -X,r26" are undefined. */
3885 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
3886 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3889 "st X,__tmp_reg__", op
, plen
, -4)
3891 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3893 "st X,__tmp_reg__" CR_TAB
3895 "st X,r26", op
, plen
, -5);
3897 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
3898 ? avr_asm_len ("st X+,%A1" CR_TAB
3899 "st X,%B1", op
, plen
, -2)
3900 : avr_asm_len ("adiw r26,1" CR_TAB
3902 "st -X,%A1", op
, plen
, -3);
3904 else if (GET_CODE (base
) == PLUS
)
3906 int disp
= INTVAL (XEXP (base
, 1));
3907 reg_base
= REGNO (XEXP (base
, 0));
3908 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3910 if (reg_base
!= REG_Y
)
3911 fatal_insn ("incorrect insn:",insn
);
3913 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3914 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3915 "std Y+63,%B1" CR_TAB
3916 "std Y+62,%A1" CR_TAB
3917 "sbiw r28,%o0-62", op
, plen
, -4)
3919 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3920 "sbci r29,hi8(-%o0)" CR_TAB
3921 "std Y+1,%B1" CR_TAB
3923 "subi r28,lo8(%o0)" CR_TAB
3924 "sbci r29,hi8(%o0)", op
, plen
, -6);
3927 if (reg_base
!= REG_X
)
3928 return avr_asm_len ("std %B0,%B1" CR_TAB
3929 "std %A0,%A1", op
, plen
, -2);
3931 return reg_src
== REG_X
3932 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3933 "mov __zero_reg__,r27" CR_TAB
3934 "adiw r26,%o0+1" CR_TAB
3935 "st X,__zero_reg__" CR_TAB
3936 "st -X,__tmp_reg__" CR_TAB
3937 "clr __zero_reg__" CR_TAB
3938 "sbiw r26,%o0", op
, plen
, -7)
3940 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3943 "sbiw r26,%o0", op
, plen
, -4);
3945 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3947 return avr_asm_len ("st %0,%B1" CR_TAB
3948 "st %0,%A1", op
, plen
, -2);
3950 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3952 if (!mem_volatile_p
)
3953 return avr_asm_len ("st %0,%A1" CR_TAB
3954 "st %0,%B1", op
, plen
, -2);
3956 return REGNO (XEXP (base
, 0)) == REG_X
3957 ? avr_asm_len ("adiw r26,1" CR_TAB
3960 "adiw r26,2", op
, plen
, -4)
3962 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3964 "adiw %r0,2", op
, plen
, -3);
3966 fatal_insn ("unknown move insn:",insn
);
3970 /* Return 1 if frame pointer for current function required. */
3973 avr_frame_pointer_required_p (void)
3975 return (cfun
->calls_alloca
3976 || cfun
->calls_setjmp
3977 || cfun
->has_nonlocal_label
3978 || crtl
->args
.info
.nregs
== 0
3979 || get_frame_size () > 0);
3982 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3985 compare_condition (rtx insn
)
3987 rtx next
= next_real_insn (insn
);
3989 if (next
&& JUMP_P (next
))
3991 rtx pat
= PATTERN (next
);
3992 rtx src
= SET_SRC (pat
);
3994 if (IF_THEN_ELSE
== GET_CODE (src
))
3995 return GET_CODE (XEXP (src
, 0));
4002 /* Returns true iff INSN is a tst insn that only tests the sign. */
4005 compare_sign_p (rtx insn
)
4007 RTX_CODE cond
= compare_condition (insn
);
4008 return (cond
== GE
|| cond
== LT
);
4012 /* Returns true iff the next insn is a JUMP_INSN with a condition
4013 that needs to be swapped (GT, GTU, LE, LEU). */
4016 compare_diff_p (rtx insn
)
4018 RTX_CODE cond
= compare_condition (insn
);
4019 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4022 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4025 compare_eq_p (rtx insn
)
4027 RTX_CODE cond
= compare_condition (insn
);
4028 return (cond
== EQ
|| cond
== NE
);
4032 /* Output compare instruction
4034 compare (XOP[0], XOP[1])
4036 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4037 XOP[2] is an 8-bit scratch register as needed.
4039 PLEN == NULL: Output instructions.
4040 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4041 Don't output anything. */
4044 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4046 /* Register to compare and value to compare against. */
4050 /* MODE of the comparison. */
4051 enum machine_mode mode
= GET_MODE (xreg
);
4053 /* Number of bytes to operate on. */
4054 int i
, n_bytes
= GET_MODE_SIZE (mode
);
4056 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4057 int clobber_val
= -1;
4059 gcc_assert (REG_P (xreg
));
4060 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4061 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4066 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4067 against 0 by ORing the bytes. This is one instruction shorter.
4068 Notice that DImode comparisons are always against reg:DI 18
4069 and therefore don't use this. */
4071 if (!test_hard_reg_class (LD_REGS
, xreg
)
4072 && compare_eq_p (insn
)
4073 && reg_unused_after (insn
, xreg
))
4075 if (xval
== const1_rtx
)
4077 avr_asm_len ("dec %A0" CR_TAB
4078 "or %A0,%B0", xop
, plen
, 2);
4081 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4084 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4088 else if (xval
== constm1_rtx
)
4091 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4094 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4096 return avr_asm_len ("and %A0,%B0" CR_TAB
4097 "com %A0", xop
, plen
, 2);
4101 for (i
= 0; i
< n_bytes
; i
++)
4103 /* We compare byte-wise. */
4104 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4105 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4107 /* 8-bit value to compare with this byte. */
4108 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4110 /* Registers R16..R31 can operate with immediate. */
4111 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4114 xop
[1] = gen_int_mode (val8
, QImode
);
4116 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4119 && test_hard_reg_class (ADDW_REGS
, reg8
))
4121 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4123 if (IN_RANGE (val16
, 0, 63)
4125 || reg_unused_after (insn
, xreg
)))
4127 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4133 && IN_RANGE (val16
, -63, -1)
4134 && compare_eq_p (insn
)
4135 && reg_unused_after (insn
, xreg
))
4137 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4141 /* Comparing against 0 is easy. */
4146 ? "cp %0,__zero_reg__"
4147 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4151 /* Upper registers can compare and subtract-with-carry immediates.
4152 Notice that compare instructions do the same as respective subtract
4153 instruction; the only difference is that comparisons don't write
4154 the result back to the target register. */
4160 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4163 else if (reg_unused_after (insn
, xreg
))
4165 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4170 /* Must load the value into the scratch register. */
4172 gcc_assert (REG_P (xop
[2]));
4174 if (clobber_val
!= (int) val8
)
4175 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4176 clobber_val
= (int) val8
;
4180 : "cpc %0,%2", xop
, plen
, 1);
4187 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4190 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4194 xop
[0] = gen_rtx_REG (DImode
, 18);
4198 return avr_out_compare (insn
, xop
, plen
);
4201 /* Output test instruction for HImode. */
4204 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4206 if (compare_sign_p (insn
))
4208 avr_asm_len ("tst %B0", op
, plen
, -1);
4210 else if (reg_unused_after (insn
, op
[0])
4211 && compare_eq_p (insn
))
4213 /* Faster than sbiw if we can clobber the operand. */
4214 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4218 avr_out_compare (insn
, op
, plen
);
4225 /* Output test instruction for PSImode. */
4228 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4230 if (compare_sign_p (insn
))
4232 avr_asm_len ("tst %C0", op
, plen
, -1);
4234 else if (reg_unused_after (insn
, op
[0])
4235 && compare_eq_p (insn
))
4237 /* Faster than sbiw if we can clobber the operand. */
4238 avr_asm_len ("or %A0,%B0" CR_TAB
4239 "or %A0,%C0", op
, plen
, -2);
4243 avr_out_compare (insn
, op
, plen
);
4250 /* Output test instruction for SImode. */
4253 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4255 if (compare_sign_p (insn
))
4257 avr_asm_len ("tst %D0", op
, plen
, -1);
4259 else if (reg_unused_after (insn
, op
[0])
4260 && compare_eq_p (insn
))
4262 /* Faster than sbiw if we can clobber the operand. */
4263 avr_asm_len ("or %A0,%B0" CR_TAB
4265 "or %A0,%D0", op
, plen
, -3);
4269 avr_out_compare (insn
, op
, plen
);
4276 /* Generate asm equivalent for various shifts. This only handles cases
4277 that are not already carefully hand-optimized in ?sh??i3_out.
4279 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4280 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4281 OPERANDS[3] is a QImode scratch register from LD regs if
4282 available and SCRATCH, otherwise (no scratch available)
4284 TEMPL is an assembler template that shifts by one position.
4285 T_LEN is the length of this template. */
4288 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4289 int *plen
, int t_len
)
4291 bool second_label
= true;
4292 bool saved_in_tmp
= false;
4293 bool use_zero_reg
= false;
4296 op
[0] = operands
[0];
4297 op
[1] = operands
[1];
4298 op
[2] = operands
[2];
4299 op
[3] = operands
[3];
4304 if (CONST_INT_P (operands
[2]))
4306 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4307 && REG_P (operands
[3]));
4308 int count
= INTVAL (operands
[2]);
4309 int max_len
= 10; /* If larger than this, always use a loop. */
4314 if (count
< 8 && !scratch
)
4315 use_zero_reg
= true;
4318 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4320 if (t_len
* count
<= max_len
)
4322 /* Output shifts inline with no loop - faster. */
4325 avr_asm_len (templ
, op
, plen
, t_len
);
4332 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4334 else if (use_zero_reg
)
4336 /* Hack to save one word: use __zero_reg__ as loop counter.
4337 Set one bit, then shift in a loop until it is 0 again. */
4339 op
[3] = zero_reg_rtx
;
4341 avr_asm_len ("set" CR_TAB
4342 "bld %3,%2-1", op
, plen
, 2);
4346 /* No scratch register available, use one from LD_REGS (saved in
4347 __tmp_reg__) that doesn't overlap with registers to shift. */
4349 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4350 op
[4] = tmp_reg_rtx
;
4351 saved_in_tmp
= true;
4353 avr_asm_len ("mov %4,%3" CR_TAB
4354 "ldi %3,%2", op
, plen
, 2);
4357 second_label
= false;
4359 else if (MEM_P (op
[2]))
4363 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4366 out_movqi_r_mr (insn
, op_mov
, plen
);
4368 else if (register_operand (op
[2], QImode
))
4372 if (!reg_unused_after (insn
, op
[2])
4373 || reg_overlap_mentioned_p (op
[0], op
[2]))
4375 op
[3] = tmp_reg_rtx
;
4376 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4380 fatal_insn ("bad shift insn:", insn
);
4383 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4385 avr_asm_len ("1:", op
, plen
, 0);
4386 avr_asm_len (templ
, op
, plen
, t_len
);
4389 avr_asm_len ("2:", op
, plen
, 0);
4391 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4392 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4395 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4399 /* 8bit shift left ((char)x << i) */
4402 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4404 if (GET_CODE (operands
[2]) == CONST_INT
)
4411 switch (INTVAL (operands
[2]))
4414 if (INTVAL (operands
[2]) < 8)
4426 return ("lsl %0" CR_TAB
4431 return ("lsl %0" CR_TAB
4436 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4439 return ("swap %0" CR_TAB
4443 return ("lsl %0" CR_TAB
4449 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4452 return ("swap %0" CR_TAB
4457 return ("lsl %0" CR_TAB
4464 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4467 return ("swap %0" CR_TAB
4473 return ("lsl %0" CR_TAB
4482 return ("ror %0" CR_TAB
4487 else if (CONSTANT_P (operands
[2]))
4488 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4490 out_shift_with_cnt ("lsl %0",
4491 insn
, operands
, len
, 1);
4496 /* 16bit shift left ((short)x << i) */
4499 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4501 if (GET_CODE (operands
[2]) == CONST_INT
)
4503 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4504 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4511 switch (INTVAL (operands
[2]))
4514 if (INTVAL (operands
[2]) < 16)
4518 return ("clr %B0" CR_TAB
4522 if (optimize_size
&& scratch
)
4527 return ("swap %A0" CR_TAB
4529 "andi %B0,0xf0" CR_TAB
4530 "eor %B0,%A0" CR_TAB
4531 "andi %A0,0xf0" CR_TAB
4537 return ("swap %A0" CR_TAB
4539 "ldi %3,0xf0" CR_TAB
4541 "eor %B0,%A0" CR_TAB
4545 break; /* optimize_size ? 6 : 8 */
4549 break; /* scratch ? 5 : 6 */
4553 return ("lsl %A0" CR_TAB
4557 "andi %B0,0xf0" CR_TAB
4558 "eor %B0,%A0" CR_TAB
4559 "andi %A0,0xf0" CR_TAB
4565 return ("lsl %A0" CR_TAB
4569 "ldi %3,0xf0" CR_TAB
4571 "eor %B0,%A0" CR_TAB
4579 break; /* scratch ? 5 : 6 */
4581 return ("clr __tmp_reg__" CR_TAB
4584 "ror __tmp_reg__" CR_TAB
4587 "ror __tmp_reg__" CR_TAB
4588 "mov %B0,%A0" CR_TAB
4589 "mov %A0,__tmp_reg__");
4593 return ("lsr %B0" CR_TAB
4594 "mov %B0,%A0" CR_TAB
4600 return *len
= 2, ("mov %B0,%A1" CR_TAB
4605 return ("mov %B0,%A0" CR_TAB
4611 return ("mov %B0,%A0" CR_TAB
4618 return ("mov %B0,%A0" CR_TAB
4628 return ("mov %B0,%A0" CR_TAB
4636 return ("mov %B0,%A0" CR_TAB
4639 "ldi %3,0xf0" CR_TAB
4643 return ("mov %B0,%A0" CR_TAB
4654 return ("mov %B0,%A0" CR_TAB
4660 if (AVR_HAVE_MUL
&& scratch
)
4663 return ("ldi %3,0x20" CR_TAB
4667 "clr __zero_reg__");
4669 if (optimize_size
&& scratch
)
4674 return ("mov %B0,%A0" CR_TAB
4678 "ldi %3,0xe0" CR_TAB
4684 return ("set" CR_TAB
4689 "clr __zero_reg__");
4692 return ("mov %B0,%A0" CR_TAB
4701 if (AVR_HAVE_MUL
&& ldi_ok
)
4704 return ("ldi %B0,0x40" CR_TAB
4705 "mul %A0,%B0" CR_TAB
4708 "clr __zero_reg__");
4710 if (AVR_HAVE_MUL
&& scratch
)
4713 return ("ldi %3,0x40" CR_TAB
4717 "clr __zero_reg__");
4719 if (optimize_size
&& ldi_ok
)
4722 return ("mov %B0,%A0" CR_TAB
4723 "ldi %A0,6" "\n1:\t"
4728 if (optimize_size
&& scratch
)
4731 return ("clr %B0" CR_TAB
4740 return ("clr %B0" CR_TAB
4747 out_shift_with_cnt ("lsl %A0" CR_TAB
4748 "rol %B0", insn
, operands
, len
, 2);
4753 /* 24-bit shift left */
4756 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4761 if (CONST_INT_P (op
[2]))
4763 switch (INTVAL (op
[2]))
4766 if (INTVAL (op
[2]) < 24)
4769 return avr_asm_len ("clr %A0" CR_TAB
4771 "clr %C0", op
, plen
, 3);
4775 int reg0
= REGNO (op
[0]);
4776 int reg1
= REGNO (op
[1]);
4779 return avr_asm_len ("mov %C0,%B1" CR_TAB
4780 "mov %B0,%A1" CR_TAB
4781 "clr %A0", op
, plen
, 3);
4783 return avr_asm_len ("clr %A0" CR_TAB
4784 "mov %B0,%A1" CR_TAB
4785 "mov %C0,%B1", op
, plen
, 3);
4790 int reg0
= REGNO (op
[0]);
4791 int reg1
= REGNO (op
[1]);
4793 if (reg0
+ 2 != reg1
)
4794 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4796 return avr_asm_len ("clr %B0" CR_TAB
4797 "clr %A0", op
, plen
, 2);
4801 return avr_asm_len ("clr %C0" CR_TAB
4805 "clr %A0", op
, plen
, 5);
4809 out_shift_with_cnt ("lsl %A0" CR_TAB
4811 "rol %C0", insn
, op
, plen
, 3);
4816 /* 32bit shift left ((long)x << i) */
4819 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
4821 if (GET_CODE (operands
[2]) == CONST_INT
)
4829 switch (INTVAL (operands
[2]))
4832 if (INTVAL (operands
[2]) < 32)
4836 return *len
= 3, ("clr %D0" CR_TAB
4840 return ("clr %D0" CR_TAB
4847 int reg0
= true_regnum (operands
[0]);
4848 int reg1
= true_regnum (operands
[1]);
4851 return ("mov %D0,%C1" CR_TAB
4852 "mov %C0,%B1" CR_TAB
4853 "mov %B0,%A1" CR_TAB
4856 return ("clr %A0" CR_TAB
4857 "mov %B0,%A1" CR_TAB
4858 "mov %C0,%B1" CR_TAB
4864 int reg0
= true_regnum (operands
[0]);
4865 int reg1
= true_regnum (operands
[1]);
4866 if (reg0
+ 2 == reg1
)
4867 return *len
= 2, ("clr %B0" CR_TAB
4870 return *len
= 3, ("movw %C0,%A1" CR_TAB
4874 return *len
= 4, ("mov %C0,%A1" CR_TAB
4875 "mov %D0,%B1" CR_TAB
4882 return ("mov %D0,%A1" CR_TAB
4889 return ("clr %D0" CR_TAB
4898 out_shift_with_cnt ("lsl %A0" CR_TAB
4901 "rol %D0", insn
, operands
, len
, 4);
4905 /* 8bit arithmetic shift right ((signed char)x >> i) */
4908 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
4910 if (GET_CODE (operands
[2]) == CONST_INT
)
4917 switch (INTVAL (operands
[2]))
4925 return ("asr %0" CR_TAB
4930 return ("asr %0" CR_TAB
4936 return ("asr %0" CR_TAB
4943 return ("asr %0" CR_TAB
4951 return ("bst %0,6" CR_TAB
4957 if (INTVAL (operands
[2]) < 8)
4964 return ("lsl %0" CR_TAB
4968 else if (CONSTANT_P (operands
[2]))
4969 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4971 out_shift_with_cnt ("asr %0",
4972 insn
, operands
, len
, 1);
4977 /* 16bit arithmetic shift right ((signed short)x >> i) */
4980 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
4982 if (GET_CODE (operands
[2]) == CONST_INT
)
4984 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4985 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4992 switch (INTVAL (operands
[2]))
4996 /* XXX try to optimize this too? */
5001 break; /* scratch ? 5 : 6 */
5003 return ("mov __tmp_reg__,%A0" CR_TAB
5004 "mov %A0,%B0" CR_TAB
5005 "lsl __tmp_reg__" CR_TAB
5007 "sbc %B0,%B0" CR_TAB
5008 "lsl __tmp_reg__" CR_TAB
5014 return ("lsl %A0" CR_TAB
5015 "mov %A0,%B0" CR_TAB
5021 int reg0
= true_regnum (operands
[0]);
5022 int reg1
= true_regnum (operands
[1]);
5025 return *len
= 3, ("mov %A0,%B0" CR_TAB
5029 return *len
= 4, ("mov %A0,%B1" CR_TAB
5037 return ("mov %A0,%B0" CR_TAB
5039 "sbc %B0,%B0" CR_TAB
5044 return ("mov %A0,%B0" CR_TAB
5046 "sbc %B0,%B0" CR_TAB
5051 if (AVR_HAVE_MUL
&& ldi_ok
)
5054 return ("ldi %A0,0x20" CR_TAB
5055 "muls %B0,%A0" CR_TAB
5057 "sbc %B0,%B0" CR_TAB
5058 "clr __zero_reg__");
5060 if (optimize_size
&& scratch
)
5063 return ("mov %A0,%B0" CR_TAB
5065 "sbc %B0,%B0" CR_TAB
5071 if (AVR_HAVE_MUL
&& ldi_ok
)
5074 return ("ldi %A0,0x10" CR_TAB
5075 "muls %B0,%A0" CR_TAB
5077 "sbc %B0,%B0" CR_TAB
5078 "clr __zero_reg__");
5080 if (optimize_size
&& scratch
)
5083 return ("mov %A0,%B0" CR_TAB
5085 "sbc %B0,%B0" CR_TAB
5092 if (AVR_HAVE_MUL
&& ldi_ok
)
5095 return ("ldi %A0,0x08" CR_TAB
5096 "muls %B0,%A0" CR_TAB
5098 "sbc %B0,%B0" CR_TAB
5099 "clr __zero_reg__");
5102 break; /* scratch ? 5 : 7 */
5104 return ("mov %A0,%B0" CR_TAB
5106 "sbc %B0,%B0" CR_TAB
5115 return ("lsl %B0" CR_TAB
5116 "sbc %A0,%A0" CR_TAB
5118 "mov %B0,%A0" CR_TAB
5122 if (INTVAL (operands
[2]) < 16)
5128 return *len
= 3, ("lsl %B0" CR_TAB
5129 "sbc %A0,%A0" CR_TAB
5134 out_shift_with_cnt ("asr %B0" CR_TAB
5135 "ror %A0", insn
, operands
, len
, 2);
5140 /* 24-bit arithmetic shift right */
5143 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5145 int dest
= REGNO (op
[0]);
5146 int src
= REGNO (op
[1]);
5148 if (CONST_INT_P (op
[2]))
5153 switch (INTVAL (op
[2]))
5157 return avr_asm_len ("mov %A0,%B1" CR_TAB
5158 "mov %B0,%C1" CR_TAB
5161 "dec %C0", op
, plen
, 5);
5163 return avr_asm_len ("clr %C0" CR_TAB
5166 "mov %B0,%C1" CR_TAB
5167 "mov %A0,%B1", op
, plen
, 5);
5170 if (dest
!= src
+ 2)
5171 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5173 return avr_asm_len ("clr %B0" CR_TAB
5176 "mov %C0,%B0", op
, plen
, 4);
5179 if (INTVAL (op
[2]) < 24)
5185 return avr_asm_len ("lsl %C0" CR_TAB
5186 "sbc %A0,%A0" CR_TAB
5187 "mov %B0,%A0" CR_TAB
5188 "mov %C0,%A0", op
, plen
, 4);
5192 out_shift_with_cnt ("asr %C0" CR_TAB
5194 "ror %A0", insn
, op
, plen
, 3);
5199 /* 32bit arithmetic shift right ((signed long)x >> i) */
5202 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5204 if (GET_CODE (operands
[2]) == CONST_INT
)
5212 switch (INTVAL (operands
[2]))
5216 int reg0
= true_regnum (operands
[0]);
5217 int reg1
= true_regnum (operands
[1]);
5220 return ("mov %A0,%B1" CR_TAB
5221 "mov %B0,%C1" CR_TAB
5222 "mov %C0,%D1" CR_TAB
5227 return ("clr %D0" CR_TAB
5230 "mov %C0,%D1" CR_TAB
5231 "mov %B0,%C1" CR_TAB
5237 int reg0
= true_regnum (operands
[0]);
5238 int reg1
= true_regnum (operands
[1]);
5240 if (reg0
== reg1
+ 2)
5241 return *len
= 4, ("clr %D0" CR_TAB
5246 return *len
= 5, ("movw %A0,%C1" CR_TAB
5252 return *len
= 6, ("mov %B0,%D1" CR_TAB
5253 "mov %A0,%C1" CR_TAB
5261 return *len
= 6, ("mov %A0,%D1" CR_TAB
5265 "mov %B0,%D0" CR_TAB
5269 if (INTVAL (operands
[2]) < 32)
5276 return *len
= 4, ("lsl %D0" CR_TAB
5277 "sbc %A0,%A0" CR_TAB
5278 "mov %B0,%A0" CR_TAB
5281 return *len
= 5, ("lsl %D0" CR_TAB
5282 "sbc %A0,%A0" CR_TAB
5283 "mov %B0,%A0" CR_TAB
5284 "mov %C0,%A0" CR_TAB
5289 out_shift_with_cnt ("asr %D0" CR_TAB
5292 "ror %A0", insn
, operands
, len
, 4);
5296 /* 8bit logic shift right ((unsigned char)x >> i) */
5299 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5301 if (GET_CODE (operands
[2]) == CONST_INT
)
5308 switch (INTVAL (operands
[2]))
5311 if (INTVAL (operands
[2]) < 8)
5323 return ("lsr %0" CR_TAB
5327 return ("lsr %0" CR_TAB
5332 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5335 return ("swap %0" CR_TAB
5339 return ("lsr %0" CR_TAB
5345 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5348 return ("swap %0" CR_TAB
5353 return ("lsr %0" CR_TAB
5360 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5363 return ("swap %0" CR_TAB
5369 return ("lsr %0" CR_TAB
5378 return ("rol %0" CR_TAB
5383 else if (CONSTANT_P (operands
[2]))
5384 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5386 out_shift_with_cnt ("lsr %0",
5387 insn
, operands
, len
, 1);
5391 /* 16bit logic shift right ((unsigned short)x >> i) */
5394 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5396 if (GET_CODE (operands
[2]) == CONST_INT
)
5398 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5399 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5406 switch (INTVAL (operands
[2]))
5409 if (INTVAL (operands
[2]) < 16)
5413 return ("clr %B0" CR_TAB
5417 if (optimize_size
&& scratch
)
5422 return ("swap %B0" CR_TAB
5424 "andi %A0,0x0f" CR_TAB
5425 "eor %A0,%B0" CR_TAB
5426 "andi %B0,0x0f" CR_TAB
5432 return ("swap %B0" CR_TAB
5434 "ldi %3,0x0f" CR_TAB
5436 "eor %A0,%B0" CR_TAB
5440 break; /* optimize_size ? 6 : 8 */
5444 break; /* scratch ? 5 : 6 */
5448 return ("lsr %B0" CR_TAB
5452 "andi %A0,0x0f" CR_TAB
5453 "eor %A0,%B0" CR_TAB
5454 "andi %B0,0x0f" CR_TAB
5460 return ("lsr %B0" CR_TAB
5464 "ldi %3,0x0f" CR_TAB
5466 "eor %A0,%B0" CR_TAB
5474 break; /* scratch ? 5 : 6 */
5476 return ("clr __tmp_reg__" CR_TAB
5479 "rol __tmp_reg__" CR_TAB
5482 "rol __tmp_reg__" CR_TAB
5483 "mov %A0,%B0" CR_TAB
5484 "mov %B0,__tmp_reg__");
5488 return ("lsl %A0" CR_TAB
5489 "mov %A0,%B0" CR_TAB
5491 "sbc %B0,%B0" CR_TAB
5495 return *len
= 2, ("mov %A0,%B1" CR_TAB
5500 return ("mov %A0,%B0" CR_TAB
5506 return ("mov %A0,%B0" CR_TAB
5513 return ("mov %A0,%B0" CR_TAB
5523 return ("mov %A0,%B0" CR_TAB
5531 return ("mov %A0,%B0" CR_TAB
5534 "ldi %3,0x0f" CR_TAB
5538 return ("mov %A0,%B0" CR_TAB
5549 return ("mov %A0,%B0" CR_TAB
5555 if (AVR_HAVE_MUL
&& scratch
)
5558 return ("ldi %3,0x08" CR_TAB
5562 "clr __zero_reg__");
5564 if (optimize_size
&& scratch
)
5569 return ("mov %A0,%B0" CR_TAB
5573 "ldi %3,0x07" CR_TAB
5579 return ("set" CR_TAB
5584 "clr __zero_reg__");
5587 return ("mov %A0,%B0" CR_TAB
5596 if (AVR_HAVE_MUL
&& ldi_ok
)
5599 return ("ldi %A0,0x04" CR_TAB
5600 "mul %B0,%A0" CR_TAB
5603 "clr __zero_reg__");
5605 if (AVR_HAVE_MUL
&& scratch
)
5608 return ("ldi %3,0x04" CR_TAB
5612 "clr __zero_reg__");
5614 if (optimize_size
&& ldi_ok
)
5617 return ("mov %A0,%B0" CR_TAB
5618 "ldi %B0,6" "\n1:\t"
5623 if (optimize_size
&& scratch
)
5626 return ("clr %A0" CR_TAB
5635 return ("clr %A0" CR_TAB
5642 out_shift_with_cnt ("lsr %B0" CR_TAB
5643 "ror %A0", insn
, operands
, len
, 2);
5648 /* 24-bit logic shift right */
5651 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5653 int dest
= REGNO (op
[0]);
5654 int src
= REGNO (op
[1]);
5656 if (CONST_INT_P (op
[2]))
5661 switch (INTVAL (op
[2]))
5665 return avr_asm_len ("mov %A0,%B1" CR_TAB
5666 "mov %B0,%C1" CR_TAB
5667 "clr %C0", op
, plen
, 3);
5669 return avr_asm_len ("clr %C0" CR_TAB
5670 "mov %B0,%C1" CR_TAB
5671 "mov %A0,%B1", op
, plen
, 3);
5674 if (dest
!= src
+ 2)
5675 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5677 return avr_asm_len ("clr %B0" CR_TAB
5678 "clr %C0", op
, plen
, 2);
5681 if (INTVAL (op
[2]) < 24)
5687 return avr_asm_len ("clr %A0" CR_TAB
5691 "clr %C0", op
, plen
, 5);
5695 out_shift_with_cnt ("lsr %C0" CR_TAB
5697 "ror %A0", insn
, op
, plen
, 3);
5702 /* 32bit logic shift right ((unsigned int)x >> i) */
5705 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5707 if (GET_CODE (operands
[2]) == CONST_INT
)
5715 switch (INTVAL (operands
[2]))
5718 if (INTVAL (operands
[2]) < 32)
5722 return *len
= 3, ("clr %D0" CR_TAB
5726 return ("clr %D0" CR_TAB
5733 int reg0
= true_regnum (operands
[0]);
5734 int reg1
= true_regnum (operands
[1]);
5737 return ("mov %A0,%B1" CR_TAB
5738 "mov %B0,%C1" CR_TAB
5739 "mov %C0,%D1" CR_TAB
5742 return ("clr %D0" CR_TAB
5743 "mov %C0,%D1" CR_TAB
5744 "mov %B0,%C1" CR_TAB
5750 int reg0
= true_regnum (operands
[0]);
5751 int reg1
= true_regnum (operands
[1]);
5753 if (reg0
== reg1
+ 2)
5754 return *len
= 2, ("clr %C0" CR_TAB
5757 return *len
= 3, ("movw %A0,%C1" CR_TAB
5761 return *len
= 4, ("mov %B0,%D1" CR_TAB
5762 "mov %A0,%C1" CR_TAB
5768 return *len
= 4, ("mov %A0,%D1" CR_TAB
5775 return ("clr %A0" CR_TAB
5784 out_shift_with_cnt ("lsr %D0" CR_TAB
5787 "ror %A0", insn
, operands
, len
, 4);
5792 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5794 XOP[0] = XOP[0] + XOP[2]
5796 and return "". If PLEN == NULL, print assembler instructions to perform the
5797 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5798 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5799 CODE == PLUS: perform addition by using ADD instructions.
5800 CODE == MINUS: perform addition by using SUB instructions.
5801 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5804 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
5806 /* MODE of the operation. */
5807 enum machine_mode mode
= GET_MODE (xop
[0]);
5809 /* Number of bytes to operate on. */
5810 int i
, n_bytes
= GET_MODE_SIZE (mode
);
5812 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5813 int clobber_val
= -1;
5815 /* op[0]: 8-bit destination register
5816 op[1]: 8-bit const int
5817 op[2]: 8-bit scratch register */
5820 /* Started the operation? Before starting the operation we may skip
5821 adding 0. This is no more true after the operation started because
5822 carry must be taken into account. */
5823 bool started
= false;
5825 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5828 /* Except in the case of ADIW with 16-bit register (see below)
5829 addition does not set cc0 in a usable way. */
5831 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
5834 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
5841 for (i
= 0; i
< n_bytes
; i
++)
5843 /* We operate byte-wise on the destination. */
5844 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
5845 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5847 /* 8-bit value to operate with this byte. */
5848 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5850 /* Registers R16..R31 can operate with immediate. */
5851 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5854 op
[1] = gen_int_mode (val8
, QImode
);
5856 /* To get usable cc0 no low-bytes must have been skipped. */
5864 && test_hard_reg_class (ADDW_REGS
, reg8
))
5866 rtx xval16
= simplify_gen_subreg (HImode
, xval
, mode
, i
);
5867 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
5869 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5870 i.e. operate word-wise. */
5877 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
5880 if (n_bytes
== 2 && PLUS
== code
)
5892 avr_asm_len (code
== PLUS
5893 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5897 else if ((val8
== 1 || val8
== 0xff)
5899 && i
== n_bytes
- 1)
5901 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
5910 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5912 if (clobber_val
!= (int) val8
)
5913 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5914 clobber_val
= (int) val8
;
5916 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
5923 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
5926 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
5928 if (clobber_val
!= (int) val8
)
5929 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
5930 clobber_val
= (int) val8
;
5932 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
5944 } /* for all sub-bytes */
5946 /* No output doesn't change cc0. */
5948 if (plen
&& *plen
== 0)
5953 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5955 XOP[0] = XOP[0] + XOP[2]
5957 and return "". If PLEN == NULL, print assembler instructions to perform the
5958 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5959 words) printed with PLEN == NULL.
5960 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5961 condition code (with respect to XOP[0]). */
5964 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
5966 int len_plus
, len_minus
;
5967 int cc_plus
, cc_minus
, cc_dummy
;
5972 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5974 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
5975 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
5977 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5981 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
5982 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
5984 else if (len_minus
<= len_plus
)
5985 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
5987 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
5993 /* Same as above but XOP has just 3 entries.
5994 Supply a dummy 4th operand. */
5997 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
6006 return avr_out_plus (op
, plen
, pcc
);
6010 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6013 avr_out_plus64 (rtx addend
, int *plen
)
6018 op
[0] = gen_rtx_REG (DImode
, 18);
6023 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6028 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6029 time constant XOP[2]:
6031 XOP[0] = XOP[0] <op> XOP[2]
6033 and return "". If PLEN == NULL, print assembler instructions to perform the
6034 operation; otherwise, set *PLEN to the length of the instruction sequence
6035 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6036 register or SCRATCH if no clobber register is needed for the operation. */
6039 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6041 /* CODE and MODE of the operation. */
6042 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6043 enum machine_mode mode
= GET_MODE (xop
[0]);
6045 /* Number of bytes to operate on. */
6046 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6048 /* Value of T-flag (0 or 1) or -1 if unknow. */
6051 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6052 int clobber_val
= -1;
6054 /* op[0]: 8-bit destination register
6055 op[1]: 8-bit const int
6056 op[2]: 8-bit clobber register or SCRATCH
6057 op[3]: 8-bit register containing 0xff or NULL_RTX */
6066 for (i
= 0; i
< n_bytes
; i
++)
6068 /* We operate byte-wise on the destination. */
6069 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6070 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6072 /* 8-bit value to operate with this byte. */
6073 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6075 /* Number of bits set in the current byte of the constant. */
6076 int pop8
= avr_popcount (val8
);
6078 /* Registers R16..R31 can operate with immediate. */
6079 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6082 op
[1] = GEN_INT (val8
);
6091 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6095 avr_asm_len ("set", op
, plen
, 1);
6098 op
[1] = GEN_INT (exact_log2 (val8
));
6099 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6103 if (op
[3] != NULL_RTX
)
6104 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6106 avr_asm_len ("clr %0" CR_TAB
6107 "dec %0", op
, plen
, 2);
6113 if (clobber_val
!= (int) val8
)
6114 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6115 clobber_val
= (int) val8
;
6117 avr_asm_len ("or %0,%2", op
, plen
, 1);
6127 avr_asm_len ("clr %0", op
, plen
, 1);
6129 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6133 avr_asm_len ("clt", op
, plen
, 1);
6136 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6137 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6141 if (clobber_val
!= (int) val8
)
6142 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6143 clobber_val
= (int) val8
;
6145 avr_asm_len ("and %0,%2", op
, plen
, 1);
6155 avr_asm_len ("com %0", op
, plen
, 1);
6156 else if (ld_reg_p
&& val8
== (1 << 7))
6157 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6160 if (clobber_val
!= (int) val8
)
6161 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6162 clobber_val
= (int) val8
;
6164 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6170 /* Unknown rtx_code */
6173 } /* for all sub-bytes */
6179 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6180 PLEN != NULL: Set *PLEN to the length of that sequence.
6184 avr_out_addto_sp (rtx
*op
, int *plen
)
6186 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6187 int addend
= INTVAL (op
[0]);
6194 if (flag_verbose_asm
|| flag_print_asm_name
)
6195 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6197 while (addend
<= -pc_len
)
6200 avr_asm_len ("rcall .", op
, plen
, 1);
6203 while (addend
++ < 0)
6204 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6206 else if (addend
> 0)
6208 if (flag_verbose_asm
|| flag_print_asm_name
)
6209 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6211 while (addend
-- > 0)
6212 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6219 /* Create RTL split patterns for byte sized rotate expressions. This
6220 produces a series of move instructions and considers overlap situations.
6221 Overlapping non-HImode operands need a scratch register. */
6224 avr_rotate_bytes (rtx operands
[])
6227 enum machine_mode mode
= GET_MODE (operands
[0]);
6228 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6229 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6230 int num
= INTVAL (operands
[2]);
6231 rtx scratch
= operands
[3];
6232 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6233 Word move if no scratch is needed, otherwise use size of scratch. */
6234 enum machine_mode move_mode
= QImode
;
6235 int move_size
, offset
, size
;
6239 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6242 move_mode
= GET_MODE (scratch
);
6244 /* Force DI rotate to use QI moves since other DI moves are currently split
6245 into QI moves so forward propagation works better. */
6248 /* Make scratch smaller if needed. */
6249 if (SCRATCH
!= GET_CODE (scratch
)
6250 && HImode
== GET_MODE (scratch
)
6251 && QImode
== move_mode
)
6252 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6254 move_size
= GET_MODE_SIZE (move_mode
);
6255 /* Number of bytes/words to rotate. */
6256 offset
= (num
>> 3) / move_size
;
6257 /* Number of moves needed. */
6258 size
= GET_MODE_SIZE (mode
) / move_size
;
6259 /* Himode byte swap is special case to avoid a scratch register. */
6260 if (mode
== HImode
&& same_reg
)
6262 /* HImode byte swap, using xor. This is as quick as using scratch. */
6264 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6265 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6266 if (!rtx_equal_p (dst
, src
))
6268 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6269 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6270 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6275 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6276 /* Create linked list of moves to determine move order. */
6280 } move
[MAX_SIZE
+ 8];
6283 gcc_assert (size
<= MAX_SIZE
);
6284 /* Generate list of subreg moves. */
6285 for (i
= 0; i
< size
; i
++)
6288 int to
= (from
+ offset
) % size
;
6289 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6290 mode
, from
* move_size
);
6291 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6292 mode
, to
* move_size
);
6295 /* Mark dependence where a dst of one move is the src of another move.
6296 The first move is a conflict as it must wait until second is
6297 performed. We ignore moves to self - we catch this later. */
6299 for (i
= 0; i
< size
; i
++)
6300 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6301 for (j
= 0; j
< size
; j
++)
6302 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6304 /* The dst of move i is the src of move j. */
6311 /* Go through move list and perform non-conflicting moves. As each
6312 non-overlapping move is made, it may remove other conflicts
6313 so the process is repeated until no conflicts remain. */
6318 /* Emit move where dst is not also a src or we have used that
6320 for (i
= 0; i
< size
; i
++)
6321 if (move
[i
].src
!= NULL_RTX
)
6323 if (move
[i
].links
== -1
6324 || move
[move
[i
].links
].src
== NULL_RTX
)
6327 /* Ignore NOP moves to self. */
6328 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6329 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6331 /* Remove conflict from list. */
6332 move
[i
].src
= NULL_RTX
;
6338 /* Check for deadlock. This is when no moves occurred and we have
6339 at least one blocked move. */
6340 if (moves
== 0 && blocked
!= -1)
6342 /* Need to use scratch register to break deadlock.
6343 Add move to put dst of blocked move into scratch.
6344 When this move occurs, it will break chain deadlock.
6345 The scratch register is substituted for real move. */
6347 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6349 move
[size
].src
= move
[blocked
].dst
;
6350 move
[size
].dst
= scratch
;
6351 /* Scratch move is never blocked. */
6352 move
[size
].links
= -1;
6353 /* Make sure we have valid link. */
6354 gcc_assert (move
[blocked
].links
!= -1);
6355 /* Replace src of blocking move with scratch reg. */
6356 move
[move
[blocked
].links
].src
= scratch
;
6357 /* Make dependent on scratch move occuring. */
6358 move
[blocked
].links
= size
;
6362 while (blocked
!= -1);
6367 /* Modifies the length assigned to instruction INSN
6368 LEN is the initially computed length of the insn. */
6371 adjust_insn_length (rtx insn
, int len
)
6373 rtx
*op
= recog_data
.operand
;
6374 enum attr_adjust_len adjust_len
;
6376 /* Some complex insns don't need length adjustment and therefore
6377 the length need not/must not be adjusted for these insns.
6378 It is easier to state this in an insn attribute "adjust_len" than
6379 to clutter up code here... */
6381 if (-1 == recog_memoized (insn
))
6386 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6388 adjust_len
= get_attr_adjust_len (insn
);
6390 if (adjust_len
== ADJUST_LEN_NO
)
6392 /* Nothing to adjust: The length from attribute "length" is fine.
6393 This is the default. */
6398 /* Extract insn's operands. */
6400 extract_constrain_insn_cached (insn
);
6402 /* Dispatch to right function. */
6406 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
6407 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
6408 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
6410 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
6412 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
6413 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
6414 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
6415 avr_out_plus_noclobber (op
, &len
, NULL
); break;
6417 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
6419 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
6420 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
6421 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
6422 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
6423 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
6424 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
6425 case ADJUST_LEN_LOAD_LPM
: avr_load_lpm (insn
, op
, &len
); break;
6427 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
6428 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
6429 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
6430 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
6431 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
6433 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
6434 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
6435 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
6437 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
6438 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
6439 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
6441 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
6442 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
6443 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
6445 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
6446 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
6447 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
6449 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
6451 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
6460 /* Return nonzero if register REG dead after INSN. */
6463 reg_unused_after (rtx insn
, rtx reg
)
6465 return (dead_or_set_p (insn
, reg
)
6466 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
6469 /* Return nonzero if REG is not used after INSN.
6470 We assume REG is a reload reg, and therefore does
6471 not live past labels. It may live past calls or jumps though. */
6474 _reg_unused_after (rtx insn
, rtx reg
)
6479 /* If the reg is set by this instruction, then it is safe for our
6480 case. Disregard the case where this is a store to memory, since
6481 we are checking a register used in the store address. */
6482 set
= single_set (insn
);
6483 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
6484 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6487 while ((insn
= NEXT_INSN (insn
)))
6490 code
= GET_CODE (insn
);
6493 /* If this is a label that existed before reload, then the register
6494 if dead here. However, if this is a label added by reorg, then
6495 the register may still be live here. We can't tell the difference,
6496 so we just ignore labels completely. */
6497 if (code
== CODE_LABEL
)
6505 if (code
== JUMP_INSN
)
6508 /* If this is a sequence, we must handle them all at once.
6509 We could have for instance a call that sets the target register,
6510 and an insn in a delay slot that uses the register. In this case,
6511 we must return 0. */
6512 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6517 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
6519 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
6520 rtx set
= single_set (this_insn
);
6522 if (GET_CODE (this_insn
) == CALL_INSN
)
6524 else if (GET_CODE (this_insn
) == JUMP_INSN
)
6526 if (INSN_ANNULLED_BRANCH_P (this_insn
))
6531 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6533 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6535 if (GET_CODE (SET_DEST (set
)) != MEM
)
6541 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
6546 else if (code
== JUMP_INSN
)
6550 if (code
== CALL_INSN
)
6553 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6554 if (GET_CODE (XEXP (tem
, 0)) == USE
6555 && REG_P (XEXP (XEXP (tem
, 0), 0))
6556 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
6558 if (call_used_regs
[REGNO (reg
)])
6562 set
= single_set (insn
);
6564 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
6566 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
6567 return GET_CODE (SET_DEST (set
)) != MEM
;
6568 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
6575 /* Return RTX that represents the lower 16 bits of a constant address.
6576 Unfortunately, simplify_gen_subreg does not handle this case. */
6579 avr_const_address_lo16 (rtx x
)
6583 switch (GET_CODE (x
))
6589 if (PLUS
== GET_CODE (XEXP (x
, 0))
6590 && SYMBOL_REF
== GET_CODE (XEXP (XEXP (x
, 0), 0))
6591 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
6593 HOST_WIDE_INT offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
6594 const char *name
= XSTR (XEXP (XEXP (x
, 0), 0), 0);
6596 lo16
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6597 lo16
= gen_rtx_CONST (Pmode
, plus_constant (lo16
, offset
));
6606 const char *name
= XSTR (x
, 0);
6608 return gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (name
));
6612 avr_edump ("\n%?: %r\n", x
);
6617 /* Target hook for assembling integer objects. The AVR version needs
6618 special handling for references to certain labels. */
6621 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
6623 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
6624 && text_segment_operand (x
, VOIDmode
) )
6626 fputs ("\t.word\tgs(", asm_out_file
);
6627 output_addr_const (asm_out_file
, x
);
6628 fputs (")\n", asm_out_file
);
6632 else if (GET_MODE (x
) == PSImode
)
6634 default_assemble_integer (avr_const_address_lo16 (x
),
6635 GET_MODE_SIZE (HImode
), aligned_p
);
6637 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6638 " extension for hh8(", asm_out_file
);
6639 output_addr_const (asm_out_file
, x
);
6640 fputs (")\"\n", asm_out_file
);
6642 fputs ("\t.byte\t0\t" ASM_COMMENT_START
" hh8(", asm_out_file
);
6643 output_addr_const (asm_out_file
, x
);
6644 fputs (")\n", asm_out_file
);
6649 return default_assemble_integer (x
, size
, aligned_p
);
6653 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6656 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
6659 /* If the function has the 'signal' or 'interrupt' attribute, test to
6660 make sure that the name of the function is "__vector_NN" so as to
6661 catch when the user misspells the interrupt vector name. */
6663 if (cfun
->machine
->is_interrupt
)
6665 if (!STR_PREFIX_P (name
, "__vector"))
6667 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6668 "%qs appears to be a misspelled interrupt handler",
6672 else if (cfun
->machine
->is_signal
)
6674 if (!STR_PREFIX_P (name
, "__vector"))
6676 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
6677 "%qs appears to be a misspelled signal handler",
6682 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
6683 ASM_OUTPUT_LABEL (file
, name
);
6687 /* Return value is nonzero if pseudos that have been
6688 assigned to registers of class CLASS would likely be spilled
6689 because registers of CLASS are needed for spill registers. */
6692 avr_class_likely_spilled_p (reg_class_t c
)
6694 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
6697 /* Valid attributes:
6698 progmem - put data to program memory;
6699 signal - make a function to be hardware interrupt. After function
6700 prologue interrupts are disabled;
6701 interrupt - make a function to be hardware interrupt. After function
6702 prologue interrupts are enabled;
6703 naked - don't generate function prologue/epilogue and `ret' command.
6705 Only `progmem' attribute valid for type. */
6707 /* Handle a "progmem" attribute; arguments as in
6708 struct attribute_spec.handler. */
6710 avr_handle_progmem_attribute (tree
*node
, tree name
,
6711 tree args ATTRIBUTE_UNUSED
,
6712 int flags ATTRIBUTE_UNUSED
,
6717 if (TREE_CODE (*node
) == TYPE_DECL
)
6719 /* This is really a decl attribute, not a type attribute,
6720 but try to handle it for GCC 3.0 backwards compatibility. */
6722 tree type
= TREE_TYPE (*node
);
6723 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
6724 tree newtype
= build_type_attribute_variant (type
, attr
);
6726 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
6727 TREE_TYPE (*node
) = newtype
;
6728 *no_add_attrs
= true;
6730 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
6732 *no_add_attrs
= false;
6736 warning (OPT_Wattributes
, "%qE attribute ignored",
6738 *no_add_attrs
= true;
6745 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6746 struct attribute_spec.handler. */
6749 avr_handle_fndecl_attribute (tree
*node
, tree name
,
6750 tree args ATTRIBUTE_UNUSED
,
6751 int flags ATTRIBUTE_UNUSED
,
6754 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6756 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6758 *no_add_attrs
= true;
6765 avr_handle_fntype_attribute (tree
*node
, tree name
,
6766 tree args ATTRIBUTE_UNUSED
,
6767 int flags ATTRIBUTE_UNUSED
,
6770 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
6772 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6774 *no_add_attrs
= true;
6781 /* AVR attributes. */
6782 static const struct attribute_spec
6783 avr_attribute_table
[] =
6785 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6786 affects_type_identity } */
6787 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
6789 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6791 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
6793 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6795 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6797 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
6799 { NULL
, 0, 0, false, false, false, NULL
, false }
6803 /* Look if DECL shall be placed in program memory space by
6804 means of attribute `progmem' or some address-space qualifier.
6805 Return non-zero if DECL is data that must end up in Flash and
6806 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6808 Return 2 if DECL is located in 24-bit flash address-space
6809 Return 1 if DECL is located in 16-bit flash address-space
6810 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6811 Return 0 otherwise */
6814 avr_progmem_p (tree decl
, tree attributes
)
6818 if (TREE_CODE (decl
) != VAR_DECL
)
6821 if (avr_decl_memx_p (decl
))
6824 if (avr_decl_flash_p (decl
))
6828 != lookup_attribute ("progmem", attributes
))
6835 while (TREE_CODE (a
) == ARRAY_TYPE
);
6837 if (a
== error_mark_node
)
6840 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
6847 /* Scan type TYP for pointer references to address space ASn.
6848 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6849 the AS are also declared to be CONST.
6850 Otherwise, return the respective addres space, i.e. a value != 0. */
6853 avr_nonconst_pointer_addrspace (tree typ
)
6855 while (ARRAY_TYPE
== TREE_CODE (typ
))
6856 typ
= TREE_TYPE (typ
);
6858 if (POINTER_TYPE_P (typ
))
6861 tree target
= TREE_TYPE (typ
);
6863 /* Pointer to function: Test the function's return type. */
6865 if (FUNCTION_TYPE
== TREE_CODE (target
))
6866 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
6868 /* "Ordinary" pointers... */
6870 while (TREE_CODE (target
) == ARRAY_TYPE
)
6871 target
= TREE_TYPE (target
);
6873 /* Pointers to non-generic address space must be const.
6874 Refuse address spaces outside the device's flash. */
6876 as
= TYPE_ADDR_SPACE (target
);
6878 if (!ADDR_SPACE_GENERIC_P (as
)
6879 && (!TYPE_READONLY (target
)
6880 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
6885 /* Scan pointer's target type. */
6887 return avr_nonconst_pointer_addrspace (target
);
6890 return ADDR_SPACE_GENERIC
;
6894 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6895 go along with CONST qualifier. Writing to these address spaces should
6896 be detected and complained about as early as possible. */
6899 avr_pgm_check_var_decl (tree node
)
6901 const char *reason
= NULL
;
6903 addr_space_t as
= ADDR_SPACE_GENERIC
;
6905 gcc_assert (as
== 0);
6907 if (avr_log
.progmem
)
6908 avr_edump ("%?: %t\n", node
);
6910 switch (TREE_CODE (node
))
6916 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6917 reason
= "variable";
6921 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6922 reason
= "function parameter";
6926 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
6927 reason
= "structure field";
6931 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
6933 reason
= "return type of function";
6937 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
6944 avr_edump ("%?: %s, %d, %d\n",
6945 avr_addrspace
[as
].name
,
6946 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
6947 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
6950 error ("%qT uses address space %qs beyond flash of %qs",
6951 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
6953 error ("%s %q+D uses address space %qs beyond flash of %qs",
6954 reason
, node
, avr_addrspace
[as
].name
,
6955 avr_current_device
->name
);
6960 error ("pointer targeting address space %qs must be const in %qT",
6961 avr_addrspace
[as
].name
, node
);
6963 error ("pointer targeting address space %qs must be const"
6965 avr_addrspace
[as
].name
, reason
, node
);
6969 return reason
== NULL
;
6973 /* Add the section attribute if the variable is in progmem. */
6976 avr_insert_attributes (tree node
, tree
*attributes
)
6978 avr_pgm_check_var_decl (node
);
6980 if (TREE_CODE (node
) == VAR_DECL
6981 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
6982 && avr_progmem_p (node
, *attributes
))
6987 /* For C++, we have to peel arrays in order to get correct
6988 determination of readonlyness. */
6991 node0
= TREE_TYPE (node0
);
6992 while (TREE_CODE (node0
) == ARRAY_TYPE
);
6994 if (error_mark_node
== node0
)
6997 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
6999 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7001 error ("variable %q+D located in address space %qs"
7002 " beyond flash of %qs",
7003 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7006 if (!TYPE_READONLY (node0
)
7007 && !TREE_READONLY (node
))
7009 const char *reason
= "__attribute__((progmem))";
7011 if (!ADDR_SPACE_GENERIC_P (as
))
7012 reason
= avr_addrspace
[as
].name
;
7014 if (avr_log
.progmem
)
7015 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7017 error ("variable %q+D must be const in order to be put into"
7018 " read-only section by means of %qs", node
, reason
);
7024 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7025 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7026 /* Track need of __do_clear_bss. */
7029 avr_asm_output_aligned_decl_common (FILE * stream
,
7030 const_tree decl ATTRIBUTE_UNUSED
,
7032 unsigned HOST_WIDE_INT size
,
7033 unsigned int align
, bool local_p
)
7035 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7036 There is no need to trigger __do_clear_bss code for them. */
7038 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7039 avr_need_clear_bss_p
= true;
7042 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7044 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7048 /* Unnamed section callback for data_section
7049 to track need of __do_copy_data. */
7052 avr_output_data_section_asm_op (const void *data
)
7054 avr_need_copy_data_p
= true;
7056 /* Dispatch to default. */
7057 output_section_asm_op (data
);
7061 /* Unnamed section callback for bss_section
7062 to track need of __do_clear_bss. */
7065 avr_output_bss_section_asm_op (const void *data
)
7067 avr_need_clear_bss_p
= true;
7069 /* Dispatch to default. */
7070 output_section_asm_op (data
);
7074 /* Unnamed section callback for progmem*.data sections. */
7077 avr_output_progmem_section_asm_op (const void *data
)
7079 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7080 (const char*) data
);
7084 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7087 avr_asm_init_sections (void)
7091 /* Set up a section for jump tables. Alignment is handled by
7092 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7094 if (AVR_HAVE_JMP_CALL
)
7096 progmem_swtable_section
7097 = get_unnamed_section (0, output_section_asm_op
,
7098 "\t.section\t.progmem.gcc_sw_table"
7099 ",\"a\",@progbits");
7103 progmem_swtable_section
7104 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7105 "\t.section\t.progmem.gcc_sw_table"
7106 ",\"ax\",@progbits");
7109 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7112 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7113 progmem_section_prefix
[n
]);
7116 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7117 resp. `avr_need_copy_data_p'. */
7119 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7120 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7121 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7125 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7128 avr_asm_function_rodata_section (tree decl
)
7130 /* If a function is unused and optimized out by -ffunction-sections
7131 and --gc-sections, ensure that the same will happen for its jump
7132 tables by putting them into individual sections. */
7137 /* Get the frodata section from the default function in varasm.c
7138 but treat function-associated data-like jump tables as code
7139 rather than as user defined data. AVR has no constant pools. */
7141 int fdata
= flag_data_sections
;
7143 flag_data_sections
= flag_function_sections
;
7144 frodata
= default_function_rodata_section (decl
);
7145 flag_data_sections
= fdata
;
7146 flags
= frodata
->common
.flags
;
7149 if (frodata
!= readonly_data_section
7150 && flags
& SECTION_NAMED
)
7152 /* Adjust section flags and replace section name prefix. */
7156 static const char* const prefix
[] =
7158 ".rodata", ".progmem.gcc_sw_table",
7159 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7162 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7164 const char * old_prefix
= prefix
[i
];
7165 const char * new_prefix
= prefix
[i
+1];
7166 const char * name
= frodata
->named
.name
;
7168 if (STR_PREFIX_P (name
, old_prefix
))
7170 const char *rname
= ACONCAT ((new_prefix
,
7171 name
+ strlen (old_prefix
), NULL
));
7172 flags
&= ~SECTION_CODE
;
7173 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7175 return get_section (rname
, flags
, frodata
->named
.decl
);
7180 return progmem_swtable_section
;
7184 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7185 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7188 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7190 if (flags
& AVR_SECTION_PROGMEM
)
7192 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7193 int segment
= avr_addrspace
[as
].segment
;
7194 const char *old_prefix
= ".rodata";
7195 const char *new_prefix
= progmem_section_prefix
[segment
];
7197 if (STR_PREFIX_P (name
, old_prefix
))
7199 const char *sname
= ACONCAT ((new_prefix
,
7200 name
+ strlen (old_prefix
), NULL
));
7201 default_elf_asm_named_section (sname
, flags
, decl
);
7205 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7209 if (!avr_need_copy_data_p
)
7210 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7211 || STR_PREFIX_P (name
, ".rodata")
7212 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7214 if (!avr_need_clear_bss_p
)
7215 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7217 default_elf_asm_named_section (name
, flags
, decl
);
7221 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7223 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7225 if (STR_PREFIX_P (name
, ".noinit"))
7227 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7228 && DECL_INITIAL (decl
) == NULL_TREE
)
7229 flags
|= SECTION_BSS
; /* @nobits */
7231 warning (0, "only uninitialized variables can be placed in the "
7235 if (decl
&& DECL_P (decl
)
7236 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7238 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7240 /* Attribute progmem puts data in generic address space.
7241 Set section flags as if it was in __flash to get the right
7242 section prefix in the remainder. */
7244 if (ADDR_SPACE_GENERIC_P (as
))
7245 as
= ADDR_SPACE_FLASH
;
7247 flags
|= as
* SECTION_MACH_DEP
;
7248 flags
&= ~SECTION_WRITE
;
7249 flags
&= ~SECTION_BSS
;
7256 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7259 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7261 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7262 readily available, see PR34734. So we postpone the warning
7263 about uninitialized data in program memory section until here. */
7266 && decl
&& DECL_P (decl
)
7267 && NULL_TREE
== DECL_INITIAL (decl
)
7268 && !DECL_EXTERNAL (decl
)
7269 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7271 warning (OPT_Wuninitialized
,
7272 "uninitialized variable %q+D put into "
7273 "program memory area", decl
);
7276 default_encode_section_info (decl
, rtl
, new_decl_p
);
7278 if (decl
&& DECL_P (decl
)
7279 && TREE_CODE (decl
) != FUNCTION_DECL
7281 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7283 rtx sym
= XEXP (rtl
, 0);
7284 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7286 /* PSTR strings are in generic space but located in flash:
7287 patch address space. */
7289 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7290 as
= ADDR_SPACE_FLASH
;
7292 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7297 /* Implement `TARGET_ASM_SELECT_SECTION' */
7300 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7302 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7304 if (decl
&& DECL_P (decl
)
7305 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7307 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7308 int segment
= avr_addrspace
[as
].segment
;
7310 if (sect
->common
.flags
& SECTION_NAMED
)
7312 const char * name
= sect
->named
.name
;
7313 const char * old_prefix
= ".rodata";
7314 const char * new_prefix
= progmem_section_prefix
[segment
];
7316 if (STR_PREFIX_P (name
, old_prefix
))
7318 const char *sname
= ACONCAT ((new_prefix
,
7319 name
+ strlen (old_prefix
), NULL
));
7320 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7324 return progmem_section
[segment
];
7330 /* Implement `TARGET_ASM_FILE_START'. */
7331 /* Outputs some text at the start of each assembler file. */
7334 avr_file_start (void)
7336 int sfr_offset
= avr_current_arch
->sfr_offset
;
7338 if (avr_current_arch
->asm_only
)
7339 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7341 default_file_start ();
7343 /* Print I/O addresses of some SFRs used with IN and OUT. */
7346 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
7348 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
7349 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
7351 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
7353 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
7355 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
7357 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
7359 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
7360 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
7361 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
7365 /* Implement `TARGET_ASM_FILE_END'. */
7366 /* Outputs to the stdio stream FILE some
7367 appropriate text to go at the end of an assembler file. */
7372 /* Output these only if there is anything in the
7373 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7374 input section(s) - some code size can be saved by not
7375 linking in the initialization code from libgcc if resp.
7376 sections are empty. */
7378 if (avr_need_copy_data_p
)
7379 fputs (".global __do_copy_data\n", asm_out_file
);
7381 if (avr_need_clear_bss_p
)
7382 fputs (".global __do_clear_bss\n", asm_out_file
);
7385 /* Choose the order in which to allocate hard registers for
7386 pseudo-registers local to a basic block.
7388 Store the desired register order in the array `reg_alloc_order'.
7389 Element 0 should be the register to allocate first; element 1, the
7390 next register; and so on. */
7393 order_regs_for_local_alloc (void)
7396 static const int order_0
[] = {
7404 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7408 static const int order_1
[] = {
7416 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7420 static const int order_2
[] = {
7429 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7434 const int *order
= (TARGET_ORDER_1
? order_1
:
7435 TARGET_ORDER_2
? order_2
:
7437 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7438 reg_alloc_order
[i
] = order
[i
];
7442 /* Implement `TARGET_REGISTER_MOVE_COST' */
7445 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7446 reg_class_t from
, reg_class_t to
)
7448 return (from
== STACK_REG
? 6
7449 : to
== STACK_REG
? 12
7454 /* Implement `TARGET_MEMORY_MOVE_COST' */
7457 avr_memory_move_cost (enum machine_mode mode
,
7458 reg_class_t rclass ATTRIBUTE_UNUSED
,
7459 bool in ATTRIBUTE_UNUSED
)
7461 return (mode
== QImode
? 2
7462 : mode
== HImode
? 4
7463 : mode
== SImode
? 8
7464 : mode
== SFmode
? 8
7469 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7470 cost of an RTX operand given its context. X is the rtx of the
7471 operand, MODE is its mode, and OUTER is the rtx_code of this
7472 operand's parent operator. */
7475 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
7476 int opno
, bool speed
)
7478 enum rtx_code code
= GET_CODE (x
);
7489 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7496 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
7500 /* Worker function for AVR backend's rtx_cost function.
7501 X is rtx expression whose cost is to be calculated.
7502 Return true if the complete cost has been computed.
7503 Return false if subexpressions should be scanned.
7504 In either case, *TOTAL contains the cost result. */
7507 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
7508 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
7510 enum rtx_code code
= (enum rtx_code
) codearg
;
7511 enum machine_mode mode
= GET_MODE (x
);
7521 /* Immediate constants are as cheap as registers. */
7526 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7534 *total
= COSTS_N_INSNS (1);
7540 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
7546 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7554 *total
= COSTS_N_INSNS (1);
7560 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7564 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7565 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7569 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
7570 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7571 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7575 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
7576 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
7577 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7585 && MULT
== GET_CODE (XEXP (x
, 0))
7586 && register_operand (XEXP (x
, 1), QImode
))
7589 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7590 /* multiply-add with constant: will be split and load constant. */
7591 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7592 *total
= COSTS_N_INSNS (1) + *total
;
7595 *total
= COSTS_N_INSNS (1);
7596 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7597 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7602 && (MULT
== GET_CODE (XEXP (x
, 0))
7603 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
7604 && register_operand (XEXP (x
, 1), HImode
)
7605 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
7606 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
7609 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7610 /* multiply-add with constant: will be split and load constant. */
7611 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
7612 *total
= COSTS_N_INSNS (1) + *total
;
7615 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7617 *total
= COSTS_N_INSNS (2);
7618 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7621 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7622 *total
= COSTS_N_INSNS (1);
7624 *total
= COSTS_N_INSNS (2);
7628 if (!CONST_INT_P (XEXP (x
, 1)))
7630 *total
= COSTS_N_INSNS (3);
7631 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7634 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7635 *total
= COSTS_N_INSNS (2);
7637 *total
= COSTS_N_INSNS (3);
7641 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7643 *total
= COSTS_N_INSNS (4);
7644 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7647 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
7648 *total
= COSTS_N_INSNS (1);
7650 *total
= COSTS_N_INSNS (4);
7656 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7662 && register_operand (XEXP (x
, 0), QImode
)
7663 && MULT
== GET_CODE (XEXP (x
, 1)))
7666 *total
= COSTS_N_INSNS (speed
? 4 : 3);
7667 /* multiply-sub with constant: will be split and load constant. */
7668 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7669 *total
= COSTS_N_INSNS (1) + *total
;
7674 && register_operand (XEXP (x
, 0), HImode
)
7675 && (MULT
== GET_CODE (XEXP (x
, 1))
7676 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
7677 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
7678 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
7681 *total
= COSTS_N_INSNS (speed
? 5 : 4);
7682 /* multiply-sub with constant: will be split and load constant. */
7683 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
7684 *total
= COSTS_N_INSNS (1) + *total
;
7690 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7691 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7692 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7693 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7697 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7698 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7699 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7707 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
7709 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7717 rtx op0
= XEXP (x
, 0);
7718 rtx op1
= XEXP (x
, 1);
7719 enum rtx_code code0
= GET_CODE (op0
);
7720 enum rtx_code code1
= GET_CODE (op1
);
7721 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
7722 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
7725 && (u8_operand (op1
, HImode
)
7726 || s8_operand (op1
, HImode
)))
7728 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7732 && register_operand (op1
, HImode
))
7734 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7737 else if (ex0
|| ex1
)
7739 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
7742 else if (register_operand (op0
, HImode
)
7743 && (u8_operand (op1
, HImode
)
7744 || s8_operand (op1
, HImode
)))
7746 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
7750 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
7753 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7760 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7770 /* Add some additional costs besides CALL like moves etc. */
7772 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7776 /* Just a rough estimate. Even with -O2 we don't want bulky
7777 code expanded inline. */
7779 *total
= COSTS_N_INSNS (25);
7785 *total
= COSTS_N_INSNS (300);
7787 /* Add some additional costs besides CALL like moves etc. */
7788 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
7796 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7797 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
7805 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
7807 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
7808 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7809 /* For div/mod with const-int divisor we have at least the cost of
7810 loading the divisor. */
7811 if (CONST_INT_P (XEXP (x
, 1)))
7812 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
7813 /* Add some overall penaly for clobbering and moving around registers */
7814 *total
+= COSTS_N_INSNS (2);
7821 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
7822 *total
= COSTS_N_INSNS (1);
7827 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
7828 *total
= COSTS_N_INSNS (3);
7833 if (CONST_INT_P (XEXP (x
, 1)))
7834 switch (INTVAL (XEXP (x
, 1)))
7838 *total
= COSTS_N_INSNS (5);
7841 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
7849 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
7856 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7858 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
7859 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7864 val
= INTVAL (XEXP (x
, 1));
7866 *total
= COSTS_N_INSNS (3);
7867 else if (val
>= 0 && val
<= 7)
7868 *total
= COSTS_N_INSNS (val
);
7870 *total
= COSTS_N_INSNS (1);
7877 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
7878 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
7879 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
7881 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
7886 if (const1_rtx
== (XEXP (x
, 1))
7887 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
7889 *total
= COSTS_N_INSNS (2);
7893 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7895 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7896 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7900 switch (INTVAL (XEXP (x
, 1)))
7907 *total
= COSTS_N_INSNS (2);
7910 *total
= COSTS_N_INSNS (3);
7916 *total
= COSTS_N_INSNS (4);
7921 *total
= COSTS_N_INSNS (5);
7924 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
7927 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
7930 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
7933 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
7934 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7940 if (!CONST_INT_P (XEXP (x
, 1)))
7942 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
7945 switch (INTVAL (XEXP (x
, 1)))
7953 *total
= COSTS_N_INSNS (3);
7956 *total
= COSTS_N_INSNS (5);
7959 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
7965 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
7967 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7968 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
7972 switch (INTVAL (XEXP (x
, 1)))
7978 *total
= COSTS_N_INSNS (3);
7983 *total
= COSTS_N_INSNS (4);
7986 *total
= COSTS_N_INSNS (6);
7989 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
7992 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
7993 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8001 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8008 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8010 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8011 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8016 val
= INTVAL (XEXP (x
, 1));
8018 *total
= COSTS_N_INSNS (4);
8020 *total
= COSTS_N_INSNS (2);
8021 else if (val
>= 0 && val
<= 7)
8022 *total
= COSTS_N_INSNS (val
);
8024 *total
= COSTS_N_INSNS (1);
8029 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8031 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8032 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8036 switch (INTVAL (XEXP (x
, 1)))
8042 *total
= COSTS_N_INSNS (2);
8045 *total
= COSTS_N_INSNS (3);
8051 *total
= COSTS_N_INSNS (4);
8055 *total
= COSTS_N_INSNS (5);
8058 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8061 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8065 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8068 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8069 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8075 if (!CONST_INT_P (XEXP (x
, 1)))
8077 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8080 switch (INTVAL (XEXP (x
, 1)))
8086 *total
= COSTS_N_INSNS (3);
8090 *total
= COSTS_N_INSNS (5);
8093 *total
= COSTS_N_INSNS (4);
8096 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8102 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8104 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8105 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8109 switch (INTVAL (XEXP (x
, 1)))
8115 *total
= COSTS_N_INSNS (4);
8120 *total
= COSTS_N_INSNS (6);
8123 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8126 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8129 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8130 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8138 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8145 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8147 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8148 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8153 val
= INTVAL (XEXP (x
, 1));
8155 *total
= COSTS_N_INSNS (3);
8156 else if (val
>= 0 && val
<= 7)
8157 *total
= COSTS_N_INSNS (val
);
8159 *total
= COSTS_N_INSNS (1);
8164 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8166 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8167 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8171 switch (INTVAL (XEXP (x
, 1)))
8178 *total
= COSTS_N_INSNS (2);
8181 *total
= COSTS_N_INSNS (3);
8186 *total
= COSTS_N_INSNS (4);
8190 *total
= COSTS_N_INSNS (5);
8196 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8199 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8203 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8206 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8207 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8213 if (!CONST_INT_P (XEXP (x
, 1)))
8215 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8218 switch (INTVAL (XEXP (x
, 1)))
8226 *total
= COSTS_N_INSNS (3);
8229 *total
= COSTS_N_INSNS (5);
8232 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8238 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8240 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8241 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8245 switch (INTVAL (XEXP (x
, 1)))
8251 *total
= COSTS_N_INSNS (4);
8254 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8259 *total
= COSTS_N_INSNS (4);
8262 *total
= COSTS_N_INSNS (6);
8265 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8266 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8274 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8278 switch (GET_MODE (XEXP (x
, 0)))
8281 *total
= COSTS_N_INSNS (1);
8282 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8283 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8287 *total
= COSTS_N_INSNS (2);
8288 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8289 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8290 else if (INTVAL (XEXP (x
, 1)) != 0)
8291 *total
+= COSTS_N_INSNS (1);
8295 *total
= COSTS_N_INSNS (3);
8296 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8297 *total
+= COSTS_N_INSNS (2);
8301 *total
= COSTS_N_INSNS (4);
8302 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8303 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8304 else if (INTVAL (XEXP (x
, 1)) != 0)
8305 *total
+= COSTS_N_INSNS (3);
8311 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8316 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8317 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8318 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8320 if (QImode
== mode
|| HImode
== mode
)
8322 *total
= COSTS_N_INSNS (2);
8335 /* Implement `TARGET_RTX_COSTS'. */
8338 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8339 int opno
, int *total
, bool speed
)
8341 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8342 opno
, total
, speed
);
8344 if (avr_log
.rtx_costs
)
8346 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8347 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8354 /* Implement `TARGET_ADDRESS_COST'. */
8357 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8361 if (GET_CODE (x
) == PLUS
8362 && CONST_INT_P (XEXP (x
, 1))
8363 && (REG_P (XEXP (x
, 0))
8364 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8366 if (INTVAL (XEXP (x
, 1)) >= 61)
8369 else if (CONSTANT_ADDRESS_P (x
))
8372 && io_address_operand (x
, QImode
))
8376 if (avr_log
.address_cost
)
8377 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8382 /* Test for extra memory constraint 'Q'.
8383 It's a memory address based on Y or Z pointer with valid displacement. */
8386 extra_constraint_Q (rtx x
)
8390 if (GET_CODE (XEXP (x
,0)) == PLUS
8391 && REG_P (XEXP (XEXP (x
,0), 0))
8392 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8393 && (INTVAL (XEXP (XEXP (x
,0), 1))
8394 <= MAX_LD_OFFSET (GET_MODE (x
))))
8396 rtx xx
= XEXP (XEXP (x
,0), 0);
8397 int regno
= REGNO (xx
);
8399 ok
= (/* allocate pseudos */
8400 regno
>= FIRST_PSEUDO_REGISTER
8401 /* strictly check */
8402 || regno
== REG_Z
|| regno
== REG_Y
8403 /* XXX frame & arg pointer checks */
8404 || xx
== frame_pointer_rtx
8405 || xx
== arg_pointer_rtx
);
8407 if (avr_log
.constraints
)
8408 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8409 ok
, reload_completed
, reload_in_progress
, x
);
8415 /* Convert condition code CONDITION to the valid AVR condition code. */
8418 avr_normalize_condition (RTX_CODE condition
)
8435 /* Helper function for `avr_reorg'. */
8438 avr_compare_pattern (rtx insn
)
8440 rtx pattern
= single_set (insn
);
8443 && NONJUMP_INSN_P (insn
)
8444 && SET_DEST (pattern
) == cc0_rtx
8445 && GET_CODE (SET_SRC (pattern
)) == COMPARE
8446 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 0))
8447 && DImode
!= GET_MODE (XEXP (SET_SRC (pattern
), 1)))
8455 /* Helper function for `avr_reorg'. */
8457 /* Expansion of switch/case decision trees leads to code like
8459 cc0 = compare (Reg, Num)
8463 cc0 = compare (Reg, Num)
8467 The second comparison is superfluous and can be deleted.
8468 The second jump condition can be transformed from a
8469 "difficult" one to a "simple" one because "cc0 > 0" and
8470 "cc0 >= 0" will have the same effect here.
8472 This function relies on the way switch/case is being expaned
8473 as binary decision tree. For example code see PR 49903.
8475 Return TRUE if optimization performed.
8476 Return FALSE if nothing changed.
8478 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8480 We don't want to do this in text peephole because it is
8481 tedious to work out jump offsets there and the second comparison
8482 might have been transormed by `avr_reorg'.
8484 RTL peephole won't do because peephole2 does not scan across
8488 avr_reorg_remove_redundant_compare (rtx insn1
)
8490 rtx comp1
, ifelse1
, xcond1
, branch1
;
8491 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
8493 rtx jump
, target
, cond
;
8495 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8497 branch1
= next_nonnote_nondebug_insn (insn1
);
8498 if (!branch1
|| !JUMP_P (branch1
))
8501 insn2
= next_nonnote_nondebug_insn (branch1
);
8502 if (!insn2
|| !avr_compare_pattern (insn2
))
8505 branch2
= next_nonnote_nondebug_insn (insn2
);
8506 if (!branch2
|| !JUMP_P (branch2
))
8509 comp1
= avr_compare_pattern (insn1
);
8510 comp2
= avr_compare_pattern (insn2
);
8511 xcond1
= single_set (branch1
);
8512 xcond2
= single_set (branch2
);
8514 if (!comp1
|| !comp2
8515 || !rtx_equal_p (comp1
, comp2
)
8516 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
8517 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
8518 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
8519 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
8524 comp1
= SET_SRC (comp1
);
8525 ifelse1
= SET_SRC (xcond1
);
8526 ifelse2
= SET_SRC (xcond2
);
8528 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8530 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
8531 || !REG_P (XEXP (comp1
, 0))
8532 || !CONST_INT_P (XEXP (comp1
, 1))
8533 || XEXP (ifelse1
, 2) != pc_rtx
8534 || XEXP (ifelse2
, 2) != pc_rtx
8535 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
8536 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
8537 || !COMPARISON_P (XEXP (ifelse2
, 0))
8538 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
8539 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
8540 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
8541 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
8546 /* We filtered the insn sequence to look like
8552 (if_then_else (eq (cc0)
8561 (if_then_else (CODE (cc0)
8567 code
= GET_CODE (XEXP (ifelse2
, 0));
8569 /* Map GT/GTU to GE/GEU which is easier for AVR.
8570 The first two instructions compare/branch on EQ
8571 so we may replace the difficult
8573 if (x == VAL) goto L1;
8574 if (x > VAL) goto L2;
8578 if (x == VAL) goto L1;
8579 if (x >= VAL) goto L2;
8581 Similarly, replace LE/LEU by LT/LTU. */
8592 code
= avr_normalize_condition (code
);
8599 /* Wrap the branches into UNSPECs so they won't be changed or
8600 optimized in the remainder. */
8602 target
= XEXP (XEXP (ifelse1
, 1), 0);
8603 cond
= XEXP (ifelse1
, 0);
8604 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
8606 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
8608 target
= XEXP (XEXP (ifelse2
, 1), 0);
8609 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8610 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
8612 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
8614 /* The comparisons in insn1 and insn2 are exactly the same;
8615 insn2 is superfluous so delete it. */
8617 delete_insn (insn2
);
8618 delete_insn (branch1
);
8619 delete_insn (branch2
);
8625 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8626 /* Optimize conditional jumps. */
8631 rtx insn
= get_insns();
8633 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
8635 rtx pattern
= avr_compare_pattern (insn
);
8641 && avr_reorg_remove_redundant_compare (insn
))
8646 if (compare_diff_p (insn
))
8648 /* Now we work under compare insn with difficult branch. */
8650 rtx next
= next_real_insn (insn
);
8651 rtx pat
= PATTERN (next
);
8653 pattern
= SET_SRC (pattern
);
8655 if (true_regnum (XEXP (pattern
, 0)) >= 0
8656 && true_regnum (XEXP (pattern
, 1)) >= 0)
8658 rtx x
= XEXP (pattern
, 0);
8659 rtx src
= SET_SRC (pat
);
8660 rtx t
= XEXP (src
,0);
8661 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8662 XEXP (pattern
, 0) = XEXP (pattern
, 1);
8663 XEXP (pattern
, 1) = x
;
8664 INSN_CODE (next
) = -1;
8666 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8667 && XEXP (pattern
, 1) == const0_rtx
)
8669 /* This is a tst insn, we can reverse it. */
8670 rtx src
= SET_SRC (pat
);
8671 rtx t
= XEXP (src
,0);
8673 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
8674 XEXP (pattern
, 1) = XEXP (pattern
, 0);
8675 XEXP (pattern
, 0) = const0_rtx
;
8676 INSN_CODE (next
) = -1;
8677 INSN_CODE (insn
) = -1;
8679 else if (true_regnum (XEXP (pattern
, 0)) >= 0
8680 && CONST_INT_P (XEXP (pattern
, 1)))
8682 rtx x
= XEXP (pattern
, 1);
8683 rtx src
= SET_SRC (pat
);
8684 rtx t
= XEXP (src
,0);
8685 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
8687 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
8689 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
8690 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
8691 INSN_CODE (next
) = -1;
8692 INSN_CODE (insn
) = -1;
8699 /* Returns register number for function return value.*/
8701 static inline unsigned int
8702 avr_ret_register (void)
8707 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8710 avr_function_value_regno_p (const unsigned int regno
)
8712 return (regno
== avr_ret_register ());
8715 /* Create an RTX representing the place where a
8716 library function returns a value of mode MODE. */
8719 avr_libcall_value (enum machine_mode mode
,
8720 const_rtx func ATTRIBUTE_UNUSED
)
8722 int offs
= GET_MODE_SIZE (mode
);
8725 offs
= (offs
+ 1) & ~1;
8727 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
8730 /* Create an RTX representing the place where a
8731 function returns a value of data type VALTYPE. */
8734 avr_function_value (const_tree type
,
8735 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
8736 bool outgoing ATTRIBUTE_UNUSED
)
8740 if (TYPE_MODE (type
) != BLKmode
)
8741 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
8743 offs
= int_size_in_bytes (type
);
8746 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
8747 offs
= GET_MODE_SIZE (SImode
);
8748 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
8749 offs
= GET_MODE_SIZE (DImode
);
8751 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
8755 test_hard_reg_class (enum reg_class rclass
, rtx x
)
8757 int regno
= true_regnum (x
);
8761 if (TEST_HARD_REG_CLASS (rclass
, regno
))
8768 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8769 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8772 avr_2word_insn_p (rtx insn
)
8774 if (avr_current_device
->errata_skip
8776 || 2 != get_attr_length (insn
))
8781 switch (INSN_CODE (insn
))
8786 case CODE_FOR_movqi_insn
:
8788 rtx set
= single_set (insn
);
8789 rtx src
= SET_SRC (set
);
8790 rtx dest
= SET_DEST (set
);
8792 /* Factor out LDS and STS from movqi_insn. */
8795 && (REG_P (src
) || src
== const0_rtx
))
8797 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
8799 else if (REG_P (dest
)
8802 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
8808 case CODE_FOR_call_insn
:
8809 case CODE_FOR_call_value_insn
:
8816 jump_over_one_insn_p (rtx insn
, rtx dest
)
8818 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
8821 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
8822 int dest_addr
= INSN_ADDRESSES (uid
);
8823 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
8825 return (jump_offset
== 1
8826 || (jump_offset
== 2
8827 && avr_2word_insn_p (next_active_insn (insn
))));
8830 /* Returns 1 if a value of mode MODE can be stored starting with hard
8831 register number REGNO. On the enhanced core, anything larger than
8832 1 byte must start in even numbered register for "movw" to work
8833 (this way we don't have to check for odd registers everywhere). */
8836 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
8838 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8839 Disallowing QI et al. in these regs might lead to code like
8840 (set (subreg:QI (reg:HI 28) n) ...)
8841 which will result in wrong code because reload does not
8842 handle SUBREGs of hard regsisters like this.
8843 This could be fixed in reload. However, it appears
8844 that fixing reload is not wanted by reload people. */
8846 /* Any GENERAL_REGS register can hold 8-bit values. */
8848 if (GET_MODE_SIZE (mode
) == 1)
8851 /* FIXME: Ideally, the following test is not needed.
8852 However, it turned out that it can reduce the number
8853 of spill fails. AVR and it's poor endowment with
8854 address registers is extreme stress test for reload. */
8856 if (GET_MODE_SIZE (mode
) >= 4
8860 /* All modes larger than 8 bits should start in an even register. */
8862 return !(regno
& 1);
8866 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8869 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
8870 addr_space_t as
, RTX_CODE outer_code
,
8871 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8873 if (!ADDR_SPACE_GENERIC_P (as
))
8875 return POINTER_Z_REGS
;
8879 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
8881 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
8885 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8888 avr_regno_mode_code_ok_for_base_p (int regno
,
8889 enum machine_mode mode ATTRIBUTE_UNUSED
,
8890 addr_space_t as ATTRIBUTE_UNUSED
,
8891 RTX_CODE outer_code
,
8892 RTX_CODE index_code ATTRIBUTE_UNUSED
)
8896 if (!ADDR_SPACE_GENERIC_P (as
))
8898 if (regno
< FIRST_PSEUDO_REGISTER
8906 regno
= reg_renumber
[regno
];
8917 if (regno
< FIRST_PSEUDO_REGISTER
8921 || regno
== ARG_POINTER_REGNUM
))
8925 else if (reg_renumber
)
8927 regno
= reg_renumber
[regno
];
8932 || regno
== ARG_POINTER_REGNUM
)
8939 && PLUS
== outer_code
8949 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8950 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8951 CLOBBER_REG is a QI clobber register or NULL_RTX.
8952 LEN == NULL: output instructions.
8953 LEN != NULL: set *LEN to the length of the instruction sequence
8954 (in words) printed with LEN = NULL.
8955 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8956 If CLEAR_P is false, nothing is known about OP[0].
8958 The effect on cc0 is as follows:
8960 Load 0 to any register except ZERO_REG : NONE
8961 Load ld register with any value : NONE
8962 Anything else: : CLOBBER */
8965 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
8971 int clobber_val
= 1234;
8972 bool cooked_clobber_p
= false;
8974 enum machine_mode mode
= GET_MODE (dest
);
8975 int n
, n_bytes
= GET_MODE_SIZE (mode
);
8977 gcc_assert (REG_P (dest
)
8978 && CONSTANT_P (src
));
8983 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8984 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8986 if (REGNO (dest
) < 16
8987 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
8989 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
8992 /* We might need a clobber reg but don't have one. Look at the value to
8993 be loaded more closely. A clobber is only needed if it is a symbol
8994 or contains a byte that is neither 0, -1 or a power of 2. */
8996 if (NULL_RTX
== clobber_reg
8997 && !test_hard_reg_class (LD_REGS
, dest
)
8998 && (! (CONST_INT_P (src
) || CONST_DOUBLE_P (src
))
8999 || !avr_popcount_each_byte (src
, n_bytes
,
9000 (1 << 0) | (1 << 1) | (1 << 8))))
9002 /* We have no clobber register but need one. Cook one up.
9003 That's cheaper than loading from constant pool. */
9005 cooked_clobber_p
= true;
9006 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9007 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9010 /* Now start filling DEST from LSB to MSB. */
9012 for (n
= 0; n
< n_bytes
; n
++)
9015 bool done_byte
= false;
9019 /* Crop the n-th destination byte. */
9021 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9022 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9024 if (!CONST_INT_P (src
)
9025 && !CONST_DOUBLE_P (src
))
9027 static const char* const asm_code
[][2] =
9029 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9030 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9031 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9032 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9037 xop
[2] = clobber_reg
;
9039 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9044 /* Crop the n-th source byte. */
9046 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9047 ival
[n
] = INTVAL (xval
);
9049 /* Look if we can reuse the low word by means of MOVW. */
9055 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9056 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9058 if (INTVAL (lo16
) == INTVAL (hi16
))
9060 if (0 != INTVAL (lo16
)
9063 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9070 /* Don't use CLR so that cc0 is set as expected. */
9075 avr_asm_len (ldreg_p
? "ldi %0,0"
9076 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9077 : "mov %0,__zero_reg__",
9082 if (clobber_val
== ival
[n
]
9083 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9088 /* LD_REGS can use LDI to move a constant value */
9094 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9098 /* Try to reuse value already loaded in some lower byte. */
9100 for (j
= 0; j
< n
; j
++)
9101 if (ival
[j
] == ival
[n
])
9106 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9114 /* Need no clobber reg for -1: Use CLR/DEC */
9119 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9121 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9124 else if (1 == ival
[n
])
9127 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9129 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9133 /* Use T flag or INC to manage powers of 2 if we have
9136 if (NULL_RTX
== clobber_reg
9137 && single_one_operand (xval
, QImode
))
9140 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9142 gcc_assert (constm1_rtx
!= xop
[1]);
9147 avr_asm_len ("set", xop
, len
, 1);
9151 avr_asm_len ("clr %0", xop
, len
, 1);
9153 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9157 /* We actually need the LD_REGS clobber reg. */
9159 gcc_assert (NULL_RTX
!= clobber_reg
);
9163 xop
[2] = clobber_reg
;
9164 clobber_val
= ival
[n
];
9166 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9167 "mov %0,%2", xop
, len
, 2);
9170 /* If we cooked up a clobber reg above, restore it. */
9172 if (cooked_clobber_p
)
9174 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9179 /* Reload the constant OP[1] into the HI register OP[0].
9180 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9181 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9182 need a clobber reg or have to cook one up.
9184 PLEN == NULL: Output instructions.
9185 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9186 by the insns printed.
9191 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9193 output_reload_in_const (op
, clobber_reg
, plen
, false);
9198 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9199 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9200 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9201 need a clobber reg or have to cook one up.
9203 LEN == NULL: Output instructions.
9205 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9206 by the insns printed.
9211 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9214 && !test_hard_reg_class (LD_REGS
, op
[0])
9215 && (CONST_INT_P (op
[1])
9216 || CONST_DOUBLE_P (op
[1])))
9218 int len_clr
, len_noclr
;
9220 /* In some cases it is better to clear the destination beforehand, e.g.
9222 CLR R2 CLR R3 MOVW R4,R2 INC R2
9226 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9228 We find it too tedious to work that out in the print function.
9229 Instead, we call the print function twice to get the lengths of
9230 both methods and use the shortest one. */
9232 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9233 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9235 if (len_noclr
- len_clr
== 4)
9237 /* Default needs 4 CLR instructions: clear register beforehand. */
9239 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9240 "mov %B0,__zero_reg__" CR_TAB
9241 "movw %C0,%A0", &op
[0], len
, 3);
9243 output_reload_in_const (op
, clobber_reg
, len
, true);
9252 /* Default: destination not pre-cleared. */
9254 output_reload_in_const (op
, clobber_reg
, len
, false);
9259 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9261 output_reload_in_const (op
, clobber_reg
, len
, false);
9267 avr_output_addr_vec_elt (FILE *stream
, int value
)
9269 if (AVR_HAVE_JMP_CALL
)
9270 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9272 fprintf (stream
, "\trjmp .L%d\n", value
);
9275 /* Returns true if SCRATCH are safe to be allocated as a scratch
9276 registers (for a define_peephole2) in the current function. */
9279 avr_hard_regno_scratch_ok (unsigned int regno
)
9281 /* Interrupt functions can only use registers that have already been saved
9282 by the prologue, even if they would normally be call-clobbered. */
9284 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9285 && !df_regs_ever_live_p (regno
))
9288 /* Don't allow hard registers that might be part of the frame pointer.
9289 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9290 and don't care for a frame pointer that spans more than one register. */
9292 if ((!reload_completed
|| frame_pointer_needed
)
9293 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9301 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9304 avr_hard_regno_rename_ok (unsigned int old_reg
,
9305 unsigned int new_reg
)
9307 /* Interrupt functions can only use registers that have already been
9308 saved by the prologue, even if they would normally be
9311 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9312 && !df_regs_ever_live_p (new_reg
))
9315 /* Don't allow hard registers that might be part of the frame pointer.
9316 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9317 and don't care for a frame pointer that spans more than one register. */
9319 if ((!reload_completed
|| frame_pointer_needed
)
9320 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9321 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9329 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9330 or memory location in the I/O space (QImode only).
9332 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9333 Operand 1: register operand to test, or CONST_INT memory address.
9334 Operand 2: bit number.
9335 Operand 3: label to jump to if the test is true. */
9338 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9340 enum rtx_code comp
= GET_CODE (operands
[0]);
9341 bool long_jump
= get_attr_length (insn
) >= 4;
9342 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9346 else if (comp
== LT
)
9350 comp
= reverse_condition (comp
);
9352 switch (GET_CODE (operands
[1]))
9359 if (low_io_address_operand (operands
[1], QImode
))
9362 output_asm_insn ("sbis %i1,%2", operands
);
9364 output_asm_insn ("sbic %i1,%2", operands
);
9368 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9370 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9372 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9375 break; /* CONST_INT */
9380 output_asm_insn ("sbrs %T1%T2", operands
);
9382 output_asm_insn ("sbrc %T1%T2", operands
);
9388 return ("rjmp .+4" CR_TAB
9397 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9400 avr_asm_out_ctor (rtx symbol
, int priority
)
9402 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9403 default_ctor_section_asm_out_constructor (symbol
, priority
);
9406 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9409 avr_asm_out_dtor (rtx symbol
, int priority
)
9411 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9412 default_dtor_section_asm_out_destructor (symbol
, priority
);
9415 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9418 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9420 if (TYPE_MODE (type
) == BLKmode
)
9422 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9423 return (size
== -1 || size
> 8);
9429 /* Worker function for CASE_VALUES_THRESHOLD. */
9432 avr_case_values_threshold (void)
9434 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
9438 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9440 static enum machine_mode
9441 avr_addr_space_address_mode (addr_space_t as
)
9443 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
9447 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9449 static enum machine_mode
9450 avr_addr_space_pointer_mode (addr_space_t as
)
9452 return avr_addr_space_address_mode (as
);
9456 /* Helper for following function. */
9459 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
9466 return REGNO (reg
) == REG_Z
;
9469 /* Avoid combine to propagate hard regs. */
9471 if (can_create_pseudo_p()
9472 && REGNO (reg
) < REG_Z
)
9481 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9484 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
9485 bool strict
, addr_space_t as
)
9494 case ADDR_SPACE_GENERIC
:
9495 return avr_legitimate_address_p (mode
, x
, strict
);
9497 case ADDR_SPACE_FLASH
:
9498 case ADDR_SPACE_FLASH1
:
9499 case ADDR_SPACE_FLASH2
:
9500 case ADDR_SPACE_FLASH3
:
9501 case ADDR_SPACE_FLASH4
:
9502 case ADDR_SPACE_FLASH5
:
9504 switch (GET_CODE (x
))
9507 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
9511 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
9520 case ADDR_SPACE_MEMX
:
9523 && can_create_pseudo_p());
9525 if (LO_SUM
== GET_CODE (x
))
9527 rtx hi
= XEXP (x
, 0);
9528 rtx lo
= XEXP (x
, 1);
9531 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
9533 && REGNO (lo
) == REG_Z
);
9539 if (avr_log
.legitimate_address_p
)
9541 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9542 "reload_completed=%d reload_in_progress=%d %s:",
9543 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
9544 reg_renumber
? "(reg_renumber)" : "");
9546 if (GET_CODE (x
) == PLUS
9547 && REG_P (XEXP (x
, 0))
9548 && CONST_INT_P (XEXP (x
, 1))
9549 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
9552 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
9553 true_regnum (XEXP (x
, 0)));
9556 avr_edump ("\n%r\n", x
);
9563 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9566 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
9567 enum machine_mode mode
, addr_space_t as
)
9569 if (ADDR_SPACE_GENERIC_P (as
))
9570 return avr_legitimize_address (x
, old_x
, mode
);
9572 if (avr_log
.legitimize_address
)
9574 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
9581 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9584 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
9586 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
9587 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
9589 if (avr_log
.progmem
)
9590 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9591 src
, type_from
, type_to
);
9593 /* Up-casting from 16-bit to 24-bit pointer. */
9595 if (as_from
!= ADDR_SPACE_MEMX
9596 && as_to
== ADDR_SPACE_MEMX
)
9600 rtx reg
= gen_reg_rtx (PSImode
);
9602 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
9603 sym
= XEXP (sym
, 0);
9605 /* Look at symbol flags: avr_encode_section_info set the flags
9606 also if attribute progmem was seen so that we get the right
9607 promotion for, e.g. PSTR-like strings that reside in generic space
9608 but are located in flash. In that case we patch the incoming
9611 if (SYMBOL_REF
== GET_CODE (sym
)
9612 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
9614 as_from
= ADDR_SPACE_FLASH
;
9617 /* Linearize memory: RAM has bit 23 set. */
9619 msb
= ADDR_SPACE_GENERIC_P (as_from
)
9621 : avr_addrspace
[as_from
].segment
;
9623 src
= force_reg (Pmode
, src
);
9626 ? gen_zero_extendhipsi2 (reg
, src
)
9627 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
9632 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9634 if (as_from
== ADDR_SPACE_MEMX
9635 && as_to
!= ADDR_SPACE_MEMX
)
9637 rtx new_src
= gen_reg_rtx (Pmode
);
9639 src
= force_reg (PSImode
, src
);
9641 emit_move_insn (new_src
,
9642 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
9650 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9653 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
9654 addr_space_t superset ATTRIBUTE_UNUSED
)
9656 /* Allow any kind of pointer mess. */
9662 /* Worker function for movmemhi expander.
9663 XOP[0] Destination as MEM:BLK
9665 XOP[2] # Bytes to copy
9667 Return TRUE if the expansion is accomplished.
9668 Return FALSE if the operand compination is not supported. */
9671 avr_emit_movmemhi (rtx
*xop
)
9673 HOST_WIDE_INT count
;
9674 enum machine_mode loop_mode
;
9675 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
9676 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
9677 rtx a_hi8
= NULL_RTX
;
9679 if (avr_mem_flash_p (xop
[0]))
9682 if (!CONST_INT_P (xop
[2]))
9685 count
= INTVAL (xop
[2]);
9689 a_src
= XEXP (xop
[1], 0);
9690 a_dest
= XEXP (xop
[0], 0);
9692 if (PSImode
== GET_MODE (a_src
))
9694 gcc_assert (as
== ADDR_SPACE_MEMX
);
9696 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
9697 loop_reg
= gen_rtx_REG (loop_mode
, 24);
9698 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
9700 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
9701 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
9705 int segment
= avr_addrspace
[as
].segment
;
9708 && avr_current_device
->n_flash
> 1)
9710 a_hi8
= GEN_INT (segment
);
9711 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
9713 else if (!ADDR_SPACE_GENERIC_P (as
))
9715 as
= ADDR_SPACE_FLASH
;
9720 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
9721 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
9726 /* FIXME: Register allocator might come up with spill fails if it is left
9727 on its own. Thus, we allocate the pointer registers by hand:
9729 X = destination address */
9731 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
9732 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
9734 /* FIXME: Register allocator does a bad job and might spill address
9735 register(s) inside the loop leading to additional move instruction
9736 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9737 load and store as seperate insns. Instead, we perform the copy
9738 by means of one monolithic insn. */
9740 gcc_assert (TMP_REGNO
== LPM_REGNO
);
9742 if (as
!= ADDR_SPACE_MEMX
)
9744 /* Load instruction ([E]LPM or LD) is known at compile time:
9745 Do the copy-loop inline. */
9747 rtx (*fun
) (rtx
, rtx
, rtx
)
9748 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
9750 insn
= fun (xas
, loop_reg
, loop_reg
);
9754 rtx (*fun
) (rtx
, rtx
)
9755 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
9757 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
9759 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
9762 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
9769 /* Print assembler for movmem_qi, movmem_hi insns...
9771 $1, $2 : Loop register
9773 X : Destination address
9777 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
9779 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
9780 enum machine_mode loop_mode
= GET_MODE (op
[1]);
9781 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
9789 xop
[2] = tmp_reg_rtx
;
9793 avr_asm_len ("0:", xop
, plen
, 0);
9795 /* Load with post-increment */
9802 case ADDR_SPACE_GENERIC
:
9804 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
9807 case ADDR_SPACE_FLASH
:
9810 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
9812 avr_asm_len ("lpm" CR_TAB
9813 "adiw r30,1", xop
, plen
, 2);
9816 case ADDR_SPACE_FLASH1
:
9817 case ADDR_SPACE_FLASH2
:
9818 case ADDR_SPACE_FLASH3
:
9819 case ADDR_SPACE_FLASH4
:
9820 case ADDR_SPACE_FLASH5
:
9823 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
9825 avr_asm_len ("elpm" CR_TAB
9826 "adiw r30,1", xop
, plen
, 2);
9830 /* Store with post-increment */
9832 avr_asm_len ("st X+,%2", xop
, plen
, 1);
9834 /* Decrement loop-counter and set Z-flag */
9836 if (QImode
== loop_mode
)
9838 avr_asm_len ("dec %1", xop
, plen
, 1);
9842 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
9846 avr_asm_len ("subi %A1,1" CR_TAB
9847 "sbci %B1,0", xop
, plen
, 2);
9850 /* Loop until zero */
9852 return avr_asm_len ("brne 0b", xop
, plen
, 1);
9857 /* Helper for __builtin_avr_delay_cycles */
9860 avr_mem_clobber (void)
9862 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
9863 MEM_VOLATILE_P (mem
) = 1;
9868 avr_expand_delay_cycles (rtx operands0
)
9870 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
9871 unsigned HOST_WIDE_INT cycles_used
;
9872 unsigned HOST_WIDE_INT loop_count
;
9874 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
9876 loop_count
= ((cycles
- 9) / 6) + 1;
9877 cycles_used
= ((loop_count
- 1) * 6) + 9;
9878 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
9879 avr_mem_clobber()));
9880 cycles
-= cycles_used
;
9883 if (IN_RANGE (cycles
, 262145, 83886081))
9885 loop_count
= ((cycles
- 7) / 5) + 1;
9886 if (loop_count
> 0xFFFFFF)
9887 loop_count
= 0xFFFFFF;
9888 cycles_used
= ((loop_count
- 1) * 5) + 7;
9889 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
9890 avr_mem_clobber()));
9891 cycles
-= cycles_used
;
9894 if (IN_RANGE (cycles
, 768, 262144))
9896 loop_count
= ((cycles
- 5) / 4) + 1;
9897 if (loop_count
> 0xFFFF)
9898 loop_count
= 0xFFFF;
9899 cycles_used
= ((loop_count
- 1) * 4) + 5;
9900 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
9901 avr_mem_clobber()));
9902 cycles
-= cycles_used
;
9905 if (IN_RANGE (cycles
, 6, 767))
9907 loop_count
= cycles
/ 3;
9908 if (loop_count
> 255)
9910 cycles_used
= loop_count
* 3;
9911 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
9912 avr_mem_clobber()));
9913 cycles
-= cycles_used
;
9918 emit_insn (gen_nopv (GEN_INT(2)));
9924 emit_insn (gen_nopv (GEN_INT(1)));
9930 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9933 avr_double_int_push_digit (double_int val
, int base
,
9934 unsigned HOST_WIDE_INT digit
)
9937 ? double_int_lshift (val
, 32, 64, false)
9938 : double_int_mul (val
, uhwi_to_double_int (base
));
9940 return double_int_add (val
, uhwi_to_double_int (digit
));
9944 /* Compute the image of x under f, i.e. perform x --> f(x) */
9947 avr_map (double_int f
, int x
)
9949 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
9953 /* Return some metrics of map A. */
9957 /* Number of fixed points in { 0 ... 7 } */
9960 /* Size of preimage of non-fixed points in { 0 ... 7 } */
9963 /* Mask representing the fixed points in { 0 ... 7 } */
9966 /* Size of the preimage of { 0 ... 7 } */
9969 /* Mask that represents the preimage of { f } */
9974 avr_map_metric (double_int a
, int mode
)
9976 unsigned i
, metric
= 0;
9978 for (i
= 0; i
< 8; i
++)
9980 unsigned ai
= avr_map (a
, i
);
9982 if (mode
== MAP_FIXED_0_7
)
9984 else if (mode
== MAP_NONFIXED_0_7
)
9985 metric
+= ai
< 8 && ai
!= i
;
9986 else if (mode
== MAP_MASK_FIXED_0_7
)
9987 metric
|= ((unsigned) (ai
== i
)) << i
;
9988 else if (mode
== MAP_PREIMAGE_0_7
)
9990 else if (mode
== MAP_MASK_PREIMAGE_F
)
9991 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10000 /* Return true if IVAL has a 0xf in its hexadecimal representation
10001 and false, otherwise. Only nibbles 0..7 are taken into account.
10002 Used as constraint helper for C0f and Cxf. */
10005 avr_has_nibble_0xf (rtx ival
)
10007 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10011 /* We have a set of bits that are mapped by a function F.
10012 Try to decompose F by means of a second function G so that
10018 cost (F o G^-1) + cost (G) < cost (F)
10020 Example: Suppose builtin insert_bits supplies us with the map
10021 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10022 nibble of the result, we can just as well rotate the bits before inserting
10023 them and use the map 0x7654ffff which is cheaper than the original map.
10024 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10028 /* tree code of binary function G */
10029 enum tree_code code
;
10031 /* The constant second argument of G */
10034 /* G^-1, the inverse of G (*, arg) */
10037 /* The cost of appplying G (*, arg) */
10040 /* The composition F o G^-1 (*, arg) for some function F */
10043 /* For debug purpose only */
10047 static const avr_map_op_t avr_map_op
[] =
10049 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10050 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10051 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10052 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10053 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10054 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10055 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10056 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10057 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10058 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10059 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10060 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10061 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10062 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10063 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10067 /* Try to decompose F as F = (F o G^-1) o G as described above.
10068 The result is a struct representing F o G^-1 and G.
10069 If result.cost < 0 then such a decomposition does not exist. */
10071 static avr_map_op_t
10072 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10075 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10076 avr_map_op_t f_ginv
= *g
;
10077 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10081 /* Step 1: Computing F o G^-1 */
10083 for (i
= 7; i
>= 0; i
--)
10085 int x
= avr_map (f
, i
);
10089 x
= avr_map (ginv
, x
);
10091 /* The bit is no element of the image of G: no avail (cost = -1) */
10097 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10100 /* Step 2: Compute the cost of the operations.
10101 The overall cost of doing an operation prior to the insertion is
10102 the cost of the insertion plus the cost of the operation. */
10104 /* Step 2a: Compute cost of F o G^-1 */
10106 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10108 /* The mapping consists only of fixed points and can be folded
10109 to AND/OR logic in the remainder. Reasonable cost is 3. */
10111 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10117 /* Get the cost of the insn by calling the output worker with some
10118 fake values. Mimic effect of reloading xop[3]: Unused operands
10119 are mapped to 0 and used operands are reloaded to xop[0]. */
10121 xop
[0] = all_regs_rtx
[24];
10122 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
10123 xop
[2] = all_regs_rtx
[25];
10124 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
10126 avr_out_insert_bits (xop
, &f_ginv
.cost
);
10128 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
10131 /* Step 2b: Add cost of G */
10133 f_ginv
.cost
+= g
->cost
;
10135 if (avr_log
.builtin
)
10136 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
10142 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10143 XOP[0] and XOP[1] don't overlap.
10144 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10145 If FIXP_P = false: Just move the bit if its position in the destination
10146 is different to its source position. */
10149 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
10153 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10154 int t_bit_src
= -1;
10156 /* We order the operations according to the requested source bit b. */
10158 for (b
= 0; b
< 8; b
++)
10159 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
10161 int bit_src
= avr_map (map
, bit_dest
);
10165 /* Same position: No need to copy as requested by FIXP_P. */
10166 || (bit_dest
== bit_src
&& !fixp_p
))
10169 if (t_bit_src
!= bit_src
)
10171 /* Source bit is not yet in T: Store it to T. */
10173 t_bit_src
= bit_src
;
10175 xop
[3] = GEN_INT (bit_src
);
10176 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
10179 /* Load destination bit with T. */
10181 xop
[3] = GEN_INT (bit_dest
);
10182 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
10187 /* PLEN == 0: Print assembler code for `insert_bits'.
10188 PLEN != 0: Compute code length in bytes.
10191 OP[1]: The mapping composed of nibbles. If nibble no. N is
10192 0: Bit N of result is copied from bit OP[2].0
10194 7: Bit N of result is copied from bit OP[2].7
10195 0xf: Bit N of result is copied from bit OP[3].N
10196 OP[2]: Bits to be inserted
10197 OP[3]: Target value */
10200 avr_out_insert_bits (rtx
*op
, int *plen
)
10202 double_int map
= rtx_to_double_int (op
[1]);
10203 unsigned mask_fixed
;
10204 bool fixp_p
= true;
10211 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
10215 else if (flag_print_asm_name
)
10216 fprintf (asm_out_file
,
10217 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
10218 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
10220 /* If MAP has fixed points it might be better to initialize the result
10221 with the bits to be inserted instead of moving all bits by hand. */
10223 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
10225 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10227 /* Avoid early-clobber conflicts */
10229 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10230 xop
[1] = tmp_reg_rtx
;
10234 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10236 /* XOP[2] is used and reloaded to XOP[0] already */
10238 int n_fix
= 0, n_nofix
= 0;
10240 gcc_assert (REG_P (xop
[2]));
10242 /* Get the code size of the bit insertions; once with all bits
10243 moved and once with fixed points omitted. */
10245 avr_move_bits (xop
, map
, true, &n_fix
);
10246 avr_move_bits (xop
, map
, false, &n_nofix
);
10248 if (fixp_p
&& n_fix
- n_nofix
> 3)
10250 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
10252 avr_asm_len ("eor %0,%1" CR_TAB
10253 "andi %0,%3" CR_TAB
10254 "eor %0,%1", xop
, plen
, 3);
10260 /* XOP[2] is unused */
10262 if (fixp_p
&& mask_fixed
)
10264 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10269 /* Move/insert remaining bits. */
10271 avr_move_bits (xop
, map
, fixp_p
, plen
);
10277 /* IDs for all the AVR builtins. */
10279 enum avr_builtin_id
10282 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10283 #include "builtins.def"
10289 struct GTY(()) avr_builtin_description
10291 enum insn_code icode
;
10298 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
10299 that a built-in's ID can be used to access the built-in by means of
10302 static GTY(()) struct avr_builtin_description
10303 avr_bdesc
[AVR_BUILTIN_COUNT
] =
10306 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10307 { ICODE, NAME, N_ARGS, NULL_TREE },
10308 #include "builtins.def"
10313 /* Implement `TARGET_BUILTIN_DECL'. */
10316 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
10318 if (id
< AVR_BUILTIN_COUNT
)
10319 return avr_bdesc
[id
].fndecl
;
10321 return error_mark_node
;
10326 avr_init_builtin_int24 (void)
10328 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10329 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10331 (*lang_hooks
.types
.register_builtin_type
) (int24_type
, "__int24");
10332 (*lang_hooks
.types
.register_builtin_type
) (uint24_type
, "__uint24");
10336 /* Implement `TARGET_INIT_BUILTINS' */
10337 /* Set up all builtin functions for this target. */
10340 avr_init_builtins (void)
10342 tree void_ftype_void
10343 = build_function_type_list (void_type_node
, NULL_TREE
);
10344 tree uchar_ftype_uchar
10345 = build_function_type_list (unsigned_char_type_node
,
10346 unsigned_char_type_node
,
10348 tree uint_ftype_uchar_uchar
10349 = build_function_type_list (unsigned_type_node
,
10350 unsigned_char_type_node
,
10351 unsigned_char_type_node
,
10353 tree int_ftype_char_char
10354 = build_function_type_list (integer_type_node
,
10358 tree int_ftype_char_uchar
10359 = build_function_type_list (integer_type_node
,
10361 unsigned_char_type_node
,
10363 tree void_ftype_ulong
10364 = build_function_type_list (void_type_node
,
10365 long_unsigned_type_node
,
10368 tree uchar_ftype_ulong_uchar_uchar
10369 = build_function_type_list (unsigned_char_type_node
,
10370 long_unsigned_type_node
,
10371 unsigned_char_type_node
,
10372 unsigned_char_type_node
,
10375 tree const_memx_void_node
10376 = build_qualified_type (void_type_node
,
10378 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
10380 tree const_memx_ptr_type_node
10381 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
10383 tree char_ftype_const_memx_ptr
10384 = build_function_type_list (char_type_node
,
10385 const_memx_ptr_type_node
,
10388 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10389 gcc_assert (ID < AVR_BUILTIN_COUNT); \
10390 avr_bdesc[ID].fndecl \
10391 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10392 #include "builtins.def"
10395 avr_init_builtin_int24 ();
10399 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10402 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
10406 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10407 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10408 enum machine_mode op0mode
= GET_MODE (op0
);
10409 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10410 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10413 || GET_MODE (target
) != tmode
10414 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10416 target
= gen_reg_rtx (tmode
);
10419 if (op0mode
== SImode
&& mode0
== HImode
)
10422 op0
= gen_lowpart (HImode
, op0
);
10425 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
10427 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10428 op0
= copy_to_mode_reg (mode0
, op0
);
10430 pat
= GEN_FCN (icode
) (target
, op0
);
10440 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10443 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10446 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10447 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10448 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10449 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10450 enum machine_mode op0mode
= GET_MODE (op0
);
10451 enum machine_mode op1mode
= GET_MODE (op1
);
10452 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10453 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10454 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10457 || GET_MODE (target
) != tmode
10458 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10460 target
= gen_reg_rtx (tmode
);
10463 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10466 op0
= gen_lowpart (HImode
, op0
);
10469 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10472 op1
= gen_lowpart (HImode
, op1
);
10475 /* In case the insn wants input operands in modes different from
10476 the result, abort. */
10478 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10479 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
10481 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10482 op0
= copy_to_mode_reg (mode0
, op0
);
10484 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10485 op1
= copy_to_mode_reg (mode1
, op1
);
10487 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
10496 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10499 avr_expand_triop_builtin (enum insn_code icode
, tree exp
, rtx target
)
10502 tree arg0
= CALL_EXPR_ARG (exp
, 0);
10503 tree arg1
= CALL_EXPR_ARG (exp
, 1);
10504 tree arg2
= CALL_EXPR_ARG (exp
, 2);
10505 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10506 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10507 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10508 enum machine_mode op0mode
= GET_MODE (op0
);
10509 enum machine_mode op1mode
= GET_MODE (op1
);
10510 enum machine_mode op2mode
= GET_MODE (op2
);
10511 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10512 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
10513 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
10514 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
10517 || GET_MODE (target
) != tmode
10518 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
10520 target
= gen_reg_rtx (tmode
);
10523 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
10526 op0
= gen_lowpart (HImode
, op0
);
10529 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
10532 op1
= gen_lowpart (HImode
, op1
);
10535 if ((op2mode
== SImode
|| op2mode
== VOIDmode
) && mode2
== HImode
)
10538 op2
= gen_lowpart (HImode
, op2
);
10541 /* In case the insn wants input operands in modes different from
10542 the result, abort. */
10544 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
10545 && (op1mode
== mode1
|| op1mode
== VOIDmode
)
10546 && (op2mode
== mode2
|| op2mode
== VOIDmode
));
10548 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
10549 op0
= copy_to_mode_reg (mode0
, op0
);
10551 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
10552 op1
= copy_to_mode_reg (mode1
, op1
);
10554 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
10555 op2
= copy_to_mode_reg (mode2
, op2
);
10557 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
10567 /* Implement `TARGET_EXPAND_BUILTIN'. */
10568 /* Expand an expression EXP that calls a built-in function,
10569 with result going to TARGET if that's convenient
10570 (and in mode MODE if that's convenient).
10571 SUBTARGET may be used as the target for computing one of EXP's operands.
10572 IGNORE is nonzero if the value is to be ignored. */
10575 avr_expand_builtin (tree exp
, rtx target
,
10576 rtx subtarget ATTRIBUTE_UNUSED
,
10577 enum machine_mode mode ATTRIBUTE_UNUSED
,
10578 int ignore ATTRIBUTE_UNUSED
)
10580 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
10581 const char* bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
10582 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
10583 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
10587 gcc_assert (id
< AVR_BUILTIN_COUNT
);
10591 case AVR_BUILTIN_NOP
:
10592 emit_insn (gen_nopv (GEN_INT(1)));
10595 case AVR_BUILTIN_DELAY_CYCLES
:
10597 arg0
= CALL_EXPR_ARG (exp
, 0);
10598 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10600 if (!CONST_INT_P (op0
))
10601 error ("%s expects a compile time integer constant", bname
);
10603 avr_expand_delay_cycles (op0
);
10608 case AVR_BUILTIN_INSERT_BITS
:
10610 arg0
= CALL_EXPR_ARG (exp
, 0);
10611 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
10613 if (!CONST_INT_P (op0
))
10615 error ("%s expects a compile time long integer constant"
10616 " as first argument", bname
);
10622 /* No special treatment needed: vanilla expand. */
10627 emit_insn ((GEN_FCN (d
->icode
)) (target
));
10631 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
10634 return avr_expand_binop_builtin (d
->icode
, exp
, target
);
10637 return avr_expand_triop_builtin (d
->icode
, exp
, target
);
10640 gcc_unreachable ();
10644 /* Implement `TARGET_FOLD_BUILTIN'. */
10647 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
10648 bool ignore ATTRIBUTE_UNUSED
)
10650 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
10651 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
10661 case AVR_BUILTIN_SWAP
:
10663 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
10664 build_int_cst (val_type
, 4));
10667 case AVR_BUILTIN_INSERT_BITS
:
10669 tree tbits
= arg
[1];
10670 tree tval
= arg
[2];
10672 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
10674 bool changed
= false;
10676 avr_map_op_t best_g
;
10678 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
10680 /* No constant as first argument: Don't fold this and run into
10681 error in avr_expand_builtin. */
10686 map
= tree_to_double_int (arg
[0]);
10687 tmap
= double_int_to_tree (map_type
, map
);
10689 if (TREE_CODE (tval
) != INTEGER_CST
10690 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10692 /* There are no F in the map, i.e. 3rd operand is unused.
10693 Replace that argument with some constant to render
10694 respective input unused. */
10696 tval
= build_int_cst (val_type
, 0);
10700 if (TREE_CODE (tbits
) != INTEGER_CST
10701 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
10703 /* Similar for the bits to be inserted. If they are unused,
10704 we can just as well pass 0. */
10706 tbits
= build_int_cst (val_type
, 0);
10709 if (TREE_CODE (tbits
) == INTEGER_CST
)
10711 /* Inserting bits known at compile time is easy and can be
10712 performed by AND and OR with appropriate masks. */
10714 int bits
= TREE_INT_CST_LOW (tbits
);
10715 int mask_ior
= 0, mask_and
= 0xff;
10717 for (i
= 0; i
< 8; i
++)
10719 int mi
= avr_map (map
, i
);
10723 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
10724 else mask_and
&= ~(1 << i
);
10728 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
10729 build_int_cst (val_type
, mask_ior
));
10730 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
10731 build_int_cst (val_type
, mask_and
));
10735 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10737 /* If bits don't change their position we can use vanilla logic
10738 to merge the two arguments. */
10740 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
10742 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
10743 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
10745 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
10746 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
10747 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
10750 /* Try to decomposing map to reduce overall cost. */
10752 if (avr_log
.builtin
)
10753 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
10755 best_g
= avr_map_op
[0];
10756 best_g
.cost
= 1000;
10758 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
10761 = avr_map_decompose (map
, avr_map_op
+ i
,
10762 TREE_CODE (tval
) == INTEGER_CST
);
10764 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
10768 if (avr_log
.builtin
)
10771 if (best_g
.arg
== 0)
10772 /* No optimization found */
10775 /* Apply operation G to the 2nd argument. */
10777 if (avr_log
.builtin
)
10778 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10779 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
10781 /* Do right-shifts arithmetically: They copy the MSB instead of
10782 shifting in a non-usable value (0) as with logic right-shift. */
10784 tbits
= fold_convert (signed_char_type_node
, tbits
);
10785 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
10786 build_int_cst (val_type
, best_g
.arg
));
10787 tbits
= fold_convert (val_type
, tbits
);
10789 /* Use map o G^-1 instead of original map to undo the effect of G. */
10791 tmap
= double_int_to_tree (map_type
, best_g
.map
);
10793 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
10794 } /* AVR_BUILTIN_INSERT_BITS */
10802 /* Initialize the GCC target structure. */
10804 #undef TARGET_ASM_ALIGNED_HI_OP
10805 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10806 #undef TARGET_ASM_ALIGNED_SI_OP
10807 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10808 #undef TARGET_ASM_UNALIGNED_HI_OP
10809 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10810 #undef TARGET_ASM_UNALIGNED_SI_OP
10811 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10812 #undef TARGET_ASM_INTEGER
10813 #define TARGET_ASM_INTEGER avr_assemble_integer
10814 #undef TARGET_ASM_FILE_START
10815 #define TARGET_ASM_FILE_START avr_file_start
10816 #undef TARGET_ASM_FILE_END
10817 #define TARGET_ASM_FILE_END avr_file_end
10819 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10820 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10821 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10822 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10824 #undef TARGET_FUNCTION_VALUE
10825 #define TARGET_FUNCTION_VALUE avr_function_value
10826 #undef TARGET_LIBCALL_VALUE
10827 #define TARGET_LIBCALL_VALUE avr_libcall_value
10828 #undef TARGET_FUNCTION_VALUE_REGNO_P
10829 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10831 #undef TARGET_ATTRIBUTE_TABLE
10832 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10833 #undef TARGET_INSERT_ATTRIBUTES
10834 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10835 #undef TARGET_SECTION_TYPE_FLAGS
10836 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10838 #undef TARGET_ASM_NAMED_SECTION
10839 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10840 #undef TARGET_ASM_INIT_SECTIONS
10841 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10842 #undef TARGET_ENCODE_SECTION_INFO
10843 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10844 #undef TARGET_ASM_SELECT_SECTION
10845 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10847 #undef TARGET_REGISTER_MOVE_COST
10848 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10849 #undef TARGET_MEMORY_MOVE_COST
10850 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10851 #undef TARGET_RTX_COSTS
10852 #define TARGET_RTX_COSTS avr_rtx_costs
10853 #undef TARGET_ADDRESS_COST
10854 #define TARGET_ADDRESS_COST avr_address_cost
10855 #undef TARGET_MACHINE_DEPENDENT_REORG
10856 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10857 #undef TARGET_FUNCTION_ARG
10858 #define TARGET_FUNCTION_ARG avr_function_arg
10859 #undef TARGET_FUNCTION_ARG_ADVANCE
10860 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10862 #undef TARGET_RETURN_IN_MEMORY
10863 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10865 #undef TARGET_STRICT_ARGUMENT_NAMING
10866 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10868 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10869 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10871 #undef TARGET_HARD_REGNO_SCRATCH_OK
10872 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10873 #undef TARGET_CASE_VALUES_THRESHOLD
10874 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10876 #undef TARGET_FRAME_POINTER_REQUIRED
10877 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10878 #undef TARGET_CAN_ELIMINATE
10879 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10881 #undef TARGET_CLASS_LIKELY_SPILLED_P
10882 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10884 #undef TARGET_OPTION_OVERRIDE
10885 #define TARGET_OPTION_OVERRIDE avr_option_override
10887 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10888 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10890 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10891 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10893 #undef TARGET_INIT_BUILTINS
10894 #define TARGET_INIT_BUILTINS avr_init_builtins
10896 #undef TARGET_BUILTIN_DECL
10897 #define TARGET_BUILTIN_DECL avr_builtin_decl
10899 #undef TARGET_EXPAND_BUILTIN
10900 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10902 #undef TARGET_FOLD_BUILTIN
10903 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10905 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10906 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10908 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10909 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10911 #undef TARGET_ADDR_SPACE_SUBSET_P
10912 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10914 #undef TARGET_ADDR_SPACE_CONVERT
10915 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10917 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
10918 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10920 #undef TARGET_ADDR_SPACE_POINTER_MODE
10921 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10923 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10924 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
10925 avr_addr_space_legitimate_address_p
10927 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10928 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10930 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
10931 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
10933 #undef TARGET_PRINT_OPERAND
10934 #define TARGET_PRINT_OPERAND avr_print_operand
10935 #undef TARGET_PRINT_OPERAND_ADDRESS
10936 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
10937 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10938 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
10940 struct gcc_target targetm
= TARGET_INITIALIZER
;
10943 #include "gt-avr.h"