1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
53 #define CONST_FIXED_P(X) (CONST_FIXED == GET_CODE (X))
56 /* Maximal allowed offset for an address in the LD command */
57 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
59 /* Return true if STR starts with PREFIX and false, otherwise. */
60 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
62 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
63 address space where data is to be located.
64 As the only non-generic address spaces are all located in Flash,
65 this can be used to test if data shall go into some .progmem* section.
66 This must be the rightmost field of machine dependent section flags. */
67 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
69 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
70 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
72 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
73 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
74 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
76 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
77 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
80 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
81 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
82 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
83 / SYMBOL_FLAG_MACH_DEP)
85 /* Known address spaces. The order must be the same as in the respective
86 enum from avr.h (or designated initialized must be used). */
87 const avr_addrspace_t avr_addrspace
[] =
89 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
90 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
91 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
92 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
93 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
94 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
95 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
96 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
100 /* Map 64-k Flash segment to section prefix. */
101 static const char* const progmem_section_prefix
[6] =
111 /* Holding RAM addresses of some SFRs used by the compiler and that
112 are unique over all devices in an architecture like 'avr4'. */
116 /* SREG: The pocessor status */
119 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
125 /* RAMPZ: The high byte of 24-bit address used with ELPM */
128 /* SP: The stack pointer and its low and high byte */
133 static avr_addr_t avr_addr
;
136 /* Prototypes for local helper functions. */
138 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
139 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
140 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
141 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
142 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
143 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
145 static int get_sequence_length (rtx insns
);
146 static int sequent_regs_live (void);
147 static const char *ptrreg_to_str (int);
148 static const char *cond_string (enum rtx_code
);
149 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
150 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
152 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
153 static struct machine_function
* avr_init_machine_status (void);
156 /* Prototypes for hook implementors if needed before their implementation. */
158 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
161 /* Allocate registers from r25 to r8 for parameters for function calls. */
162 #define FIRST_CUM_REG 26
164 /* Implicit target register of LPM instruction (R0) */
165 extern GTY(()) rtx lpm_reg_rtx
;
168 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
169 extern GTY(()) rtx lpm_addr_reg_rtx
;
170 rtx lpm_addr_reg_rtx
;
172 /* Temporary register RTX (reg:QI TMP_REGNO) */
173 extern GTY(()) rtx tmp_reg_rtx
;
176 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
177 extern GTY(()) rtx zero_reg_rtx
;
180 /* RTXs for all general purpose registers as QImode */
181 extern GTY(()) rtx all_regs_rtx
[32];
182 rtx all_regs_rtx
[32];
184 /* SREG, the processor status */
185 extern GTY(()) rtx sreg_rtx
;
188 /* RAMP* special function registers */
189 extern GTY(()) rtx rampd_rtx
;
190 extern GTY(()) rtx rampx_rtx
;
191 extern GTY(()) rtx rampy_rtx
;
192 extern GTY(()) rtx rampz_rtx
;
198 /* RTX containing the strings "" and "e", respectively */
199 static GTY(()) rtx xstring_empty
;
200 static GTY(()) rtx xstring_e
;
202 /* Preprocessor macros to define depending on MCU type. */
203 const char *avr_extra_arch_macro
;
205 /* Current architecture. */
206 const struct base_arch_s
*avr_current_arch
;
208 /* Current device. */
209 const struct mcu_type_s
*avr_current_device
;
211 /* Section to put switch tables in. */
212 static GTY(()) section
*progmem_swtable_section
;
214 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
215 or to address space __flash*. */
216 static GTY(()) section
*progmem_section
[6];
218 /* Condition for insns/expanders from avr-dimode.md. */
219 bool avr_have_dimode
= true;
221 /* To track if code will use .bss and/or .data. */
222 bool avr_need_clear_bss_p
= false;
223 bool avr_need_copy_data_p
= false;
227 /* Custom function to count number of set bits. */
230 avr_popcount (unsigned int val
)
244 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
245 Return true if the least significant N_BYTES bytes of XVAL all have a
246 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
247 of integers which contains an integer N iff bit N of POP_MASK is set. */
250 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
254 enum machine_mode mode
= GET_MODE (xval
);
256 if (VOIDmode
== mode
)
259 for (i
= 0; i
< n_bytes
; i
++)
261 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
262 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
264 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
272 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
273 the bit representation of X by "casting" it to CONST_INT. */
276 avr_to_int_mode (rtx x
)
278 enum machine_mode mode
= GET_MODE (x
);
280 return VOIDmode
== mode
282 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
286 /* Implement `TARGET_OPTION_OVERRIDE'. */
289 avr_option_override (void)
291 flag_delete_null_pointer_checks
= 0;
293 /* caller-save.c looks for call-clobbered hard registers that are assigned
294 to pseudos that cross calls and tries so save-restore them around calls
295 in order to reduce the number of stack slots needed.
297 This might leads to situations where reload is no more able to cope
298 with the challenge of AVR's very few address registers and fails to
299 perform the requested spills. */
302 flag_caller_saves
= 0;
304 /* Unwind tables currently require a frame pointer for correctness,
305 see toplev.c:process_options(). */
307 if ((flag_unwind_tables
308 || flag_non_call_exceptions
309 || flag_asynchronous_unwind_tables
)
310 && !ACCUMULATE_OUTGOING_ARGS
)
312 flag_omit_frame_pointer
= 0;
315 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
316 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
317 avr_extra_arch_macro
= avr_current_device
->macro
;
319 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
321 /* SREG: Status Register containing flags like I (global IRQ) */
322 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
324 /* RAMPZ: Address' high part when loading via ELPM */
325 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
327 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
328 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
329 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
330 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
332 /* SP: Stack Pointer (SP_H:SP_L) */
333 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
334 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
336 init_machine_status
= avr_init_machine_status
;
338 avr_log_set_avr_log();
341 /* Function to set up the backend function structure. */
343 static struct machine_function
*
344 avr_init_machine_status (void)
346 return ggc_alloc_cleared_machine_function ();
350 /* Implement `INIT_EXPANDERS'. */
351 /* The function works like a singleton. */
354 avr_init_expanders (void)
358 for (regno
= 0; regno
< 32; regno
++)
359 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
361 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
362 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
363 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
365 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
367 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
368 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
369 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
370 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
371 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
373 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
374 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
378 /* Return register class for register R. */
381 avr_regno_reg_class (int r
)
383 static const enum reg_class reg_class_tab
[] =
387 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
388 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
389 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
390 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
392 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
393 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
395 ADDW_REGS
, ADDW_REGS
,
397 POINTER_X_REGS
, POINTER_X_REGS
,
399 POINTER_Y_REGS
, POINTER_Y_REGS
,
401 POINTER_Z_REGS
, POINTER_Z_REGS
,
407 return reg_class_tab
[r
];
413 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
416 avr_scalar_mode_supported_p (enum machine_mode mode
)
418 if (ALL_FIXED_POINT_MODE_P (mode
))
424 return default_scalar_mode_supported_p (mode
);
428 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
431 avr_decl_flash_p (tree decl
)
433 if (TREE_CODE (decl
) != VAR_DECL
434 || TREE_TYPE (decl
) == error_mark_node
)
439 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
443 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
444 address space and FALSE, otherwise. */
447 avr_decl_memx_p (tree decl
)
449 if (TREE_CODE (decl
) != VAR_DECL
450 || TREE_TYPE (decl
) == error_mark_node
)
455 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
459 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
462 avr_mem_flash_p (rtx x
)
465 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
469 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
470 address space and FALSE, otherwise. */
473 avr_mem_memx_p (rtx x
)
476 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
480 /* A helper for the subsequent function attribute used to dig for
481 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
484 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
486 if (FUNCTION_DECL
== TREE_CODE (func
))
488 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
493 func
= TREE_TYPE (func
);
496 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
497 || TREE_CODE (func
) == METHOD_TYPE
);
499 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
502 /* Return nonzero if FUNC is a naked function. */
505 avr_naked_function_p (tree func
)
507 return avr_lookup_function_attribute1 (func
, "naked");
510 /* Return nonzero if FUNC is an interrupt function as specified
511 by the "interrupt" attribute. */
514 avr_interrupt_function_p (tree func
)
516 return avr_lookup_function_attribute1 (func
, "interrupt");
519 /* Return nonzero if FUNC is a signal function as specified
520 by the "signal" attribute. */
523 avr_signal_function_p (tree func
)
525 return avr_lookup_function_attribute1 (func
, "signal");
528 /* Return nonzero if FUNC is an OS_task function. */
531 avr_OS_task_function_p (tree func
)
533 return avr_lookup_function_attribute1 (func
, "OS_task");
536 /* Return nonzero if FUNC is an OS_main function. */
539 avr_OS_main_function_p (tree func
)
541 return avr_lookup_function_attribute1 (func
, "OS_main");
545 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
546 /* Sanity cheching for above function attributes. */
549 avr_set_current_function (tree decl
)
554 if (decl
== NULL_TREE
555 || current_function_decl
== NULL_TREE
556 || current_function_decl
== error_mark_node
557 || cfun
->machine
->attributes_checked_p
)
560 loc
= DECL_SOURCE_LOCATION (decl
);
562 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
563 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
564 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
565 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
566 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
568 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
570 /* Too much attributes make no sense as they request conflicting features. */
572 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
573 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
574 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
575 " exclusive", "OS_task", "OS_main", isr
);
577 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
579 if (cfun
->machine
->is_naked
580 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
581 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
582 " no effect on %qs function", "OS_task", "OS_main", "naked");
584 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
586 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
587 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
588 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
590 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
591 using this when it switched from SIGNAL and INTERRUPT to ISR. */
593 if (cfun
->machine
->is_interrupt
)
594 cfun
->machine
->is_signal
= 0;
596 /* Interrupt handlers must be void __vector (void) functions. */
598 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
599 error_at (loc
, "%qs function cannot have arguments", isr
);
601 if (TREE_CODE (ret
) != VOID_TYPE
)
602 error_at (loc
, "%qs function cannot return a value", isr
);
604 /* If the function has the 'signal' or 'interrupt' attribute, ensure
605 that the name of the function is "__vector_NN" so as to catch
606 when the user misspells the vector name. */
608 if (!STR_PREFIX_P (name
, "__vector"))
609 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
613 /* Avoid the above diagnosis to be printed more than once. */
615 cfun
->machine
->attributes_checked_p
= 1;
619 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
622 avr_accumulate_outgoing_args (void)
625 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
627 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
628 what offset is correct. In some cases it is relative to
629 virtual_outgoing_args_rtx and in others it is relative to
630 virtual_stack_vars_rtx. For example code see
631 gcc.c-torture/execute/built-in-setjmp.c
632 gcc.c-torture/execute/builtins/sprintf-chk.c */
634 return (TARGET_ACCUMULATE_OUTGOING_ARGS
635 && !(cfun
->calls_setjmp
636 || cfun
->has_nonlocal_label
));
640 /* Report contribution of accumulated outgoing arguments to stack size. */
643 avr_outgoing_args_size (void)
645 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
649 /* Implement `STARTING_FRAME_OFFSET'. */
650 /* This is the offset from the frame pointer register to the first stack slot
651 that contains a variable living in the frame. */
654 avr_starting_frame_offset (void)
656 return 1 + avr_outgoing_args_size ();
660 /* Return the number of hard registers to push/pop in the prologue/epilogue
661 of the current function, and optionally store these registers in SET. */
664 avr_regs_to_save (HARD_REG_SET
*set
)
667 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
670 CLEAR_HARD_REG_SET (*set
);
673 /* No need to save any registers if the function never returns or
674 has the "OS_task" or "OS_main" attribute. */
675 if (TREE_THIS_VOLATILE (current_function_decl
)
676 || cfun
->machine
->is_OS_task
677 || cfun
->machine
->is_OS_main
)
680 for (reg
= 0; reg
< 32; reg
++)
682 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
683 any global register variables. */
687 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
688 || (df_regs_ever_live_p (reg
)
689 && (int_or_sig_p
|| !call_used_regs
[reg
])
690 /* Don't record frame pointer registers here. They are treated
691 indivitually in prologue. */
692 && !(frame_pointer_needed
693 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
696 SET_HARD_REG_BIT (*set
, reg
);
704 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
707 avr_allocate_stack_slots_for_args (void)
709 return !cfun
->machine
->is_naked
;
713 /* Return true if register FROM can be eliminated via register TO. */
716 avr_can_eliminate (const int from
, const int to
)
718 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
719 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
720 || ((from
== FRAME_POINTER_REGNUM
721 || from
== FRAME_POINTER_REGNUM
+ 1)
722 && !frame_pointer_needed
));
726 /* Implement TARGET_WARN_FUNC_RETURN. */
729 avr_warn_func_return (tree decl
)
731 /* Naked functions are implemented entirely in assembly, including the
732 return sequence, so suppress warnings about this. */
733 return !avr_naked_function_p (decl
);
736 /* Compute offset between arg_pointer and frame_pointer. */
739 avr_initial_elimination_offset (int from
, int to
)
741 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
745 int offset
= frame_pointer_needed
? 2 : 0;
746 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
748 offset
+= avr_regs_to_save (NULL
);
749 return (get_frame_size () + avr_outgoing_args_size()
750 + avr_pc_size
+ 1 + offset
);
755 /* Helper for the function below. */
758 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
760 *node
= make_node (FIXED_POINT_TYPE
);
761 TYPE_SATURATING (*node
) = sat_p
;
762 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
763 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
764 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
765 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
766 TYPE_ALIGN (*node
) = 8;
767 SET_TYPE_MODE (*node
, mode
);
773 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
776 avr_build_builtin_va_list (void)
778 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
779 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
780 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
781 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
782 to the long long accum modes instead of the desired [U]TAmode.
784 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
785 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
786 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
787 libgcc to detect IBIT and FBIT. */
789 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
790 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
791 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
792 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
794 unsigned_long_long_accum_type_node
= uta_type_node
;
795 long_long_accum_type_node
= ta_type_node
;
796 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
797 sat_long_long_accum_type_node
= sat_ta_type_node
;
799 /* Dispatch to the default handler. */
801 return std_build_builtin_va_list ();
805 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
806 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
807 frame pointer by +STARTING_FRAME_OFFSET.
808 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
809 avoids creating add/sub of offset in nonlocal goto and setjmp. */
812 avr_builtin_setjmp_frame_value (void)
814 rtx xval
= gen_reg_rtx (Pmode
);
815 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
816 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
821 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
822 This is return address of function. */
824 avr_return_addr_rtx (int count
, rtx tem
)
828 /* Can only return this function's return address. Others not supported. */
834 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
835 warning (0, "'builtin_return_address' contains only 2 bytes of address");
838 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
840 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
841 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
842 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
846 /* Return 1 if the function epilogue is just a single "ret". */
849 avr_simple_epilogue (void)
851 return (! frame_pointer_needed
852 && get_frame_size () == 0
853 && avr_outgoing_args_size() == 0
854 && avr_regs_to_save (NULL
) == 0
855 && ! cfun
->machine
->is_interrupt
856 && ! cfun
->machine
->is_signal
857 && ! cfun
->machine
->is_naked
858 && ! TREE_THIS_VOLATILE (current_function_decl
));
861 /* This function checks sequence of live registers. */
864 sequent_regs_live (void)
870 for (reg
= 0; reg
< 18; ++reg
)
874 /* Don't recognize sequences that contain global register
883 if (!call_used_regs
[reg
])
885 if (df_regs_ever_live_p (reg
))
895 if (!frame_pointer_needed
)
897 if (df_regs_ever_live_p (REG_Y
))
905 if (df_regs_ever_live_p (REG_Y
+1))
918 return (cur_seq
== live_seq
) ? live_seq
: 0;
921 /* Obtain the length sequence of insns. */
924 get_sequence_length (rtx insns
)
929 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
930 length
+= get_attr_length (insn
);
935 /* Implement INCOMING_RETURN_ADDR_RTX. */
938 avr_incoming_return_addr_rtx (void)
940 /* The return address is at the top of the stack. Note that the push
941 was via post-decrement, which means the actual address is off by one. */
942 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
945 /* Helper for expand_prologue. Emit a push of a byte register. */
948 emit_push_byte (unsigned regno
, bool frame_related_p
)
952 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
953 mem
= gen_frame_mem (QImode
, mem
);
954 reg
= gen_rtx_REG (QImode
, regno
);
956 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
958 RTX_FRAME_RELATED_P (insn
) = 1;
960 cfun
->machine
->stack_usage
++;
964 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
965 SFR is a MEM representing the memory location of the SFR.
966 If CLR_P then clear the SFR after the push using zero_reg. */
969 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
973 gcc_assert (MEM_P (sfr
));
975 /* IN __tmp_reg__, IO(SFR) */
976 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
978 RTX_FRAME_RELATED_P (insn
) = 1;
980 /* PUSH __tmp_reg__ */
981 emit_push_byte (TMP_REGNO
, frame_related_p
);
985 /* OUT IO(SFR), __zero_reg__ */
986 insn
= emit_move_insn (sfr
, const0_rtx
);
988 RTX_FRAME_RELATED_P (insn
) = 1;
993 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
996 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
997 int live_seq
= sequent_regs_live ();
999 HOST_WIDE_INT size_max
1000 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1002 bool minimize
= (TARGET_CALL_PROLOGUES
1006 && !cfun
->machine
->is_OS_task
1007 && !cfun
->machine
->is_OS_main
);
1010 && (frame_pointer_needed
1011 || avr_outgoing_args_size() > 8
1012 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1016 int first_reg
, reg
, offset
;
1018 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1019 gen_int_mode (size
, HImode
));
1021 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1022 gen_int_mode (live_seq
+size
, HImode
));
1023 insn
= emit_insn (pattern
);
1024 RTX_FRAME_RELATED_P (insn
) = 1;
1026 /* Describe the effect of the unspec_volatile call to prologue_saves.
1027 Note that this formulation assumes that add_reg_note pushes the
1028 notes to the front. Thus we build them in the reverse order of
1029 how we want dwarf2out to process them. */
1031 /* The function does always set frame_pointer_rtx, but whether that
1032 is going to be permanent in the function is frame_pointer_needed. */
1034 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1035 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1037 : stack_pointer_rtx
),
1038 plus_constant (Pmode
, stack_pointer_rtx
,
1039 -(size
+ live_seq
))));
1041 /* Note that live_seq always contains r28+r29, but the other
1042 registers to be saved are all below 18. */
1044 first_reg
= 18 - (live_seq
- 2);
1046 for (reg
= 29, offset
= -live_seq
+ 1;
1048 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1052 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1054 r
= gen_rtx_REG (QImode
, reg
);
1055 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1058 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1060 else /* !minimize */
1064 for (reg
= 0; reg
< 32; ++reg
)
1065 if (TEST_HARD_REG_BIT (set
, reg
))
1066 emit_push_byte (reg
, true);
1068 if (frame_pointer_needed
1069 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1071 /* Push frame pointer. Always be consistent about the
1072 ordering of pushes -- epilogue_restores expects the
1073 register pair to be pushed low byte first. */
1075 emit_push_byte (REG_Y
, true);
1076 emit_push_byte (REG_Y
+ 1, true);
1079 if (frame_pointer_needed
1082 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1083 RTX_FRAME_RELATED_P (insn
) = 1;
1088 /* Creating a frame can be done by direct manipulation of the
1089 stack or via the frame pointer. These two methods are:
1096 the optimum method depends on function type, stack and
1097 frame size. To avoid a complex logic, both methods are
1098 tested and shortest is selected.
1100 There is also the case where SIZE != 0 and no frame pointer is
1101 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1102 In that case, insn (*) is not needed in that case.
1103 We use the X register as scratch. This is save because in X
1105 In an interrupt routine, the case of SIZE != 0 together with
1106 !frame_pointer_needed can only occur if the function is not a
1107 leaf function and thus X has already been saved. */
1110 HOST_WIDE_INT size_cfa
= size
;
1111 rtx fp_plus_insns
, fp
, my_fp
;
1113 gcc_assert (frame_pointer_needed
1117 fp
= my_fp
= (frame_pointer_needed
1119 : gen_rtx_REG (Pmode
, REG_X
));
1121 if (AVR_HAVE_8BIT_SP
)
1123 /* The high byte (r29) does not change:
1124 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1126 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1129 /* Cut down size and avoid size = 0 so that we don't run
1130 into ICE like PR52488 in the remainder. */
1132 if (size
> size_max
)
1134 /* Don't error so that insane code from newlib still compiles
1135 and does not break building newlib. As PR51345 is implemented
1136 now, there are multilib variants with -msp8.
1138 If user wants sanity checks he can use -Wstack-usage=
1141 For CFA we emit the original, non-saturated size so that
1142 the generic machinery is aware of the real stack usage and
1143 will print the above diagnostic as expected. */
1148 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1150 /************ Method 1: Adjust frame pointer ************/
1154 /* Normally, the dwarf2out frame-related-expr interpreter does
1155 not expect to have the CFA change once the frame pointer is
1156 set up. Thus, we avoid marking the move insn below and
1157 instead indicate that the entire operation is complete after
1158 the frame pointer subtraction is done. */
1160 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1161 if (frame_pointer_needed
)
1163 RTX_FRAME_RELATED_P (insn
) = 1;
1164 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1165 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1168 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1170 if (frame_pointer_needed
)
1172 RTX_FRAME_RELATED_P (insn
) = 1;
1173 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1174 gen_rtx_SET (VOIDmode
, fp
,
1175 plus_constant (Pmode
, fp
,
1179 /* Copy to stack pointer. Note that since we've already
1180 changed the CFA to the frame pointer this operation
1181 need not be annotated if frame pointer is needed.
1182 Always move through unspec, see PR50063.
1183 For meaning of irq_state see movhi_sp_r insn. */
1185 if (cfun
->machine
->is_interrupt
)
1188 if (TARGET_NO_INTERRUPTS
1189 || cfun
->machine
->is_signal
1190 || cfun
->machine
->is_OS_main
)
1193 if (AVR_HAVE_8BIT_SP
)
1196 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1197 fp
, GEN_INT (irq_state
)));
1198 if (!frame_pointer_needed
)
1200 RTX_FRAME_RELATED_P (insn
) = 1;
1201 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1202 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1203 plus_constant (Pmode
,
1208 fp_plus_insns
= get_insns ();
1211 /************ Method 2: Adjust Stack pointer ************/
1213 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1214 can only handle specific offsets. */
1216 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1222 insn
= emit_move_insn (stack_pointer_rtx
,
1223 plus_constant (Pmode
, stack_pointer_rtx
,
1225 RTX_FRAME_RELATED_P (insn
) = 1;
1226 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1227 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1228 plus_constant (Pmode
,
1231 if (frame_pointer_needed
)
1233 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1234 RTX_FRAME_RELATED_P (insn
) = 1;
1237 sp_plus_insns
= get_insns ();
1240 /************ Use shortest method ************/
1242 emit_insn (get_sequence_length (sp_plus_insns
)
1243 < get_sequence_length (fp_plus_insns
)
1249 emit_insn (fp_plus_insns
);
1252 cfun
->machine
->stack_usage
+= size_cfa
;
1253 } /* !minimize && size != 0 */
1258 /* Output function prologue. */
1261 expand_prologue (void)
1266 size
= get_frame_size() + avr_outgoing_args_size();
1268 cfun
->machine
->stack_usage
= 0;
1270 /* Prologue: naked. */
1271 if (cfun
->machine
->is_naked
)
1276 avr_regs_to_save (&set
);
1278 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1280 /* Enable interrupts. */
1281 if (cfun
->machine
->is_interrupt
)
1282 emit_insn (gen_enable_interrupt ());
1284 /* Push zero reg. */
1285 emit_push_byte (ZERO_REGNO
, true);
1288 emit_push_byte (TMP_REGNO
, true);
1291 /* ??? There's no dwarf2 column reserved for SREG. */
1292 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1294 /* Clear zero reg. */
1295 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1297 /* Prevent any attempt to delete the setting of ZERO_REG! */
1298 emit_use (zero_reg_rtx
);
1300 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1301 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1304 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1307 && TEST_HARD_REG_BIT (set
, REG_X
)
1308 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1310 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1314 && (frame_pointer_needed
1315 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1316 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1318 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1322 && TEST_HARD_REG_BIT (set
, REG_Z
)
1323 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1325 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1327 } /* is_interrupt is_signal */
1329 avr_prologue_setup_frame (size
, set
);
1331 if (flag_stack_usage_info
)
1332 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1335 /* Output summary at end of function prologue. */
1338 avr_asm_function_end_prologue (FILE *file
)
1340 if (cfun
->machine
->is_naked
)
1342 fputs ("/* prologue: naked */\n", file
);
1346 if (cfun
->machine
->is_interrupt
)
1348 fputs ("/* prologue: Interrupt */\n", file
);
1350 else if (cfun
->machine
->is_signal
)
1352 fputs ("/* prologue: Signal */\n", file
);
1355 fputs ("/* prologue: function */\n", file
);
1358 if (ACCUMULATE_OUTGOING_ARGS
)
1359 fprintf (file
, "/* outgoing args size = %d */\n",
1360 avr_outgoing_args_size());
1362 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1364 fprintf (file
, "/* stack size = %d */\n",
1365 cfun
->machine
->stack_usage
);
1366 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1367 usage for offset so that SP + .L__stack_offset = return address. */
1368 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1372 /* Implement EPILOGUE_USES. */
1375 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1377 if (reload_completed
1379 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1384 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1387 emit_pop_byte (unsigned regno
)
1391 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1392 mem
= gen_frame_mem (QImode
, mem
);
1393 reg
= gen_rtx_REG (QImode
, regno
);
1395 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1398 /* Output RTL epilogue. */
1401 expand_epilogue (bool sibcall_p
)
1408 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1410 size
= get_frame_size() + avr_outgoing_args_size();
1412 /* epilogue: naked */
1413 if (cfun
->machine
->is_naked
)
1415 gcc_assert (!sibcall_p
);
1417 emit_jump_insn (gen_return ());
1421 avr_regs_to_save (&set
);
1422 live_seq
= sequent_regs_live ();
1424 minimize
= (TARGET_CALL_PROLOGUES
1427 && !cfun
->machine
->is_OS_task
1428 && !cfun
->machine
->is_OS_main
);
1432 || frame_pointer_needed
1435 /* Get rid of frame. */
1437 if (!frame_pointer_needed
)
1439 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1444 emit_move_insn (frame_pointer_rtx
,
1445 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1448 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1454 /* Try two methods to adjust stack and select shortest. */
1459 HOST_WIDE_INT size_max
;
1461 gcc_assert (frame_pointer_needed
1465 fp
= my_fp
= (frame_pointer_needed
1467 : gen_rtx_REG (Pmode
, REG_X
));
1469 if (AVR_HAVE_8BIT_SP
)
1471 /* The high byte (r29) does not change:
1472 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1474 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1477 /* For rationale see comment in prologue generation. */
1479 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1480 if (size
> size_max
)
1482 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1484 /********** Method 1: Adjust fp register **********/
1488 if (!frame_pointer_needed
)
1489 emit_move_insn (fp
, stack_pointer_rtx
);
1491 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1493 /* Copy to stack pointer. */
1495 if (TARGET_NO_INTERRUPTS
)
1498 if (AVR_HAVE_8BIT_SP
)
1501 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1502 GEN_INT (irq_state
)));
1504 fp_plus_insns
= get_insns ();
1507 /********** Method 2: Adjust Stack pointer **********/
1509 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1515 emit_move_insn (stack_pointer_rtx
,
1516 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1518 sp_plus_insns
= get_insns ();
1521 /************ Use shortest method ************/
1523 emit_insn (get_sequence_length (sp_plus_insns
)
1524 < get_sequence_length (fp_plus_insns
)
1529 emit_insn (fp_plus_insns
);
1532 if (frame_pointer_needed
1533 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1535 /* Restore previous frame_pointer. See expand_prologue for
1536 rationale for not using pophi. */
1538 emit_pop_byte (REG_Y
+ 1);
1539 emit_pop_byte (REG_Y
);
1542 /* Restore used registers. */
1544 for (reg
= 31; reg
>= 0; --reg
)
1545 if (TEST_HARD_REG_BIT (set
, reg
))
1546 emit_pop_byte (reg
);
1550 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1551 The conditions to restore them must be tha same as in prologue. */
1554 && TEST_HARD_REG_BIT (set
, REG_Z
)
1555 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1557 emit_pop_byte (TMP_REGNO
);
1558 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1562 && (frame_pointer_needed
1563 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1564 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1566 emit_pop_byte (TMP_REGNO
);
1567 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1571 && TEST_HARD_REG_BIT (set
, REG_X
)
1572 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1574 emit_pop_byte (TMP_REGNO
);
1575 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1580 emit_pop_byte (TMP_REGNO
);
1581 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1584 /* Restore SREG using tmp_reg as scratch. */
1586 emit_pop_byte (TMP_REGNO
);
1587 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1589 /* Restore tmp REG. */
1590 emit_pop_byte (TMP_REGNO
);
1592 /* Restore zero REG. */
1593 emit_pop_byte (ZERO_REGNO
);
1597 emit_jump_insn (gen_return ());
1600 /* Output summary messages at beginning of function epilogue. */
1603 avr_asm_function_begin_epilogue (FILE *file
)
1605 fprintf (file
, "/* epilogue start */\n");
1609 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1612 avr_cannot_modify_jumps_p (void)
1615 /* Naked Functions must not have any instructions after
1616 their epilogue, see PR42240 */
1618 if (reload_completed
1620 && cfun
->machine
->is_naked
)
1629 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1631 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1632 This hook just serves to hack around PR rtl-optimization/52543 by
1633 claiming that PSImode addresses (which are used for the 24-bit
1634 address space __memx) were mode-dependent so that lower-subreg.s
1635 will skip these addresses. See also the similar FIXME comment along
1636 with mov<mode> expanders in avr.md. */
1639 avr_mode_dependent_address_p (const_rtx addr
)
1641 return GET_MODE (addr
) != Pmode
;
1645 /* Helper function for `avr_legitimate_address_p'. */
1648 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1649 RTX_CODE outer_code
, bool strict
)
1652 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1653 as
, outer_code
, UNKNOWN
)
1655 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1659 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1660 machine for a memory operand of mode MODE. */
1663 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1665 bool ok
= CONSTANT_ADDRESS_P (x
);
1667 switch (GET_CODE (x
))
1670 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1674 && GET_MODE_SIZE (mode
) > 4
1675 && REG_X
== REGNO (x
))
1683 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1684 GET_CODE (x
), strict
);
1689 rtx reg
= XEXP (x
, 0);
1690 rtx op1
= XEXP (x
, 1);
1693 && CONST_INT_P (op1
)
1694 && INTVAL (op1
) >= 0)
1696 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1701 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1704 if (reg
== frame_pointer_rtx
1705 || reg
== arg_pointer_rtx
)
1710 else if (frame_pointer_needed
1711 && reg
== frame_pointer_rtx
)
1723 if (avr_log
.legitimate_address_p
)
1725 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1726 "reload_completed=%d reload_in_progress=%d %s:",
1727 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1728 reg_renumber
? "(reg_renumber)" : "");
1730 if (GET_CODE (x
) == PLUS
1731 && REG_P (XEXP (x
, 0))
1732 && CONST_INT_P (XEXP (x
, 1))
1733 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1736 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1737 true_regnum (XEXP (x
, 0)));
1740 avr_edump ("\n%r\n", x
);
1747 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1748 now only a helper for avr_addr_space_legitimize_address. */
1749 /* Attempts to replace X with a valid
1750 memory address for an operand of mode MODE */
1753 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1755 bool big_offset_p
= false;
1759 if (GET_CODE (oldx
) == PLUS
1760 && REG_P (XEXP (oldx
, 0)))
1762 if (REG_P (XEXP (oldx
, 1)))
1763 x
= force_reg (GET_MODE (oldx
), oldx
);
1764 else if (CONST_INT_P (XEXP (oldx
, 1)))
1766 int offs
= INTVAL (XEXP (oldx
, 1));
1767 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1768 && offs
> MAX_LD_OFFSET (mode
))
1770 big_offset_p
= true;
1771 x
= force_reg (GET_MODE (oldx
), oldx
);
1776 if (avr_log
.legitimize_address
)
1778 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1781 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1788 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1789 /* This will allow register R26/27 to be used where it is no worse than normal
1790 base pointers R28/29 or R30/31. For example, if base offset is greater
1791 than 63 bytes or for R++ or --R addressing. */
1794 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1795 int opnum
, int type
, int addr_type
,
1796 int ind_levels ATTRIBUTE_UNUSED
,
1797 rtx (*mk_memloc
)(rtx
,int))
1801 if (avr_log
.legitimize_reload_address
)
1802 avr_edump ("\n%?:%m %r\n", mode
, x
);
1804 if (1 && (GET_CODE (x
) == POST_INC
1805 || GET_CODE (x
) == PRE_DEC
))
1807 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1808 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1809 opnum
, RELOAD_OTHER
);
1811 if (avr_log
.legitimize_reload_address
)
1812 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1813 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1818 if (GET_CODE (x
) == PLUS
1819 && REG_P (XEXP (x
, 0))
1820 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1821 && CONST_INT_P (XEXP (x
, 1))
1822 && INTVAL (XEXP (x
, 1)) >= 1)
1824 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1828 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1830 int regno
= REGNO (XEXP (x
, 0));
1831 rtx mem
= mk_memloc (x
, regno
);
1833 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1834 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1835 1, (enum reload_type
) addr_type
);
1837 if (avr_log
.legitimize_reload_address
)
1838 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1839 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1841 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1842 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1843 opnum
, (enum reload_type
) type
);
1845 if (avr_log
.legitimize_reload_address
)
1846 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1847 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1852 else if (! (frame_pointer_needed
1853 && XEXP (x
, 0) == frame_pointer_rtx
))
1855 push_reload (x
, NULL_RTX
, px
, NULL
,
1856 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1857 opnum
, (enum reload_type
) type
);
1859 if (avr_log
.legitimize_reload_address
)
1860 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1861 POINTER_REGS
, x
, NULL_RTX
);
1871 /* Helper function to print assembler resp. track instruction
1872 sequence lengths. Always return "".
1875 Output assembler code from template TPL with operands supplied
1876 by OPERANDS. This is just forwarding to output_asm_insn.
1879 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1880 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1881 Don't output anything.
1885 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1889 output_asm_insn (tpl
, operands
);
1903 /* Return a pointer register name as a string. */
1906 ptrreg_to_str (int regno
)
1910 case REG_X
: return "X";
1911 case REG_Y
: return "Y";
1912 case REG_Z
: return "Z";
1914 output_operand_lossage ("address operand requires constraint for"
1915 " X, Y, or Z register");
1920 /* Return the condition name as a string.
1921 Used in conditional jump constructing */
1924 cond_string (enum rtx_code code
)
1933 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1938 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1954 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1955 /* Output ADDR to FILE as address. */
1958 avr_print_operand_address (FILE *file
, rtx addr
)
1960 switch (GET_CODE (addr
))
1963 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1967 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1971 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1975 if (CONSTANT_ADDRESS_P (addr
)
1976 && text_segment_operand (addr
, VOIDmode
))
1979 if (GET_CODE (x
) == CONST
)
1981 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1983 /* Assembler gs() will implant word address. Make offset
1984 a byte offset inside gs() for assembler. This is
1985 needed because the more logical (constant+gs(sym)) is not
1986 accepted by gas. For 128K and lower devices this is ok.
1987 For large devices it will create a Trampoline to offset
1988 from symbol which may not be what the user really wanted. */
1989 fprintf (file
, "gs(");
1990 output_addr_const (file
, XEXP (x
,0));
1991 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1992 2 * INTVAL (XEXP (x
, 1)));
1994 if (warning (0, "pointer offset from symbol maybe incorrect"))
1996 output_addr_const (stderr
, addr
);
1997 fprintf(stderr
,"\n");
2002 fprintf (file
, "gs(");
2003 output_addr_const (file
, addr
);
2004 fprintf (file
, ")");
2008 output_addr_const (file
, addr
);
2013 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2016 avr_print_operand_punct_valid_p (unsigned char code
)
2018 return code
== '~' || code
== '!';
2022 /* Implement `TARGET_PRINT_OPERAND'. */
2023 /* Output X as assembler operand to file FILE.
2024 For a description of supported %-codes, see top of avr.md. */
2027 avr_print_operand (FILE *file
, rtx x
, int code
)
2031 if (code
>= 'A' && code
<= 'D')
2036 if (!AVR_HAVE_JMP_CALL
)
2039 else if (code
== '!')
2041 if (AVR_HAVE_EIJMP_EICALL
)
2044 else if (code
== 't'
2047 static int t_regno
= -1;
2048 static int t_nbits
= -1;
2050 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2052 t_regno
= REGNO (x
);
2053 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2055 else if (CONST_INT_P (x
) && t_regno
>= 0
2056 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2058 int bpos
= INTVAL (x
);
2060 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2062 fprintf (file
, ",%d", bpos
% 8);
2067 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2071 if (x
== zero_reg_rtx
)
2072 fprintf (file
, "__zero_reg__");
2073 else if (code
== 'r' && REGNO (x
) < 32)
2074 fprintf (file
, "%d", (int) REGNO (x
));
2076 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2078 else if (CONST_INT_P (x
))
2080 HOST_WIDE_INT ival
= INTVAL (x
);
2083 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2084 else if (low_io_address_operand (x
, VOIDmode
)
2085 || high_io_address_operand (x
, VOIDmode
))
2087 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2088 fprintf (file
, "__RAMPZ__");
2089 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2090 fprintf (file
, "__RAMPY__");
2091 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2092 fprintf (file
, "__RAMPX__");
2093 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2094 fprintf (file
, "__RAMPD__");
2095 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2096 fprintf (file
, "__CCP__");
2097 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2098 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2099 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2102 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2103 ival
- avr_current_arch
->sfr_offset
);
2107 fatal_insn ("bad address, not an I/O address:", x
);
2111 rtx addr
= XEXP (x
, 0);
2115 if (!CONSTANT_P (addr
))
2116 fatal_insn ("bad address, not a constant:", addr
);
2117 /* Assembler template with m-code is data - not progmem section */
2118 if (text_segment_operand (addr
, VOIDmode
))
2119 if (warning (0, "accessing data memory with"
2120 " program memory address"))
2122 output_addr_const (stderr
, addr
);
2123 fprintf(stderr
,"\n");
2125 output_addr_const (file
, addr
);
2127 else if (code
== 'i')
2129 avr_print_operand (file
, addr
, 'i');
2131 else if (code
== 'o')
2133 if (GET_CODE (addr
) != PLUS
)
2134 fatal_insn ("bad address, not (reg+disp):", addr
);
2136 avr_print_operand (file
, XEXP (addr
, 1), 0);
2138 else if (code
== 'p' || code
== 'r')
2140 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2141 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2144 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2146 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2148 else if (GET_CODE (addr
) == PLUS
)
2150 avr_print_operand_address (file
, XEXP (addr
,0));
2151 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2152 fatal_insn ("internal compiler error. Bad address:"
2155 avr_print_operand (file
, XEXP (addr
,1), code
);
2158 avr_print_operand_address (file
, addr
);
2160 else if (code
== 'i')
2162 fatal_insn ("bad address, not an I/O address:", x
);
2164 else if (code
== 'x')
2166 /* Constant progmem address - like used in jmp or call */
2167 if (0 == text_segment_operand (x
, VOIDmode
))
2168 if (warning (0, "accessing program memory"
2169 " with data memory address"))
2171 output_addr_const (stderr
, x
);
2172 fprintf(stderr
,"\n");
2174 /* Use normal symbol for direct address no linker trampoline needed */
2175 output_addr_const (file
, x
);
2177 else if (CONST_FIXED_P (x
))
2179 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2181 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2183 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2185 else if (GET_CODE (x
) == CONST_DOUBLE
)
2189 if (GET_MODE (x
) != SFmode
)
2190 fatal_insn ("internal compiler error. Unknown mode:", x
);
2191 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2192 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2193 fprintf (file
, "0x%lx", val
);
2195 else if (GET_CODE (x
) == CONST_STRING
)
2196 fputs (XSTR (x
, 0), file
);
2197 else if (code
== 'j')
2198 fputs (cond_string (GET_CODE (x
)), file
);
2199 else if (code
== 'k')
2200 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2202 avr_print_operand_address (file
, x
);
2205 /* Update the condition code in the INSN. */
2208 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2211 enum attr_cc cc
= get_attr_cc (insn
);
2221 rtx
*op
= recog_data
.operand
;
2224 /* Extract insn's operands. */
2225 extract_constrain_insn_cached (insn
);
2233 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2234 cc
= (enum attr_cc
) icc
;
2239 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2240 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2241 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2243 /* Any other "r,rL" combination does not alter cc0. */
2247 } /* inner switch */
2251 } /* outer swicth */
2256 /* Special values like CC_OUT_PLUS from above have been
2257 mapped to "standard" CC_* values so we never come here. */
2263 /* Insn does not affect CC at all. */
2271 set
= single_set (insn
);
2275 cc_status
.flags
|= CC_NO_OVERFLOW
;
2276 cc_status
.value1
= SET_DEST (set
);
2281 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2282 The V flag may or may not be known but that's ok because
2283 alter_cond will change tests to use EQ/NE. */
2284 set
= single_set (insn
);
2288 cc_status
.value1
= SET_DEST (set
);
2289 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2294 set
= single_set (insn
);
2297 cc_status
.value1
= SET_SRC (set
);
2301 /* Insn doesn't leave CC in a usable state. */
2307 /* Choose mode for jump insn:
2308 1 - relative jump in range -63 <= x <= 62 ;
2309 2 - relative jump in range -2046 <= x <= 2045 ;
2310 3 - absolute jump (only for ATmega[16]03). */
2313 avr_jump_mode (rtx x
, rtx insn
)
2315 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2316 ? XEXP (x
, 0) : x
));
2317 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2318 int jump_distance
= cur_addr
- dest_addr
;
2320 if (-63 <= jump_distance
&& jump_distance
<= 62)
2322 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2324 else if (AVR_HAVE_JMP_CALL
)
2330 /* return an AVR condition jump commands.
2331 X is a comparison RTX.
2332 LEN is a number returned by avr_jump_mode function.
2333 if REVERSE nonzero then condition code in X must be reversed. */
2336 ret_cond_branch (rtx x
, int len
, int reverse
)
2338 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2343 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2344 return (len
== 1 ? ("breq .+2" CR_TAB
2346 len
== 2 ? ("breq .+4" CR_TAB
2354 return (len
== 1 ? ("breq .+2" CR_TAB
2356 len
== 2 ? ("breq .+4" CR_TAB
2363 return (len
== 1 ? ("breq .+2" CR_TAB
2365 len
== 2 ? ("breq .+4" CR_TAB
2372 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2373 return (len
== 1 ? ("breq %0" CR_TAB
2375 len
== 2 ? ("breq .+2" CR_TAB
2382 return (len
== 1 ? ("breq %0" CR_TAB
2384 len
== 2 ? ("breq .+2" CR_TAB
2391 return (len
== 1 ? ("breq %0" CR_TAB
2393 len
== 2 ? ("breq .+2" CR_TAB
2407 return ("br%j1 .+2" CR_TAB
2410 return ("br%j1 .+4" CR_TAB
2421 return ("br%k1 .+2" CR_TAB
2424 return ("br%k1 .+4" CR_TAB
2432 /* Output insn cost for next insn. */
2435 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2436 int num_operands ATTRIBUTE_UNUSED
)
2438 if (avr_log
.rtx_costs
)
2440 rtx set
= single_set (insn
);
2443 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2444 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2446 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2447 rtx_cost (PATTERN (insn
), INSN
, 0,
2448 optimize_insn_for_speed_p()));
2452 /* Return 0 if undefined, 1 if always true or always false. */
2455 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2457 unsigned int max
= (mode
== QImode
? 0xff :
2458 mode
== HImode
? 0xffff :
2459 mode
== PSImode
? 0xffffff :
2460 mode
== SImode
? 0xffffffff : 0);
2461 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2463 if (unsigned_condition (op
) != op
)
2466 if (max
!= (INTVAL (x
) & max
)
2467 && INTVAL (x
) != 0xff)
2474 /* Returns nonzero if REGNO is the number of a hard
2475 register in which function arguments are sometimes passed. */
2478 function_arg_regno_p(int r
)
2480 return (r
>= 8 && r
<= 25);
2483 /* Initializing the variable cum for the state at the beginning
2484 of the argument list. */
2487 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2488 tree fndecl ATTRIBUTE_UNUSED
)
2491 cum
->regno
= FIRST_CUM_REG
;
2492 if (!libname
&& stdarg_p (fntype
))
2495 /* Assume the calle may be tail called */
2497 cfun
->machine
->sibcall_fails
= 0;
2500 /* Returns the number of registers to allocate for a function argument. */
2503 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2507 if (mode
== BLKmode
)
2508 size
= int_size_in_bytes (type
);
2510 size
= GET_MODE_SIZE (mode
);
2512 /* Align all function arguments to start in even-numbered registers.
2513 Odd-sized arguments leave holes above them. */
2515 return (size
+ 1) & ~1;
2518 /* Controls whether a function argument is passed
2519 in a register, and which register. */
2522 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2523 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2525 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2526 int bytes
= avr_num_arg_regs (mode
, type
);
2528 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2529 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2534 /* Update the summarizer variable CUM to advance past an argument
2535 in the argument list. */
2538 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2539 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2541 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2542 int bytes
= avr_num_arg_regs (mode
, type
);
2544 cum
->nregs
-= bytes
;
2545 cum
->regno
-= bytes
;
2547 /* A parameter is being passed in a call-saved register. As the original
2548 contents of these regs has to be restored before leaving the function,
2549 a function must not pass arguments in call-saved regs in order to get
2554 && !call_used_regs
[cum
->regno
])
2556 /* FIXME: We ship info on failing tail-call in struct machine_function.
2557 This uses internals of calls.c:expand_call() and the way args_so_far
2558 is used. targetm.function_ok_for_sibcall() needs to be extended to
2559 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2560 dependent so that such an extension is not wanted. */
2562 cfun
->machine
->sibcall_fails
= 1;
2565 /* Test if all registers needed by the ABI are actually available. If the
2566 user has fixed a GPR needed to pass an argument, an (implicit) function
2567 call will clobber that fixed register. See PR45099 for an example. */
2574 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2575 if (fixed_regs
[regno
])
2576 warning (0, "fixed register %s used to pass parameter to function",
2580 if (cum
->nregs
<= 0)
2583 cum
->regno
= FIRST_CUM_REG
;
2587 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2588 /* Decide whether we can make a sibling call to a function. DECL is the
2589 declaration of the function being targeted by the call and EXP is the
2590 CALL_EXPR representing the call. */
2593 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2597 /* Tail-calling must fail if callee-saved regs are used to pass
2598 function args. We must not tail-call when `epilogue_restores'
2599 is used. Unfortunately, we cannot tell at this point if that
2600 actually will happen or not, and we cannot step back from
2601 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2603 if (cfun
->machine
->sibcall_fails
2604 || TARGET_CALL_PROLOGUES
)
2609 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2613 decl_callee
= TREE_TYPE (decl_callee
);
2617 decl_callee
= fntype_callee
;
2619 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2620 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2622 decl_callee
= TREE_TYPE (decl_callee
);
2626 /* Ensure that caller and callee have compatible epilogues */
2628 if (cfun
->machine
->is_interrupt
2629 || cfun
->machine
->is_signal
2630 || cfun
->machine
->is_naked
2631 || avr_naked_function_p (decl_callee
)
2632 /* FIXME: For OS_task and OS_main, we are over-conservative.
2633 This is due to missing documentation of these attributes
2634 and what they actually should do and should not do. */
2635 || (avr_OS_task_function_p (decl_callee
)
2636 != cfun
->machine
->is_OS_task
)
2637 || (avr_OS_main_function_p (decl_callee
)
2638 != cfun
->machine
->is_OS_main
))
2646 /***********************************************************************
2647 Functions for outputting various mov's for a various modes
2648 ************************************************************************/
2650 /* Return true if a value of mode MODE is read from flash by
2651 __load_* function from libgcc. */
2654 avr_load_libgcc_p (rtx op
)
2656 enum machine_mode mode
= GET_MODE (op
);
2657 int n_bytes
= GET_MODE_SIZE (mode
);
2662 && MEM_ADDR_SPACE (op
) == ADDR_SPACE_FLASH
);
2665 /* Return true if a value of mode MODE is read by __xload_* function. */
2668 avr_xload_libgcc_p (enum machine_mode mode
)
2670 int n_bytes
= GET_MODE_SIZE (mode
);
2673 || avr_current_device
->n_flash
> 1);
2677 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2678 OP[1] in AS1 to register OP[0].
2679 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2683 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2687 rtx src
= SET_SRC (single_set (insn
));
2689 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2691 addr_space_t as
= MEM_ADDR_SPACE (src
);
2698 warning (0, "writing to address space %qs not supported",
2699 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2704 addr
= XEXP (src
, 0);
2705 code
= GET_CODE (addr
);
2707 gcc_assert (REG_P (dest
));
2708 gcc_assert (REG
== code
|| POST_INC
== code
);
2710 /* Only 1-byte moves from __flash are representes as open coded
2711 mov insns. All other loads from flash are not handled here but
2712 by some UNSPEC instead, see respective FIXME in machine description. */
2714 gcc_assert (as
== ADDR_SPACE_FLASH
);
2715 gcc_assert (n_bytes
== 1);
2718 xop
[1] = lpm_addr_reg_rtx
;
2719 xop
[2] = lpm_reg_rtx
;
2728 gcc_assert (REG_Z
== REGNO (addr
));
2730 return AVR_HAVE_LPMX
2731 ? avr_asm_len ("lpm %0,%a1", xop
, plen
, 1)
2732 : avr_asm_len ("lpm" CR_TAB
2733 "mov %0,%2", xop
, plen
, 2);
2737 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0)));
2739 return AVR_HAVE_LPMX
2740 ? avr_asm_len ("lpm %0,%a1+", xop
, plen
, 1)
2741 : avr_asm_len ("lpm" CR_TAB
2743 "mov %0,%2", xop
, plen
, 3);
2750 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2751 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2753 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2757 avr_load_lpm (rtx insn
, rtx
*op
, int *plen
)
2760 int n
, n_bytes
= GET_MODE_SIZE (GET_MODE (op
[0]));
2761 rtx xsegment
= op
[1];
2762 bool clobber_z
= PARALLEL
== GET_CODE (PATTERN (insn
));
2763 bool r30_in_tmp
= false;
2768 xop
[1] = lpm_addr_reg_rtx
;
2769 xop
[2] = lpm_reg_rtx
;
2770 xop
[3] = xstring_empty
;
2772 /* Set RAMPZ as needed. */
2774 if (REG_P (xsegment
))
2776 avr_asm_len ("out __RAMPZ__,%0", &xsegment
, plen
, 1);
2780 /* Load the individual bytes from LSB to MSB. */
2782 for (n
= 0; n
< n_bytes
; n
++)
2784 xop
[0] = all_regs_rtx
[REGNO (op
[0]) + n
];
2786 if ((CONST_INT_P (xsegment
) && AVR_HAVE_LPMX
)
2787 || (REG_P (xsegment
) && AVR_HAVE_ELPMX
))
2790 avr_asm_len ("%3lpm %0,%a1", xop
, plen
, 1);
2791 else if (REGNO (xop
[0]) == REG_Z
)
2793 avr_asm_len ("%3lpm %2,%a1+", xop
, plen
, 1);
2797 avr_asm_len ("%3lpm %0,%a1+", xop
, plen
, 1);
2801 gcc_assert (clobber_z
);
2803 avr_asm_len ("%3lpm" CR_TAB
2804 "mov %0,%2", xop
, plen
, 2);
2807 avr_asm_len ("adiw %1,1", xop
, plen
, 1);
2812 avr_asm_len ("mov %1,%2", xop
, plen
, 1);
2816 && !reg_unused_after (insn
, lpm_addr_reg_rtx
)
2817 && !reg_overlap_mentioned_p (op
[0], lpm_addr_reg_rtx
))
2819 xop
[2] = GEN_INT (n_bytes
-1);
2820 avr_asm_len ("sbiw %1,%2", xop
, plen
, 1);
2823 if (REG_P (xsegment
) && AVR_HAVE_RAMPD
)
2825 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2827 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop
, plen
, 1);
2834 /* Worker function for xload_8 insn. */
2837 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2843 xop
[2] = lpm_addr_reg_rtx
;
2844 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2849 avr_asm_len ("sbrc %1,7" CR_TAB
2851 "sbrs %1,7", xop
, plen
, 3);
2853 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2855 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2856 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2863 output_movqi (rtx insn
, rtx operands
[], int *real_l
)
2865 rtx dest
= operands
[0];
2866 rtx src
= operands
[1];
2868 if (avr_mem_flash_p (src
)
2869 || avr_mem_flash_p (dest
))
2871 return avr_out_lpm (insn
, operands
, real_l
);
2877 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
2881 if (REG_P (src
)) /* mov r,r */
2883 if (test_hard_reg_class (STACK_REG
, dest
))
2885 else if (test_hard_reg_class (STACK_REG
, src
))
2890 else if (CONSTANT_P (src
))
2892 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2895 else if (MEM_P (src
))
2896 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2898 else if (MEM_P (dest
))
2903 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2905 return out_movqi_mr_r (insn
, xop
, real_l
);
2912 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2917 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2919 if (avr_mem_flash_p (src
)
2920 || avr_mem_flash_p (dest
))
2922 return avr_out_lpm (insn
, xop
, plen
);
2925 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
2929 if (REG_P (src
)) /* mov r,r */
2931 if (test_hard_reg_class (STACK_REG
, dest
))
2933 if (AVR_HAVE_8BIT_SP
)
2934 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2937 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2938 "out __SP_H__,%B1", xop
, plen
, -2);
2940 /* Use simple load of SP if no interrupts are used. */
2942 return TARGET_NO_INTERRUPTS
2943 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2944 "out __SP_L__,%A1", xop
, plen
, -2)
2945 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2947 "out __SP_H__,%B1" CR_TAB
2948 "out __SREG__,__tmp_reg__" CR_TAB
2949 "out __SP_L__,%A1", xop
, plen
, -5);
2951 else if (test_hard_reg_class (STACK_REG
, src
))
2953 return !AVR_HAVE_SPH
2954 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2955 "clr %B0", xop
, plen
, -2)
2957 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2958 "in %B0,__SP_H__", xop
, plen
, -2);
2961 return AVR_HAVE_MOVW
2962 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2964 : avr_asm_len ("mov %A0,%A1" CR_TAB
2965 "mov %B0,%B1", xop
, plen
, -2);
2967 else if (CONSTANT_P (src
))
2969 return output_reload_inhi (xop
, NULL
, plen
);
2971 else if (MEM_P (src
))
2973 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2976 else if (MEM_P (dest
))
2981 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2983 return out_movhi_mr_r (insn
, xop
, plen
);
2986 fatal_insn ("invalid insn:", insn
);
2992 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
2996 rtx x
= XEXP (src
, 0);
2998 if (CONSTANT_ADDRESS_P (x
))
3000 return optimize
> 0 && io_address_operand (x
, QImode
)
3001 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3002 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3004 else if (GET_CODE (x
) == PLUS
3005 && REG_P (XEXP (x
, 0))
3006 && CONST_INT_P (XEXP (x
, 1)))
3008 /* memory access by reg+disp */
3010 int disp
= INTVAL (XEXP (x
, 1));
3012 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3014 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3015 fatal_insn ("incorrect insn:",insn
);
3017 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3018 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3019 "ldd %0,Y+63" CR_TAB
3020 "sbiw r28,%o1-63", op
, plen
, -3);
3022 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3023 "sbci r29,hi8(-%o1)" CR_TAB
3025 "subi r28,lo8(%o1)" CR_TAB
3026 "sbci r29,hi8(%o1)", op
, plen
, -5);
3028 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3030 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3031 it but I have this situation with extremal optimizing options. */
3033 avr_asm_len ("adiw r26,%o1" CR_TAB
3034 "ld %0,X", op
, plen
, -2);
3036 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3037 && !reg_unused_after (insn
, XEXP (x
,0)))
3039 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3045 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3048 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3052 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3056 rtx base
= XEXP (src
, 0);
3057 int reg_dest
= true_regnum (dest
);
3058 int reg_base
= true_regnum (base
);
3059 /* "volatile" forces reading low byte first, even if less efficient,
3060 for correct operation with 16-bit I/O registers. */
3061 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3065 if (reg_dest
== reg_base
) /* R = (R) */
3066 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3068 "mov %A0,__tmp_reg__", op
, plen
, -3);
3070 if (reg_base
!= REG_X
)
3071 return avr_asm_len ("ld %A0,%1" CR_TAB
3072 "ldd %B0,%1+1", op
, plen
, -2);
3074 avr_asm_len ("ld %A0,X+" CR_TAB
3075 "ld %B0,X", op
, plen
, -2);
3077 if (!reg_unused_after (insn
, base
))
3078 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3082 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3084 int disp
= INTVAL (XEXP (base
, 1));
3085 int reg_base
= true_regnum (XEXP (base
, 0));
3087 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3089 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3090 fatal_insn ("incorrect insn:",insn
);
3092 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3093 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3094 "ldd %A0,Y+62" CR_TAB
3095 "ldd %B0,Y+63" CR_TAB
3096 "sbiw r28,%o1-62", op
, plen
, -4)
3098 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3099 "sbci r29,hi8(-%o1)" CR_TAB
3101 "ldd %B0,Y+1" CR_TAB
3102 "subi r28,lo8(%o1)" CR_TAB
3103 "sbci r29,hi8(%o1)", op
, plen
, -6);
3106 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3107 it but I have this situation with extremal
3108 optimization options. */
3110 if (reg_base
== REG_X
)
3111 return reg_base
== reg_dest
3112 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3113 "ld __tmp_reg__,X+" CR_TAB
3115 "mov %A0,__tmp_reg__", op
, plen
, -4)
3117 : avr_asm_len ("adiw r26,%o1" CR_TAB
3120 "sbiw r26,%o1+1", op
, plen
, -4);
3122 return reg_base
== reg_dest
3123 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3124 "ldd %B0,%B1" CR_TAB
3125 "mov %A0,__tmp_reg__", op
, plen
, -3)
3127 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3128 "ldd %B0,%B1", op
, plen
, -2);
3130 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3132 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3133 fatal_insn ("incorrect insn:", insn
);
3135 if (!mem_volatile_p
)
3136 return avr_asm_len ("ld %B0,%1" CR_TAB
3137 "ld %A0,%1", op
, plen
, -2);
3139 return REGNO (XEXP (base
, 0)) == REG_X
3140 ? avr_asm_len ("sbiw r26,2" CR_TAB
3143 "sbiw r26,1", op
, plen
, -4)
3145 : avr_asm_len ("sbiw %r1,2" CR_TAB
3147 "ldd %B0,%p1+1", op
, plen
, -3);
3149 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3151 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3152 fatal_insn ("incorrect insn:", insn
);
3154 return avr_asm_len ("ld %A0,%1" CR_TAB
3155 "ld %B0,%1", op
, plen
, -2);
3157 else if (CONSTANT_ADDRESS_P (base
))
3159 return optimize
> 0 && io_address_operand (base
, HImode
)
3160 ? avr_asm_len ("in %A0,%i1" CR_TAB
3161 "in %B0,%i1+1", op
, plen
, -2)
3163 : avr_asm_len ("lds %A0,%m1" CR_TAB
3164 "lds %B0,%m1+1", op
, plen
, -4);
3167 fatal_insn ("unknown move insn:",insn
);
3172 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3176 rtx base
= XEXP (src
, 0);
3177 int reg_dest
= true_regnum (dest
);
3178 int reg_base
= true_regnum (base
);
3186 if (reg_base
== REG_X
) /* (R26) */
3188 if (reg_dest
== REG_X
)
3189 /* "ld r26,-X" is undefined */
3190 return *l
=7, ("adiw r26,3" CR_TAB
3193 "ld __tmp_reg__,-X" CR_TAB
3196 "mov r27,__tmp_reg__");
3197 else if (reg_dest
== REG_X
- 2)
3198 return *l
=5, ("ld %A0,X+" CR_TAB
3200 "ld __tmp_reg__,X+" CR_TAB
3202 "mov %C0,__tmp_reg__");
3203 else if (reg_unused_after (insn
, base
))
3204 return *l
=4, ("ld %A0,X+" CR_TAB
3209 return *l
=5, ("ld %A0,X+" CR_TAB
3217 if (reg_dest
== reg_base
)
3218 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3219 "ldd %C0,%1+2" CR_TAB
3220 "ldd __tmp_reg__,%1+1" CR_TAB
3222 "mov %B0,__tmp_reg__");
3223 else if (reg_base
== reg_dest
+ 2)
3224 return *l
=5, ("ld %A0,%1" CR_TAB
3225 "ldd %B0,%1+1" CR_TAB
3226 "ldd __tmp_reg__,%1+2" CR_TAB
3227 "ldd %D0,%1+3" CR_TAB
3228 "mov %C0,__tmp_reg__");
3230 return *l
=4, ("ld %A0,%1" CR_TAB
3231 "ldd %B0,%1+1" CR_TAB
3232 "ldd %C0,%1+2" CR_TAB
3236 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3238 int disp
= INTVAL (XEXP (base
, 1));
3240 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3242 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3243 fatal_insn ("incorrect insn:",insn
);
3245 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3246 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3247 "ldd %A0,Y+60" CR_TAB
3248 "ldd %B0,Y+61" CR_TAB
3249 "ldd %C0,Y+62" CR_TAB
3250 "ldd %D0,Y+63" CR_TAB
3253 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3254 "sbci r29,hi8(-%o1)" CR_TAB
3256 "ldd %B0,Y+1" CR_TAB
3257 "ldd %C0,Y+2" CR_TAB
3258 "ldd %D0,Y+3" CR_TAB
3259 "subi r28,lo8(%o1)" CR_TAB
3260 "sbci r29,hi8(%o1)");
3263 reg_base
= true_regnum (XEXP (base
, 0));
3264 if (reg_base
== REG_X
)
3267 if (reg_dest
== REG_X
)
3270 /* "ld r26,-X" is undefined */
3271 return ("adiw r26,%o1+3" CR_TAB
3274 "ld __tmp_reg__,-X" CR_TAB
3277 "mov r27,__tmp_reg__");
3280 if (reg_dest
== REG_X
- 2)
3281 return ("adiw r26,%o1" CR_TAB
3284 "ld __tmp_reg__,X+" CR_TAB
3286 "mov r26,__tmp_reg__");
3288 return ("adiw r26,%o1" CR_TAB
3295 if (reg_dest
== reg_base
)
3296 return *l
=5, ("ldd %D0,%D1" CR_TAB
3297 "ldd %C0,%C1" CR_TAB
3298 "ldd __tmp_reg__,%B1" CR_TAB
3299 "ldd %A0,%A1" CR_TAB
3300 "mov %B0,__tmp_reg__");
3301 else if (reg_dest
== reg_base
- 2)
3302 return *l
=5, ("ldd %A0,%A1" CR_TAB
3303 "ldd %B0,%B1" CR_TAB
3304 "ldd __tmp_reg__,%C1" CR_TAB
3305 "ldd %D0,%D1" CR_TAB
3306 "mov %C0,__tmp_reg__");
3307 return *l
=4, ("ldd %A0,%A1" CR_TAB
3308 "ldd %B0,%B1" CR_TAB
3309 "ldd %C0,%C1" CR_TAB
3312 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3313 return *l
=4, ("ld %D0,%1" CR_TAB
3317 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3318 return *l
=4, ("ld %A0,%1" CR_TAB
3322 else if (CONSTANT_ADDRESS_P (base
))
3323 return *l
=8, ("lds %A0,%m1" CR_TAB
3324 "lds %B0,%m1+1" CR_TAB
3325 "lds %C0,%m1+2" CR_TAB
3328 fatal_insn ("unknown move insn:",insn
);
3333 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3337 rtx base
= XEXP (dest
, 0);
3338 int reg_base
= true_regnum (base
);
3339 int reg_src
= true_regnum (src
);
3345 if (CONSTANT_ADDRESS_P (base
))
3346 return *l
=8,("sts %m0,%A1" CR_TAB
3347 "sts %m0+1,%B1" CR_TAB
3348 "sts %m0+2,%C1" CR_TAB
3350 if (reg_base
> 0) /* (r) */
3352 if (reg_base
== REG_X
) /* (R26) */
3354 if (reg_src
== REG_X
)
3356 /* "st X+,r26" is undefined */
3357 if (reg_unused_after (insn
, base
))
3358 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3361 "st X+,__tmp_reg__" CR_TAB
3365 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3368 "st X+,__tmp_reg__" CR_TAB
3373 else if (reg_base
== reg_src
+ 2)
3375 if (reg_unused_after (insn
, base
))
3376 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3377 "mov __tmp_reg__,%D1" CR_TAB
3380 "st %0+,__zero_reg__" CR_TAB
3381 "st %0,__tmp_reg__" CR_TAB
3382 "clr __zero_reg__");
3384 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3385 "mov __tmp_reg__,%D1" CR_TAB
3388 "st %0+,__zero_reg__" CR_TAB
3389 "st %0,__tmp_reg__" CR_TAB
3390 "clr __zero_reg__" CR_TAB
3393 return *l
=5, ("st %0+,%A1" CR_TAB
3400 return *l
=4, ("st %0,%A1" CR_TAB
3401 "std %0+1,%B1" CR_TAB
3402 "std %0+2,%C1" CR_TAB
3405 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3407 int disp
= INTVAL (XEXP (base
, 1));
3408 reg_base
= REGNO (XEXP (base
, 0));
3409 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3411 if (reg_base
!= REG_Y
)
3412 fatal_insn ("incorrect insn:",insn
);
3414 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3415 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3416 "std Y+60,%A1" CR_TAB
3417 "std Y+61,%B1" CR_TAB
3418 "std Y+62,%C1" CR_TAB
3419 "std Y+63,%D1" CR_TAB
3422 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3423 "sbci r29,hi8(-%o0)" CR_TAB
3425 "std Y+1,%B1" CR_TAB
3426 "std Y+2,%C1" CR_TAB
3427 "std Y+3,%D1" CR_TAB
3428 "subi r28,lo8(%o0)" CR_TAB
3429 "sbci r29,hi8(%o0)");
3431 if (reg_base
== REG_X
)
3434 if (reg_src
== REG_X
)
3437 return ("mov __tmp_reg__,r26" CR_TAB
3438 "mov __zero_reg__,r27" CR_TAB
3439 "adiw r26,%o0" CR_TAB
3440 "st X+,__tmp_reg__" CR_TAB
3441 "st X+,__zero_reg__" CR_TAB
3444 "clr __zero_reg__" CR_TAB
3447 else if (reg_src
== REG_X
- 2)
3450 return ("mov __tmp_reg__,r26" CR_TAB
3451 "mov __zero_reg__,r27" CR_TAB
3452 "adiw r26,%o0" CR_TAB
3455 "st X+,__tmp_reg__" CR_TAB
3456 "st X,__zero_reg__" CR_TAB
3457 "clr __zero_reg__" CR_TAB
3461 return ("adiw r26,%o0" CR_TAB
3468 return *l
=4, ("std %A0,%A1" CR_TAB
3469 "std %B0,%B1" CR_TAB
3470 "std %C0,%C1" CR_TAB
3473 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3474 return *l
=4, ("st %0,%D1" CR_TAB
3478 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3479 return *l
=4, ("st %0,%A1" CR_TAB
3483 fatal_insn ("unknown move insn:",insn
);
3488 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3491 rtx dest
= operands
[0];
3492 rtx src
= operands
[1];
3495 if (avr_mem_flash_p (src
)
3496 || avr_mem_flash_p (dest
))
3498 return avr_out_lpm (insn
, operands
, real_l
);
3504 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3507 if (REG_P (src
)) /* mov r,r */
3509 if (true_regnum (dest
) > true_regnum (src
))
3514 return ("movw %C0,%C1" CR_TAB
3518 return ("mov %D0,%D1" CR_TAB
3519 "mov %C0,%C1" CR_TAB
3520 "mov %B0,%B1" CR_TAB
3528 return ("movw %A0,%A1" CR_TAB
3532 return ("mov %A0,%A1" CR_TAB
3533 "mov %B0,%B1" CR_TAB
3534 "mov %C0,%C1" CR_TAB
3538 else if (CONSTANT_P (src
))
3540 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3542 else if (MEM_P (src
))
3543 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3545 else if (MEM_P (dest
))
3549 if (src
== CONST0_RTX (GET_MODE (dest
)))
3550 operands
[1] = zero_reg_rtx
;
3552 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3555 output_asm_insn (templ
, operands
);
3560 fatal_insn ("invalid insn:", insn
);
3565 /* Handle loads of 24-bit types from memory to register. */
3568 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3572 rtx base
= XEXP (src
, 0);
3573 int reg_dest
= true_regnum (dest
);
3574 int reg_base
= true_regnum (base
);
3578 if (reg_base
== REG_X
) /* (R26) */
3580 if (reg_dest
== REG_X
)
3581 /* "ld r26,-X" is undefined */
3582 return avr_asm_len ("adiw r26,2" CR_TAB
3584 "ld __tmp_reg__,-X" CR_TAB
3587 "mov r27,__tmp_reg__", op
, plen
, -6);
3590 avr_asm_len ("ld %A0,X+" CR_TAB
3592 "ld %C0,X", op
, plen
, -3);
3594 if (reg_dest
!= REG_X
- 2
3595 && !reg_unused_after (insn
, base
))
3597 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3603 else /* reg_base != REG_X */
3605 if (reg_dest
== reg_base
)
3606 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3607 "ldd __tmp_reg__,%1+1" CR_TAB
3609 "mov %B0,__tmp_reg__", op
, plen
, -4);
3611 return avr_asm_len ("ld %A0,%1" CR_TAB
3612 "ldd %B0,%1+1" CR_TAB
3613 "ldd %C0,%1+2", op
, plen
, -3);
3616 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3618 int disp
= INTVAL (XEXP (base
, 1));
3620 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3622 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3623 fatal_insn ("incorrect insn:",insn
);
3625 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3626 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3627 "ldd %A0,Y+61" CR_TAB
3628 "ldd %B0,Y+62" CR_TAB
3629 "ldd %C0,Y+63" CR_TAB
3630 "sbiw r28,%o1-61", op
, plen
, -5);
3632 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3633 "sbci r29,hi8(-%o1)" CR_TAB
3635 "ldd %B0,Y+1" CR_TAB
3636 "ldd %C0,Y+2" CR_TAB
3637 "subi r28,lo8(%o1)" CR_TAB
3638 "sbci r29,hi8(%o1)", op
, plen
, -7);
3641 reg_base
= true_regnum (XEXP (base
, 0));
3642 if (reg_base
== REG_X
)
3645 if (reg_dest
== REG_X
)
3647 /* "ld r26,-X" is undefined */
3648 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3650 "ld __tmp_reg__,-X" CR_TAB
3653 "mov r27,__tmp_reg__", op
, plen
, -6);
3656 avr_asm_len ("adiw r26,%o1" CR_TAB
3659 "ld %C0,X", op
, plen
, -4);
3661 if (reg_dest
!= REG_W
3662 && !reg_unused_after (insn
, XEXP (base
, 0)))
3663 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3668 if (reg_dest
== reg_base
)
3669 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3670 "ldd __tmp_reg__,%B1" CR_TAB
3671 "ldd %A0,%A1" CR_TAB
3672 "mov %B0,__tmp_reg__", op
, plen
, -4);
3674 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3675 "ldd %B0,%B1" CR_TAB
3676 "ldd %C0,%C1", op
, plen
, -3);
3678 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3679 return avr_asm_len ("ld %C0,%1" CR_TAB
3681 "ld %A0,%1", op
, plen
, -3);
3682 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3683 return avr_asm_len ("ld %A0,%1" CR_TAB
3685 "ld %C0,%1", op
, plen
, -3);
3687 else if (CONSTANT_ADDRESS_P (base
))
3688 return avr_asm_len ("lds %A0,%m1" CR_TAB
3689 "lds %B0,%m1+1" CR_TAB
3690 "lds %C0,%m1+2", op
, plen
, -6);
3692 fatal_insn ("unknown move insn:",insn
);
3696 /* Handle store of 24-bit type from register or zero to memory. */
3699 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3703 rtx base
= XEXP (dest
, 0);
3704 int reg_base
= true_regnum (base
);
3706 if (CONSTANT_ADDRESS_P (base
))
3707 return avr_asm_len ("sts %m0,%A1" CR_TAB
3708 "sts %m0+1,%B1" CR_TAB
3709 "sts %m0+2,%C1", op
, plen
, -6);
3711 if (reg_base
> 0) /* (r) */
3713 if (reg_base
== REG_X
) /* (R26) */
3715 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3717 avr_asm_len ("st %0+,%A1" CR_TAB
3719 "st %0,%C1", op
, plen
, -3);
3721 if (!reg_unused_after (insn
, base
))
3722 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3727 return avr_asm_len ("st %0,%A1" CR_TAB
3728 "std %0+1,%B1" CR_TAB
3729 "std %0+2,%C1", op
, plen
, -3);
3731 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3733 int disp
= INTVAL (XEXP (base
, 1));
3734 reg_base
= REGNO (XEXP (base
, 0));
3736 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3738 if (reg_base
!= REG_Y
)
3739 fatal_insn ("incorrect insn:",insn
);
3741 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3742 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3743 "std Y+61,%A1" CR_TAB
3744 "std Y+62,%B1" CR_TAB
3745 "std Y+63,%C1" CR_TAB
3746 "sbiw r28,%o0-60", op
, plen
, -5);
3748 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3749 "sbci r29,hi8(-%o0)" CR_TAB
3751 "std Y+1,%B1" CR_TAB
3752 "std Y+2,%C1" CR_TAB
3753 "subi r28,lo8(%o0)" CR_TAB
3754 "sbci r29,hi8(%o0)", op
, plen
, -7);
3756 if (reg_base
== REG_X
)
3759 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3761 avr_asm_len ("adiw r26,%o0" CR_TAB
3764 "st X,%C1", op
, plen
, -4);
3766 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3767 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3772 return avr_asm_len ("std %A0,%A1" CR_TAB
3773 "std %B0,%B1" CR_TAB
3774 "std %C0,%C1", op
, plen
, -3);
3776 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3777 return avr_asm_len ("st %0,%C1" CR_TAB
3779 "st %0,%A1", op
, plen
, -3);
3780 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3781 return avr_asm_len ("st %0,%A1" CR_TAB
3783 "st %0,%C1", op
, plen
, -3);
3785 fatal_insn ("unknown move insn:",insn
);
3790 /* Move around 24-bit stuff. */
3793 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3798 if (avr_mem_flash_p (src
)
3799 || avr_mem_flash_p (dest
))
3801 return avr_out_lpm (insn
, op
, plen
);
3804 if (register_operand (dest
, VOIDmode
))
3806 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3808 if (true_regnum (dest
) > true_regnum (src
))
3810 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3813 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3815 return avr_asm_len ("mov %B0,%B1" CR_TAB
3816 "mov %A0,%A1", op
, plen
, 2);
3821 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3823 avr_asm_len ("mov %A0,%A1" CR_TAB
3824 "mov %B0,%B1", op
, plen
, -2);
3826 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3829 else if (CONSTANT_P (src
))
3831 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3833 else if (MEM_P (src
))
3834 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3836 else if (MEM_P (dest
))
3841 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3843 return avr_out_store_psi (insn
, xop
, plen
);
3846 fatal_insn ("invalid insn:", insn
);
3852 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3856 rtx x
= XEXP (dest
, 0);
3858 if (CONSTANT_ADDRESS_P (x
))
3860 return optimize
> 0 && io_address_operand (x
, QImode
)
3861 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3862 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3864 else if (GET_CODE (x
) == PLUS
3865 && REG_P (XEXP (x
, 0))
3866 && CONST_INT_P (XEXP (x
, 1)))
3868 /* memory access by reg+disp */
3870 int disp
= INTVAL (XEXP (x
, 1));
3872 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3874 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3875 fatal_insn ("incorrect insn:",insn
);
3877 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3878 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3879 "std Y+63,%1" CR_TAB
3880 "sbiw r28,%o0-63", op
, plen
, -3);
3882 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3883 "sbci r29,hi8(-%o0)" CR_TAB
3885 "subi r28,lo8(%o0)" CR_TAB
3886 "sbci r29,hi8(%o0)", op
, plen
, -5);
3888 else if (REGNO (XEXP (x
,0)) == REG_X
)
3890 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3892 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3893 "adiw r26,%o0" CR_TAB
3894 "st X,__tmp_reg__", op
, plen
, -3);
3898 avr_asm_len ("adiw r26,%o0" CR_TAB
3899 "st X,%1", op
, plen
, -2);
3902 if (!reg_unused_after (insn
, XEXP (x
,0)))
3903 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3908 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3911 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3915 /* Helper for the next function for XMEGA. It does the same
3916 but with low byte first. */
3919 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3923 rtx base
= XEXP (dest
, 0);
3924 int reg_base
= true_regnum (base
);
3925 int reg_src
= true_regnum (src
);
3927 /* "volatile" forces writing low byte first, even if less efficient,
3928 for correct operation with 16-bit I/O registers like SP. */
3929 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3931 if (CONSTANT_ADDRESS_P (base
))
3932 return optimize
> 0 && io_address_operand (base
, HImode
)
3933 ? avr_asm_len ("out %i0,%A1" CR_TAB
3934 "out %i0+1,%B1", op
, plen
, -2)
3936 : avr_asm_len ("sts %m0,%A1" CR_TAB
3937 "sts %m0+1,%B1", op
, plen
, -4);
3941 if (reg_base
!= REG_X
)
3942 return avr_asm_len ("st %0,%A1" CR_TAB
3943 "std %0+1,%B1", op
, plen
, -2);
3945 if (reg_src
== REG_X
)
3946 /* "st X+,r26" and "st -X,r26" are undefined. */
3947 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3950 "st X,__tmp_reg__", op
, plen
, -4);
3952 avr_asm_len ("st X+,%A1" CR_TAB
3953 "st X,%B1", op
, plen
, -2);
3955 return reg_unused_after (insn
, base
)
3957 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3959 else if (GET_CODE (base
) == PLUS
)
3961 int disp
= INTVAL (XEXP (base
, 1));
3962 reg_base
= REGNO (XEXP (base
, 0));
3963 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3965 if (reg_base
!= REG_Y
)
3966 fatal_insn ("incorrect insn:",insn
);
3968 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3969 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3970 "std Y+62,%A1" CR_TAB
3971 "std Y+63,%B1" CR_TAB
3972 "sbiw r28,%o0-62", op
, plen
, -4)
3974 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3975 "sbci r29,hi8(-%o0)" CR_TAB
3977 "std Y+1,%B1" CR_TAB
3978 "subi r28,lo8(%o0)" CR_TAB
3979 "sbci r29,hi8(%o0)", op
, plen
, -6);
3982 if (reg_base
!= REG_X
)
3983 return avr_asm_len ("std %A0,%A1" CR_TAB
3984 "std %B0,%B1", op
, plen
, -2);
3986 return reg_src
== REG_X
3987 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3988 "mov __zero_reg__,r27" CR_TAB
3989 "adiw r26,%o0" CR_TAB
3990 "st X+,__tmp_reg__" CR_TAB
3991 "st X,__zero_reg__" CR_TAB
3992 "clr __zero_reg__" CR_TAB
3993 "sbiw r26,%o0+1", op
, plen
, -7)
3995 : avr_asm_len ("adiw r26,%o0" CR_TAB
3998 "sbiw r26,%o0+1", op
, plen
, -4);
4000 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4002 if (!mem_volatile_p
)
4003 return avr_asm_len ("st %0,%B1" CR_TAB
4004 "st %0,%A1", op
, plen
, -2);
4006 return REGNO (XEXP (base
, 0)) == REG_X
4007 ? avr_asm_len ("sbiw r26,2" CR_TAB
4010 "sbiw r26,1", op
, plen
, -4)
4012 : avr_asm_len ("sbiw %r0,2" CR_TAB
4014 "std %p0+1,%B1", op
, plen
, -3);
4016 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4018 return avr_asm_len ("st %0,%A1" CR_TAB
4019 "st %0,%B1", op
, plen
, -2);
4022 fatal_insn ("unknown move insn:",insn
);
4028 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4032 rtx base
= XEXP (dest
, 0);
4033 int reg_base
= true_regnum (base
);
4034 int reg_src
= true_regnum (src
);
4037 /* "volatile" forces writing high-byte first (no-xmega) resp.
4038 low-byte first (xmega) even if less efficient, for correct
4039 operation with 16-bit I/O registers like. */
4042 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4044 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4046 if (CONSTANT_ADDRESS_P (base
))
4047 return optimize
> 0 && io_address_operand (base
, HImode
)
4048 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4049 "out %i0,%A1", op
, plen
, -2)
4051 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4052 "sts %m0,%A1", op
, plen
, -4);
4056 if (reg_base
!= REG_X
)
4057 return avr_asm_len ("std %0+1,%B1" CR_TAB
4058 "st %0,%A1", op
, plen
, -2);
4060 if (reg_src
== REG_X
)
4061 /* "st X+,r26" and "st -X,r26" are undefined. */
4062 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4063 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4066 "st X,__tmp_reg__", op
, plen
, -4)
4068 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4070 "st X,__tmp_reg__" CR_TAB
4072 "st X,r26", op
, plen
, -5);
4074 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4075 ? avr_asm_len ("st X+,%A1" CR_TAB
4076 "st X,%B1", op
, plen
, -2)
4077 : avr_asm_len ("adiw r26,1" CR_TAB
4079 "st -X,%A1", op
, plen
, -3);
4081 else if (GET_CODE (base
) == PLUS
)
4083 int disp
= INTVAL (XEXP (base
, 1));
4084 reg_base
= REGNO (XEXP (base
, 0));
4085 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4087 if (reg_base
!= REG_Y
)
4088 fatal_insn ("incorrect insn:",insn
);
4090 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4091 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4092 "std Y+63,%B1" CR_TAB
4093 "std Y+62,%A1" CR_TAB
4094 "sbiw r28,%o0-62", op
, plen
, -4)
4096 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4097 "sbci r29,hi8(-%o0)" CR_TAB
4098 "std Y+1,%B1" CR_TAB
4100 "subi r28,lo8(%o0)" CR_TAB
4101 "sbci r29,hi8(%o0)", op
, plen
, -6);
4104 if (reg_base
!= REG_X
)
4105 return avr_asm_len ("std %B0,%B1" CR_TAB
4106 "std %A0,%A1", op
, plen
, -2);
4108 return reg_src
== REG_X
4109 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4110 "mov __zero_reg__,r27" CR_TAB
4111 "adiw r26,%o0+1" CR_TAB
4112 "st X,__zero_reg__" CR_TAB
4113 "st -X,__tmp_reg__" CR_TAB
4114 "clr __zero_reg__" CR_TAB
4115 "sbiw r26,%o0", op
, plen
, -7)
4117 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4120 "sbiw r26,%o0", op
, plen
, -4);
4122 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4124 return avr_asm_len ("st %0,%B1" CR_TAB
4125 "st %0,%A1", op
, plen
, -2);
4127 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4129 if (!mem_volatile_p
)
4130 return avr_asm_len ("st %0,%A1" CR_TAB
4131 "st %0,%B1", op
, plen
, -2);
4133 return REGNO (XEXP (base
, 0)) == REG_X
4134 ? avr_asm_len ("adiw r26,1" CR_TAB
4137 "adiw r26,2", op
, plen
, -4)
4139 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4141 "adiw %r0,2", op
, plen
, -3);
4143 fatal_insn ("unknown move insn:",insn
);
4147 /* Return 1 if frame pointer for current function required. */
4150 avr_frame_pointer_required_p (void)
4152 return (cfun
->calls_alloca
4153 || cfun
->calls_setjmp
4154 || cfun
->has_nonlocal_label
4155 || crtl
->args
.info
.nregs
== 0
4156 || get_frame_size () > 0);
4159 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4162 compare_condition (rtx insn
)
4164 rtx next
= next_real_insn (insn
);
4166 if (next
&& JUMP_P (next
))
4168 rtx pat
= PATTERN (next
);
4169 rtx src
= SET_SRC (pat
);
4171 if (IF_THEN_ELSE
== GET_CODE (src
))
4172 return GET_CODE (XEXP (src
, 0));
4179 /* Returns true iff INSN is a tst insn that only tests the sign. */
4182 compare_sign_p (rtx insn
)
4184 RTX_CODE cond
= compare_condition (insn
);
4185 return (cond
== GE
|| cond
== LT
);
4189 /* Returns true iff the next insn is a JUMP_INSN with a condition
4190 that needs to be swapped (GT, GTU, LE, LEU). */
4193 compare_diff_p (rtx insn
)
4195 RTX_CODE cond
= compare_condition (insn
);
4196 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4199 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4202 compare_eq_p (rtx insn
)
4204 RTX_CODE cond
= compare_condition (insn
);
4205 return (cond
== EQ
|| cond
== NE
);
4209 /* Output compare instruction
4211 compare (XOP[0], XOP[1])
4213 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4214 XOP[2] is an 8-bit scratch register as needed.
4216 PLEN == NULL: Output instructions.
4217 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4218 Don't output anything. */
4221 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4223 /* Register to compare and value to compare against. */
4227 /* MODE of the comparison. */
4228 enum machine_mode mode
;
4230 /* Number of bytes to operate on. */
4231 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4233 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4234 int clobber_val
= -1;
4236 /* Map fixed mode operands to integer operands with the same binary
4237 representation. They are easier to handle in the remainder. */
4239 if (CONST_FIXED_P (xval
))
4241 xreg
= avr_to_int_mode (xop
[0]);
4242 xval
= avr_to_int_mode (xop
[1]);
4245 mode
= GET_MODE (xreg
);
4247 gcc_assert (REG_P (xreg
));
4248 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4249 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4254 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4255 against 0 by ORing the bytes. This is one instruction shorter.
4256 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4257 and therefore don't use this. */
4259 if (!test_hard_reg_class (LD_REGS
, xreg
)
4260 && compare_eq_p (insn
)
4261 && reg_unused_after (insn
, xreg
))
4263 if (xval
== const1_rtx
)
4265 avr_asm_len ("dec %A0" CR_TAB
4266 "or %A0,%B0", xop
, plen
, 2);
4269 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4272 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4276 else if (xval
== constm1_rtx
)
4279 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4282 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4284 return avr_asm_len ("and %A0,%B0" CR_TAB
4285 "com %A0", xop
, plen
, 2);
4289 for (i
= 0; i
< n_bytes
; i
++)
4291 /* We compare byte-wise. */
4292 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4293 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4295 /* 8-bit value to compare with this byte. */
4296 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4298 /* Registers R16..R31 can operate with immediate. */
4299 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4302 xop
[1] = gen_int_mode (val8
, QImode
);
4304 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4307 && test_hard_reg_class (ADDW_REGS
, reg8
))
4309 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4311 if (IN_RANGE (val16
, 0, 63)
4313 || reg_unused_after (insn
, xreg
)))
4315 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4321 && IN_RANGE (val16
, -63, -1)
4322 && compare_eq_p (insn
)
4323 && reg_unused_after (insn
, xreg
))
4325 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4329 /* Comparing against 0 is easy. */
4334 ? "cp %0,__zero_reg__"
4335 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4339 /* Upper registers can compare and subtract-with-carry immediates.
4340 Notice that compare instructions do the same as respective subtract
4341 instruction; the only difference is that comparisons don't write
4342 the result back to the target register. */
4348 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4351 else if (reg_unused_after (insn
, xreg
))
4353 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4358 /* Must load the value into the scratch register. */
4360 gcc_assert (REG_P (xop
[2]));
4362 if (clobber_val
!= (int) val8
)
4363 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4364 clobber_val
= (int) val8
;
4368 : "cpc %0,%2", xop
, plen
, 1);
4375 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4378 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4382 xop
[0] = gen_rtx_REG (DImode
, 18);
4386 return avr_out_compare (insn
, xop
, plen
);
4389 /* Output test instruction for HImode. */
4392 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4394 if (compare_sign_p (insn
))
4396 avr_asm_len ("tst %B0", op
, plen
, -1);
4398 else if (reg_unused_after (insn
, op
[0])
4399 && compare_eq_p (insn
))
4401 /* Faster than sbiw if we can clobber the operand. */
4402 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4406 avr_out_compare (insn
, op
, plen
);
4413 /* Output test instruction for PSImode. */
4416 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4418 if (compare_sign_p (insn
))
4420 avr_asm_len ("tst %C0", op
, plen
, -1);
4422 else if (reg_unused_after (insn
, op
[0])
4423 && compare_eq_p (insn
))
4425 /* Faster than sbiw if we can clobber the operand. */
4426 avr_asm_len ("or %A0,%B0" CR_TAB
4427 "or %A0,%C0", op
, plen
, -2);
4431 avr_out_compare (insn
, op
, plen
);
4438 /* Output test instruction for SImode. */
4441 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4443 if (compare_sign_p (insn
))
4445 avr_asm_len ("tst %D0", op
, plen
, -1);
4447 else if (reg_unused_after (insn
, op
[0])
4448 && compare_eq_p (insn
))
4450 /* Faster than sbiw if we can clobber the operand. */
4451 avr_asm_len ("or %A0,%B0" CR_TAB
4453 "or %A0,%D0", op
, plen
, -3);
4457 avr_out_compare (insn
, op
, plen
);
4464 /* Generate asm equivalent for various shifts. This only handles cases
4465 that are not already carefully hand-optimized in ?sh??i3_out.
4467 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4468 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4469 OPERANDS[3] is a QImode scratch register from LD regs if
4470 available and SCRATCH, otherwise (no scratch available)
4472 TEMPL is an assembler template that shifts by one position.
4473 T_LEN is the length of this template. */
4476 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4477 int *plen
, int t_len
)
4479 bool second_label
= true;
4480 bool saved_in_tmp
= false;
4481 bool use_zero_reg
= false;
4484 op
[0] = operands
[0];
4485 op
[1] = operands
[1];
4486 op
[2] = operands
[2];
4487 op
[3] = operands
[3];
4492 if (CONST_INT_P (operands
[2]))
4494 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4495 && REG_P (operands
[3]));
4496 int count
= INTVAL (operands
[2]);
4497 int max_len
= 10; /* If larger than this, always use a loop. */
4502 if (count
< 8 && !scratch
)
4503 use_zero_reg
= true;
4506 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4508 if (t_len
* count
<= max_len
)
4510 /* Output shifts inline with no loop - faster. */
4513 avr_asm_len (templ
, op
, plen
, t_len
);
4520 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4522 else if (use_zero_reg
)
4524 /* Hack to save one word: use __zero_reg__ as loop counter.
4525 Set one bit, then shift in a loop until it is 0 again. */
4527 op
[3] = zero_reg_rtx
;
4529 avr_asm_len ("set" CR_TAB
4530 "bld %3,%2-1", op
, plen
, 2);
4534 /* No scratch register available, use one from LD_REGS (saved in
4535 __tmp_reg__) that doesn't overlap with registers to shift. */
4537 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4538 op
[4] = tmp_reg_rtx
;
4539 saved_in_tmp
= true;
4541 avr_asm_len ("mov %4,%3" CR_TAB
4542 "ldi %3,%2", op
, plen
, 2);
4545 second_label
= false;
4547 else if (MEM_P (op
[2]))
4551 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4554 out_movqi_r_mr (insn
, op_mov
, plen
);
4556 else if (register_operand (op
[2], QImode
))
4560 if (!reg_unused_after (insn
, op
[2])
4561 || reg_overlap_mentioned_p (op
[0], op
[2]))
4563 op
[3] = tmp_reg_rtx
;
4564 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4568 fatal_insn ("bad shift insn:", insn
);
4571 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4573 avr_asm_len ("1:", op
, plen
, 0);
4574 avr_asm_len (templ
, op
, plen
, t_len
);
4577 avr_asm_len ("2:", op
, plen
, 0);
4579 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4580 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4583 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4587 /* 8bit shift left ((char)x << i) */
4590 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4592 if (GET_CODE (operands
[2]) == CONST_INT
)
4599 switch (INTVAL (operands
[2]))
4602 if (INTVAL (operands
[2]) < 8)
4614 return ("lsl %0" CR_TAB
4619 return ("lsl %0" CR_TAB
4624 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4627 return ("swap %0" CR_TAB
4631 return ("lsl %0" CR_TAB
4637 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4640 return ("swap %0" CR_TAB
4645 return ("lsl %0" CR_TAB
4652 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4655 return ("swap %0" CR_TAB
4661 return ("lsl %0" CR_TAB
4670 return ("ror %0" CR_TAB
4675 else if (CONSTANT_P (operands
[2]))
4676 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4678 out_shift_with_cnt ("lsl %0",
4679 insn
, operands
, len
, 1);
4684 /* 16bit shift left ((short)x << i) */
4687 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4689 if (GET_CODE (operands
[2]) == CONST_INT
)
4691 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4692 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4699 switch (INTVAL (operands
[2]))
4702 if (INTVAL (operands
[2]) < 16)
4706 return ("clr %B0" CR_TAB
4710 if (optimize_size
&& scratch
)
4715 return ("swap %A0" CR_TAB
4717 "andi %B0,0xf0" CR_TAB
4718 "eor %B0,%A0" CR_TAB
4719 "andi %A0,0xf0" CR_TAB
4725 return ("swap %A0" CR_TAB
4727 "ldi %3,0xf0" CR_TAB
4729 "eor %B0,%A0" CR_TAB
4733 break; /* optimize_size ? 6 : 8 */
4737 break; /* scratch ? 5 : 6 */
4741 return ("lsl %A0" CR_TAB
4745 "andi %B0,0xf0" CR_TAB
4746 "eor %B0,%A0" CR_TAB
4747 "andi %A0,0xf0" CR_TAB
4753 return ("lsl %A0" CR_TAB
4757 "ldi %3,0xf0" CR_TAB
4759 "eor %B0,%A0" CR_TAB
4767 break; /* scratch ? 5 : 6 */
4769 return ("clr __tmp_reg__" CR_TAB
4772 "ror __tmp_reg__" CR_TAB
4775 "ror __tmp_reg__" CR_TAB
4776 "mov %B0,%A0" CR_TAB
4777 "mov %A0,__tmp_reg__");
4781 return ("lsr %B0" CR_TAB
4782 "mov %B0,%A0" CR_TAB
4788 return *len
= 2, ("mov %B0,%A1" CR_TAB
4793 return ("mov %B0,%A0" CR_TAB
4799 return ("mov %B0,%A0" CR_TAB
4806 return ("mov %B0,%A0" CR_TAB
4816 return ("mov %B0,%A0" CR_TAB
4824 return ("mov %B0,%A0" CR_TAB
4827 "ldi %3,0xf0" CR_TAB
4831 return ("mov %B0,%A0" CR_TAB
4842 return ("mov %B0,%A0" CR_TAB
4848 if (AVR_HAVE_MUL
&& scratch
)
4851 return ("ldi %3,0x20" CR_TAB
4855 "clr __zero_reg__");
4857 if (optimize_size
&& scratch
)
4862 return ("mov %B0,%A0" CR_TAB
4866 "ldi %3,0xe0" CR_TAB
4872 return ("set" CR_TAB
4877 "clr __zero_reg__");
4880 return ("mov %B0,%A0" CR_TAB
4889 if (AVR_HAVE_MUL
&& ldi_ok
)
4892 return ("ldi %B0,0x40" CR_TAB
4893 "mul %A0,%B0" CR_TAB
4896 "clr __zero_reg__");
4898 if (AVR_HAVE_MUL
&& scratch
)
4901 return ("ldi %3,0x40" CR_TAB
4905 "clr __zero_reg__");
4907 if (optimize_size
&& ldi_ok
)
4910 return ("mov %B0,%A0" CR_TAB
4911 "ldi %A0,6" "\n1:\t"
4916 if (optimize_size
&& scratch
)
4919 return ("clr %B0" CR_TAB
4928 return ("clr %B0" CR_TAB
4935 out_shift_with_cnt ("lsl %A0" CR_TAB
4936 "rol %B0", insn
, operands
, len
, 2);
4941 /* 24-bit shift left */
4944 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4949 if (CONST_INT_P (op
[2]))
4951 switch (INTVAL (op
[2]))
4954 if (INTVAL (op
[2]) < 24)
4957 return avr_asm_len ("clr %A0" CR_TAB
4959 "clr %C0", op
, plen
, 3);
4963 int reg0
= REGNO (op
[0]);
4964 int reg1
= REGNO (op
[1]);
4967 return avr_asm_len ("mov %C0,%B1" CR_TAB
4968 "mov %B0,%A1" CR_TAB
4969 "clr %A0", op
, plen
, 3);
4971 return avr_asm_len ("clr %A0" CR_TAB
4972 "mov %B0,%A1" CR_TAB
4973 "mov %C0,%B1", op
, plen
, 3);
4978 int reg0
= REGNO (op
[0]);
4979 int reg1
= REGNO (op
[1]);
4981 if (reg0
+ 2 != reg1
)
4982 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4984 return avr_asm_len ("clr %B0" CR_TAB
4985 "clr %A0", op
, plen
, 2);
4989 return avr_asm_len ("clr %C0" CR_TAB
4993 "clr %A0", op
, plen
, 5);
4997 out_shift_with_cnt ("lsl %A0" CR_TAB
4999 "rol %C0", insn
, op
, plen
, 3);
5004 /* 32bit shift left ((long)x << i) */
5007 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5009 if (GET_CODE (operands
[2]) == CONST_INT
)
5017 switch (INTVAL (operands
[2]))
5020 if (INTVAL (operands
[2]) < 32)
5024 return *len
= 3, ("clr %D0" CR_TAB
5028 return ("clr %D0" CR_TAB
5035 int reg0
= true_regnum (operands
[0]);
5036 int reg1
= true_regnum (operands
[1]);
5039 return ("mov %D0,%C1" CR_TAB
5040 "mov %C0,%B1" CR_TAB
5041 "mov %B0,%A1" CR_TAB
5044 return ("clr %A0" CR_TAB
5045 "mov %B0,%A1" CR_TAB
5046 "mov %C0,%B1" CR_TAB
5052 int reg0
= true_regnum (operands
[0]);
5053 int reg1
= true_regnum (operands
[1]);
5054 if (reg0
+ 2 == reg1
)
5055 return *len
= 2, ("clr %B0" CR_TAB
5058 return *len
= 3, ("movw %C0,%A1" CR_TAB
5062 return *len
= 4, ("mov %C0,%A1" CR_TAB
5063 "mov %D0,%B1" CR_TAB
5070 return ("mov %D0,%A1" CR_TAB
5077 return ("clr %D0" CR_TAB
5086 out_shift_with_cnt ("lsl %A0" CR_TAB
5089 "rol %D0", insn
, operands
, len
, 4);
5093 /* 8bit arithmetic shift right ((signed char)x >> i) */
5096 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5098 if (GET_CODE (operands
[2]) == CONST_INT
)
5105 switch (INTVAL (operands
[2]))
5113 return ("asr %0" CR_TAB
5118 return ("asr %0" CR_TAB
5124 return ("asr %0" CR_TAB
5131 return ("asr %0" CR_TAB
5139 return ("bst %0,6" CR_TAB
5145 if (INTVAL (operands
[2]) < 8)
5152 return ("lsl %0" CR_TAB
5156 else if (CONSTANT_P (operands
[2]))
5157 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5159 out_shift_with_cnt ("asr %0",
5160 insn
, operands
, len
, 1);
5165 /* 16bit arithmetic shift right ((signed short)x >> i) */
5168 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5170 if (GET_CODE (operands
[2]) == CONST_INT
)
5172 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5173 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5180 switch (INTVAL (operands
[2]))
5184 /* XXX try to optimize this too? */
5189 break; /* scratch ? 5 : 6 */
5191 return ("mov __tmp_reg__,%A0" CR_TAB
5192 "mov %A0,%B0" CR_TAB
5193 "lsl __tmp_reg__" CR_TAB
5195 "sbc %B0,%B0" CR_TAB
5196 "lsl __tmp_reg__" CR_TAB
5202 return ("lsl %A0" CR_TAB
5203 "mov %A0,%B0" CR_TAB
5209 int reg0
= true_regnum (operands
[0]);
5210 int reg1
= true_regnum (operands
[1]);
5213 return *len
= 3, ("mov %A0,%B0" CR_TAB
5217 return *len
= 4, ("mov %A0,%B1" CR_TAB
5225 return ("mov %A0,%B0" CR_TAB
5227 "sbc %B0,%B0" CR_TAB
5232 return ("mov %A0,%B0" CR_TAB
5234 "sbc %B0,%B0" CR_TAB
5239 if (AVR_HAVE_MUL
&& ldi_ok
)
5242 return ("ldi %A0,0x20" CR_TAB
5243 "muls %B0,%A0" CR_TAB
5245 "sbc %B0,%B0" CR_TAB
5246 "clr __zero_reg__");
5248 if (optimize_size
&& scratch
)
5251 return ("mov %A0,%B0" CR_TAB
5253 "sbc %B0,%B0" CR_TAB
5259 if (AVR_HAVE_MUL
&& ldi_ok
)
5262 return ("ldi %A0,0x10" CR_TAB
5263 "muls %B0,%A0" CR_TAB
5265 "sbc %B0,%B0" CR_TAB
5266 "clr __zero_reg__");
5268 if (optimize_size
&& scratch
)
5271 return ("mov %A0,%B0" CR_TAB
5273 "sbc %B0,%B0" CR_TAB
5280 if (AVR_HAVE_MUL
&& ldi_ok
)
5283 return ("ldi %A0,0x08" CR_TAB
5284 "muls %B0,%A0" CR_TAB
5286 "sbc %B0,%B0" CR_TAB
5287 "clr __zero_reg__");
5290 break; /* scratch ? 5 : 7 */
5292 return ("mov %A0,%B0" CR_TAB
5294 "sbc %B0,%B0" CR_TAB
5303 return ("lsl %B0" CR_TAB
5304 "sbc %A0,%A0" CR_TAB
5306 "mov %B0,%A0" CR_TAB
5310 if (INTVAL (operands
[2]) < 16)
5316 return *len
= 3, ("lsl %B0" CR_TAB
5317 "sbc %A0,%A0" CR_TAB
5322 out_shift_with_cnt ("asr %B0" CR_TAB
5323 "ror %A0", insn
, operands
, len
, 2);
5328 /* 24-bit arithmetic shift right */
5331 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5333 int dest
= REGNO (op
[0]);
5334 int src
= REGNO (op
[1]);
5336 if (CONST_INT_P (op
[2]))
5341 switch (INTVAL (op
[2]))
5345 return avr_asm_len ("mov %A0,%B1" CR_TAB
5346 "mov %B0,%C1" CR_TAB
5349 "dec %C0", op
, plen
, 5);
5351 return avr_asm_len ("clr %C0" CR_TAB
5354 "mov %B0,%C1" CR_TAB
5355 "mov %A0,%B1", op
, plen
, 5);
5358 if (dest
!= src
+ 2)
5359 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5361 return avr_asm_len ("clr %B0" CR_TAB
5364 "mov %C0,%B0", op
, plen
, 4);
5367 if (INTVAL (op
[2]) < 24)
5373 return avr_asm_len ("lsl %C0" CR_TAB
5374 "sbc %A0,%A0" CR_TAB
5375 "mov %B0,%A0" CR_TAB
5376 "mov %C0,%A0", op
, plen
, 4);
5380 out_shift_with_cnt ("asr %C0" CR_TAB
5382 "ror %A0", insn
, op
, plen
, 3);
5387 /* 32bit arithmetic shift right ((signed long)x >> i) */
5390 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5392 if (GET_CODE (operands
[2]) == CONST_INT
)
5400 switch (INTVAL (operands
[2]))
5404 int reg0
= true_regnum (operands
[0]);
5405 int reg1
= true_regnum (operands
[1]);
5408 return ("mov %A0,%B1" CR_TAB
5409 "mov %B0,%C1" CR_TAB
5410 "mov %C0,%D1" CR_TAB
5415 return ("clr %D0" CR_TAB
5418 "mov %C0,%D1" CR_TAB
5419 "mov %B0,%C1" CR_TAB
5425 int reg0
= true_regnum (operands
[0]);
5426 int reg1
= true_regnum (operands
[1]);
5428 if (reg0
== reg1
+ 2)
5429 return *len
= 4, ("clr %D0" CR_TAB
5434 return *len
= 5, ("movw %A0,%C1" CR_TAB
5440 return *len
= 6, ("mov %B0,%D1" CR_TAB
5441 "mov %A0,%C1" CR_TAB
5449 return *len
= 6, ("mov %A0,%D1" CR_TAB
5453 "mov %B0,%D0" CR_TAB
5457 if (INTVAL (operands
[2]) < 32)
5464 return *len
= 4, ("lsl %D0" CR_TAB
5465 "sbc %A0,%A0" CR_TAB
5466 "mov %B0,%A0" CR_TAB
5469 return *len
= 5, ("lsl %D0" CR_TAB
5470 "sbc %A0,%A0" CR_TAB
5471 "mov %B0,%A0" CR_TAB
5472 "mov %C0,%A0" CR_TAB
5477 out_shift_with_cnt ("asr %D0" CR_TAB
5480 "ror %A0", insn
, operands
, len
, 4);
5484 /* 8bit logic shift right ((unsigned char)x >> i) */
5487 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5489 if (GET_CODE (operands
[2]) == CONST_INT
)
5496 switch (INTVAL (operands
[2]))
5499 if (INTVAL (operands
[2]) < 8)
5511 return ("lsr %0" CR_TAB
5515 return ("lsr %0" CR_TAB
5520 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5523 return ("swap %0" CR_TAB
5527 return ("lsr %0" CR_TAB
5533 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5536 return ("swap %0" CR_TAB
5541 return ("lsr %0" CR_TAB
5548 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5551 return ("swap %0" CR_TAB
5557 return ("lsr %0" CR_TAB
5566 return ("rol %0" CR_TAB
5571 else if (CONSTANT_P (operands
[2]))
5572 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5574 out_shift_with_cnt ("lsr %0",
5575 insn
, operands
, len
, 1);
5579 /* 16bit logic shift right ((unsigned short)x >> i) */
5582 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5584 if (GET_CODE (operands
[2]) == CONST_INT
)
5586 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5587 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5594 switch (INTVAL (operands
[2]))
5597 if (INTVAL (operands
[2]) < 16)
5601 return ("clr %B0" CR_TAB
5605 if (optimize_size
&& scratch
)
5610 return ("swap %B0" CR_TAB
5612 "andi %A0,0x0f" CR_TAB
5613 "eor %A0,%B0" CR_TAB
5614 "andi %B0,0x0f" CR_TAB
5620 return ("swap %B0" CR_TAB
5622 "ldi %3,0x0f" CR_TAB
5624 "eor %A0,%B0" CR_TAB
5628 break; /* optimize_size ? 6 : 8 */
5632 break; /* scratch ? 5 : 6 */
5636 return ("lsr %B0" CR_TAB
5640 "andi %A0,0x0f" CR_TAB
5641 "eor %A0,%B0" CR_TAB
5642 "andi %B0,0x0f" CR_TAB
5648 return ("lsr %B0" CR_TAB
5652 "ldi %3,0x0f" CR_TAB
5654 "eor %A0,%B0" CR_TAB
5662 break; /* scratch ? 5 : 6 */
5664 return ("clr __tmp_reg__" CR_TAB
5667 "rol __tmp_reg__" CR_TAB
5670 "rol __tmp_reg__" CR_TAB
5671 "mov %A0,%B0" CR_TAB
5672 "mov %B0,__tmp_reg__");
5676 return ("lsl %A0" CR_TAB
5677 "mov %A0,%B0" CR_TAB
5679 "sbc %B0,%B0" CR_TAB
5683 return *len
= 2, ("mov %A0,%B1" CR_TAB
5688 return ("mov %A0,%B0" CR_TAB
5694 return ("mov %A0,%B0" CR_TAB
5701 return ("mov %A0,%B0" CR_TAB
5711 return ("mov %A0,%B0" CR_TAB
5719 return ("mov %A0,%B0" CR_TAB
5722 "ldi %3,0x0f" CR_TAB
5726 return ("mov %A0,%B0" CR_TAB
5737 return ("mov %A0,%B0" CR_TAB
5743 if (AVR_HAVE_MUL
&& scratch
)
5746 return ("ldi %3,0x08" CR_TAB
5750 "clr __zero_reg__");
5752 if (optimize_size
&& scratch
)
5757 return ("mov %A0,%B0" CR_TAB
5761 "ldi %3,0x07" CR_TAB
5767 return ("set" CR_TAB
5772 "clr __zero_reg__");
5775 return ("mov %A0,%B0" CR_TAB
5784 if (AVR_HAVE_MUL
&& ldi_ok
)
5787 return ("ldi %A0,0x04" CR_TAB
5788 "mul %B0,%A0" CR_TAB
5791 "clr __zero_reg__");
5793 if (AVR_HAVE_MUL
&& scratch
)
5796 return ("ldi %3,0x04" CR_TAB
5800 "clr __zero_reg__");
5802 if (optimize_size
&& ldi_ok
)
5805 return ("mov %A0,%B0" CR_TAB
5806 "ldi %B0,6" "\n1:\t"
5811 if (optimize_size
&& scratch
)
5814 return ("clr %A0" CR_TAB
5823 return ("clr %A0" CR_TAB
5830 out_shift_with_cnt ("lsr %B0" CR_TAB
5831 "ror %A0", insn
, operands
, len
, 2);
5836 /* 24-bit logic shift right */
5839 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5841 int dest
= REGNO (op
[0]);
5842 int src
= REGNO (op
[1]);
5844 if (CONST_INT_P (op
[2]))
5849 switch (INTVAL (op
[2]))
5853 return avr_asm_len ("mov %A0,%B1" CR_TAB
5854 "mov %B0,%C1" CR_TAB
5855 "clr %C0", op
, plen
, 3);
5857 return avr_asm_len ("clr %C0" CR_TAB
5858 "mov %B0,%C1" CR_TAB
5859 "mov %A0,%B1", op
, plen
, 3);
5862 if (dest
!= src
+ 2)
5863 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5865 return avr_asm_len ("clr %B0" CR_TAB
5866 "clr %C0", op
, plen
, 2);
5869 if (INTVAL (op
[2]) < 24)
5875 return avr_asm_len ("clr %A0" CR_TAB
5879 "clr %C0", op
, plen
, 5);
5883 out_shift_with_cnt ("lsr %C0" CR_TAB
5885 "ror %A0", insn
, op
, plen
, 3);
5890 /* 32bit logic shift right ((unsigned int)x >> i) */
5893 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5895 if (GET_CODE (operands
[2]) == CONST_INT
)
5903 switch (INTVAL (operands
[2]))
5906 if (INTVAL (operands
[2]) < 32)
5910 return *len
= 3, ("clr %D0" CR_TAB
5914 return ("clr %D0" CR_TAB
5921 int reg0
= true_regnum (operands
[0]);
5922 int reg1
= true_regnum (operands
[1]);
5925 return ("mov %A0,%B1" CR_TAB
5926 "mov %B0,%C1" CR_TAB
5927 "mov %C0,%D1" CR_TAB
5930 return ("clr %D0" CR_TAB
5931 "mov %C0,%D1" CR_TAB
5932 "mov %B0,%C1" CR_TAB
5938 int reg0
= true_regnum (operands
[0]);
5939 int reg1
= true_regnum (operands
[1]);
5941 if (reg0
== reg1
+ 2)
5942 return *len
= 2, ("clr %C0" CR_TAB
5945 return *len
= 3, ("movw %A0,%C1" CR_TAB
5949 return *len
= 4, ("mov %B0,%D1" CR_TAB
5950 "mov %A0,%C1" CR_TAB
5956 return *len
= 4, ("mov %A0,%D1" CR_TAB
5963 return ("clr %A0" CR_TAB
5972 out_shift_with_cnt ("lsr %D0" CR_TAB
5975 "ror %A0", insn
, operands
, len
, 4);
5980 /* Output addition of register XOP[0] and compile time constant XOP[2].
5981 CODE == PLUS: perform addition by using ADD instructions or
5982 CODE == MINUS: perform addition by using SUB instructions:
5984 XOP[0] = XOP[0] + XOP[2]
5986 Or perform addition/subtraction with register XOP[2] depending on CODE:
5988 XOP[0] = XOP[0] +/- XOP[2]
5990 If PLEN == NULL, print assembler instructions to perform the operation;
5991 otherwise, set *PLEN to the length of the instruction sequence (in words)
5992 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
5993 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
5995 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
5996 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
5997 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
5998 the subtrahend in the original insn, provided it is a compile time constant.
5999 In all other cases, SIGN is 0.
6004 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6005 enum rtx_code code_sat
= UNKNOWN
, int sign
= 0)
6007 /* MODE of the operation. */
6008 enum machine_mode mode
= GET_MODE (xop
[0]);
6010 /* INT_MODE of the same size. */
6011 enum machine_mode imode
= int_mode_for_mode (mode
);
6013 /* Number of bytes to operate on. */
6014 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6016 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6017 int clobber_val
= -1;
6019 /* op[0]: 8-bit destination register
6020 op[1]: 8-bit const int
6021 op[2]: 8-bit scratch register */
6024 /* Started the operation? Before starting the operation we may skip
6025 adding 0. This is no more true after the operation started because
6026 carry must be taken into account. */
6027 bool started
= false;
6029 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6032 /* Output a BRVC instruction. Only needed with saturation. */
6033 bool out_brvc
= true;
6040 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6042 for (i
= 0; i
< n_bytes
; i
++)
6044 /* We operate byte-wise on the destination. */
6045 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6046 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6049 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6052 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6056 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6058 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6067 /* Except in the case of ADIW with 16-bit register (see below)
6068 addition does not set cc0 in a usable way. */
6070 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6072 if (CONST_FIXED_P (xval
))
6073 xval
= avr_to_int_mode (xval
);
6075 /* Adding/Subtracting zero is a no-op. */
6077 if (xval
== const0_rtx
)
6084 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6088 if (SS_PLUS
== code_sat
&& MINUS
== code
6090 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6091 & GET_MODE_MASK (QImode
)))
6093 /* We compute x + 0x80 by means of SUB instructions. We negated the
6094 constant subtrahend above and are left with x - (-128) so that we
6095 need something like SUBI r,128 which does not exist because SUBI sets
6096 V according to the sign of the subtrahend. Notice the only case
6097 where this must be done is when NEG overflowed in case [2s] because
6098 the V computation needs the right sign of the subtrahend. */
6100 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6102 avr_asm_len ("subi %0,128" CR_TAB
6103 "brmi 0f", &msb
, plen
, 2);
6109 for (i
= 0; i
< n_bytes
; i
++)
6111 /* We operate byte-wise on the destination. */
6112 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6113 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6115 /* 8-bit value to operate with this byte. */
6116 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6118 /* Registers R16..R31 can operate with immediate. */
6119 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6122 op
[1] = gen_int_mode (val8
, QImode
);
6124 /* To get usable cc0 no low-bytes must have been skipped. */
6132 && test_hard_reg_class (ADDW_REGS
, reg8
))
6134 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6135 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6137 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6138 i.e. operate word-wise. */
6145 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6148 if (n_bytes
== 2 && PLUS
== code
)
6160 avr_asm_len (code
== PLUS
6161 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6165 else if ((val8
== 1 || val8
== 0xff)
6166 && UNKNOWN
== code_sat
6168 && i
== n_bytes
- 1)
6170 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6179 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6181 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6183 /* This belongs to the x + 0x80 corner case. The code with
6184 ADD instruction is not smaller, thus make this case
6185 expensive so that the caller won't pick it. */
6191 if (clobber_val
!= (int) val8
)
6192 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6193 clobber_val
= (int) val8
;
6195 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6202 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6205 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6207 if (clobber_val
!= (int) val8
)
6208 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6209 clobber_val
= (int) val8
;
6211 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6223 } /* for all sub-bytes */
6227 if (UNKNOWN
== code_sat
)
6230 *pcc
= (int) CC_CLOBBER
;
6232 /* Vanilla addition/subtraction is done. We are left with saturation.
6234 We have to compute A = A <op> B where A is a register and
6235 B is a register or a non-zero compile time constant CONST.
6236 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6237 B stands for the original operand $2 in INSN. In the case of B = CONST
6238 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6240 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6244 operation | code | sat if | b is | sat value | case
6245 -----------------+-------+----------+--------------+-----------+-------
6246 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6247 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6248 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6249 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6253 operation | code | sat if | b is | sat value | case
6254 -----------------+-------+----------+--------------+-----------+-------
6255 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6256 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6257 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6258 - as a + (-b) | add | V == 1 | const | s- | [4s]
6260 s+ = b < 0 ? -0x80 : 0x7f
6261 s- = b < 0 ? 0x7f : -0x80
6263 The cases a - b actually perform a - (-(-b)) if B is CONST.
6266 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6268 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6271 if (!plen
&& flag_print_asm_name
)
6272 avr_fdump (asm_out_file
, ";; %C (%C)\n", code_sat
, code
);
6274 bool need_copy
= true;
6275 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6284 if (!plen
&& flag_print_asm_name
)
6285 avr_fdump (asm_out_file
, ";; %s = %r\n", sign
< 0 ? "neg" : "pos",
6289 avr_asm_len ("brvc 0f", op
, plen
, 1);
6291 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6296 avr_asm_len ("ldi %0,0x7f" CR_TAB
6297 "adc %0,__zero_reg__", op
, plen
, 2);
6299 avr_asm_len ("ldi %0,0x7f" CR_TAB
6300 "ldi %1,0xff" CR_TAB
6301 "adc %1,__zero_reg__" CR_TAB
6302 "adc %0,__zero_reg__", op
, plen
, 4);
6304 else if (sign
== 0 && PLUS
== code
)
6308 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6311 avr_asm_len ("ldi %0,0x80" CR_TAB
6313 "dec %0", op
, plen
, 3);
6315 avr_asm_len ("ldi %0,0x80" CR_TAB
6318 "sbci %0,0", op
, plen
, 4);
6320 else if (sign
== 0 && MINUS
== code
)
6324 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6327 avr_asm_len ("ldi %0,0x7f" CR_TAB
6329 "inc %0", op
, plen
, 3);
6331 avr_asm_len ("ldi %0,0x7f" CR_TAB
6334 "sbci %0,-1", op
, plen
, 4);
6336 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6338 /* [1s,const,B < 0] [2s,B < 0] */
6339 /* [3s,const,B > 0] [4s,B > 0] */
6343 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6347 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6348 if (n_bytes
> 1 && need_copy
)
6349 avr_asm_len ("clr %1", op
, plen
, 1);
6351 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6353 /* [1s,const,B > 0] [2s,B > 0] */
6354 /* [3s,const,B < 0] [4s,B < 0] */
6358 avr_asm_len ("sec" CR_TAB
6359 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6363 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6364 if (n_bytes
> 1 && need_copy
)
6365 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6375 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6380 avr_asm_len ("sec", op
, plen
, 1);
6381 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6387 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6388 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6390 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6393 break; /* US_PLUS */
6398 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6402 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6406 avr_asm_len ("clr %0", op
, plen
, 1);
6411 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6412 Now copy the right value to the LSBs. */
6414 if (need_copy
&& n_bytes
> 1)
6416 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6418 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6424 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6426 avr_asm_len ("mov %A0,%1" CR_TAB
6427 "mov %B0,%1", op
, plen
, 2);
6430 else if (n_bytes
> 2)
6433 avr_asm_len ("mov %A0,%1" CR_TAB
6434 "mov %B0,%1", op
, plen
, 2);
6438 if (need_copy
&& n_bytes
== 8)
6441 avr_asm_len ("movw %r0+2,%0" CR_TAB
6442 "movw %r0+4,%0", xop
, plen
, 2);
6444 avr_asm_len ("mov %r0+2,%0" CR_TAB
6445 "mov %r0+3,%0" CR_TAB
6446 "mov %r0+4,%0" CR_TAB
6447 "mov %r0+5,%0", xop
, plen
, 4);
6450 avr_asm_len ("0:", op
, plen
, 0);
6454 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6455 is ont a compile-time constant:
6457 XOP[0] = XOP[0] +/- XOP[2]
6459 This is a helper for the function below. The only insns that need this
6460 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6463 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6465 enum machine_mode mode
= GET_MODE (xop
[0]);
6466 int n_bytes
= GET_MODE_SIZE (mode
);
6468 /* Only pointer modes want to add symbols. */
6470 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6472 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6474 avr_asm_len (PLUS
== code
6475 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6476 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6480 avr_asm_len (PLUS
== code
6481 ? "sbci %C0,hlo8((-%2))"
6482 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6487 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6489 INSN is a single_set insn with a binary operation as SET_SRC that is
6490 one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6492 XOP are the operands of INSN. In the case of 64-bit operations with
6493 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6494 The non-saturating insns up to 32 bits may or may not supply a "d" class
6497 If PLEN == NULL output the instructions.
6498 If PLEN != NULL set *PLEN to the length of the sequence in words.
6500 PCC is a pointer to store the instructions' effect on cc0.
6503 PLEN and PCC default to NULL.
6508 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
)
6510 int cc_plus
, cc_minus
, cc_dummy
;
6511 int len_plus
, len_minus
;
6513 rtx xdest
= SET_DEST (single_set (insn
));
6514 enum machine_mode mode
= GET_MODE (xdest
);
6515 enum machine_mode imode
= int_mode_for_mode (mode
);
6516 int n_bytes
= GET_MODE_SIZE (mode
);
6517 enum rtx_code code_sat
= GET_CODE (SET_SRC (single_set (insn
)));
6519 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6525 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6527 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6530 if (n_bytes
<= 4 && REG_P (xop
[2]))
6532 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
);
6538 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6539 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6540 op
[2] = avr_to_int_mode (xop
[0]);
6545 && !CONST_INT_P (xop
[2])
6546 && !CONST_FIXED_P (xop
[2]))
6548 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6551 op
[0] = avr_to_int_mode (xop
[0]);
6552 op
[1] = avr_to_int_mode (xop
[1]);
6553 op
[2] = avr_to_int_mode (xop
[2]);
6556 /* Saturations and 64-bit operations don't have a clobber operand.
6557 For the other cases, the caller will provide a proper XOP[3]. */
6559 op
[3] = PARALLEL
== GET_CODE (PATTERN (insn
)) ? xop
[3] : NULL_RTX
;
6561 /* Saturation will need the sign of the original operand. */
6563 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6564 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6566 /* If we subtract and the subtrahend is a constant, then negate it
6567 so that avr_out_plus_1 can be used. */
6570 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6572 /* Work out the shortest sequence. */
6574 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_plus
, code_sat
, sign
);
6575 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_minus
, code_sat
, sign
);
6579 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6580 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6582 else if (len_minus
<= len_plus
)
6583 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
);
6585 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
);
6591 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6592 time constant XOP[2]:
6594 XOP[0] = XOP[0] <op> XOP[2]
6596 and return "". If PLEN == NULL, print assembler instructions to perform the
6597 operation; otherwise, set *PLEN to the length of the instruction sequence
6598 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6599 register or SCRATCH if no clobber register is needed for the operation. */
6602 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6604 /* CODE and MODE of the operation. */
6605 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6606 enum machine_mode mode
= GET_MODE (xop
[0]);
6608 /* Number of bytes to operate on. */
6609 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6611 /* Value of T-flag (0 or 1) or -1 if unknow. */
6614 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6615 int clobber_val
= -1;
6617 /* op[0]: 8-bit destination register
6618 op[1]: 8-bit const int
6619 op[2]: 8-bit clobber register or SCRATCH
6620 op[3]: 8-bit register containing 0xff or NULL_RTX */
6629 for (i
= 0; i
< n_bytes
; i
++)
6631 /* We operate byte-wise on the destination. */
6632 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6633 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6635 /* 8-bit value to operate with this byte. */
6636 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6638 /* Number of bits set in the current byte of the constant. */
6639 int pop8
= avr_popcount (val8
);
6641 /* Registers R16..R31 can operate with immediate. */
6642 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6645 op
[1] = GEN_INT (val8
);
6654 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6658 avr_asm_len ("set", op
, plen
, 1);
6661 op
[1] = GEN_INT (exact_log2 (val8
));
6662 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6666 if (op
[3] != NULL_RTX
)
6667 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6669 avr_asm_len ("clr %0" CR_TAB
6670 "dec %0", op
, plen
, 2);
6676 if (clobber_val
!= (int) val8
)
6677 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6678 clobber_val
= (int) val8
;
6680 avr_asm_len ("or %0,%2", op
, plen
, 1);
6690 avr_asm_len ("clr %0", op
, plen
, 1);
6692 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6696 avr_asm_len ("clt", op
, plen
, 1);
6699 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6700 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6704 if (clobber_val
!= (int) val8
)
6705 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6706 clobber_val
= (int) val8
;
6708 avr_asm_len ("and %0,%2", op
, plen
, 1);
6718 avr_asm_len ("com %0", op
, plen
, 1);
6719 else if (ld_reg_p
&& val8
== (1 << 7))
6720 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6723 if (clobber_val
!= (int) val8
)
6724 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6725 clobber_val
= (int) val8
;
6727 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6733 /* Unknown rtx_code */
6736 } /* for all sub-bytes */
6742 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6743 PLEN != NULL: Set *PLEN to the length of that sequence.
6747 avr_out_addto_sp (rtx
*op
, int *plen
)
6749 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6750 int addend
= INTVAL (op
[0]);
6757 if (flag_verbose_asm
|| flag_print_asm_name
)
6758 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6760 while (addend
<= -pc_len
)
6763 avr_asm_len ("rcall .", op
, plen
, 1);
6766 while (addend
++ < 0)
6767 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6769 else if (addend
> 0)
6771 if (flag_verbose_asm
|| flag_print_asm_name
)
6772 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6774 while (addend
-- > 0)
6775 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6782 /* Create RTL split patterns for byte sized rotate expressions. This
6783 produces a series of move instructions and considers overlap situations.
6784 Overlapping non-HImode operands need a scratch register. */
6787 avr_rotate_bytes (rtx operands
[])
6790 enum machine_mode mode
= GET_MODE (operands
[0]);
6791 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6792 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6793 int num
= INTVAL (operands
[2]);
6794 rtx scratch
= operands
[3];
6795 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6796 Word move if no scratch is needed, otherwise use size of scratch. */
6797 enum machine_mode move_mode
= QImode
;
6798 int move_size
, offset
, size
;
6802 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6805 move_mode
= GET_MODE (scratch
);
6807 /* Force DI rotate to use QI moves since other DI moves are currently split
6808 into QI moves so forward propagation works better. */
6811 /* Make scratch smaller if needed. */
6812 if (SCRATCH
!= GET_CODE (scratch
)
6813 && HImode
== GET_MODE (scratch
)
6814 && QImode
== move_mode
)
6815 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6817 move_size
= GET_MODE_SIZE (move_mode
);
6818 /* Number of bytes/words to rotate. */
6819 offset
= (num
>> 3) / move_size
;
6820 /* Number of moves needed. */
6821 size
= GET_MODE_SIZE (mode
) / move_size
;
6822 /* Himode byte swap is special case to avoid a scratch register. */
6823 if (mode
== HImode
&& same_reg
)
6825 /* HImode byte swap, using xor. This is as quick as using scratch. */
6827 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6828 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6829 if (!rtx_equal_p (dst
, src
))
6831 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6832 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6833 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6838 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6839 /* Create linked list of moves to determine move order. */
6843 } move
[MAX_SIZE
+ 8];
6846 gcc_assert (size
<= MAX_SIZE
);
6847 /* Generate list of subreg moves. */
6848 for (i
= 0; i
< size
; i
++)
6851 int to
= (from
+ offset
) % size
;
6852 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6853 mode
, from
* move_size
);
6854 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6855 mode
, to
* move_size
);
6858 /* Mark dependence where a dst of one move is the src of another move.
6859 The first move is a conflict as it must wait until second is
6860 performed. We ignore moves to self - we catch this later. */
6862 for (i
= 0; i
< size
; i
++)
6863 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6864 for (j
= 0; j
< size
; j
++)
6865 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6867 /* The dst of move i is the src of move j. */
6874 /* Go through move list and perform non-conflicting moves. As each
6875 non-overlapping move is made, it may remove other conflicts
6876 so the process is repeated until no conflicts remain. */
6881 /* Emit move where dst is not also a src or we have used that
6883 for (i
= 0; i
< size
; i
++)
6884 if (move
[i
].src
!= NULL_RTX
)
6886 if (move
[i
].links
== -1
6887 || move
[move
[i
].links
].src
== NULL_RTX
)
6890 /* Ignore NOP moves to self. */
6891 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6892 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6894 /* Remove conflict from list. */
6895 move
[i
].src
= NULL_RTX
;
6901 /* Check for deadlock. This is when no moves occurred and we have
6902 at least one blocked move. */
6903 if (moves
== 0 && blocked
!= -1)
6905 /* Need to use scratch register to break deadlock.
6906 Add move to put dst of blocked move into scratch.
6907 When this move occurs, it will break chain deadlock.
6908 The scratch register is substituted for real move. */
6910 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6912 move
[size
].src
= move
[blocked
].dst
;
6913 move
[size
].dst
= scratch
;
6914 /* Scratch move is never blocked. */
6915 move
[size
].links
= -1;
6916 /* Make sure we have valid link. */
6917 gcc_assert (move
[blocked
].links
!= -1);
6918 /* Replace src of blocking move with scratch reg. */
6919 move
[move
[blocked
].links
].src
= scratch
;
6920 /* Make dependent on scratch move occuring. */
6921 move
[blocked
].links
= size
;
6925 while (blocked
!= -1);
6931 /* Outputs instructions needed for fixed point type conversion.
6932 This includes converting between any fixed point type, as well
6933 as converting to any integer type. Conversion between integer
6934 types is not supported.
6936 The number of instructions generated depends on the types
6937 being converted and the registers assigned to them.
6939 The number of instructions required to complete the conversion
6940 is least if the registers for source and destination are overlapping
6941 and are aligned at the decimal place as actual movement of data is
6942 completely avoided. In some cases, the conversion may already be
6943 complete without any instructions needed.
6945 When converting to signed types from signed types, sign extension
6948 Converting signed fractional types requires a bit shift if converting
6949 to or from any unsigned fractional type because the decimal place is
6950 shifted by 1 bit. When the destination is a signed fractional, the sign
6951 is stored in either the carry or T bit. */
6954 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
6958 /* ilen: Length of integral part (in bytes)
6959 flen: Length of fractional part (in bytes)
6960 tlen: Length of operand (in bytes)
6961 blen: Length of operand (in bits) */
6962 int ilen
[2], flen
[2], tlen
[2], blen
[2];
6963 int rdest
, rsource
, offset
;
6964 int start
, end
, dir
;
6965 bool sign_in_T
= false, sign_in_Carry
= false, sign_done
= false;
6966 bool widening_sign_extend
= false;
6967 int clrword
= -1, lastclr
= 0, clr
= 0;
6973 xop
[dest
] = operands
[dest
];
6974 xop
[src
] = operands
[src
];
6979 /* Determine format (integer and fractional parts)
6980 of types needing conversion. */
6982 for (i
= 0; i
< 2; i
++)
6984 enum machine_mode mode
= GET_MODE (xop
[i
]);
6986 tlen
[i
] = GET_MODE_SIZE (mode
);
6987 blen
[i
] = GET_MODE_BITSIZE (mode
);
6989 if (SCALAR_INT_MODE_P (mode
))
6991 sbit
[i
] = intsigned
;
6992 ilen
[i
] = GET_MODE_SIZE (mode
);
6995 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
6997 sbit
[i
] = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
6998 ilen
[i
] = (GET_MODE_IBIT (mode
) + 1) / 8;
6999 flen
[i
] = (GET_MODE_FBIT (mode
) + 1) / 8;
7002 fatal_insn ("unsupported fixed-point conversion", insn
);
7005 /* Perform sign extension if source and dest are both signed,
7006 and there are more integer parts in dest than in source. */
7008 widening_sign_extend
= sbit
[dest
] && sbit
[src
] && ilen
[dest
] > ilen
[src
];
7010 rdest
= REGNO (xop
[dest
]);
7011 rsource
= REGNO (xop
[src
]);
7012 offset
= flen
[src
] - flen
[dest
];
7014 /* Position of MSB resp. sign bit. */
7016 xop
[2] = GEN_INT (blen
[dest
] - 1);
7017 xop
[3] = GEN_INT (blen
[src
] - 1);
7019 /* Store the sign bit if the destination is a signed fract and the source
7020 has a sign in the integer part. */
7022 if (sbit
[dest
] && ilen
[dest
] == 0 && sbit
[src
] && ilen
[src
] > 0)
7024 /* To avoid using BST and BLD if the source and destination registers
7025 overlap or the source is unused after, we can use LSL to store the
7026 sign bit in carry since we don't need the integral part of the source.
7027 Restoring the sign from carry saves one BLD instruction below. */
7029 if (reg_unused_after (insn
, xop
[src
])
7030 || (rdest
< rsource
+ tlen
[src
]
7031 && rdest
+ tlen
[dest
] > rsource
))
7033 avr_asm_len ("lsl %T1%t3", xop
, plen
, 1);
7034 sign_in_Carry
= true;
7038 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
7043 /* Pick the correct direction to shift bytes. */
7045 if (rdest
< rsource
+ offset
)
7054 start
= tlen
[dest
] - 1;
7058 /* Perform conversion by moving registers into place, clearing
7059 destination registers that do not overlap with any source. */
7061 for (i
= start
; i
!= end
; i
+= dir
)
7063 int destloc
= rdest
+ i
;
7064 int sourceloc
= rsource
+ i
+ offset
;
7066 /* Source register location is outside range of source register,
7067 so clear this byte in the dest. */
7069 if (sourceloc
< rsource
7070 || sourceloc
>= rsource
+ tlen
[src
])
7074 && (sourceloc
+ dir
< rsource
7075 || sourceloc
+ dir
>= rsource
+ tlen
[src
])
7076 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
7077 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2)))
7080 /* Use already cleared word to clear two bytes at a time. */
7082 int even_i
= i
& ~1;
7083 int even_clrword
= clrword
& ~1;
7085 xop
[4] = GEN_INT (8 * even_i
);
7086 xop
[5] = GEN_INT (8 * even_clrword
);
7087 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
7092 if (i
== tlen
[dest
] - 1
7093 && widening_sign_extend
7094 && blen
[src
] - 1 - 8 * offset
< 0)
7096 /* The SBRC below that sign-extends would come
7097 up with a negative bit number because the sign
7098 bit is out of reach. ALso avoid some early-clobber
7099 situations because of premature CLR. */
7101 if (reg_unused_after (insn
, xop
[src
]))
7102 avr_asm_len ("lsl %T1%t3" CR_TAB
7103 "sbc %T0%t2,%T0%t2", xop
, plen
, 2);
7105 avr_asm_len ("mov __tmp_reg__,%T1%t3" CR_TAB
7106 "lsl __tmp_reg__" CR_TAB
7107 "sbc %T0%t2,%T0%t2", xop
, plen
, 3);
7113 /* Do not clear the register if it is going to get
7114 sign extended with a MOV later. */
7116 if (sbit
[dest
] && sbit
[src
]
7117 && i
!= tlen
[dest
] - 1
7123 xop
[4] = GEN_INT (8 * i
);
7124 avr_asm_len ("clr %T0%t4", xop
, plen
, 1);
7126 /* If the last byte was cleared too, we have a cleared
7127 word we can MOVW to clear two bytes at a time. */
7135 else if (destloc
== sourceloc
)
7137 /* Source byte is already in destination: Nothing needed. */
7143 /* Registers do not line up and source register location
7144 is within range: Perform move, shifting with MOV or MOVW. */
7148 && sourceloc
+ dir
>= rsource
7149 && sourceloc
+ dir
< rsource
+ tlen
[src
]
7150 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
7151 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2))))
7153 int even_i
= i
& ~1;
7154 int even_i_plus_offset
= (i
+ offset
) & ~1;
7156 xop
[4] = GEN_INT (8 * even_i
);
7157 xop
[5] = GEN_INT (8 * even_i_plus_offset
);
7158 avr_asm_len ("movw %T0%t4,%T1%t5", xop
, plen
, 1);
7163 xop
[4] = GEN_INT (8 * i
);
7164 xop
[5] = GEN_INT (8 * (i
+ offset
));
7165 avr_asm_len ("mov %T0%t4,%T1%t5", xop
, plen
, 1);
7173 /* Perform sign extension if source and dest are both signed,
7174 and there are more integer parts in dest than in source. */
7176 if (widening_sign_extend
)
7180 xop
[4] = GEN_INT (blen
[src
] - 1 - 8 * offset
);
7182 /* Register was cleared above, so can become 0xff and extended.
7183 Note: Instead of the CLR/SBRC/COM the sign extension could
7184 be performed after the LSL below by means of a SBC if only
7185 one byte has to be shifted left. */
7187 avr_asm_len ("sbrc %T0%T4" CR_TAB
7188 "com %T0%t2", xop
, plen
, 2);
7191 /* Sign extend additional bytes by MOV and MOVW. */
7193 start
= tlen
[dest
] - 2;
7194 end
= flen
[dest
] + ilen
[src
] - 1;
7196 for (i
= start
; i
!= end
; i
--)
7198 if (AVR_HAVE_MOVW
&& i
!= start
&& i
-1 != end
)
7201 xop
[4] = GEN_INT (8 * i
);
7202 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 2));
7203 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
7207 xop
[4] = GEN_INT (8 * i
);
7208 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 1));
7209 avr_asm_len ("mov %T0%t4,%T0%t5", xop
, plen
, 1);
7214 /* If destination is a signed fract, and the source was not, a shift
7215 by 1 bit is needed. Also restore sign from carry or T. */
7217 if (sbit
[dest
] && !ilen
[dest
] && (!sbit
[src
] || ilen
[src
]))
7219 /* We have flen[src] non-zero fractional bytes to shift.
7220 Because of the right shift, handle one byte more so that the
7221 LSB won't be lost. */
7223 int nonzero
= flen
[src
] + 1;
7225 /* If the LSB is in the T flag and there are no fractional
7226 bits, the high byte is zero and no shift needed. */
7228 if (flen
[src
] == 0 && sign_in_T
)
7231 start
= flen
[dest
] - 1;
7232 end
= start
- nonzero
;
7234 for (i
= start
; i
> end
&& i
>= 0; i
--)
7236 xop
[4] = GEN_INT (8 * i
);
7237 if (i
== start
&& !sign_in_Carry
)
7238 avr_asm_len ("lsr %T0%t4", xop
, plen
, 1);
7240 avr_asm_len ("ror %T0%t4", xop
, plen
, 1);
7245 avr_asm_len ("bld %T0%T2", xop
, plen
, 1);
7248 else if (sbit
[src
] && !ilen
[src
] && (!sbit
[dest
] || ilen
[dest
]))
7250 /* If source was a signed fract and dest was not, shift 1 bit
7253 start
= flen
[dest
] - flen
[src
];
7258 for (i
= start
; i
< flen
[dest
]; i
++)
7260 xop
[4] = GEN_INT (8 * i
);
7263 avr_asm_len ("lsl %T0%t4", xop
, plen
, 1);
7265 avr_asm_len ("rol %T0%t4", xop
, plen
, 1);
7273 /* Modifies the length assigned to instruction INSN
7274 LEN is the initially computed length of the insn. */
7277 adjust_insn_length (rtx insn
, int len
)
7279 rtx
*op
= recog_data
.operand
;
7280 enum attr_adjust_len adjust_len
;
7282 /* Some complex insns don't need length adjustment and therefore
7283 the length need not/must not be adjusted for these insns.
7284 It is easier to state this in an insn attribute "adjust_len" than
7285 to clutter up code here... */
7287 if (-1 == recog_memoized (insn
))
7292 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7294 adjust_len
= get_attr_adjust_len (insn
);
7296 if (adjust_len
== ADJUST_LEN_NO
)
7298 /* Nothing to adjust: The length from attribute "length" is fine.
7299 This is the default. */
7304 /* Extract insn's operands. */
7306 extract_constrain_insn_cached (insn
);
7308 /* Dispatch to right function. */
7312 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7313 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7314 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7316 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7318 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7319 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7321 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7322 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7323 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7324 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7325 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7326 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7327 case ADJUST_LEN_LOAD_LPM
: avr_load_lpm (insn
, op
, &len
); break;
7329 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7330 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7332 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7333 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7334 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7335 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7336 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7338 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7339 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7340 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7342 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7343 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7344 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7346 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7347 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7348 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7350 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7351 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7352 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7354 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7356 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7365 /* Return nonzero if register REG dead after INSN. */
7368 reg_unused_after (rtx insn
, rtx reg
)
7370 return (dead_or_set_p (insn
, reg
)
7371 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7374 /* Return nonzero if REG is not used after INSN.
7375 We assume REG is a reload reg, and therefore does
7376 not live past labels. It may live past calls or jumps though. */
7379 _reg_unused_after (rtx insn
, rtx reg
)
7384 /* If the reg is set by this instruction, then it is safe for our
7385 case. Disregard the case where this is a store to memory, since
7386 we are checking a register used in the store address. */
7387 set
= single_set (insn
);
7388 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7389 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7392 while ((insn
= NEXT_INSN (insn
)))
7395 code
= GET_CODE (insn
);
7398 /* If this is a label that existed before reload, then the register
7399 if dead here. However, if this is a label added by reorg, then
7400 the register may still be live here. We can't tell the difference,
7401 so we just ignore labels completely. */
7402 if (code
== CODE_LABEL
)
7410 if (code
== JUMP_INSN
)
7413 /* If this is a sequence, we must handle them all at once.
7414 We could have for instance a call that sets the target register,
7415 and an insn in a delay slot that uses the register. In this case,
7416 we must return 0. */
7417 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7422 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7424 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7425 rtx set
= single_set (this_insn
);
7427 if (GET_CODE (this_insn
) == CALL_INSN
)
7429 else if (GET_CODE (this_insn
) == JUMP_INSN
)
7431 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7436 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7438 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7440 if (GET_CODE (SET_DEST (set
)) != MEM
)
7446 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7451 else if (code
== JUMP_INSN
)
7455 if (code
== CALL_INSN
)
7458 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7459 if (GET_CODE (XEXP (tem
, 0)) == USE
7460 && REG_P (XEXP (XEXP (tem
, 0), 0))
7461 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7463 if (call_used_regs
[REGNO (reg
)])
7467 set
= single_set (insn
);
7469 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7471 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7472 return GET_CODE (SET_DEST (set
)) != MEM
;
7473 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7480 /* Target hook for assembling integer objects. The AVR version needs
7481 special handling for references to certain labels. */
7484 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7486 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7487 && text_segment_operand (x
, VOIDmode
))
7489 fputs ("\t.word\tgs(", asm_out_file
);
7490 output_addr_const (asm_out_file
, x
);
7491 fputs (")\n", asm_out_file
);
7495 else if (GET_MODE (x
) == PSImode
)
7497 /* This needs binutils 2.23+, see PR binutils/13503 */
7499 fputs ("\t.byte\tlo8(", asm_out_file
);
7500 output_addr_const (asm_out_file
, x
);
7501 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7503 fputs ("\t.byte\thi8(", asm_out_file
);
7504 output_addr_const (asm_out_file
, x
);
7505 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7507 fputs ("\t.byte\thh8(", asm_out_file
);
7508 output_addr_const (asm_out_file
, x
);
7509 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7513 else if (CONST_FIXED_P (x
))
7517 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7519 for (n
= 0; n
< size
; n
++)
7521 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
7522 default_assemble_integer (xn
, 1, aligned_p
);
7528 return default_assemble_integer (x
, size
, aligned_p
);
7532 /* Return value is nonzero if pseudos that have been
7533 assigned to registers of class CLASS would likely be spilled
7534 because registers of CLASS are needed for spill registers. */
7537 avr_class_likely_spilled_p (reg_class_t c
)
7539 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
7542 /* Valid attributes:
7543 progmem - put data to program memory;
7544 signal - make a function to be hardware interrupt. After function
7545 prologue interrupts are disabled;
7546 interrupt - make a function to be hardware interrupt. After function
7547 prologue interrupts are enabled;
7548 naked - don't generate function prologue/epilogue and `ret' command.
7550 Only `progmem' attribute valid for type. */
7552 /* Handle a "progmem" attribute; arguments as in
7553 struct attribute_spec.handler. */
7555 avr_handle_progmem_attribute (tree
*node
, tree name
,
7556 tree args ATTRIBUTE_UNUSED
,
7557 int flags ATTRIBUTE_UNUSED
,
7562 if (TREE_CODE (*node
) == TYPE_DECL
)
7564 /* This is really a decl attribute, not a type attribute,
7565 but try to handle it for GCC 3.0 backwards compatibility. */
7567 tree type
= TREE_TYPE (*node
);
7568 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
7569 tree newtype
= build_type_attribute_variant (type
, attr
);
7571 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
7572 TREE_TYPE (*node
) = newtype
;
7573 *no_add_attrs
= true;
7575 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
7577 *no_add_attrs
= false;
7581 warning (OPT_Wattributes
, "%qE attribute ignored",
7583 *no_add_attrs
= true;
7590 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7591 struct attribute_spec.handler. */
7594 avr_handle_fndecl_attribute (tree
*node
, tree name
,
7595 tree args ATTRIBUTE_UNUSED
,
7596 int flags ATTRIBUTE_UNUSED
,
7599 if (TREE_CODE (*node
) != FUNCTION_DECL
)
7601 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7603 *no_add_attrs
= true;
7610 avr_handle_fntype_attribute (tree
*node
, tree name
,
7611 tree args ATTRIBUTE_UNUSED
,
7612 int flags ATTRIBUTE_UNUSED
,
7615 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7617 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7619 *no_add_attrs
= true;
7626 /* AVR attributes. */
7627 static const struct attribute_spec
7628 avr_attribute_table
[] =
7630 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7631 affects_type_identity } */
7632 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7634 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7636 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7638 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7640 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7642 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7644 { NULL
, 0, 0, false, false, false, NULL
, false }
7648 /* Look if DECL shall be placed in program memory space by
7649 means of attribute `progmem' or some address-space qualifier.
7650 Return non-zero if DECL is data that must end up in Flash and
7651 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7653 Return 2 if DECL is located in 24-bit flash address-space
7654 Return 1 if DECL is located in 16-bit flash address-space
7655 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7656 Return 0 otherwise */
7659 avr_progmem_p (tree decl
, tree attributes
)
7663 if (TREE_CODE (decl
) != VAR_DECL
)
7666 if (avr_decl_memx_p (decl
))
7669 if (avr_decl_flash_p (decl
))
7673 != lookup_attribute ("progmem", attributes
))
7680 while (TREE_CODE (a
) == ARRAY_TYPE
);
7682 if (a
== error_mark_node
)
7685 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7692 /* Scan type TYP for pointer references to address space ASn.
7693 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7694 the AS are also declared to be CONST.
7695 Otherwise, return the respective address space, i.e. a value != 0. */
7698 avr_nonconst_pointer_addrspace (tree typ
)
7700 while (ARRAY_TYPE
== TREE_CODE (typ
))
7701 typ
= TREE_TYPE (typ
);
7703 if (POINTER_TYPE_P (typ
))
7706 tree target
= TREE_TYPE (typ
);
7708 /* Pointer to function: Test the function's return type. */
7710 if (FUNCTION_TYPE
== TREE_CODE (target
))
7711 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7713 /* "Ordinary" pointers... */
7715 while (TREE_CODE (target
) == ARRAY_TYPE
)
7716 target
= TREE_TYPE (target
);
7718 /* Pointers to non-generic address space must be const.
7719 Refuse address spaces outside the device's flash. */
7721 as
= TYPE_ADDR_SPACE (target
);
7723 if (!ADDR_SPACE_GENERIC_P (as
)
7724 && (!TYPE_READONLY (target
)
7725 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
7730 /* Scan pointer's target type. */
7732 return avr_nonconst_pointer_addrspace (target
);
7735 return ADDR_SPACE_GENERIC
;
7739 /* Sanity check NODE so that all pointers targeting non-generic address spaces
7740 go along with CONST qualifier. Writing to these address spaces should
7741 be detected and complained about as early as possible. */
7744 avr_pgm_check_var_decl (tree node
)
7746 const char *reason
= NULL
;
7748 addr_space_t as
= ADDR_SPACE_GENERIC
;
7750 gcc_assert (as
== 0);
7752 if (avr_log
.progmem
)
7753 avr_edump ("%?: %t\n", node
);
7755 switch (TREE_CODE (node
))
7761 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7762 reason
= "variable";
7766 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7767 reason
= "function parameter";
7771 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7772 reason
= "structure field";
7776 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7778 reason
= "return type of function";
7782 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7789 avr_edump ("%?: %s, %d, %d\n",
7790 avr_addrspace
[as
].name
,
7791 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
7792 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7795 error ("%qT uses address space %qs beyond flash of %qs",
7796 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7798 error ("%s %q+D uses address space %qs beyond flash of %qs",
7799 reason
, node
, avr_addrspace
[as
].name
,
7800 avr_current_device
->name
);
7805 error ("pointer targeting address space %qs must be const in %qT",
7806 avr_addrspace
[as
].name
, node
);
7808 error ("pointer targeting address space %qs must be const"
7810 avr_addrspace
[as
].name
, reason
, node
);
7814 return reason
== NULL
;
7818 /* Add the section attribute if the variable is in progmem. */
7821 avr_insert_attributes (tree node
, tree
*attributes
)
7823 avr_pgm_check_var_decl (node
);
7825 if (TREE_CODE (node
) == VAR_DECL
7826 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
7827 && avr_progmem_p (node
, *attributes
))
7832 /* For C++, we have to peel arrays in order to get correct
7833 determination of readonlyness. */
7836 node0
= TREE_TYPE (node0
);
7837 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7839 if (error_mark_node
== node0
)
7842 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7844 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7846 error ("variable %q+D located in address space %qs"
7847 " beyond flash of %qs",
7848 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7851 if (!TYPE_READONLY (node0
)
7852 && !TREE_READONLY (node
))
7854 const char *reason
= "__attribute__((progmem))";
7856 if (!ADDR_SPACE_GENERIC_P (as
))
7857 reason
= avr_addrspace
[as
].name
;
7859 if (avr_log
.progmem
)
7860 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7862 error ("variable %q+D must be const in order to be put into"
7863 " read-only section by means of %qs", node
, reason
);
7869 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7870 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7871 /* Track need of __do_clear_bss. */
7874 avr_asm_output_aligned_decl_common (FILE * stream
,
7875 const_tree decl ATTRIBUTE_UNUSED
,
7877 unsigned HOST_WIDE_INT size
,
7878 unsigned int align
, bool local_p
)
7880 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7881 There is no need to trigger __do_clear_bss code for them. */
7883 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7884 avr_need_clear_bss_p
= true;
7887 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7889 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7893 /* Unnamed section callback for data_section
7894 to track need of __do_copy_data. */
7897 avr_output_data_section_asm_op (const void *data
)
7899 avr_need_copy_data_p
= true;
7901 /* Dispatch to default. */
7902 output_section_asm_op (data
);
7906 /* Unnamed section callback for bss_section
7907 to track need of __do_clear_bss. */
7910 avr_output_bss_section_asm_op (const void *data
)
7912 avr_need_clear_bss_p
= true;
7914 /* Dispatch to default. */
7915 output_section_asm_op (data
);
7919 /* Unnamed section callback for progmem*.data sections. */
7922 avr_output_progmem_section_asm_op (const void *data
)
7924 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7925 (const char*) data
);
7929 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7932 avr_asm_init_sections (void)
7936 /* Set up a section for jump tables. Alignment is handled by
7937 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7939 if (AVR_HAVE_JMP_CALL
)
7941 progmem_swtable_section
7942 = get_unnamed_section (0, output_section_asm_op
,
7943 "\t.section\t.progmem.gcc_sw_table"
7944 ",\"a\",@progbits");
7948 progmem_swtable_section
7949 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7950 "\t.section\t.progmem.gcc_sw_table"
7951 ",\"ax\",@progbits");
7954 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7957 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7958 progmem_section_prefix
[n
]);
7961 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7962 resp. `avr_need_copy_data_p'. */
7964 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7965 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7966 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7970 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7973 avr_asm_function_rodata_section (tree decl
)
7975 /* If a function is unused and optimized out by -ffunction-sections
7976 and --gc-sections, ensure that the same will happen for its jump
7977 tables by putting them into individual sections. */
7982 /* Get the frodata section from the default function in varasm.c
7983 but treat function-associated data-like jump tables as code
7984 rather than as user defined data. AVR has no constant pools. */
7986 int fdata
= flag_data_sections
;
7988 flag_data_sections
= flag_function_sections
;
7989 frodata
= default_function_rodata_section (decl
);
7990 flag_data_sections
= fdata
;
7991 flags
= frodata
->common
.flags
;
7994 if (frodata
!= readonly_data_section
7995 && flags
& SECTION_NAMED
)
7997 /* Adjust section flags and replace section name prefix. */
8001 static const char* const prefix
[] =
8003 ".rodata", ".progmem.gcc_sw_table",
8004 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8007 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8009 const char * old_prefix
= prefix
[i
];
8010 const char * new_prefix
= prefix
[i
+1];
8011 const char * name
= frodata
->named
.name
;
8013 if (STR_PREFIX_P (name
, old_prefix
))
8015 const char *rname
= ACONCAT ((new_prefix
,
8016 name
+ strlen (old_prefix
), NULL
));
8017 flags
&= ~SECTION_CODE
;
8018 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8020 return get_section (rname
, flags
, frodata
->named
.decl
);
8025 return progmem_swtable_section
;
8029 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8030 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8033 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8035 if (flags
& AVR_SECTION_PROGMEM
)
8037 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8038 int segment
= avr_addrspace
[as
].segment
;
8039 const char *old_prefix
= ".rodata";
8040 const char *new_prefix
= progmem_section_prefix
[segment
];
8042 if (STR_PREFIX_P (name
, old_prefix
))
8044 const char *sname
= ACONCAT ((new_prefix
,
8045 name
+ strlen (old_prefix
), NULL
));
8046 default_elf_asm_named_section (sname
, flags
, decl
);
8050 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8054 if (!avr_need_copy_data_p
)
8055 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8056 || STR_PREFIX_P (name
, ".rodata")
8057 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8059 if (!avr_need_clear_bss_p
)
8060 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8062 default_elf_asm_named_section (name
, flags
, decl
);
8066 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8068 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8070 if (STR_PREFIX_P (name
, ".noinit"))
8072 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8073 && DECL_INITIAL (decl
) == NULL_TREE
)
8074 flags
|= SECTION_BSS
; /* @nobits */
8076 warning (0, "only uninitialized variables can be placed in the "
8080 if (decl
&& DECL_P (decl
)
8081 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8083 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8085 /* Attribute progmem puts data in generic address space.
8086 Set section flags as if it was in __flash to get the right
8087 section prefix in the remainder. */
8089 if (ADDR_SPACE_GENERIC_P (as
))
8090 as
= ADDR_SPACE_FLASH
;
8092 flags
|= as
* SECTION_MACH_DEP
;
8093 flags
&= ~SECTION_WRITE
;
8094 flags
&= ~SECTION_BSS
;
8101 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8104 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8106 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8107 readily available, see PR34734. So we postpone the warning
8108 about uninitialized data in program memory section until here. */
8111 && decl
&& DECL_P (decl
)
8112 && NULL_TREE
== DECL_INITIAL (decl
)
8113 && !DECL_EXTERNAL (decl
)
8114 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8116 warning (OPT_Wuninitialized
,
8117 "uninitialized variable %q+D put into "
8118 "program memory area", decl
);
8121 default_encode_section_info (decl
, rtl
, new_decl_p
);
8123 if (decl
&& DECL_P (decl
)
8124 && TREE_CODE (decl
) != FUNCTION_DECL
8126 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8128 rtx sym
= XEXP (rtl
, 0);
8129 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8131 /* PSTR strings are in generic space but located in flash:
8132 patch address space. */
8134 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8135 as
= ADDR_SPACE_FLASH
;
8137 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8142 /* Implement `TARGET_ASM_SELECT_SECTION' */
8145 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8147 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8149 if (decl
&& DECL_P (decl
)
8150 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8152 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8153 int segment
= avr_addrspace
[as
].segment
;
8155 if (sect
->common
.flags
& SECTION_NAMED
)
8157 const char * name
= sect
->named
.name
;
8158 const char * old_prefix
= ".rodata";
8159 const char * new_prefix
= progmem_section_prefix
[segment
];
8161 if (STR_PREFIX_P (name
, old_prefix
))
8163 const char *sname
= ACONCAT ((new_prefix
,
8164 name
+ strlen (old_prefix
), NULL
));
8165 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8169 return progmem_section
[segment
];
8175 /* Implement `TARGET_ASM_FILE_START'. */
8176 /* Outputs some text at the start of each assembler file. */
8179 avr_file_start (void)
8181 int sfr_offset
= avr_current_arch
->sfr_offset
;
8183 if (avr_current_arch
->asm_only
)
8184 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8186 default_file_start ();
8188 /* Print I/O addresses of some SFRs used with IN and OUT. */
8191 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8193 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8194 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8196 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8198 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8200 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8202 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8204 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8205 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8206 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8210 /* Implement `TARGET_ASM_FILE_END'. */
8211 /* Outputs to the stdio stream FILE some
8212 appropriate text to go at the end of an assembler file. */
8217 /* Output these only if there is anything in the
8218 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
8219 input section(s) - some code size can be saved by not
8220 linking in the initialization code from libgcc if resp.
8221 sections are empty. */
8223 if (avr_need_copy_data_p
)
8224 fputs (".global __do_copy_data\n", asm_out_file
);
8226 if (avr_need_clear_bss_p
)
8227 fputs (".global __do_clear_bss\n", asm_out_file
);
8230 /* Choose the order in which to allocate hard registers for
8231 pseudo-registers local to a basic block.
8233 Store the desired register order in the array `reg_alloc_order'.
8234 Element 0 should be the register to allocate first; element 1, the
8235 next register; and so on. */
8238 order_regs_for_local_alloc (void)
8241 static const int order_0
[] = {
8249 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8253 static const int order_1
[] = {
8261 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8265 static const int order_2
[] = {
8274 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8279 const int *order
= (TARGET_ORDER_1
? order_1
:
8280 TARGET_ORDER_2
? order_2
:
8282 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
8283 reg_alloc_order
[i
] = order
[i
];
8287 /* Implement `TARGET_REGISTER_MOVE_COST' */
8290 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8291 reg_class_t from
, reg_class_t to
)
8293 return (from
== STACK_REG
? 6
8294 : to
== STACK_REG
? 12
8299 /* Implement `TARGET_MEMORY_MOVE_COST' */
8302 avr_memory_move_cost (enum machine_mode mode
,
8303 reg_class_t rclass ATTRIBUTE_UNUSED
,
8304 bool in ATTRIBUTE_UNUSED
)
8306 return (mode
== QImode
? 2
8307 : mode
== HImode
? 4
8308 : mode
== SImode
? 8
8309 : mode
== SFmode
? 8
8314 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8315 cost of an RTX operand given its context. X is the rtx of the
8316 operand, MODE is its mode, and OUTER is the rtx_code of this
8317 operand's parent operator. */
8320 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8321 int opno
, bool speed
)
8323 enum rtx_code code
= GET_CODE (x
);
8335 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8342 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8346 /* Worker function for AVR backend's rtx_cost function.
8347 X is rtx expression whose cost is to be calculated.
8348 Return true if the complete cost has been computed.
8349 Return false if subexpressions should be scanned.
8350 In either case, *TOTAL contains the cost result. */
8353 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8354 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8356 enum rtx_code code
= (enum rtx_code
) codearg
;
8357 enum machine_mode mode
= GET_MODE (x
);
8368 /* Immediate constants are as cheap as registers. */
8373 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8381 *total
= COSTS_N_INSNS (1);
8387 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8393 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8401 *total
= COSTS_N_INSNS (1);
8407 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8411 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8412 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8416 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8417 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8418 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8422 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8423 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8424 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8432 && MULT
== GET_CODE (XEXP (x
, 0))
8433 && register_operand (XEXP (x
, 1), QImode
))
8436 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8437 /* multiply-add with constant: will be split and load constant. */
8438 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8439 *total
= COSTS_N_INSNS (1) + *total
;
8442 *total
= COSTS_N_INSNS (1);
8443 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8444 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8449 && (MULT
== GET_CODE (XEXP (x
, 0))
8450 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8451 && register_operand (XEXP (x
, 1), HImode
)
8452 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8453 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8456 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8457 /* multiply-add with constant: will be split and load constant. */
8458 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8459 *total
= COSTS_N_INSNS (1) + *total
;
8462 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8464 *total
= COSTS_N_INSNS (2);
8465 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8468 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8469 *total
= COSTS_N_INSNS (1);
8471 *total
= COSTS_N_INSNS (2);
8475 if (!CONST_INT_P (XEXP (x
, 1)))
8477 *total
= COSTS_N_INSNS (3);
8478 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8481 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8482 *total
= COSTS_N_INSNS (2);
8484 *total
= COSTS_N_INSNS (3);
8488 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8490 *total
= COSTS_N_INSNS (4);
8491 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8494 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8495 *total
= COSTS_N_INSNS (1);
8497 *total
= COSTS_N_INSNS (4);
8503 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8509 && register_operand (XEXP (x
, 0), QImode
)
8510 && MULT
== GET_CODE (XEXP (x
, 1)))
8513 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8514 /* multiply-sub with constant: will be split and load constant. */
8515 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8516 *total
= COSTS_N_INSNS (1) + *total
;
8521 && register_operand (XEXP (x
, 0), HImode
)
8522 && (MULT
== GET_CODE (XEXP (x
, 1))
8523 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
8524 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
8525 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
8528 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8529 /* multiply-sub with constant: will be split and load constant. */
8530 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8531 *total
= COSTS_N_INSNS (1) + *total
;
8537 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8538 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8539 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8540 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8544 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8545 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8546 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8554 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
8556 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8564 rtx op0
= XEXP (x
, 0);
8565 rtx op1
= XEXP (x
, 1);
8566 enum rtx_code code0
= GET_CODE (op0
);
8567 enum rtx_code code1
= GET_CODE (op1
);
8568 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
8569 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
8572 && (u8_operand (op1
, HImode
)
8573 || s8_operand (op1
, HImode
)))
8575 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8579 && register_operand (op1
, HImode
))
8581 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8584 else if (ex0
|| ex1
)
8586 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
8589 else if (register_operand (op0
, HImode
)
8590 && (u8_operand (op1
, HImode
)
8591 || s8_operand (op1
, HImode
)))
8593 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
8597 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
8600 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8607 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8617 /* Add some additional costs besides CALL like moves etc. */
8619 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8623 /* Just a rough estimate. Even with -O2 we don't want bulky
8624 code expanded inline. */
8626 *total
= COSTS_N_INSNS (25);
8632 *total
= COSTS_N_INSNS (300);
8634 /* Add some additional costs besides CALL like moves etc. */
8635 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8643 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8644 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8652 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8654 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8655 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8656 /* For div/mod with const-int divisor we have at least the cost of
8657 loading the divisor. */
8658 if (CONST_INT_P (XEXP (x
, 1)))
8659 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8660 /* Add some overall penaly for clobbering and moving around registers */
8661 *total
+= COSTS_N_INSNS (2);
8668 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8669 *total
= COSTS_N_INSNS (1);
8674 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8675 *total
= COSTS_N_INSNS (3);
8680 if (CONST_INT_P (XEXP (x
, 1)))
8681 switch (INTVAL (XEXP (x
, 1)))
8685 *total
= COSTS_N_INSNS (5);
8688 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8696 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8703 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8705 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8706 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8711 val
= INTVAL (XEXP (x
, 1));
8713 *total
= COSTS_N_INSNS (3);
8714 else if (val
>= 0 && val
<= 7)
8715 *total
= COSTS_N_INSNS (val
);
8717 *total
= COSTS_N_INSNS (1);
8724 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8725 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8726 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8728 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8733 if (const1_rtx
== (XEXP (x
, 1))
8734 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8736 *total
= COSTS_N_INSNS (2);
8740 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8742 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8743 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8747 switch (INTVAL (XEXP (x
, 1)))
8754 *total
= COSTS_N_INSNS (2);
8757 *total
= COSTS_N_INSNS (3);
8763 *total
= COSTS_N_INSNS (4);
8768 *total
= COSTS_N_INSNS (5);
8771 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8774 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8777 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8780 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8781 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8787 if (!CONST_INT_P (XEXP (x
, 1)))
8789 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8792 switch (INTVAL (XEXP (x
, 1)))
8800 *total
= COSTS_N_INSNS (3);
8803 *total
= COSTS_N_INSNS (5);
8806 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8812 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8814 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8815 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8819 switch (INTVAL (XEXP (x
, 1)))
8825 *total
= COSTS_N_INSNS (3);
8830 *total
= COSTS_N_INSNS (4);
8833 *total
= COSTS_N_INSNS (6);
8836 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8839 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8840 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8848 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8855 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8857 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8858 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8863 val
= INTVAL (XEXP (x
, 1));
8865 *total
= COSTS_N_INSNS (4);
8867 *total
= COSTS_N_INSNS (2);
8868 else if (val
>= 0 && val
<= 7)
8869 *total
= COSTS_N_INSNS (val
);
8871 *total
= COSTS_N_INSNS (1);
8876 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8878 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8879 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8883 switch (INTVAL (XEXP (x
, 1)))
8889 *total
= COSTS_N_INSNS (2);
8892 *total
= COSTS_N_INSNS (3);
8898 *total
= COSTS_N_INSNS (4);
8902 *total
= COSTS_N_INSNS (5);
8905 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8908 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8912 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8915 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8916 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8922 if (!CONST_INT_P (XEXP (x
, 1)))
8924 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8927 switch (INTVAL (XEXP (x
, 1)))
8933 *total
= COSTS_N_INSNS (3);
8937 *total
= COSTS_N_INSNS (5);
8940 *total
= COSTS_N_INSNS (4);
8943 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8949 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8951 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8952 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8956 switch (INTVAL (XEXP (x
, 1)))
8962 *total
= COSTS_N_INSNS (4);
8967 *total
= COSTS_N_INSNS (6);
8970 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8973 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8976 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8977 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8985 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8992 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8994 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8995 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9000 val
= INTVAL (XEXP (x
, 1));
9002 *total
= COSTS_N_INSNS (3);
9003 else if (val
>= 0 && val
<= 7)
9004 *total
= COSTS_N_INSNS (val
);
9006 *total
= COSTS_N_INSNS (1);
9011 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9013 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9014 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9018 switch (INTVAL (XEXP (x
, 1)))
9025 *total
= COSTS_N_INSNS (2);
9028 *total
= COSTS_N_INSNS (3);
9033 *total
= COSTS_N_INSNS (4);
9037 *total
= COSTS_N_INSNS (5);
9043 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9046 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9050 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9053 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9054 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9060 if (!CONST_INT_P (XEXP (x
, 1)))
9062 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9065 switch (INTVAL (XEXP (x
, 1)))
9073 *total
= COSTS_N_INSNS (3);
9076 *total
= COSTS_N_INSNS (5);
9079 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9085 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9087 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9088 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9092 switch (INTVAL (XEXP (x
, 1)))
9098 *total
= COSTS_N_INSNS (4);
9101 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9106 *total
= COSTS_N_INSNS (4);
9109 *total
= COSTS_N_INSNS (6);
9112 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9113 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9121 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9125 switch (GET_MODE (XEXP (x
, 0)))
9128 *total
= COSTS_N_INSNS (1);
9129 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9130 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9134 *total
= COSTS_N_INSNS (2);
9135 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9136 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9137 else if (INTVAL (XEXP (x
, 1)) != 0)
9138 *total
+= COSTS_N_INSNS (1);
9142 *total
= COSTS_N_INSNS (3);
9143 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9144 *total
+= COSTS_N_INSNS (2);
9148 *total
= COSTS_N_INSNS (4);
9149 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9150 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9151 else if (INTVAL (XEXP (x
, 1)) != 0)
9152 *total
+= COSTS_N_INSNS (3);
9158 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9163 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9164 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9165 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9167 if (QImode
== mode
|| HImode
== mode
)
9169 *total
= COSTS_N_INSNS (2);
9182 /* Implement `TARGET_RTX_COSTS'. */
9185 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9186 int opno
, int *total
, bool speed
)
9188 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9189 opno
, total
, speed
);
9191 if (avr_log
.rtx_costs
)
9193 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9194 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9201 /* Implement `TARGET_ADDRESS_COST'. */
9204 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9205 addr_space_t as ATTRIBUTE_UNUSED
,
9206 bool speed ATTRIBUTE_UNUSED
)
9210 if (GET_CODE (x
) == PLUS
9211 && CONST_INT_P (XEXP (x
, 1))
9212 && (REG_P (XEXP (x
, 0))
9213 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9215 if (INTVAL (XEXP (x
, 1)) >= 61)
9218 else if (CONSTANT_ADDRESS_P (x
))
9221 && io_address_operand (x
, QImode
))
9225 if (avr_log
.address_cost
)
9226 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9231 /* Test for extra memory constraint 'Q'.
9232 It's a memory address based on Y or Z pointer with valid displacement. */
9235 extra_constraint_Q (rtx x
)
9239 if (GET_CODE (XEXP (x
,0)) == PLUS
9240 && REG_P (XEXP (XEXP (x
,0), 0))
9241 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9242 && (INTVAL (XEXP (XEXP (x
,0), 1))
9243 <= MAX_LD_OFFSET (GET_MODE (x
))))
9245 rtx xx
= XEXP (XEXP (x
,0), 0);
9246 int regno
= REGNO (xx
);
9248 ok
= (/* allocate pseudos */
9249 regno
>= FIRST_PSEUDO_REGISTER
9250 /* strictly check */
9251 || regno
== REG_Z
|| regno
== REG_Y
9252 /* XXX frame & arg pointer checks */
9253 || xx
== frame_pointer_rtx
9254 || xx
== arg_pointer_rtx
);
9256 if (avr_log
.constraints
)
9257 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9258 ok
, reload_completed
, reload_in_progress
, x
);
9264 /* Convert condition code CONDITION to the valid AVR condition code. */
9267 avr_normalize_condition (RTX_CODE condition
)
9284 /* Helper function for `avr_reorg'. */
9287 avr_compare_pattern (rtx insn
)
9289 rtx pattern
= single_set (insn
);
9292 && NONJUMP_INSN_P (insn
)
9293 && SET_DEST (pattern
) == cc0_rtx
9294 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9296 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9297 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9299 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9300 They must not be swapped, thus skip them. */
9302 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9303 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9310 /* Helper function for `avr_reorg'. */
9312 /* Expansion of switch/case decision trees leads to code like
9314 cc0 = compare (Reg, Num)
9318 cc0 = compare (Reg, Num)
9322 The second comparison is superfluous and can be deleted.
9323 The second jump condition can be transformed from a
9324 "difficult" one to a "simple" one because "cc0 > 0" and
9325 "cc0 >= 0" will have the same effect here.
9327 This function relies on the way switch/case is being expaned
9328 as binary decision tree. For example code see PR 49903.
9330 Return TRUE if optimization performed.
9331 Return FALSE if nothing changed.
9333 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9335 We don't want to do this in text peephole because it is
9336 tedious to work out jump offsets there and the second comparison
9337 might have been transormed by `avr_reorg'.
9339 RTL peephole won't do because peephole2 does not scan across
9343 avr_reorg_remove_redundant_compare (rtx insn1
)
9345 rtx comp1
, ifelse1
, xcond1
, branch1
;
9346 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9348 rtx jump
, target
, cond
;
9350 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9352 branch1
= next_nonnote_nondebug_insn (insn1
);
9353 if (!branch1
|| !JUMP_P (branch1
))
9356 insn2
= next_nonnote_nondebug_insn (branch1
);
9357 if (!insn2
|| !avr_compare_pattern (insn2
))
9360 branch2
= next_nonnote_nondebug_insn (insn2
);
9361 if (!branch2
|| !JUMP_P (branch2
))
9364 comp1
= avr_compare_pattern (insn1
);
9365 comp2
= avr_compare_pattern (insn2
);
9366 xcond1
= single_set (branch1
);
9367 xcond2
= single_set (branch2
);
9369 if (!comp1
|| !comp2
9370 || !rtx_equal_p (comp1
, comp2
)
9371 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9372 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9373 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9374 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9379 comp1
= SET_SRC (comp1
);
9380 ifelse1
= SET_SRC (xcond1
);
9381 ifelse2
= SET_SRC (xcond2
);
9383 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9385 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9386 || !REG_P (XEXP (comp1
, 0))
9387 || !CONST_INT_P (XEXP (comp1
, 1))
9388 || XEXP (ifelse1
, 2) != pc_rtx
9389 || XEXP (ifelse2
, 2) != pc_rtx
9390 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9391 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9392 || !COMPARISON_P (XEXP (ifelse2
, 0))
9393 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9394 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9395 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9396 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9401 /* We filtered the insn sequence to look like
9407 (if_then_else (eq (cc0)
9416 (if_then_else (CODE (cc0)
9422 code
= GET_CODE (XEXP (ifelse2
, 0));
9424 /* Map GT/GTU to GE/GEU which is easier for AVR.
9425 The first two instructions compare/branch on EQ
9426 so we may replace the difficult
9428 if (x == VAL) goto L1;
9429 if (x > VAL) goto L2;
9433 if (x == VAL) goto L1;
9434 if (x >= VAL) goto L2;
9436 Similarly, replace LE/LEU by LT/LTU. */
9447 code
= avr_normalize_condition (code
);
9454 /* Wrap the branches into UNSPECs so they won't be changed or
9455 optimized in the remainder. */
9457 target
= XEXP (XEXP (ifelse1
, 1), 0);
9458 cond
= XEXP (ifelse1
, 0);
9459 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9461 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9463 target
= XEXP (XEXP (ifelse2
, 1), 0);
9464 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9465 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9467 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9469 /* The comparisons in insn1 and insn2 are exactly the same;
9470 insn2 is superfluous so delete it. */
9472 delete_insn (insn2
);
9473 delete_insn (branch1
);
9474 delete_insn (branch2
);
9480 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9481 /* Optimize conditional jumps. */
9486 rtx insn
= get_insns();
9488 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9490 rtx pattern
= avr_compare_pattern (insn
);
9496 && avr_reorg_remove_redundant_compare (insn
))
9501 if (compare_diff_p (insn
))
9503 /* Now we work under compare insn with difficult branch. */
9505 rtx next
= next_real_insn (insn
);
9506 rtx pat
= PATTERN (next
);
9508 pattern
= SET_SRC (pattern
);
9510 if (true_regnum (XEXP (pattern
, 0)) >= 0
9511 && true_regnum (XEXP (pattern
, 1)) >= 0)
9513 rtx x
= XEXP (pattern
, 0);
9514 rtx src
= SET_SRC (pat
);
9515 rtx t
= XEXP (src
,0);
9516 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9517 XEXP (pattern
, 0) = XEXP (pattern
, 1);
9518 XEXP (pattern
, 1) = x
;
9519 INSN_CODE (next
) = -1;
9521 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9522 && XEXP (pattern
, 1) == const0_rtx
)
9524 /* This is a tst insn, we can reverse it. */
9525 rtx src
= SET_SRC (pat
);
9526 rtx t
= XEXP (src
,0);
9528 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9529 XEXP (pattern
, 1) = XEXP (pattern
, 0);
9530 XEXP (pattern
, 0) = const0_rtx
;
9531 INSN_CODE (next
) = -1;
9532 INSN_CODE (insn
) = -1;
9534 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9535 && CONST_INT_P (XEXP (pattern
, 1)))
9537 rtx x
= XEXP (pattern
, 1);
9538 rtx src
= SET_SRC (pat
);
9539 rtx t
= XEXP (src
,0);
9540 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
9542 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
9544 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
9545 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
9546 INSN_CODE (next
) = -1;
9547 INSN_CODE (insn
) = -1;
9554 /* Returns register number for function return value.*/
9556 static inline unsigned int
9557 avr_ret_register (void)
9562 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
9565 avr_function_value_regno_p (const unsigned int regno
)
9567 return (regno
== avr_ret_register ());
9570 /* Create an RTX representing the place where a
9571 library function returns a value of mode MODE. */
9574 avr_libcall_value (enum machine_mode mode
,
9575 const_rtx func ATTRIBUTE_UNUSED
)
9577 int offs
= GET_MODE_SIZE (mode
);
9580 offs
= (offs
+ 1) & ~1;
9582 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
9585 /* Create an RTX representing the place where a
9586 function returns a value of data type VALTYPE. */
9589 avr_function_value (const_tree type
,
9590 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
9591 bool outgoing ATTRIBUTE_UNUSED
)
9595 if (TYPE_MODE (type
) != BLKmode
)
9596 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
9598 offs
= int_size_in_bytes (type
);
9601 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
9602 offs
= GET_MODE_SIZE (SImode
);
9603 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
9604 offs
= GET_MODE_SIZE (DImode
);
9606 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
9610 test_hard_reg_class (enum reg_class rclass
, rtx x
)
9612 int regno
= true_regnum (x
);
9616 if (TEST_HARD_REG_CLASS (rclass
, regno
))
9623 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9624 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9627 avr_2word_insn_p (rtx insn
)
9629 if (avr_current_device
->errata_skip
9631 || 2 != get_attr_length (insn
))
9636 switch (INSN_CODE (insn
))
9641 case CODE_FOR_movqi_insn
:
9642 case CODE_FOR_movuqq_insn
:
9643 case CODE_FOR_movqq_insn
:
9645 rtx set
= single_set (insn
);
9646 rtx src
= SET_SRC (set
);
9647 rtx dest
= SET_DEST (set
);
9649 /* Factor out LDS and STS from movqi_insn. */
9652 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
9654 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9656 else if (REG_P (dest
)
9659 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9665 case CODE_FOR_call_insn
:
9666 case CODE_FOR_call_value_insn
:
9673 jump_over_one_insn_p (rtx insn
, rtx dest
)
9675 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9678 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9679 int dest_addr
= INSN_ADDRESSES (uid
);
9680 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9682 return (jump_offset
== 1
9683 || (jump_offset
== 2
9684 && avr_2word_insn_p (next_active_insn (insn
))));
9687 /* Returns 1 if a value of mode MODE can be stored starting with hard
9688 register number REGNO. On the enhanced core, anything larger than
9689 1 byte must start in even numbered register for "movw" to work
9690 (this way we don't have to check for odd registers everywhere). */
9693 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9695 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9696 Disallowing QI et al. in these regs might lead to code like
9697 (set (subreg:QI (reg:HI 28) n) ...)
9698 which will result in wrong code because reload does not
9699 handle SUBREGs of hard regsisters like this.
9700 This could be fixed in reload. However, it appears
9701 that fixing reload is not wanted by reload people. */
9703 /* Any GENERAL_REGS register can hold 8-bit values. */
9705 if (GET_MODE_SIZE (mode
) == 1)
9708 /* FIXME: Ideally, the following test is not needed.
9709 However, it turned out that it can reduce the number
9710 of spill fails. AVR and it's poor endowment with
9711 address registers is extreme stress test for reload. */
9713 if (GET_MODE_SIZE (mode
) >= 4
9717 /* All modes larger than 8 bits should start in an even register. */
9719 return !(regno
& 1);
9723 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9726 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
9728 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9729 represent valid hard registers like, e.g. HI:29. Returning TRUE
9730 for such registers can lead to performance degradation as mentioned
9731 in PR53595. Thus, report invalid hard registers as FALSE. */
9733 if (!avr_hard_regno_mode_ok (regno
, mode
))
9736 /* Return true if any of the following boundaries is crossed:
9737 17/18, 27/28 and 29/30. */
9739 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
9740 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
9741 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
9745 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9748 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9749 addr_space_t as
, RTX_CODE outer_code
,
9750 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9752 if (!ADDR_SPACE_GENERIC_P (as
))
9754 return POINTER_Z_REGS
;
9758 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9760 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9764 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9767 avr_regno_mode_code_ok_for_base_p (int regno
,
9768 enum machine_mode mode ATTRIBUTE_UNUSED
,
9769 addr_space_t as ATTRIBUTE_UNUSED
,
9770 RTX_CODE outer_code
,
9771 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9775 if (!ADDR_SPACE_GENERIC_P (as
))
9777 if (regno
< FIRST_PSEUDO_REGISTER
9785 regno
= reg_renumber
[regno
];
9796 if (regno
< FIRST_PSEUDO_REGISTER
9800 || regno
== ARG_POINTER_REGNUM
))
9804 else if (reg_renumber
)
9806 regno
= reg_renumber
[regno
];
9811 || regno
== ARG_POINTER_REGNUM
)
9818 && PLUS
== outer_code
9828 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9829 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9830 CLOBBER_REG is a QI clobber register or NULL_RTX.
9831 LEN == NULL: output instructions.
9832 LEN != NULL: set *LEN to the length of the instruction sequence
9833 (in words) printed with LEN = NULL.
9834 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9835 If CLEAR_P is false, nothing is known about OP[0].
9837 The effect on cc0 is as follows:
9839 Load 0 to any register except ZERO_REG : NONE
9840 Load ld register with any value : NONE
9841 Anything else: : CLOBBER */
9844 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
9850 int clobber_val
= 1234;
9851 bool cooked_clobber_p
= false;
9853 enum machine_mode mode
= GET_MODE (dest
);
9854 int n
, n_bytes
= GET_MODE_SIZE (mode
);
9856 gcc_assert (REG_P (dest
)
9857 && CONSTANT_P (src
));
9862 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9863 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9865 if (REGNO (dest
) < 16
9866 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
9868 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
9871 /* We might need a clobber reg but don't have one. Look at the value to
9872 be loaded more closely. A clobber is only needed if it is a symbol
9873 or contains a byte that is neither 0, -1 or a power of 2. */
9875 if (NULL_RTX
== clobber_reg
9876 && !test_hard_reg_class (LD_REGS
, dest
)
9877 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
9878 || !avr_popcount_each_byte (src
, n_bytes
,
9879 (1 << 0) | (1 << 1) | (1 << 8))))
9881 /* We have no clobber register but need one. Cook one up.
9882 That's cheaper than loading from constant pool. */
9884 cooked_clobber_p
= true;
9885 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9886 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9889 /* Now start filling DEST from LSB to MSB. */
9891 for (n
= 0; n
< n_bytes
; n
++)
9894 bool done_byte
= false;
9898 /* Crop the n-th destination byte. */
9900 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9901 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9903 if (!CONST_INT_P (src
)
9904 && !CONST_FIXED_P (src
)
9905 && !CONST_DOUBLE_P (src
))
9907 static const char* const asm_code
[][2] =
9909 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9910 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9911 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9912 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9917 xop
[2] = clobber_reg
;
9919 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9924 /* Crop the n-th source byte. */
9926 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9927 ival
[n
] = INTVAL (xval
);
9929 /* Look if we can reuse the low word by means of MOVW. */
9935 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9936 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9938 if (INTVAL (lo16
) == INTVAL (hi16
))
9940 if (0 != INTVAL (lo16
)
9943 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9950 /* Don't use CLR so that cc0 is set as expected. */
9955 avr_asm_len (ldreg_p
? "ldi %0,0"
9956 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9957 : "mov %0,__zero_reg__",
9962 if (clobber_val
== ival
[n
]
9963 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9968 /* LD_REGS can use LDI to move a constant value */
9974 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9978 /* Try to reuse value already loaded in some lower byte. */
9980 for (j
= 0; j
< n
; j
++)
9981 if (ival
[j
] == ival
[n
])
9986 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9994 /* Need no clobber reg for -1: Use CLR/DEC */
9999 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10001 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10004 else if (1 == ival
[n
])
10007 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10009 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10013 /* Use T flag or INC to manage powers of 2 if we have
10016 if (NULL_RTX
== clobber_reg
10017 && single_one_operand (xval
, QImode
))
10020 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10022 gcc_assert (constm1_rtx
!= xop
[1]);
10027 avr_asm_len ("set", xop
, len
, 1);
10031 avr_asm_len ("clr %0", xop
, len
, 1);
10033 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10037 /* We actually need the LD_REGS clobber reg. */
10039 gcc_assert (NULL_RTX
!= clobber_reg
);
10043 xop
[2] = clobber_reg
;
10044 clobber_val
= ival
[n
];
10046 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10047 "mov %0,%2", xop
, len
, 2);
10050 /* If we cooked up a clobber reg above, restore it. */
10052 if (cooked_clobber_p
)
10054 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10059 /* Reload the constant OP[1] into the HI register OP[0].
10060 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10061 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10062 need a clobber reg or have to cook one up.
10064 PLEN == NULL: Output instructions.
10065 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10066 by the insns printed.
10071 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10073 output_reload_in_const (op
, clobber_reg
, plen
, false);
10078 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10079 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10080 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10081 need a clobber reg or have to cook one up.
10083 LEN == NULL: Output instructions.
10085 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10086 by the insns printed.
10091 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10094 && !test_hard_reg_class (LD_REGS
, op
[0])
10095 && (CONST_INT_P (op
[1])
10096 || CONST_FIXED_P (op
[1])
10097 || CONST_DOUBLE_P (op
[1])))
10099 int len_clr
, len_noclr
;
10101 /* In some cases it is better to clear the destination beforehand, e.g.
10103 CLR R2 CLR R3 MOVW R4,R2 INC R2
10107 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10109 We find it too tedious to work that out in the print function.
10110 Instead, we call the print function twice to get the lengths of
10111 both methods and use the shortest one. */
10113 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10114 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10116 if (len_noclr
- len_clr
== 4)
10118 /* Default needs 4 CLR instructions: clear register beforehand. */
10120 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10121 "mov %B0,__zero_reg__" CR_TAB
10122 "movw %C0,%A0", &op
[0], len
, 3);
10124 output_reload_in_const (op
, clobber_reg
, len
, true);
10133 /* Default: destination not pre-cleared. */
10135 output_reload_in_const (op
, clobber_reg
, len
, false);
10140 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10142 output_reload_in_const (op
, clobber_reg
, len
, false);
10148 avr_output_addr_vec_elt (FILE *stream
, int value
)
10150 if (AVR_HAVE_JMP_CALL
)
10151 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10153 fprintf (stream
, "\trjmp .L%d\n", value
);
10156 /* Returns true if SCRATCH are safe to be allocated as a scratch
10157 registers (for a define_peephole2) in the current function. */
10160 avr_hard_regno_scratch_ok (unsigned int regno
)
10162 /* Interrupt functions can only use registers that have already been saved
10163 by the prologue, even if they would normally be call-clobbered. */
10165 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10166 && !df_regs_ever_live_p (regno
))
10169 /* Don't allow hard registers that might be part of the frame pointer.
10170 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10171 and don't care for a frame pointer that spans more than one register. */
10173 if ((!reload_completed
|| frame_pointer_needed
)
10174 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10182 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10185 avr_hard_regno_rename_ok (unsigned int old_reg
,
10186 unsigned int new_reg
)
10188 /* Interrupt functions can only use registers that have already been
10189 saved by the prologue, even if they would normally be
10192 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10193 && !df_regs_ever_live_p (new_reg
))
10196 /* Don't allow hard registers that might be part of the frame pointer.
10197 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10198 and don't care for a frame pointer that spans more than one register. */
10200 if ((!reload_completed
|| frame_pointer_needed
)
10201 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10202 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10210 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10211 or memory location in the I/O space (QImode only).
10213 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10214 Operand 1: register operand to test, or CONST_INT memory address.
10215 Operand 2: bit number.
10216 Operand 3: label to jump to if the test is true. */
10219 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10221 enum rtx_code comp
= GET_CODE (operands
[0]);
10222 bool long_jump
= get_attr_length (insn
) >= 4;
10223 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10227 else if (comp
== LT
)
10231 comp
= reverse_condition (comp
);
10233 switch (GET_CODE (operands
[1]))
10240 if (low_io_address_operand (operands
[1], QImode
))
10243 output_asm_insn ("sbis %i1,%2", operands
);
10245 output_asm_insn ("sbic %i1,%2", operands
);
10249 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10251 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10253 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10256 break; /* CONST_INT */
10261 output_asm_insn ("sbrs %T1%T2", operands
);
10263 output_asm_insn ("sbrc %T1%T2", operands
);
10269 return ("rjmp .+4" CR_TAB
10278 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
10281 avr_asm_out_ctor (rtx symbol
, int priority
)
10283 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10284 default_ctor_section_asm_out_constructor (symbol
, priority
);
10287 /* Worker function for TARGET_ASM_DESTRUCTOR. */
10290 avr_asm_out_dtor (rtx symbol
, int priority
)
10292 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10293 default_dtor_section_asm_out_destructor (symbol
, priority
);
10296 /* Worker function for TARGET_RETURN_IN_MEMORY. */
10299 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10301 if (TYPE_MODE (type
) == BLKmode
)
10303 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10304 return (size
== -1 || size
> 8);
10311 /* Implement `CASE_VALUES_THRESHOLD'. */
10312 /* Supply the default for --param case-values-threshold=0 */
10314 static unsigned int
10315 avr_case_values_threshold (void)
10317 /* The exact break-even point between a jump table and an if-else tree
10318 depends on several factors not available here like, e.g. if 8-bit
10319 comparisons can be used in the if-else tree or not, on the
10320 range of the case values, if the case value can be reused, on the
10321 register allocation, etc. '7' appears to be a good choice. */
10327 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10329 static enum machine_mode
10330 avr_addr_space_address_mode (addr_space_t as
)
10332 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10336 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10338 static enum machine_mode
10339 avr_addr_space_pointer_mode (addr_space_t as
)
10341 return avr_addr_space_address_mode (as
);
10345 /* Helper for following function. */
10348 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10355 return REGNO (reg
) == REG_Z
;
10358 /* Avoid combine to propagate hard regs. */
10360 if (can_create_pseudo_p()
10361 && REGNO (reg
) < REG_Z
)
10370 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10373 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10374 bool strict
, addr_space_t as
)
10383 case ADDR_SPACE_GENERIC
:
10384 return avr_legitimate_address_p (mode
, x
, strict
);
10386 case ADDR_SPACE_FLASH
:
10387 case ADDR_SPACE_FLASH1
:
10388 case ADDR_SPACE_FLASH2
:
10389 case ADDR_SPACE_FLASH3
:
10390 case ADDR_SPACE_FLASH4
:
10391 case ADDR_SPACE_FLASH5
:
10393 switch (GET_CODE (x
))
10396 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10400 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10409 case ADDR_SPACE_MEMX
:
10412 && can_create_pseudo_p());
10414 if (LO_SUM
== GET_CODE (x
))
10416 rtx hi
= XEXP (x
, 0);
10417 rtx lo
= XEXP (x
, 1);
10420 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10422 && REGNO (lo
) == REG_Z
);
10428 if (avr_log
.legitimate_address_p
)
10430 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10431 "reload_completed=%d reload_in_progress=%d %s:",
10432 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10433 reg_renumber
? "(reg_renumber)" : "");
10435 if (GET_CODE (x
) == PLUS
10436 && REG_P (XEXP (x
, 0))
10437 && CONST_INT_P (XEXP (x
, 1))
10438 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10441 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10442 true_regnum (XEXP (x
, 0)));
10445 avr_edump ("\n%r\n", x
);
10452 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10455 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10456 enum machine_mode mode
, addr_space_t as
)
10458 if (ADDR_SPACE_GENERIC_P (as
))
10459 return avr_legitimize_address (x
, old_x
, mode
);
10461 if (avr_log
.legitimize_address
)
10463 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10470 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10473 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10475 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10476 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10478 if (avr_log
.progmem
)
10479 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10480 src
, type_from
, type_to
);
10482 /* Up-casting from 16-bit to 24-bit pointer. */
10484 if (as_from
!= ADDR_SPACE_MEMX
10485 && as_to
== ADDR_SPACE_MEMX
)
10489 rtx reg
= gen_reg_rtx (PSImode
);
10491 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10492 sym
= XEXP (sym
, 0);
10494 /* Look at symbol flags: avr_encode_section_info set the flags
10495 also if attribute progmem was seen so that we get the right
10496 promotion for, e.g. PSTR-like strings that reside in generic space
10497 but are located in flash. In that case we patch the incoming
10500 if (SYMBOL_REF
== GET_CODE (sym
)
10501 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
10503 as_from
= ADDR_SPACE_FLASH
;
10506 /* Linearize memory: RAM has bit 23 set. */
10508 msb
= ADDR_SPACE_GENERIC_P (as_from
)
10510 : avr_addrspace
[as_from
].segment
;
10512 src
= force_reg (Pmode
, src
);
10514 emit_insn (msb
== 0
10515 ? gen_zero_extendhipsi2 (reg
, src
)
10516 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
10521 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
10523 if (as_from
== ADDR_SPACE_MEMX
10524 && as_to
!= ADDR_SPACE_MEMX
)
10526 rtx new_src
= gen_reg_rtx (Pmode
);
10528 src
= force_reg (PSImode
, src
);
10530 emit_move_insn (new_src
,
10531 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
10539 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10542 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
10543 addr_space_t superset ATTRIBUTE_UNUSED
)
10545 /* Allow any kind of pointer mess. */
10551 /* Worker function for movmemhi expander.
10552 XOP[0] Destination as MEM:BLK
10554 XOP[2] # Bytes to copy
10556 Return TRUE if the expansion is accomplished.
10557 Return FALSE if the operand compination is not supported. */
10560 avr_emit_movmemhi (rtx
*xop
)
10562 HOST_WIDE_INT count
;
10563 enum machine_mode loop_mode
;
10564 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
10565 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
10566 rtx a_hi8
= NULL_RTX
;
10568 if (avr_mem_flash_p (xop
[0]))
10571 if (!CONST_INT_P (xop
[2]))
10574 count
= INTVAL (xop
[2]);
10578 a_src
= XEXP (xop
[1], 0);
10579 a_dest
= XEXP (xop
[0], 0);
10581 if (PSImode
== GET_MODE (a_src
))
10583 gcc_assert (as
== ADDR_SPACE_MEMX
);
10585 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
10586 loop_reg
= gen_rtx_REG (loop_mode
, 24);
10587 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
10589 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
10590 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
10594 int segment
= avr_addrspace
[as
].segment
;
10597 && avr_current_device
->n_flash
> 1)
10599 a_hi8
= GEN_INT (segment
);
10600 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
10602 else if (!ADDR_SPACE_GENERIC_P (as
))
10604 as
= ADDR_SPACE_FLASH
;
10609 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
10610 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
10613 xas
= GEN_INT (as
);
10615 /* FIXME: Register allocator might come up with spill fails if it is left
10616 on its own. Thus, we allocate the pointer registers by hand:
10618 X = destination address */
10620 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
10621 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
10623 /* FIXME: Register allocator does a bad job and might spill address
10624 register(s) inside the loop leading to additional move instruction
10625 to/from stack which could clobber tmp_reg. Thus, do *not* emit
10626 load and store as separate insns. Instead, we perform the copy
10627 by means of one monolithic insn. */
10629 gcc_assert (TMP_REGNO
== LPM_REGNO
);
10631 if (as
!= ADDR_SPACE_MEMX
)
10633 /* Load instruction ([E]LPM or LD) is known at compile time:
10634 Do the copy-loop inline. */
10636 rtx (*fun
) (rtx
, rtx
, rtx
)
10637 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
10639 insn
= fun (xas
, loop_reg
, loop_reg
);
10643 rtx (*fun
) (rtx
, rtx
)
10644 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
10646 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
10648 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
10651 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10658 /* Print assembler for movmem_qi, movmem_hi insns...
10660 $1, $2 : Loop register
10662 X : Destination address
10666 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
10668 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
10669 enum machine_mode loop_mode
= GET_MODE (op
[1]);
10670 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
10678 xop
[2] = tmp_reg_rtx
;
10682 avr_asm_len ("0:", xop
, plen
, 0);
10684 /* Load with post-increment */
10691 case ADDR_SPACE_GENERIC
:
10693 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
10696 case ADDR_SPACE_FLASH
:
10699 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
10701 avr_asm_len ("lpm" CR_TAB
10702 "adiw r30,1", xop
, plen
, 2);
10705 case ADDR_SPACE_FLASH1
:
10706 case ADDR_SPACE_FLASH2
:
10707 case ADDR_SPACE_FLASH3
:
10708 case ADDR_SPACE_FLASH4
:
10709 case ADDR_SPACE_FLASH5
:
10711 if (AVR_HAVE_ELPMX
)
10712 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
10714 avr_asm_len ("elpm" CR_TAB
10715 "adiw r30,1", xop
, plen
, 2);
10719 /* Store with post-increment */
10721 avr_asm_len ("st X+,%2", xop
, plen
, 1);
10723 /* Decrement loop-counter and set Z-flag */
10725 if (QImode
== loop_mode
)
10727 avr_asm_len ("dec %1", xop
, plen
, 1);
10731 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
10735 avr_asm_len ("subi %A1,1" CR_TAB
10736 "sbci %B1,0", xop
, plen
, 2);
10739 /* Loop until zero */
10741 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10746 /* Helper for __builtin_avr_delay_cycles */
10749 avr_mem_clobber (void)
10751 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
10752 MEM_VOLATILE_P (mem
) = 1;
10757 avr_expand_delay_cycles (rtx operands0
)
10759 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
10760 unsigned HOST_WIDE_INT cycles_used
;
10761 unsigned HOST_WIDE_INT loop_count
;
10763 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10765 loop_count
= ((cycles
- 9) / 6) + 1;
10766 cycles_used
= ((loop_count
- 1) * 6) + 9;
10767 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
10768 avr_mem_clobber()));
10769 cycles
-= cycles_used
;
10772 if (IN_RANGE (cycles
, 262145, 83886081))
10774 loop_count
= ((cycles
- 7) / 5) + 1;
10775 if (loop_count
> 0xFFFFFF)
10776 loop_count
= 0xFFFFFF;
10777 cycles_used
= ((loop_count
- 1) * 5) + 7;
10778 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
10779 avr_mem_clobber()));
10780 cycles
-= cycles_used
;
10783 if (IN_RANGE (cycles
, 768, 262144))
10785 loop_count
= ((cycles
- 5) / 4) + 1;
10786 if (loop_count
> 0xFFFF)
10787 loop_count
= 0xFFFF;
10788 cycles_used
= ((loop_count
- 1) * 4) + 5;
10789 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
10790 avr_mem_clobber()));
10791 cycles
-= cycles_used
;
10794 if (IN_RANGE (cycles
, 6, 767))
10796 loop_count
= cycles
/ 3;
10797 if (loop_count
> 255)
10799 cycles_used
= loop_count
* 3;
10800 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
10801 avr_mem_clobber()));
10802 cycles
-= cycles_used
;
10805 while (cycles
>= 2)
10807 emit_insn (gen_nopv (GEN_INT(2)));
10813 emit_insn (gen_nopv (GEN_INT(1)));
10819 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10822 avr_double_int_push_digit (double_int val
, int base
,
10823 unsigned HOST_WIDE_INT digit
)
10826 ? double_int_lshift (val
, 32, 64, false)
10827 : double_int_mul (val
, uhwi_to_double_int (base
));
10829 return double_int_add (val
, uhwi_to_double_int (digit
));
10833 /* Compute the image of x under f, i.e. perform x --> f(x) */
10836 avr_map (double_int f
, int x
)
10838 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
10842 /* Return some metrics of map A. */
10846 /* Number of fixed points in { 0 ... 7 } */
10849 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10852 /* Mask representing the fixed points in { 0 ... 7 } */
10853 MAP_MASK_FIXED_0_7
,
10855 /* Size of the preimage of { 0 ... 7 } */
10858 /* Mask that represents the preimage of { f } */
10859 MAP_MASK_PREIMAGE_F
10863 avr_map_metric (double_int a
, int mode
)
10865 unsigned i
, metric
= 0;
10867 for (i
= 0; i
< 8; i
++)
10869 unsigned ai
= avr_map (a
, i
);
10871 if (mode
== MAP_FIXED_0_7
)
10873 else if (mode
== MAP_NONFIXED_0_7
)
10874 metric
+= ai
< 8 && ai
!= i
;
10875 else if (mode
== MAP_MASK_FIXED_0_7
)
10876 metric
|= ((unsigned) (ai
== i
)) << i
;
10877 else if (mode
== MAP_PREIMAGE_0_7
)
10879 else if (mode
== MAP_MASK_PREIMAGE_F
)
10880 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10889 /* Return true if IVAL has a 0xf in its hexadecimal representation
10890 and false, otherwise. Only nibbles 0..7 are taken into account.
10891 Used as constraint helper for C0f and Cxf. */
10894 avr_has_nibble_0xf (rtx ival
)
10896 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10900 /* We have a set of bits that are mapped by a function F.
10901 Try to decompose F by means of a second function G so that
10907 cost (F o G^-1) + cost (G) < cost (F)
10909 Example: Suppose builtin insert_bits supplies us with the map
10910 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10911 nibble of the result, we can just as well rotate the bits before inserting
10912 them and use the map 0x7654ffff which is cheaper than the original map.
10913 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10917 /* tree code of binary function G */
10918 enum tree_code code
;
10920 /* The constant second argument of G */
10923 /* G^-1, the inverse of G (*, arg) */
10926 /* The cost of appplying G (*, arg) */
10929 /* The composition F o G^-1 (*, arg) for some function F */
10932 /* For debug purpose only */
10936 static const avr_map_op_t avr_map_op
[] =
10938 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10939 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10940 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10941 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10942 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10943 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10944 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10945 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10946 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10947 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10948 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10949 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10950 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10951 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10952 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10956 /* Try to decompose F as F = (F o G^-1) o G as described above.
10957 The result is a struct representing F o G^-1 and G.
10958 If result.cost < 0 then such a decomposition does not exist. */
10960 static avr_map_op_t
10961 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10964 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10965 avr_map_op_t f_ginv
= *g
;
10966 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10970 /* Step 1: Computing F o G^-1 */
10972 for (i
= 7; i
>= 0; i
--)
10974 int x
= avr_map (f
, i
);
10978 x
= avr_map (ginv
, x
);
10980 /* The bit is no element of the image of G: no avail (cost = -1) */
10986 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10989 /* Step 2: Compute the cost of the operations.
10990 The overall cost of doing an operation prior to the insertion is
10991 the cost of the insertion plus the cost of the operation. */
10993 /* Step 2a: Compute cost of F o G^-1 */
10995 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10997 /* The mapping consists only of fixed points and can be folded
10998 to AND/OR logic in the remainder. Reasonable cost is 3. */
11000 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11006 /* Get the cost of the insn by calling the output worker with some
11007 fake values. Mimic effect of reloading xop[3]: Unused operands
11008 are mapped to 0 and used operands are reloaded to xop[0]. */
11010 xop
[0] = all_regs_rtx
[24];
11011 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
11012 xop
[2] = all_regs_rtx
[25];
11013 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11015 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11017 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11020 /* Step 2b: Add cost of G */
11022 f_ginv
.cost
+= g
->cost
;
11024 if (avr_log
.builtin
)
11025 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11031 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11032 XOP[0] and XOP[1] don't overlap.
11033 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11034 If FIXP_P = false: Just move the bit if its position in the destination
11035 is different to its source position. */
11038 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
11042 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11043 int t_bit_src
= -1;
11045 /* We order the operations according to the requested source bit b. */
11047 for (b
= 0; b
< 8; b
++)
11048 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11050 int bit_src
= avr_map (map
, bit_dest
);
11054 /* Same position: No need to copy as requested by FIXP_P. */
11055 || (bit_dest
== bit_src
&& !fixp_p
))
11058 if (t_bit_src
!= bit_src
)
11060 /* Source bit is not yet in T: Store it to T. */
11062 t_bit_src
= bit_src
;
11064 xop
[3] = GEN_INT (bit_src
);
11065 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11068 /* Load destination bit with T. */
11070 xop
[3] = GEN_INT (bit_dest
);
11071 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11076 /* PLEN == 0: Print assembler code for `insert_bits'.
11077 PLEN != 0: Compute code length in bytes.
11080 OP[1]: The mapping composed of nibbles. If nibble no. N is
11081 0: Bit N of result is copied from bit OP[2].0
11083 7: Bit N of result is copied from bit OP[2].7
11084 0xf: Bit N of result is copied from bit OP[3].N
11085 OP[2]: Bits to be inserted
11086 OP[3]: Target value */
11089 avr_out_insert_bits (rtx
*op
, int *plen
)
11091 double_int map
= rtx_to_double_int (op
[1]);
11092 unsigned mask_fixed
;
11093 bool fixp_p
= true;
11100 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11104 else if (flag_print_asm_name
)
11105 fprintf (asm_out_file
,
11106 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
11107 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
11109 /* If MAP has fixed points it might be better to initialize the result
11110 with the bits to be inserted instead of moving all bits by hand. */
11112 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11114 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11116 /* Avoid early-clobber conflicts */
11118 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11119 xop
[1] = tmp_reg_rtx
;
11123 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11125 /* XOP[2] is used and reloaded to XOP[0] already */
11127 int n_fix
= 0, n_nofix
= 0;
11129 gcc_assert (REG_P (xop
[2]));
11131 /* Get the code size of the bit insertions; once with all bits
11132 moved and once with fixed points omitted. */
11134 avr_move_bits (xop
, map
, true, &n_fix
);
11135 avr_move_bits (xop
, map
, false, &n_nofix
);
11137 if (fixp_p
&& n_fix
- n_nofix
> 3)
11139 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11141 avr_asm_len ("eor %0,%1" CR_TAB
11142 "andi %0,%3" CR_TAB
11143 "eor %0,%1", xop
, plen
, 3);
11149 /* XOP[2] is unused */
11151 if (fixp_p
&& mask_fixed
)
11153 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11158 /* Move/insert remaining bits. */
11160 avr_move_bits (xop
, map
, fixp_p
, plen
);
11166 /* IDs for all the AVR builtins. */
11168 enum avr_builtin_id
11171 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
11172 #include "builtins.def"
11178 struct GTY(()) avr_builtin_description
11180 enum insn_code icode
;
11187 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11188 that a built-in's ID can be used to access the built-in by means of
11191 static GTY(()) struct avr_builtin_description
11192 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11195 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
11196 { (enum insn_code) ICODE, NAME, N_ARGS, NULL_TREE },
11197 #include "builtins.def"
11202 /* Implement `TARGET_BUILTIN_DECL'. */
11205 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11207 if (id
< AVR_BUILTIN_COUNT
)
11208 return avr_bdesc
[id
].fndecl
;
11210 return error_mark_node
;
11215 avr_init_builtin_int24 (void)
11217 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11218 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11220 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11221 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11225 /* Implement `TARGET_INIT_BUILTINS' */
11226 /* Set up all builtin functions for this target. */
11229 avr_init_builtins (void)
11231 tree void_ftype_void
11232 = build_function_type_list (void_type_node
, NULL_TREE
);
11233 tree uchar_ftype_uchar
11234 = build_function_type_list (unsigned_char_type_node
,
11235 unsigned_char_type_node
,
11237 tree uint_ftype_uchar_uchar
11238 = build_function_type_list (unsigned_type_node
,
11239 unsigned_char_type_node
,
11240 unsigned_char_type_node
,
11242 tree int_ftype_char_char
11243 = build_function_type_list (integer_type_node
,
11247 tree int_ftype_char_uchar
11248 = build_function_type_list (integer_type_node
,
11250 unsigned_char_type_node
,
11252 tree void_ftype_ulong
11253 = build_function_type_list (void_type_node
,
11254 long_unsigned_type_node
,
11257 tree uchar_ftype_ulong_uchar_uchar
11258 = build_function_type_list (unsigned_char_type_node
,
11259 long_unsigned_type_node
,
11260 unsigned_char_type_node
,
11261 unsigned_char_type_node
,
11264 tree const_memx_void_node
11265 = build_qualified_type (void_type_node
,
11267 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11269 tree const_memx_ptr_type_node
11270 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11272 tree char_ftype_const_memx_ptr
11273 = build_function_type_list (char_type_node
,
11274 const_memx_ptr_type_node
,
11277 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
11278 gcc_assert (ID < AVR_BUILTIN_COUNT); \
11279 avr_bdesc[ID].fndecl \
11280 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
11281 #include "builtins.def"
11284 avr_init_builtin_int24 ();
11288 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11289 with non-void result and 1 ... 3 arguments. */
11292 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11295 int n
, n_args
= call_expr_nargs (exp
);
11296 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11298 gcc_assert (n_args
>= 1 && n_args
<= 3);
11300 if (target
== NULL_RTX
11301 || GET_MODE (target
) != tmode
11302 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11304 target
= gen_reg_rtx (tmode
);
11307 for (n
= 0; n
< n_args
; n
++)
11309 tree arg
= CALL_EXPR_ARG (exp
, n
);
11310 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11311 enum machine_mode opmode
= GET_MODE (op
);
11312 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11314 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11317 op
= gen_lowpart (HImode
, op
);
11320 /* In case the insn wants input operands in modes different from
11321 the result, abort. */
11323 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11325 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11326 op
= copy_to_mode_reg (mode
, op
);
11333 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
11334 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
11335 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
11341 if (pat
== NULL_RTX
)
11350 /* Implement `TARGET_EXPAND_BUILTIN'. */
11351 /* Expand an expression EXP that calls a built-in function,
11352 with result going to TARGET if that's convenient
11353 (and in mode MODE if that's convenient).
11354 SUBTARGET may be used as the target for computing one of EXP's operands.
11355 IGNORE is nonzero if the value is to be ignored. */
11358 avr_expand_builtin (tree exp
, rtx target
,
11359 rtx subtarget ATTRIBUTE_UNUSED
,
11360 enum machine_mode mode ATTRIBUTE_UNUSED
,
11361 int ignore ATTRIBUTE_UNUSED
)
11363 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
11364 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
11365 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
11366 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
11370 gcc_assert (id
< AVR_BUILTIN_COUNT
);
11374 case AVR_BUILTIN_NOP
:
11375 emit_insn (gen_nopv (GEN_INT(1)));
11378 case AVR_BUILTIN_DELAY_CYCLES
:
11380 arg0
= CALL_EXPR_ARG (exp
, 0);
11381 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11383 if (!CONST_INT_P (op0
))
11384 error ("%s expects a compile time integer constant", bname
);
11386 avr_expand_delay_cycles (op0
);
11391 case AVR_BUILTIN_INSERT_BITS
:
11393 arg0
= CALL_EXPR_ARG (exp
, 0);
11394 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11396 if (!CONST_INT_P (op0
))
11398 error ("%s expects a compile time long integer constant"
11399 " as first argument", bname
);
11405 /* No special treatment needed: vanilla expand. */
11407 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
11409 if (d
->n_args
== 0)
11411 emit_insn ((GEN_FCN (d
->icode
)) (target
));
11415 return avr_default_expand_builtin (d
->icode
, exp
, target
);
11419 /* Implement `TARGET_FOLD_BUILTIN'. */
11422 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
11423 bool ignore ATTRIBUTE_UNUSED
)
11425 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
11426 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
11436 case AVR_BUILTIN_SWAP
:
11438 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
11439 build_int_cst (val_type
, 4));
11442 case AVR_BUILTIN_INSERT_BITS
:
11444 tree tbits
= arg
[1];
11445 tree tval
= arg
[2];
11447 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
11449 bool changed
= false;
11451 avr_map_op_t best_g
;
11453 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
11455 /* No constant as first argument: Don't fold this and run into
11456 error in avr_expand_builtin. */
11461 map
= tree_to_double_int (arg
[0]);
11462 tmap
= double_int_to_tree (map_type
, map
);
11464 if (TREE_CODE (tval
) != INTEGER_CST
11465 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11467 /* There are no F in the map, i.e. 3rd operand is unused.
11468 Replace that argument with some constant to render
11469 respective input unused. */
11471 tval
= build_int_cst (val_type
, 0);
11475 if (TREE_CODE (tbits
) != INTEGER_CST
11476 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
11478 /* Similar for the bits to be inserted. If they are unused,
11479 we can just as well pass 0. */
11481 tbits
= build_int_cst (val_type
, 0);
11484 if (TREE_CODE (tbits
) == INTEGER_CST
)
11486 /* Inserting bits known at compile time is easy and can be
11487 performed by AND and OR with appropriate masks. */
11489 int bits
= TREE_INT_CST_LOW (tbits
);
11490 int mask_ior
= 0, mask_and
= 0xff;
11492 for (i
= 0; i
< 8; i
++)
11494 int mi
= avr_map (map
, i
);
11498 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
11499 else mask_and
&= ~(1 << i
);
11503 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
11504 build_int_cst (val_type
, mask_ior
));
11505 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
11506 build_int_cst (val_type
, mask_and
));
11510 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11512 /* If bits don't change their position we can use vanilla logic
11513 to merge the two arguments. */
11515 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
11517 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11518 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
11520 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
11521 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
11522 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
11525 /* Try to decomposing map to reduce overall cost. */
11527 if (avr_log
.builtin
)
11528 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
11530 best_g
= avr_map_op
[0];
11531 best_g
.cost
= 1000;
11533 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
11536 = avr_map_decompose (map
, avr_map_op
+ i
,
11537 TREE_CODE (tval
) == INTEGER_CST
);
11539 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
11543 if (avr_log
.builtin
)
11546 if (best_g
.arg
== 0)
11547 /* No optimization found */
11550 /* Apply operation G to the 2nd argument. */
11552 if (avr_log
.builtin
)
11553 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11554 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
11556 /* Do right-shifts arithmetically: They copy the MSB instead of
11557 shifting in a non-usable value (0) as with logic right-shift. */
11559 tbits
= fold_convert (signed_char_type_node
, tbits
);
11560 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
11561 build_int_cst (val_type
, best_g
.arg
));
11562 tbits
= fold_convert (val_type
, tbits
);
11564 /* Use map o G^-1 instead of original map to undo the effect of G. */
11566 tmap
= double_int_to_tree (map_type
, best_g
.map
);
11568 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11569 } /* AVR_BUILTIN_INSERT_BITS */
11577 /* Initialize the GCC target structure. */
11579 #undef TARGET_ASM_ALIGNED_HI_OP
11580 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11581 #undef TARGET_ASM_ALIGNED_SI_OP
11582 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
11583 #undef TARGET_ASM_UNALIGNED_HI_OP
11584 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
11585 #undef TARGET_ASM_UNALIGNED_SI_OP
11586 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
11587 #undef TARGET_ASM_INTEGER
11588 #define TARGET_ASM_INTEGER avr_assemble_integer
11589 #undef TARGET_ASM_FILE_START
11590 #define TARGET_ASM_FILE_START avr_file_start
11591 #undef TARGET_ASM_FILE_END
11592 #define TARGET_ASM_FILE_END avr_file_end
11594 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
11595 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
11596 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
11597 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
11599 #undef TARGET_FUNCTION_VALUE
11600 #define TARGET_FUNCTION_VALUE avr_function_value
11601 #undef TARGET_LIBCALL_VALUE
11602 #define TARGET_LIBCALL_VALUE avr_libcall_value
11603 #undef TARGET_FUNCTION_VALUE_REGNO_P
11604 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
11606 #undef TARGET_ATTRIBUTE_TABLE
11607 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
11608 #undef TARGET_INSERT_ATTRIBUTES
11609 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
11610 #undef TARGET_SECTION_TYPE_FLAGS
11611 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
11613 #undef TARGET_ASM_NAMED_SECTION
11614 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
11615 #undef TARGET_ASM_INIT_SECTIONS
11616 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
11617 #undef TARGET_ENCODE_SECTION_INFO
11618 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
11619 #undef TARGET_ASM_SELECT_SECTION
11620 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
11622 #undef TARGET_REGISTER_MOVE_COST
11623 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
11624 #undef TARGET_MEMORY_MOVE_COST
11625 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
11626 #undef TARGET_RTX_COSTS
11627 #define TARGET_RTX_COSTS avr_rtx_costs
11628 #undef TARGET_ADDRESS_COST
11629 #define TARGET_ADDRESS_COST avr_address_cost
11630 #undef TARGET_MACHINE_DEPENDENT_REORG
11631 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
11632 #undef TARGET_FUNCTION_ARG
11633 #define TARGET_FUNCTION_ARG avr_function_arg
11634 #undef TARGET_FUNCTION_ARG_ADVANCE
11635 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
11637 #undef TARGET_SET_CURRENT_FUNCTION
11638 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
11640 #undef TARGET_RETURN_IN_MEMORY
11641 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
11643 #undef TARGET_STRICT_ARGUMENT_NAMING
11644 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
11646 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
11647 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
11649 #undef TARGET_HARD_REGNO_SCRATCH_OK
11650 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
11651 #undef TARGET_CASE_VALUES_THRESHOLD
11652 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
11654 #undef TARGET_FRAME_POINTER_REQUIRED
11655 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
11656 #undef TARGET_CAN_ELIMINATE
11657 #define TARGET_CAN_ELIMINATE avr_can_eliminate
11659 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
11660 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
11662 #undef TARGET_WARN_FUNC_RETURN
11663 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
11665 #undef TARGET_CLASS_LIKELY_SPILLED_P
11666 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
11668 #undef TARGET_OPTION_OVERRIDE
11669 #define TARGET_OPTION_OVERRIDE avr_option_override
11671 #undef TARGET_CANNOT_MODIFY_JUMPS_P
11672 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
11674 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
11675 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
11677 #undef TARGET_INIT_BUILTINS
11678 #define TARGET_INIT_BUILTINS avr_init_builtins
11680 #undef TARGET_BUILTIN_DECL
11681 #define TARGET_BUILTIN_DECL avr_builtin_decl
11683 #undef TARGET_EXPAND_BUILTIN
11684 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
11686 #undef TARGET_FOLD_BUILTIN
11687 #define TARGET_FOLD_BUILTIN avr_fold_builtin
11689 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
11690 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
11692 #undef TARGET_SCALAR_MODE_SUPPORTED_P
11693 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
11695 #undef TARGET_BUILD_BUILTIN_VA_LIST
11696 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
11698 #undef TARGET_FIXED_POINT_SUPPORTED_P
11699 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
11701 #undef TARGET_ADDR_SPACE_SUBSET_P
11702 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11704 #undef TARGET_ADDR_SPACE_CONVERT
11705 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11707 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
11708 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11710 #undef TARGET_ADDR_SPACE_POINTER_MODE
11711 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11713 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11714 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11715 avr_addr_space_legitimate_address_p
11717 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11718 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11720 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
11721 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
11723 #undef TARGET_PRINT_OPERAND
11724 #define TARGET_PRINT_OPERAND avr_print_operand
11725 #undef TARGET_PRINT_OPERAND_ADDRESS
11726 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11727 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11728 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11730 struct gcc_target targetm
= TARGET_INITIALIZER
;
11733 #include "gt-avr.h"