1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace
[] =
85 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix
[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr
;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
135 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
136 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
137 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
138 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
139 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
141 static int get_sequence_length (rtx insns
);
142 static int sequent_regs_live (void);
143 static const char *ptrreg_to_str (int);
144 static const char *cond_string (enum rtx_code
);
145 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
146 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
148 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
149 static struct machine_function
* avr_init_machine_status (void);
152 /* Prototypes for hook implementors if needed before their implementation. */
154 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
157 /* Allocate registers from r25 to r8 for parameters for function calls. */
158 #define FIRST_CUM_REG 26
160 /* Implicit target register of LPM instruction (R0) */
161 extern GTY(()) rtx lpm_reg_rtx
;
164 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
165 extern GTY(()) rtx lpm_addr_reg_rtx
;
166 rtx lpm_addr_reg_rtx
;
168 /* Temporary register RTX (reg:QI TMP_REGNO) */
169 extern GTY(()) rtx tmp_reg_rtx
;
172 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
173 extern GTY(()) rtx zero_reg_rtx
;
176 /* RTXs for all general purpose registers as QImode */
177 extern GTY(()) rtx all_regs_rtx
[32];
178 rtx all_regs_rtx
[32];
180 /* SREG, the processor status */
181 extern GTY(()) rtx sreg_rtx
;
184 /* RAMP* special function registers */
185 extern GTY(()) rtx rampd_rtx
;
186 extern GTY(()) rtx rampx_rtx
;
187 extern GTY(()) rtx rampy_rtx
;
188 extern GTY(()) rtx rampz_rtx
;
194 /* RTX containing the strings "" and "e", respectively */
195 static GTY(()) rtx xstring_empty
;
196 static GTY(()) rtx xstring_e
;
198 /* Preprocessor macros to define depending on MCU type. */
199 const char *avr_extra_arch_macro
;
201 /* Current architecture. */
202 const struct base_arch_s
*avr_current_arch
;
204 /* Current device. */
205 const struct mcu_type_s
*avr_current_device
;
207 /* Section to put switch tables in. */
208 static GTY(()) section
*progmem_swtable_section
;
210 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
211 or to address space __flash*. */
212 static GTY(()) section
*progmem_section
[6];
214 /* Condition for insns/expanders from avr-dimode.md. */
215 bool avr_have_dimode
= true;
217 /* To track if code will use .bss and/or .data. */
218 bool avr_need_clear_bss_p
= false;
219 bool avr_need_copy_data_p
= false;
223 /* Custom function to count number of set bits. */
226 avr_popcount (unsigned int val
)
240 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
241 Return true if the least significant N_BYTES bytes of XVAL all have a
242 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
243 of integers which contains an integer N iff bit N of POP_MASK is set. */
246 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
250 enum machine_mode mode
= GET_MODE (xval
);
252 if (VOIDmode
== mode
)
255 for (i
= 0; i
< n_bytes
; i
++)
257 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
258 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
260 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
268 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
269 the bit representation of X by "casting" it to CONST_INT. */
272 avr_to_int_mode (rtx x
)
274 enum machine_mode mode
= GET_MODE (x
);
276 return VOIDmode
== mode
278 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
282 /* Implement `TARGET_OPTION_OVERRIDE'. */
285 avr_option_override (void)
287 flag_delete_null_pointer_checks
= 0;
289 /* caller-save.c looks for call-clobbered hard registers that are assigned
290 to pseudos that cross calls and tries so save-restore them around calls
291 in order to reduce the number of stack slots needed.
293 This might leads to situations where reload is no more able to cope
294 with the challenge of AVR's very few address registers and fails to
295 perform the requested spills. */
298 flag_caller_saves
= 0;
300 /* Unwind tables currently require a frame pointer for correctness,
301 see toplev.c:process_options(). */
303 if ((flag_unwind_tables
304 || flag_non_call_exceptions
305 || flag_asynchronous_unwind_tables
)
306 && !ACCUMULATE_OUTGOING_ARGS
)
308 flag_omit_frame_pointer
= 0;
311 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
312 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
313 avr_extra_arch_macro
= avr_current_device
->macro
;
315 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
317 /* SREG: Status Register containing flags like I (global IRQ) */
318 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
320 /* RAMPZ: Address' high part when loading via ELPM */
321 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
323 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
324 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
325 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
326 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
328 /* SP: Stack Pointer (SP_H:SP_L) */
329 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
330 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
332 init_machine_status
= avr_init_machine_status
;
334 avr_log_set_avr_log();
337 /* Function to set up the backend function structure. */
339 static struct machine_function
*
340 avr_init_machine_status (void)
342 return ggc_alloc_cleared_machine_function ();
346 /* Implement `INIT_EXPANDERS'. */
347 /* The function works like a singleton. */
350 avr_init_expanders (void)
354 for (regno
= 0; regno
< 32; regno
++)
355 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
357 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
358 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
359 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
361 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
363 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
364 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
365 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
366 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
367 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
369 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
370 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
374 /* Return register class for register R. */
377 avr_regno_reg_class (int r
)
379 static const enum reg_class reg_class_tab
[] =
383 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
384 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
385 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
386 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
388 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
389 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
391 ADDW_REGS
, ADDW_REGS
,
393 POINTER_X_REGS
, POINTER_X_REGS
,
395 POINTER_Y_REGS
, POINTER_Y_REGS
,
397 POINTER_Z_REGS
, POINTER_Z_REGS
,
403 return reg_class_tab
[r
];
409 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
412 avr_scalar_mode_supported_p (enum machine_mode mode
)
414 if (ALL_FIXED_POINT_MODE_P (mode
))
420 return default_scalar_mode_supported_p (mode
);
424 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
427 avr_decl_flash_p (tree decl
)
429 if (TREE_CODE (decl
) != VAR_DECL
430 || TREE_TYPE (decl
) == error_mark_node
)
435 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
439 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
440 address space and FALSE, otherwise. */
443 avr_decl_memx_p (tree decl
)
445 if (TREE_CODE (decl
) != VAR_DECL
446 || TREE_TYPE (decl
) == error_mark_node
)
451 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
455 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
458 avr_mem_flash_p (rtx x
)
461 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
465 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
466 address space and FALSE, otherwise. */
469 avr_mem_memx_p (rtx x
)
472 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
476 /* A helper for the subsequent function attribute used to dig for
477 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
480 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
482 if (FUNCTION_DECL
== TREE_CODE (func
))
484 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
489 func
= TREE_TYPE (func
);
492 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
493 || TREE_CODE (func
) == METHOD_TYPE
);
495 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
498 /* Return nonzero if FUNC is a naked function. */
501 avr_naked_function_p (tree func
)
503 return avr_lookup_function_attribute1 (func
, "naked");
506 /* Return nonzero if FUNC is an interrupt function as specified
507 by the "interrupt" attribute. */
510 avr_interrupt_function_p (tree func
)
512 return avr_lookup_function_attribute1 (func
, "interrupt");
515 /* Return nonzero if FUNC is a signal function as specified
516 by the "signal" attribute. */
519 avr_signal_function_p (tree func
)
521 return avr_lookup_function_attribute1 (func
, "signal");
524 /* Return nonzero if FUNC is an OS_task function. */
527 avr_OS_task_function_p (tree func
)
529 return avr_lookup_function_attribute1 (func
, "OS_task");
532 /* Return nonzero if FUNC is an OS_main function. */
535 avr_OS_main_function_p (tree func
)
537 return avr_lookup_function_attribute1 (func
, "OS_main");
541 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
542 /* Sanity cheching for above function attributes. */
545 avr_set_current_function (tree decl
)
550 if (decl
== NULL_TREE
551 || current_function_decl
== NULL_TREE
552 || current_function_decl
== error_mark_node
554 || cfun
->machine
->attributes_checked_p
)
557 loc
= DECL_SOURCE_LOCATION (decl
);
559 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
560 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
561 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
562 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
563 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
565 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
567 /* Too much attributes make no sense as they request conflicting features. */
569 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
570 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
571 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
572 " exclusive", "OS_task", "OS_main", isr
);
574 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
576 if (cfun
->machine
->is_naked
577 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
578 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
579 " no effect on %qs function", "OS_task", "OS_main", "naked");
581 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
583 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
584 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
585 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
587 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
588 using this when it switched from SIGNAL and INTERRUPT to ISR. */
590 if (cfun
->machine
->is_interrupt
)
591 cfun
->machine
->is_signal
= 0;
593 /* Interrupt handlers must be void __vector (void) functions. */
595 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
596 error_at (loc
, "%qs function cannot have arguments", isr
);
598 if (TREE_CODE (ret
) != VOID_TYPE
)
599 error_at (loc
, "%qs function cannot return a value", isr
);
601 /* If the function has the 'signal' or 'interrupt' attribute, ensure
602 that the name of the function is "__vector_NN" so as to catch
603 when the user misspells the vector name. */
605 if (!STR_PREFIX_P (name
, "__vector"))
606 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
610 /* Avoid the above diagnosis to be printed more than once. */
612 cfun
->machine
->attributes_checked_p
= 1;
616 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
619 avr_accumulate_outgoing_args (void)
622 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
624 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
625 what offset is correct. In some cases it is relative to
626 virtual_outgoing_args_rtx and in others it is relative to
627 virtual_stack_vars_rtx. For example code see
628 gcc.c-torture/execute/built-in-setjmp.c
629 gcc.c-torture/execute/builtins/sprintf-chk.c */
631 return (TARGET_ACCUMULATE_OUTGOING_ARGS
632 && !(cfun
->calls_setjmp
633 || cfun
->has_nonlocal_label
));
637 /* Report contribution of accumulated outgoing arguments to stack size. */
640 avr_outgoing_args_size (void)
642 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
646 /* Implement `STARTING_FRAME_OFFSET'. */
647 /* This is the offset from the frame pointer register to the first stack slot
648 that contains a variable living in the frame. */
651 avr_starting_frame_offset (void)
653 return 1 + avr_outgoing_args_size ();
657 /* Return the number of hard registers to push/pop in the prologue/epilogue
658 of the current function, and optionally store these registers in SET. */
661 avr_regs_to_save (HARD_REG_SET
*set
)
664 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
667 CLEAR_HARD_REG_SET (*set
);
670 /* No need to save any registers if the function never returns or
671 has the "OS_task" or "OS_main" attribute. */
672 if (TREE_THIS_VOLATILE (current_function_decl
)
673 || cfun
->machine
->is_OS_task
674 || cfun
->machine
->is_OS_main
)
677 for (reg
= 0; reg
< 32; reg
++)
679 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
680 any global register variables. */
684 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
685 || (df_regs_ever_live_p (reg
)
686 && (int_or_sig_p
|| !call_used_regs
[reg
])
687 /* Don't record frame pointer registers here. They are treated
688 indivitually in prologue. */
689 && !(frame_pointer_needed
690 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
693 SET_HARD_REG_BIT (*set
, reg
);
701 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
704 avr_allocate_stack_slots_for_args (void)
706 return !cfun
->machine
->is_naked
;
710 /* Return true if register FROM can be eliminated via register TO. */
713 avr_can_eliminate (const int from
, const int to
)
715 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
716 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
717 || ((from
== FRAME_POINTER_REGNUM
718 || from
== FRAME_POINTER_REGNUM
+ 1)
719 && !frame_pointer_needed
));
723 /* Implement TARGET_WARN_FUNC_RETURN. */
726 avr_warn_func_return (tree decl
)
728 /* Naked functions are implemented entirely in assembly, including the
729 return sequence, so suppress warnings about this. */
730 return !avr_naked_function_p (decl
);
733 /* Compute offset between arg_pointer and frame_pointer. */
736 avr_initial_elimination_offset (int from
, int to
)
738 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
742 int offset
= frame_pointer_needed
? 2 : 0;
743 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
745 offset
+= avr_regs_to_save (NULL
);
746 return (get_frame_size () + avr_outgoing_args_size()
747 + avr_pc_size
+ 1 + offset
);
752 /* Helper for the function below. */
755 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
757 *node
= make_node (FIXED_POINT_TYPE
);
758 TYPE_SATURATING (*node
) = sat_p
;
759 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
760 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
761 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
762 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
763 TYPE_ALIGN (*node
) = 8;
764 SET_TYPE_MODE (*node
, mode
);
770 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
773 avr_build_builtin_va_list (void)
775 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
776 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
777 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
778 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
779 to the long long accum modes instead of the desired [U]TAmode.
781 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
782 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
783 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
784 libgcc to detect IBIT and FBIT. */
786 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
787 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
788 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
789 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
791 unsigned_long_long_accum_type_node
= uta_type_node
;
792 long_long_accum_type_node
= ta_type_node
;
793 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
794 sat_long_long_accum_type_node
= sat_ta_type_node
;
796 /* Dispatch to the default handler. */
798 return std_build_builtin_va_list ();
802 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
803 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
804 frame pointer by +STARTING_FRAME_OFFSET.
805 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
806 avoids creating add/sub of offset in nonlocal goto and setjmp. */
809 avr_builtin_setjmp_frame_value (void)
811 rtx xval
= gen_reg_rtx (Pmode
);
812 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
813 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
818 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
819 This is return address of function. */
821 avr_return_addr_rtx (int count
, rtx tem
)
825 /* Can only return this function's return address. Others not supported. */
831 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
832 warning (0, "'builtin_return_address' contains only 2 bytes of address");
835 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
837 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
838 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
839 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
843 /* Return 1 if the function epilogue is just a single "ret". */
846 avr_simple_epilogue (void)
848 return (! frame_pointer_needed
849 && get_frame_size () == 0
850 && avr_outgoing_args_size() == 0
851 && avr_regs_to_save (NULL
) == 0
852 && ! cfun
->machine
->is_interrupt
853 && ! cfun
->machine
->is_signal
854 && ! cfun
->machine
->is_naked
855 && ! TREE_THIS_VOLATILE (current_function_decl
));
858 /* This function checks sequence of live registers. */
861 sequent_regs_live (void)
867 for (reg
= 0; reg
< 18; ++reg
)
871 /* Don't recognize sequences that contain global register
880 if (!call_used_regs
[reg
])
882 if (df_regs_ever_live_p (reg
))
892 if (!frame_pointer_needed
)
894 if (df_regs_ever_live_p (REG_Y
))
902 if (df_regs_ever_live_p (REG_Y
+1))
915 return (cur_seq
== live_seq
) ? live_seq
: 0;
918 /* Obtain the length sequence of insns. */
921 get_sequence_length (rtx insns
)
926 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
927 length
+= get_attr_length (insn
);
932 /* Implement INCOMING_RETURN_ADDR_RTX. */
935 avr_incoming_return_addr_rtx (void)
937 /* The return address is at the top of the stack. Note that the push
938 was via post-decrement, which means the actual address is off by one. */
939 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
942 /* Helper for expand_prologue. Emit a push of a byte register. */
945 emit_push_byte (unsigned regno
, bool frame_related_p
)
949 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
950 mem
= gen_frame_mem (QImode
, mem
);
951 reg
= gen_rtx_REG (QImode
, regno
);
953 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
955 RTX_FRAME_RELATED_P (insn
) = 1;
957 cfun
->machine
->stack_usage
++;
961 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
962 SFR is a MEM representing the memory location of the SFR.
963 If CLR_P then clear the SFR after the push using zero_reg. */
966 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
970 gcc_assert (MEM_P (sfr
));
972 /* IN __tmp_reg__, IO(SFR) */
973 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
975 RTX_FRAME_RELATED_P (insn
) = 1;
977 /* PUSH __tmp_reg__ */
978 emit_push_byte (TMP_REGNO
, frame_related_p
);
982 /* OUT IO(SFR), __zero_reg__ */
983 insn
= emit_move_insn (sfr
, const0_rtx
);
985 RTX_FRAME_RELATED_P (insn
) = 1;
990 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
993 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
994 int live_seq
= sequent_regs_live ();
996 HOST_WIDE_INT size_max
997 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
999 bool minimize
= (TARGET_CALL_PROLOGUES
1003 && !cfun
->machine
->is_OS_task
1004 && !cfun
->machine
->is_OS_main
);
1007 && (frame_pointer_needed
1008 || avr_outgoing_args_size() > 8
1009 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1013 int first_reg
, reg
, offset
;
1015 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1016 gen_int_mode (size
, HImode
));
1018 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1019 gen_int_mode (live_seq
+size
, HImode
));
1020 insn
= emit_insn (pattern
);
1021 RTX_FRAME_RELATED_P (insn
) = 1;
1023 /* Describe the effect of the unspec_volatile call to prologue_saves.
1024 Note that this formulation assumes that add_reg_note pushes the
1025 notes to the front. Thus we build them in the reverse order of
1026 how we want dwarf2out to process them. */
1028 /* The function does always set frame_pointer_rtx, but whether that
1029 is going to be permanent in the function is frame_pointer_needed. */
1031 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1032 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1034 : stack_pointer_rtx
),
1035 plus_constant (Pmode
, stack_pointer_rtx
,
1036 -(size
+ live_seq
))));
1038 /* Note that live_seq always contains r28+r29, but the other
1039 registers to be saved are all below 18. */
1041 first_reg
= 18 - (live_seq
- 2);
1043 for (reg
= 29, offset
= -live_seq
+ 1;
1045 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1049 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1051 r
= gen_rtx_REG (QImode
, reg
);
1052 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1055 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1057 else /* !minimize */
1061 for (reg
= 0; reg
< 32; ++reg
)
1062 if (TEST_HARD_REG_BIT (set
, reg
))
1063 emit_push_byte (reg
, true);
1065 if (frame_pointer_needed
1066 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1068 /* Push frame pointer. Always be consistent about the
1069 ordering of pushes -- epilogue_restores expects the
1070 register pair to be pushed low byte first. */
1072 emit_push_byte (REG_Y
, true);
1073 emit_push_byte (REG_Y
+ 1, true);
1076 if (frame_pointer_needed
1079 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1080 RTX_FRAME_RELATED_P (insn
) = 1;
1085 /* Creating a frame can be done by direct manipulation of the
1086 stack or via the frame pointer. These two methods are:
1093 the optimum method depends on function type, stack and
1094 frame size. To avoid a complex logic, both methods are
1095 tested and shortest is selected.
1097 There is also the case where SIZE != 0 and no frame pointer is
1098 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1099 In that case, insn (*) is not needed in that case.
1100 We use the X register as scratch. This is save because in X
1102 In an interrupt routine, the case of SIZE != 0 together with
1103 !frame_pointer_needed can only occur if the function is not a
1104 leaf function and thus X has already been saved. */
1107 HOST_WIDE_INT size_cfa
= size
;
1108 rtx fp_plus_insns
, fp
, my_fp
;
1110 gcc_assert (frame_pointer_needed
1114 fp
= my_fp
= (frame_pointer_needed
1116 : gen_rtx_REG (Pmode
, REG_X
));
1118 if (AVR_HAVE_8BIT_SP
)
1120 /* The high byte (r29) does not change:
1121 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1123 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1126 /* Cut down size and avoid size = 0 so that we don't run
1127 into ICE like PR52488 in the remainder. */
1129 if (size
> size_max
)
1131 /* Don't error so that insane code from newlib still compiles
1132 and does not break building newlib. As PR51345 is implemented
1133 now, there are multilib variants with -msp8.
1135 If user wants sanity checks he can use -Wstack-usage=
1138 For CFA we emit the original, non-saturated size so that
1139 the generic machinery is aware of the real stack usage and
1140 will print the above diagnostic as expected. */
1145 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1147 /************ Method 1: Adjust frame pointer ************/
1151 /* Normally, the dwarf2out frame-related-expr interpreter does
1152 not expect to have the CFA change once the frame pointer is
1153 set up. Thus, we avoid marking the move insn below and
1154 instead indicate that the entire operation is complete after
1155 the frame pointer subtraction is done. */
1157 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1158 if (frame_pointer_needed
)
1160 RTX_FRAME_RELATED_P (insn
) = 1;
1161 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1162 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1165 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1167 if (frame_pointer_needed
)
1169 RTX_FRAME_RELATED_P (insn
) = 1;
1170 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1171 gen_rtx_SET (VOIDmode
, fp
,
1172 plus_constant (Pmode
, fp
,
1176 /* Copy to stack pointer. Note that since we've already
1177 changed the CFA to the frame pointer this operation
1178 need not be annotated if frame pointer is needed.
1179 Always move through unspec, see PR50063.
1180 For meaning of irq_state see movhi_sp_r insn. */
1182 if (cfun
->machine
->is_interrupt
)
1185 if (TARGET_NO_INTERRUPTS
1186 || cfun
->machine
->is_signal
1187 || cfun
->machine
->is_OS_main
)
1190 if (AVR_HAVE_8BIT_SP
)
1193 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1194 fp
, GEN_INT (irq_state
)));
1195 if (!frame_pointer_needed
)
1197 RTX_FRAME_RELATED_P (insn
) = 1;
1198 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1199 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1200 plus_constant (Pmode
,
1205 fp_plus_insns
= get_insns ();
1208 /************ Method 2: Adjust Stack pointer ************/
1210 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1211 can only handle specific offsets. */
1213 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1219 insn
= emit_move_insn (stack_pointer_rtx
,
1220 plus_constant (Pmode
, stack_pointer_rtx
,
1222 RTX_FRAME_RELATED_P (insn
) = 1;
1223 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1224 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1225 plus_constant (Pmode
,
1228 if (frame_pointer_needed
)
1230 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1231 RTX_FRAME_RELATED_P (insn
) = 1;
1234 sp_plus_insns
= get_insns ();
1237 /************ Use shortest method ************/
1239 emit_insn (get_sequence_length (sp_plus_insns
)
1240 < get_sequence_length (fp_plus_insns
)
1246 emit_insn (fp_plus_insns
);
1249 cfun
->machine
->stack_usage
+= size_cfa
;
1250 } /* !minimize && size != 0 */
1255 /* Output function prologue. */
1258 expand_prologue (void)
1263 size
= get_frame_size() + avr_outgoing_args_size();
1265 cfun
->machine
->stack_usage
= 0;
1267 /* Prologue: naked. */
1268 if (cfun
->machine
->is_naked
)
1273 avr_regs_to_save (&set
);
1275 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1277 /* Enable interrupts. */
1278 if (cfun
->machine
->is_interrupt
)
1279 emit_insn (gen_enable_interrupt ());
1281 /* Push zero reg. */
1282 emit_push_byte (ZERO_REGNO
, true);
1285 emit_push_byte (TMP_REGNO
, true);
1288 /* ??? There's no dwarf2 column reserved for SREG. */
1289 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1291 /* Clear zero reg. */
1292 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1294 /* Prevent any attempt to delete the setting of ZERO_REG! */
1295 emit_use (zero_reg_rtx
);
1297 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1298 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1301 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1304 && TEST_HARD_REG_BIT (set
, REG_X
)
1305 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1307 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1311 && (frame_pointer_needed
1312 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1313 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1315 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1319 && TEST_HARD_REG_BIT (set
, REG_Z
)
1320 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1322 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1324 } /* is_interrupt is_signal */
1326 avr_prologue_setup_frame (size
, set
);
1328 if (flag_stack_usage_info
)
1329 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1332 /* Output summary at end of function prologue. */
1335 avr_asm_function_end_prologue (FILE *file
)
1337 if (cfun
->machine
->is_naked
)
1339 fputs ("/* prologue: naked */\n", file
);
1343 if (cfun
->machine
->is_interrupt
)
1345 fputs ("/* prologue: Interrupt */\n", file
);
1347 else if (cfun
->machine
->is_signal
)
1349 fputs ("/* prologue: Signal */\n", file
);
1352 fputs ("/* prologue: function */\n", file
);
1355 if (ACCUMULATE_OUTGOING_ARGS
)
1356 fprintf (file
, "/* outgoing args size = %d */\n",
1357 avr_outgoing_args_size());
1359 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1361 fprintf (file
, "/* stack size = %d */\n",
1362 cfun
->machine
->stack_usage
);
1363 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1364 usage for offset so that SP + .L__stack_offset = return address. */
1365 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1369 /* Implement EPILOGUE_USES. */
1372 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1374 if (reload_completed
1376 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1381 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1384 emit_pop_byte (unsigned regno
)
1388 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1389 mem
= gen_frame_mem (QImode
, mem
);
1390 reg
= gen_rtx_REG (QImode
, regno
);
1392 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1395 /* Output RTL epilogue. */
1398 expand_epilogue (bool sibcall_p
)
1405 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1407 size
= get_frame_size() + avr_outgoing_args_size();
1409 /* epilogue: naked */
1410 if (cfun
->machine
->is_naked
)
1412 gcc_assert (!sibcall_p
);
1414 emit_jump_insn (gen_return ());
1418 avr_regs_to_save (&set
);
1419 live_seq
= sequent_regs_live ();
1421 minimize
= (TARGET_CALL_PROLOGUES
1424 && !cfun
->machine
->is_OS_task
1425 && !cfun
->machine
->is_OS_main
);
1429 || frame_pointer_needed
1432 /* Get rid of frame. */
1434 if (!frame_pointer_needed
)
1436 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1441 emit_move_insn (frame_pointer_rtx
,
1442 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1445 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1451 /* Try two methods to adjust stack and select shortest. */
1456 HOST_WIDE_INT size_max
;
1458 gcc_assert (frame_pointer_needed
1462 fp
= my_fp
= (frame_pointer_needed
1464 : gen_rtx_REG (Pmode
, REG_X
));
1466 if (AVR_HAVE_8BIT_SP
)
1468 /* The high byte (r29) does not change:
1469 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1471 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1474 /* For rationale see comment in prologue generation. */
1476 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1477 if (size
> size_max
)
1479 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1481 /********** Method 1: Adjust fp register **********/
1485 if (!frame_pointer_needed
)
1486 emit_move_insn (fp
, stack_pointer_rtx
);
1488 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1490 /* Copy to stack pointer. */
1492 if (TARGET_NO_INTERRUPTS
)
1495 if (AVR_HAVE_8BIT_SP
)
1498 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1499 GEN_INT (irq_state
)));
1501 fp_plus_insns
= get_insns ();
1504 /********** Method 2: Adjust Stack pointer **********/
1506 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1512 emit_move_insn (stack_pointer_rtx
,
1513 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1515 sp_plus_insns
= get_insns ();
1518 /************ Use shortest method ************/
1520 emit_insn (get_sequence_length (sp_plus_insns
)
1521 < get_sequence_length (fp_plus_insns
)
1526 emit_insn (fp_plus_insns
);
1529 if (frame_pointer_needed
1530 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1532 /* Restore previous frame_pointer. See expand_prologue for
1533 rationale for not using pophi. */
1535 emit_pop_byte (REG_Y
+ 1);
1536 emit_pop_byte (REG_Y
);
1539 /* Restore used registers. */
1541 for (reg
= 31; reg
>= 0; --reg
)
1542 if (TEST_HARD_REG_BIT (set
, reg
))
1543 emit_pop_byte (reg
);
1547 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1548 The conditions to restore them must be tha same as in prologue. */
1551 && TEST_HARD_REG_BIT (set
, REG_Z
)
1552 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1554 emit_pop_byte (TMP_REGNO
);
1555 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1559 && (frame_pointer_needed
1560 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1561 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1563 emit_pop_byte (TMP_REGNO
);
1564 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1568 && TEST_HARD_REG_BIT (set
, REG_X
)
1569 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1571 emit_pop_byte (TMP_REGNO
);
1572 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1577 emit_pop_byte (TMP_REGNO
);
1578 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1581 /* Restore SREG using tmp_reg as scratch. */
1583 emit_pop_byte (TMP_REGNO
);
1584 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1586 /* Restore tmp REG. */
1587 emit_pop_byte (TMP_REGNO
);
1589 /* Restore zero REG. */
1590 emit_pop_byte (ZERO_REGNO
);
1594 emit_jump_insn (gen_return ());
1597 /* Output summary messages at beginning of function epilogue. */
1600 avr_asm_function_begin_epilogue (FILE *file
)
1602 fprintf (file
, "/* epilogue start */\n");
1606 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1609 avr_cannot_modify_jumps_p (void)
1612 /* Naked Functions must not have any instructions after
1613 their epilogue, see PR42240 */
1615 if (reload_completed
1617 && cfun
->machine
->is_naked
)
1626 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1628 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1629 This hook just serves to hack around PR rtl-optimization/52543 by
1630 claiming that PSImode addresses (which are used for the 24-bit
1631 address space __memx) were mode-dependent so that lower-subreg.s
1632 will skip these addresses. See also the similar FIXME comment along
1633 with mov<mode> expanders in avr.md. */
1636 avr_mode_dependent_address_p (const_rtx addr
, addr_space_t as ATTRIBUTE_UNUSED
)
1638 return GET_MODE (addr
) != Pmode
;
1642 /* Helper function for `avr_legitimate_address_p'. */
1645 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1646 RTX_CODE outer_code
, bool strict
)
1649 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1650 as
, outer_code
, UNKNOWN
)
1652 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1656 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1657 machine for a memory operand of mode MODE. */
1660 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1662 bool ok
= CONSTANT_ADDRESS_P (x
);
1664 switch (GET_CODE (x
))
1667 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1671 && GET_MODE_SIZE (mode
) > 4
1672 && REG_X
== REGNO (x
))
1680 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1681 GET_CODE (x
), strict
);
1686 rtx reg
= XEXP (x
, 0);
1687 rtx op1
= XEXP (x
, 1);
1690 && CONST_INT_P (op1
)
1691 && INTVAL (op1
) >= 0)
1693 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1698 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1701 if (reg
== frame_pointer_rtx
1702 || reg
== arg_pointer_rtx
)
1707 else if (frame_pointer_needed
1708 && reg
== frame_pointer_rtx
)
1720 if (avr_log
.legitimate_address_p
)
1722 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1723 "reload_completed=%d reload_in_progress=%d %s:",
1724 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1725 reg_renumber
? "(reg_renumber)" : "");
1727 if (GET_CODE (x
) == PLUS
1728 && REG_P (XEXP (x
, 0))
1729 && CONST_INT_P (XEXP (x
, 1))
1730 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1733 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1734 true_regnum (XEXP (x
, 0)));
1737 avr_edump ("\n%r\n", x
);
1744 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1745 now only a helper for avr_addr_space_legitimize_address. */
1746 /* Attempts to replace X with a valid
1747 memory address for an operand of mode MODE */
1750 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1752 bool big_offset_p
= false;
1756 if (GET_CODE (oldx
) == PLUS
1757 && REG_P (XEXP (oldx
, 0)))
1759 if (REG_P (XEXP (oldx
, 1)))
1760 x
= force_reg (GET_MODE (oldx
), oldx
);
1761 else if (CONST_INT_P (XEXP (oldx
, 1)))
1763 int offs
= INTVAL (XEXP (oldx
, 1));
1764 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1765 && offs
> MAX_LD_OFFSET (mode
))
1767 big_offset_p
= true;
1768 x
= force_reg (GET_MODE (oldx
), oldx
);
1773 if (avr_log
.legitimize_address
)
1775 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1778 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1785 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1786 /* This will allow register R26/27 to be used where it is no worse than normal
1787 base pointers R28/29 or R30/31. For example, if base offset is greater
1788 than 63 bytes or for R++ or --R addressing. */
1791 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1792 int opnum
, int type
, int addr_type
,
1793 int ind_levels ATTRIBUTE_UNUSED
,
1794 rtx (*mk_memloc
)(rtx
,int))
1798 if (avr_log
.legitimize_reload_address
)
1799 avr_edump ("\n%?:%m %r\n", mode
, x
);
1801 if (1 && (GET_CODE (x
) == POST_INC
1802 || GET_CODE (x
) == PRE_DEC
))
1804 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1805 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1806 opnum
, RELOAD_OTHER
);
1808 if (avr_log
.legitimize_reload_address
)
1809 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1810 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1815 if (GET_CODE (x
) == PLUS
1816 && REG_P (XEXP (x
, 0))
1817 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1818 && CONST_INT_P (XEXP (x
, 1))
1819 && INTVAL (XEXP (x
, 1)) >= 1)
1821 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1825 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1827 int regno
= REGNO (XEXP (x
, 0));
1828 rtx mem
= mk_memloc (x
, regno
);
1830 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1831 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1832 1, (enum reload_type
) addr_type
);
1834 if (avr_log
.legitimize_reload_address
)
1835 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1836 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1838 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1839 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1840 opnum
, (enum reload_type
) type
);
1842 if (avr_log
.legitimize_reload_address
)
1843 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1844 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1849 else if (! (frame_pointer_needed
1850 && XEXP (x
, 0) == frame_pointer_rtx
))
1852 push_reload (x
, NULL_RTX
, px
, NULL
,
1853 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1854 opnum
, (enum reload_type
) type
);
1856 if (avr_log
.legitimize_reload_address
)
1857 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1858 POINTER_REGS
, x
, NULL_RTX
);
1868 /* Helper function to print assembler resp. track instruction
1869 sequence lengths. Always return "".
1872 Output assembler code from template TPL with operands supplied
1873 by OPERANDS. This is just forwarding to output_asm_insn.
1876 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1877 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1878 Don't output anything.
1882 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1886 output_asm_insn (tpl
, operands
);
1900 /* Return a pointer register name as a string. */
1903 ptrreg_to_str (int regno
)
1907 case REG_X
: return "X";
1908 case REG_Y
: return "Y";
1909 case REG_Z
: return "Z";
1911 output_operand_lossage ("address operand requires constraint for"
1912 " X, Y, or Z register");
1917 /* Return the condition name as a string.
1918 Used in conditional jump constructing */
1921 cond_string (enum rtx_code code
)
1930 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1935 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1951 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1952 /* Output ADDR to FILE as address. */
1955 avr_print_operand_address (FILE *file
, rtx addr
)
1957 switch (GET_CODE (addr
))
1960 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1964 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1968 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1972 if (CONSTANT_ADDRESS_P (addr
)
1973 && text_segment_operand (addr
, VOIDmode
))
1976 if (GET_CODE (x
) == CONST
)
1978 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1980 /* Assembler gs() will implant word address. Make offset
1981 a byte offset inside gs() for assembler. This is
1982 needed because the more logical (constant+gs(sym)) is not
1983 accepted by gas. For 128K and lower devices this is ok.
1984 For large devices it will create a Trampoline to offset
1985 from symbol which may not be what the user really wanted. */
1986 fprintf (file
, "gs(");
1987 output_addr_const (file
, XEXP (x
,0));
1988 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1989 2 * INTVAL (XEXP (x
, 1)));
1991 if (warning (0, "pointer offset from symbol maybe incorrect"))
1993 output_addr_const (stderr
, addr
);
1994 fprintf(stderr
,"\n");
1999 fprintf (file
, "gs(");
2000 output_addr_const (file
, addr
);
2001 fprintf (file
, ")");
2005 output_addr_const (file
, addr
);
2010 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2013 avr_print_operand_punct_valid_p (unsigned char code
)
2015 return code
== '~' || code
== '!';
2019 /* Implement `TARGET_PRINT_OPERAND'. */
2020 /* Output X as assembler operand to file FILE.
2021 For a description of supported %-codes, see top of avr.md. */
2024 avr_print_operand (FILE *file
, rtx x
, int code
)
2028 if (code
>= 'A' && code
<= 'D')
2033 if (!AVR_HAVE_JMP_CALL
)
2036 else if (code
== '!')
2038 if (AVR_HAVE_EIJMP_EICALL
)
2041 else if (code
== 't'
2044 static int t_regno
= -1;
2045 static int t_nbits
= -1;
2047 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2049 t_regno
= REGNO (x
);
2050 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2052 else if (CONST_INT_P (x
) && t_regno
>= 0
2053 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2055 int bpos
= INTVAL (x
);
2057 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2059 fprintf (file
, ",%d", bpos
% 8);
2064 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2068 if (x
== zero_reg_rtx
)
2069 fprintf (file
, "__zero_reg__");
2070 else if (code
== 'r' && REGNO (x
) < 32)
2071 fprintf (file
, "%d", (int) REGNO (x
));
2073 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2075 else if (CONST_INT_P (x
))
2077 HOST_WIDE_INT ival
= INTVAL (x
);
2080 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2081 else if (low_io_address_operand (x
, VOIDmode
)
2082 || high_io_address_operand (x
, VOIDmode
))
2084 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2085 fprintf (file
, "__RAMPZ__");
2086 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2087 fprintf (file
, "__RAMPY__");
2088 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2089 fprintf (file
, "__RAMPX__");
2090 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2091 fprintf (file
, "__RAMPD__");
2092 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2093 fprintf (file
, "__CCP__");
2094 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2095 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2096 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2099 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2100 ival
- avr_current_arch
->sfr_offset
);
2104 fatal_insn ("bad address, not an I/O address:", x
);
2108 rtx addr
= XEXP (x
, 0);
2112 if (!CONSTANT_P (addr
))
2113 fatal_insn ("bad address, not a constant:", addr
);
2114 /* Assembler template with m-code is data - not progmem section */
2115 if (text_segment_operand (addr
, VOIDmode
))
2116 if (warning (0, "accessing data memory with"
2117 " program memory address"))
2119 output_addr_const (stderr
, addr
);
2120 fprintf(stderr
,"\n");
2122 output_addr_const (file
, addr
);
2124 else if (code
== 'i')
2126 avr_print_operand (file
, addr
, 'i');
2128 else if (code
== 'o')
2130 if (GET_CODE (addr
) != PLUS
)
2131 fatal_insn ("bad address, not (reg+disp):", addr
);
2133 avr_print_operand (file
, XEXP (addr
, 1), 0);
2135 else if (code
== 'p' || code
== 'r')
2137 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2138 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2141 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2143 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2145 else if (GET_CODE (addr
) == PLUS
)
2147 avr_print_operand_address (file
, XEXP (addr
,0));
2148 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2149 fatal_insn ("internal compiler error. Bad address:"
2152 avr_print_operand (file
, XEXP (addr
,1), code
);
2155 avr_print_operand_address (file
, addr
);
2157 else if (code
== 'i')
2159 fatal_insn ("bad address, not an I/O address:", x
);
2161 else if (code
== 'x')
2163 /* Constant progmem address - like used in jmp or call */
2164 if (0 == text_segment_operand (x
, VOIDmode
))
2165 if (warning (0, "accessing program memory"
2166 " with data memory address"))
2168 output_addr_const (stderr
, x
);
2169 fprintf(stderr
,"\n");
2171 /* Use normal symbol for direct address no linker trampoline needed */
2172 output_addr_const (file
, x
);
2174 else if (CONST_FIXED_P (x
))
2176 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2178 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2180 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2182 else if (GET_CODE (x
) == CONST_DOUBLE
)
2186 if (GET_MODE (x
) != SFmode
)
2187 fatal_insn ("internal compiler error. Unknown mode:", x
);
2188 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2189 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2190 fprintf (file
, "0x%lx", val
);
2192 else if (GET_CODE (x
) == CONST_STRING
)
2193 fputs (XSTR (x
, 0), file
);
2194 else if (code
== 'j')
2195 fputs (cond_string (GET_CODE (x
)), file
);
2196 else if (code
== 'k')
2197 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2199 avr_print_operand_address (file
, x
);
2202 /* Update the condition code in the INSN. */
2205 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2208 enum attr_cc cc
= get_attr_cc (insn
);
2218 rtx
*op
= recog_data
.operand
;
2221 /* Extract insn's operands. */
2222 extract_constrain_insn_cached (insn
);
2230 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2231 cc
= (enum attr_cc
) icc
;
2236 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2237 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2238 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2240 /* Any other "r,rL" combination does not alter cc0. */
2244 } /* inner switch */
2248 } /* outer swicth */
2253 /* Special values like CC_OUT_PLUS from above have been
2254 mapped to "standard" CC_* values so we never come here. */
2260 /* Insn does not affect CC at all. */
2268 set
= single_set (insn
);
2272 cc_status
.flags
|= CC_NO_OVERFLOW
;
2273 cc_status
.value1
= SET_DEST (set
);
2278 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2279 The V flag may or may not be known but that's ok because
2280 alter_cond will change tests to use EQ/NE. */
2281 set
= single_set (insn
);
2285 cc_status
.value1
= SET_DEST (set
);
2286 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2291 set
= single_set (insn
);
2294 cc_status
.value1
= SET_SRC (set
);
2298 /* Insn doesn't leave CC in a usable state. */
2304 /* Choose mode for jump insn:
2305 1 - relative jump in range -63 <= x <= 62 ;
2306 2 - relative jump in range -2046 <= x <= 2045 ;
2307 3 - absolute jump (only for ATmega[16]03). */
2310 avr_jump_mode (rtx x
, rtx insn
)
2312 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2313 ? XEXP (x
, 0) : x
));
2314 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2315 int jump_distance
= cur_addr
- dest_addr
;
2317 if (-63 <= jump_distance
&& jump_distance
<= 62)
2319 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2321 else if (AVR_HAVE_JMP_CALL
)
2327 /* return an AVR condition jump commands.
2328 X is a comparison RTX.
2329 LEN is a number returned by avr_jump_mode function.
2330 if REVERSE nonzero then condition code in X must be reversed. */
2333 ret_cond_branch (rtx x
, int len
, int reverse
)
2335 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2340 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2341 return (len
== 1 ? ("breq .+2" CR_TAB
2343 len
== 2 ? ("breq .+4" CR_TAB
2351 return (len
== 1 ? ("breq .+2" CR_TAB
2353 len
== 2 ? ("breq .+4" CR_TAB
2360 return (len
== 1 ? ("breq .+2" CR_TAB
2362 len
== 2 ? ("breq .+4" CR_TAB
2369 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2370 return (len
== 1 ? ("breq %0" CR_TAB
2372 len
== 2 ? ("breq .+2" CR_TAB
2379 return (len
== 1 ? ("breq %0" CR_TAB
2381 len
== 2 ? ("breq .+2" CR_TAB
2388 return (len
== 1 ? ("breq %0" CR_TAB
2390 len
== 2 ? ("breq .+2" CR_TAB
2404 return ("br%j1 .+2" CR_TAB
2407 return ("br%j1 .+4" CR_TAB
2418 return ("br%k1 .+2" CR_TAB
2421 return ("br%k1 .+4" CR_TAB
2429 /* Output insn cost for next insn. */
2432 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2433 int num_operands ATTRIBUTE_UNUSED
)
2435 if (avr_log
.rtx_costs
)
2437 rtx set
= single_set (insn
);
2440 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2441 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2443 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2444 rtx_cost (PATTERN (insn
), INSN
, 0,
2445 optimize_insn_for_speed_p()));
2449 /* Return 0 if undefined, 1 if always true or always false. */
2452 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2454 unsigned int max
= (mode
== QImode
? 0xff :
2455 mode
== HImode
? 0xffff :
2456 mode
== PSImode
? 0xffffff :
2457 mode
== SImode
? 0xffffffff : 0);
2458 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2460 if (unsigned_condition (op
) != op
)
2463 if (max
!= (INTVAL (x
) & max
)
2464 && INTVAL (x
) != 0xff)
2471 /* Returns nonzero if REGNO is the number of a hard
2472 register in which function arguments are sometimes passed. */
2475 function_arg_regno_p(int r
)
2477 return (r
>= 8 && r
<= 25);
2480 /* Initializing the variable cum for the state at the beginning
2481 of the argument list. */
2484 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2485 tree fndecl ATTRIBUTE_UNUSED
)
2488 cum
->regno
= FIRST_CUM_REG
;
2489 if (!libname
&& stdarg_p (fntype
))
2492 /* Assume the calle may be tail called */
2494 cfun
->machine
->sibcall_fails
= 0;
2497 /* Returns the number of registers to allocate for a function argument. */
2500 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2504 if (mode
== BLKmode
)
2505 size
= int_size_in_bytes (type
);
2507 size
= GET_MODE_SIZE (mode
);
2509 /* Align all function arguments to start in even-numbered registers.
2510 Odd-sized arguments leave holes above them. */
2512 return (size
+ 1) & ~1;
2515 /* Controls whether a function argument is passed
2516 in a register, and which register. */
2519 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2520 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2522 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2523 int bytes
= avr_num_arg_regs (mode
, type
);
2525 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2526 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2531 /* Update the summarizer variable CUM to advance past an argument
2532 in the argument list. */
2535 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2536 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2538 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2539 int bytes
= avr_num_arg_regs (mode
, type
);
2541 cum
->nregs
-= bytes
;
2542 cum
->regno
-= bytes
;
2544 /* A parameter is being passed in a call-saved register. As the original
2545 contents of these regs has to be restored before leaving the function,
2546 a function must not pass arguments in call-saved regs in order to get
2551 && !call_used_regs
[cum
->regno
])
2553 /* FIXME: We ship info on failing tail-call in struct machine_function.
2554 This uses internals of calls.c:expand_call() and the way args_so_far
2555 is used. targetm.function_ok_for_sibcall() needs to be extended to
2556 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2557 dependent so that such an extension is not wanted. */
2559 cfun
->machine
->sibcall_fails
= 1;
2562 /* Test if all registers needed by the ABI are actually available. If the
2563 user has fixed a GPR needed to pass an argument, an (implicit) function
2564 call will clobber that fixed register. See PR45099 for an example. */
2571 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2572 if (fixed_regs
[regno
])
2573 warning (0, "fixed register %s used to pass parameter to function",
2577 if (cum
->nregs
<= 0)
2580 cum
->regno
= FIRST_CUM_REG
;
2584 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2585 /* Decide whether we can make a sibling call to a function. DECL is the
2586 declaration of the function being targeted by the call and EXP is the
2587 CALL_EXPR representing the call. */
2590 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2594 /* Tail-calling must fail if callee-saved regs are used to pass
2595 function args. We must not tail-call when `epilogue_restores'
2596 is used. Unfortunately, we cannot tell at this point if that
2597 actually will happen or not, and we cannot step back from
2598 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2600 if (cfun
->machine
->sibcall_fails
2601 || TARGET_CALL_PROLOGUES
)
2606 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2610 decl_callee
= TREE_TYPE (decl_callee
);
2614 decl_callee
= fntype_callee
;
2616 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2617 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2619 decl_callee
= TREE_TYPE (decl_callee
);
2623 /* Ensure that caller and callee have compatible epilogues */
2625 if (cfun
->machine
->is_interrupt
2626 || cfun
->machine
->is_signal
2627 || cfun
->machine
->is_naked
2628 || avr_naked_function_p (decl_callee
)
2629 /* FIXME: For OS_task and OS_main, we are over-conservative.
2630 This is due to missing documentation of these attributes
2631 and what they actually should do and should not do. */
2632 || (avr_OS_task_function_p (decl_callee
)
2633 != cfun
->machine
->is_OS_task
)
2634 || (avr_OS_main_function_p (decl_callee
)
2635 != cfun
->machine
->is_OS_main
))
2643 /***********************************************************************
2644 Functions for outputting various mov's for a various modes
2645 ************************************************************************/
2647 /* Return true if a value of mode MODE is read from flash by
2648 __load_* function from libgcc. */
2651 avr_load_libgcc_p (rtx op
)
2653 enum machine_mode mode
= GET_MODE (op
);
2654 int n_bytes
= GET_MODE_SIZE (mode
);
2659 && MEM_ADDR_SPACE (op
) == ADDR_SPACE_FLASH
);
2662 /* Return true if a value of mode MODE is read by __xload_* function. */
2665 avr_xload_libgcc_p (enum machine_mode mode
)
2667 int n_bytes
= GET_MODE_SIZE (mode
);
2670 || avr_current_device
->n_flash
> 1);
2674 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2675 OP[1] in AS1 to register OP[0].
2676 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2680 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2684 rtx src
= SET_SRC (single_set (insn
));
2686 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2688 addr_space_t as
= MEM_ADDR_SPACE (src
);
2695 warning (0, "writing to address space %qs not supported",
2696 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2701 addr
= XEXP (src
, 0);
2702 code
= GET_CODE (addr
);
2704 gcc_assert (REG_P (dest
));
2705 gcc_assert (REG
== code
|| POST_INC
== code
);
2707 /* Only 1-byte moves from __flash are representes as open coded
2708 mov insns. All other loads from flash are not handled here but
2709 by some UNSPEC instead, see respective FIXME in machine description. */
2711 gcc_assert (as
== ADDR_SPACE_FLASH
);
2712 gcc_assert (n_bytes
== 1);
2715 xop
[1] = lpm_addr_reg_rtx
;
2716 xop
[2] = lpm_reg_rtx
;
2725 gcc_assert (REG_Z
== REGNO (addr
));
2727 return AVR_HAVE_LPMX
2728 ? avr_asm_len ("lpm %0,%a1", xop
, plen
, 1)
2729 : avr_asm_len ("lpm" CR_TAB
2730 "mov %0,%2", xop
, plen
, 2);
2734 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0)));
2736 return AVR_HAVE_LPMX
2737 ? avr_asm_len ("lpm %0,%a1+", xop
, plen
, 1)
2738 : avr_asm_len ("lpm" CR_TAB
2740 "mov %0,%2", xop
, plen
, 3);
2747 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2748 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2750 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2754 avr_load_lpm (rtx insn
, rtx
*op
, int *plen
)
2757 int n
, n_bytes
= GET_MODE_SIZE (GET_MODE (op
[0]));
2758 rtx xsegment
= op
[1];
2759 bool clobber_z
= PARALLEL
== GET_CODE (PATTERN (insn
));
2760 bool r30_in_tmp
= false;
2765 xop
[1] = lpm_addr_reg_rtx
;
2766 xop
[2] = lpm_reg_rtx
;
2767 xop
[3] = xstring_empty
;
2769 /* Set RAMPZ as needed. */
2771 if (REG_P (xsegment
))
2773 avr_asm_len ("out __RAMPZ__,%0", &xsegment
, plen
, 1);
2777 /* Load the individual bytes from LSB to MSB. */
2779 for (n
= 0; n
< n_bytes
; n
++)
2781 xop
[0] = all_regs_rtx
[REGNO (op
[0]) + n
];
2783 if ((CONST_INT_P (xsegment
) && AVR_HAVE_LPMX
)
2784 || (REG_P (xsegment
) && AVR_HAVE_ELPMX
))
2787 avr_asm_len ("%3lpm %0,%a1", xop
, plen
, 1);
2788 else if (REGNO (xop
[0]) == REG_Z
)
2790 avr_asm_len ("%3lpm %2,%a1+", xop
, plen
, 1);
2794 avr_asm_len ("%3lpm %0,%a1+", xop
, plen
, 1);
2798 gcc_assert (clobber_z
);
2800 avr_asm_len ("%3lpm" CR_TAB
2801 "mov %0,%2", xop
, plen
, 2);
2804 avr_asm_len ("adiw %1,1", xop
, plen
, 1);
2809 avr_asm_len ("mov %1,%2", xop
, plen
, 1);
2813 && !reg_unused_after (insn
, lpm_addr_reg_rtx
)
2814 && !reg_overlap_mentioned_p (op
[0], lpm_addr_reg_rtx
))
2816 xop
[2] = GEN_INT (n_bytes
-1);
2817 avr_asm_len ("sbiw %1,%2", xop
, plen
, 1);
2820 if (REG_P (xsegment
) && AVR_HAVE_RAMPD
)
2822 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2824 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop
, plen
, 1);
2831 /* Worker function for xload_8 insn. */
2834 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2840 xop
[2] = lpm_addr_reg_rtx
;
2841 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2846 avr_asm_len ("sbrc %1,7" CR_TAB
2848 "sbrs %1,7", xop
, plen
, 3);
2850 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2852 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2853 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2860 output_movqi (rtx insn
, rtx operands
[], int *real_l
)
2862 rtx dest
= operands
[0];
2863 rtx src
= operands
[1];
2865 if (avr_mem_flash_p (src
)
2866 || avr_mem_flash_p (dest
))
2868 return avr_out_lpm (insn
, operands
, real_l
);
2874 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
2878 if (REG_P (src
)) /* mov r,r */
2880 if (test_hard_reg_class (STACK_REG
, dest
))
2882 else if (test_hard_reg_class (STACK_REG
, src
))
2887 else if (CONSTANT_P (src
))
2889 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2892 else if (MEM_P (src
))
2893 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2895 else if (MEM_P (dest
))
2900 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2902 return out_movqi_mr_r (insn
, xop
, real_l
);
2909 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2914 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2916 if (avr_mem_flash_p (src
)
2917 || avr_mem_flash_p (dest
))
2919 return avr_out_lpm (insn
, xop
, plen
);
2922 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
2926 if (REG_P (src
)) /* mov r,r */
2928 if (test_hard_reg_class (STACK_REG
, dest
))
2930 if (AVR_HAVE_8BIT_SP
)
2931 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2934 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2935 "out __SP_H__,%B1", xop
, plen
, -2);
2937 /* Use simple load of SP if no interrupts are used. */
2939 return TARGET_NO_INTERRUPTS
2940 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2941 "out __SP_L__,%A1", xop
, plen
, -2)
2942 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2944 "out __SP_H__,%B1" CR_TAB
2945 "out __SREG__,__tmp_reg__" CR_TAB
2946 "out __SP_L__,%A1", xop
, plen
, -5);
2948 else if (test_hard_reg_class (STACK_REG
, src
))
2950 return !AVR_HAVE_SPH
2951 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2952 "clr %B0", xop
, plen
, -2)
2954 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2955 "in %B0,__SP_H__", xop
, plen
, -2);
2958 return AVR_HAVE_MOVW
2959 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2961 : avr_asm_len ("mov %A0,%A1" CR_TAB
2962 "mov %B0,%B1", xop
, plen
, -2);
2964 else if (CONSTANT_P (src
))
2966 return output_reload_inhi (xop
, NULL
, plen
);
2968 else if (MEM_P (src
))
2970 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2973 else if (MEM_P (dest
))
2978 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2980 return out_movhi_mr_r (insn
, xop
, plen
);
2983 fatal_insn ("invalid insn:", insn
);
2989 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
2993 rtx x
= XEXP (src
, 0);
2995 if (CONSTANT_ADDRESS_P (x
))
2997 return optimize
> 0 && io_address_operand (x
, QImode
)
2998 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
2999 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3001 else if (GET_CODE (x
) == PLUS
3002 && REG_P (XEXP (x
, 0))
3003 && CONST_INT_P (XEXP (x
, 1)))
3005 /* memory access by reg+disp */
3007 int disp
= INTVAL (XEXP (x
, 1));
3009 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3011 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3012 fatal_insn ("incorrect insn:",insn
);
3014 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3015 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3016 "ldd %0,Y+63" CR_TAB
3017 "sbiw r28,%o1-63", op
, plen
, -3);
3019 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3020 "sbci r29,hi8(-%o1)" CR_TAB
3022 "subi r28,lo8(%o1)" CR_TAB
3023 "sbci r29,hi8(%o1)", op
, plen
, -5);
3025 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3027 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3028 it but I have this situation with extremal optimizing options. */
3030 avr_asm_len ("adiw r26,%o1" CR_TAB
3031 "ld %0,X", op
, plen
, -2);
3033 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3034 && !reg_unused_after (insn
, XEXP (x
,0)))
3036 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3042 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3045 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3049 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3053 rtx base
= XEXP (src
, 0);
3054 int reg_dest
= true_regnum (dest
);
3055 int reg_base
= true_regnum (base
);
3056 /* "volatile" forces reading low byte first, even if less efficient,
3057 for correct operation with 16-bit I/O registers. */
3058 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3062 if (reg_dest
== reg_base
) /* R = (R) */
3063 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3065 "mov %A0,__tmp_reg__", op
, plen
, -3);
3067 if (reg_base
!= REG_X
)
3068 return avr_asm_len ("ld %A0,%1" CR_TAB
3069 "ldd %B0,%1+1", op
, plen
, -2);
3071 avr_asm_len ("ld %A0,X+" CR_TAB
3072 "ld %B0,X", op
, plen
, -2);
3074 if (!reg_unused_after (insn
, base
))
3075 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3079 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3081 int disp
= INTVAL (XEXP (base
, 1));
3082 int reg_base
= true_regnum (XEXP (base
, 0));
3084 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3086 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3087 fatal_insn ("incorrect insn:",insn
);
3089 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3090 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3091 "ldd %A0,Y+62" CR_TAB
3092 "ldd %B0,Y+63" CR_TAB
3093 "sbiw r28,%o1-62", op
, plen
, -4)
3095 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3096 "sbci r29,hi8(-%o1)" CR_TAB
3098 "ldd %B0,Y+1" CR_TAB
3099 "subi r28,lo8(%o1)" CR_TAB
3100 "sbci r29,hi8(%o1)", op
, plen
, -6);
3103 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3104 it but I have this situation with extremal
3105 optimization options. */
3107 if (reg_base
== REG_X
)
3108 return reg_base
== reg_dest
3109 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3110 "ld __tmp_reg__,X+" CR_TAB
3112 "mov %A0,__tmp_reg__", op
, plen
, -4)
3114 : avr_asm_len ("adiw r26,%o1" CR_TAB
3117 "sbiw r26,%o1+1", op
, plen
, -4);
3119 return reg_base
== reg_dest
3120 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3121 "ldd %B0,%B1" CR_TAB
3122 "mov %A0,__tmp_reg__", op
, plen
, -3)
3124 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3125 "ldd %B0,%B1", op
, plen
, -2);
3127 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3129 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3130 fatal_insn ("incorrect insn:", insn
);
3132 if (!mem_volatile_p
)
3133 return avr_asm_len ("ld %B0,%1" CR_TAB
3134 "ld %A0,%1", op
, plen
, -2);
3136 return REGNO (XEXP (base
, 0)) == REG_X
3137 ? avr_asm_len ("sbiw r26,2" CR_TAB
3140 "sbiw r26,1", op
, plen
, -4)
3142 : avr_asm_len ("sbiw %r1,2" CR_TAB
3144 "ldd %B0,%p1+1", op
, plen
, -3);
3146 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3148 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3149 fatal_insn ("incorrect insn:", insn
);
3151 return avr_asm_len ("ld %A0,%1" CR_TAB
3152 "ld %B0,%1", op
, plen
, -2);
3154 else if (CONSTANT_ADDRESS_P (base
))
3156 return optimize
> 0 && io_address_operand (base
, HImode
)
3157 ? avr_asm_len ("in %A0,%i1" CR_TAB
3158 "in %B0,%i1+1", op
, plen
, -2)
3160 : avr_asm_len ("lds %A0,%m1" CR_TAB
3161 "lds %B0,%m1+1", op
, plen
, -4);
3164 fatal_insn ("unknown move insn:",insn
);
3169 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3173 rtx base
= XEXP (src
, 0);
3174 int reg_dest
= true_regnum (dest
);
3175 int reg_base
= true_regnum (base
);
3183 if (reg_base
== REG_X
) /* (R26) */
3185 if (reg_dest
== REG_X
)
3186 /* "ld r26,-X" is undefined */
3187 return *l
=7, ("adiw r26,3" CR_TAB
3190 "ld __tmp_reg__,-X" CR_TAB
3193 "mov r27,__tmp_reg__");
3194 else if (reg_dest
== REG_X
- 2)
3195 return *l
=5, ("ld %A0,X+" CR_TAB
3197 "ld __tmp_reg__,X+" CR_TAB
3199 "mov %C0,__tmp_reg__");
3200 else if (reg_unused_after (insn
, base
))
3201 return *l
=4, ("ld %A0,X+" CR_TAB
3206 return *l
=5, ("ld %A0,X+" CR_TAB
3214 if (reg_dest
== reg_base
)
3215 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3216 "ldd %C0,%1+2" CR_TAB
3217 "ldd __tmp_reg__,%1+1" CR_TAB
3219 "mov %B0,__tmp_reg__");
3220 else if (reg_base
== reg_dest
+ 2)
3221 return *l
=5, ("ld %A0,%1" CR_TAB
3222 "ldd %B0,%1+1" CR_TAB
3223 "ldd __tmp_reg__,%1+2" CR_TAB
3224 "ldd %D0,%1+3" CR_TAB
3225 "mov %C0,__tmp_reg__");
3227 return *l
=4, ("ld %A0,%1" CR_TAB
3228 "ldd %B0,%1+1" CR_TAB
3229 "ldd %C0,%1+2" CR_TAB
3233 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3235 int disp
= INTVAL (XEXP (base
, 1));
3237 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3239 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3240 fatal_insn ("incorrect insn:",insn
);
3242 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3243 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3244 "ldd %A0,Y+60" CR_TAB
3245 "ldd %B0,Y+61" CR_TAB
3246 "ldd %C0,Y+62" CR_TAB
3247 "ldd %D0,Y+63" CR_TAB
3250 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3251 "sbci r29,hi8(-%o1)" CR_TAB
3253 "ldd %B0,Y+1" CR_TAB
3254 "ldd %C0,Y+2" CR_TAB
3255 "ldd %D0,Y+3" CR_TAB
3256 "subi r28,lo8(%o1)" CR_TAB
3257 "sbci r29,hi8(%o1)");
3260 reg_base
= true_regnum (XEXP (base
, 0));
3261 if (reg_base
== REG_X
)
3264 if (reg_dest
== REG_X
)
3267 /* "ld r26,-X" is undefined */
3268 return ("adiw r26,%o1+3" CR_TAB
3271 "ld __tmp_reg__,-X" CR_TAB
3274 "mov r27,__tmp_reg__");
3277 if (reg_dest
== REG_X
- 2)
3278 return ("adiw r26,%o1" CR_TAB
3281 "ld __tmp_reg__,X+" CR_TAB
3283 "mov r26,__tmp_reg__");
3285 return ("adiw r26,%o1" CR_TAB
3292 if (reg_dest
== reg_base
)
3293 return *l
=5, ("ldd %D0,%D1" CR_TAB
3294 "ldd %C0,%C1" CR_TAB
3295 "ldd __tmp_reg__,%B1" CR_TAB
3296 "ldd %A0,%A1" CR_TAB
3297 "mov %B0,__tmp_reg__");
3298 else if (reg_dest
== reg_base
- 2)
3299 return *l
=5, ("ldd %A0,%A1" CR_TAB
3300 "ldd %B0,%B1" CR_TAB
3301 "ldd __tmp_reg__,%C1" CR_TAB
3302 "ldd %D0,%D1" CR_TAB
3303 "mov %C0,__tmp_reg__");
3304 return *l
=4, ("ldd %A0,%A1" CR_TAB
3305 "ldd %B0,%B1" CR_TAB
3306 "ldd %C0,%C1" CR_TAB
3309 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3310 return *l
=4, ("ld %D0,%1" CR_TAB
3314 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3315 return *l
=4, ("ld %A0,%1" CR_TAB
3319 else if (CONSTANT_ADDRESS_P (base
))
3320 return *l
=8, ("lds %A0,%m1" CR_TAB
3321 "lds %B0,%m1+1" CR_TAB
3322 "lds %C0,%m1+2" CR_TAB
3325 fatal_insn ("unknown move insn:",insn
);
3330 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3334 rtx base
= XEXP (dest
, 0);
3335 int reg_base
= true_regnum (base
);
3336 int reg_src
= true_regnum (src
);
3342 if (CONSTANT_ADDRESS_P (base
))
3343 return *l
=8,("sts %m0,%A1" CR_TAB
3344 "sts %m0+1,%B1" CR_TAB
3345 "sts %m0+2,%C1" CR_TAB
3347 if (reg_base
> 0) /* (r) */
3349 if (reg_base
== REG_X
) /* (R26) */
3351 if (reg_src
== REG_X
)
3353 /* "st X+,r26" is undefined */
3354 if (reg_unused_after (insn
, base
))
3355 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3358 "st X+,__tmp_reg__" CR_TAB
3362 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3365 "st X+,__tmp_reg__" CR_TAB
3370 else if (reg_base
== reg_src
+ 2)
3372 if (reg_unused_after (insn
, base
))
3373 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3374 "mov __tmp_reg__,%D1" CR_TAB
3377 "st %0+,__zero_reg__" CR_TAB
3378 "st %0,__tmp_reg__" CR_TAB
3379 "clr __zero_reg__");
3381 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3382 "mov __tmp_reg__,%D1" CR_TAB
3385 "st %0+,__zero_reg__" CR_TAB
3386 "st %0,__tmp_reg__" CR_TAB
3387 "clr __zero_reg__" CR_TAB
3390 return *l
=5, ("st %0+,%A1" CR_TAB
3397 return *l
=4, ("st %0,%A1" CR_TAB
3398 "std %0+1,%B1" CR_TAB
3399 "std %0+2,%C1" CR_TAB
3402 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3404 int disp
= INTVAL (XEXP (base
, 1));
3405 reg_base
= REGNO (XEXP (base
, 0));
3406 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3408 if (reg_base
!= REG_Y
)
3409 fatal_insn ("incorrect insn:",insn
);
3411 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3412 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3413 "std Y+60,%A1" CR_TAB
3414 "std Y+61,%B1" CR_TAB
3415 "std Y+62,%C1" CR_TAB
3416 "std Y+63,%D1" CR_TAB
3419 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3420 "sbci r29,hi8(-%o0)" CR_TAB
3422 "std Y+1,%B1" CR_TAB
3423 "std Y+2,%C1" CR_TAB
3424 "std Y+3,%D1" CR_TAB
3425 "subi r28,lo8(%o0)" CR_TAB
3426 "sbci r29,hi8(%o0)");
3428 if (reg_base
== REG_X
)
3431 if (reg_src
== REG_X
)
3434 return ("mov __tmp_reg__,r26" CR_TAB
3435 "mov __zero_reg__,r27" CR_TAB
3436 "adiw r26,%o0" CR_TAB
3437 "st X+,__tmp_reg__" CR_TAB
3438 "st X+,__zero_reg__" CR_TAB
3441 "clr __zero_reg__" CR_TAB
3444 else if (reg_src
== REG_X
- 2)
3447 return ("mov __tmp_reg__,r26" CR_TAB
3448 "mov __zero_reg__,r27" CR_TAB
3449 "adiw r26,%o0" CR_TAB
3452 "st X+,__tmp_reg__" CR_TAB
3453 "st X,__zero_reg__" CR_TAB
3454 "clr __zero_reg__" CR_TAB
3458 return ("adiw r26,%o0" CR_TAB
3465 return *l
=4, ("std %A0,%A1" CR_TAB
3466 "std %B0,%B1" CR_TAB
3467 "std %C0,%C1" CR_TAB
3470 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3471 return *l
=4, ("st %0,%D1" CR_TAB
3475 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3476 return *l
=4, ("st %0,%A1" CR_TAB
3480 fatal_insn ("unknown move insn:",insn
);
3485 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3488 rtx dest
= operands
[0];
3489 rtx src
= operands
[1];
3492 if (avr_mem_flash_p (src
)
3493 || avr_mem_flash_p (dest
))
3495 return avr_out_lpm (insn
, operands
, real_l
);
3501 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3504 if (REG_P (src
)) /* mov r,r */
3506 if (true_regnum (dest
) > true_regnum (src
))
3511 return ("movw %C0,%C1" CR_TAB
3515 return ("mov %D0,%D1" CR_TAB
3516 "mov %C0,%C1" CR_TAB
3517 "mov %B0,%B1" CR_TAB
3525 return ("movw %A0,%A1" CR_TAB
3529 return ("mov %A0,%A1" CR_TAB
3530 "mov %B0,%B1" CR_TAB
3531 "mov %C0,%C1" CR_TAB
3535 else if (CONSTANT_P (src
))
3537 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3539 else if (MEM_P (src
))
3540 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3542 else if (MEM_P (dest
))
3546 if (src
== CONST0_RTX (GET_MODE (dest
)))
3547 operands
[1] = zero_reg_rtx
;
3549 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3552 output_asm_insn (templ
, operands
);
3557 fatal_insn ("invalid insn:", insn
);
3562 /* Handle loads of 24-bit types from memory to register. */
3565 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3569 rtx base
= XEXP (src
, 0);
3570 int reg_dest
= true_regnum (dest
);
3571 int reg_base
= true_regnum (base
);
3575 if (reg_base
== REG_X
) /* (R26) */
3577 if (reg_dest
== REG_X
)
3578 /* "ld r26,-X" is undefined */
3579 return avr_asm_len ("adiw r26,2" CR_TAB
3581 "ld __tmp_reg__,-X" CR_TAB
3584 "mov r27,__tmp_reg__", op
, plen
, -6);
3587 avr_asm_len ("ld %A0,X+" CR_TAB
3589 "ld %C0,X", op
, plen
, -3);
3591 if (reg_dest
!= REG_X
- 2
3592 && !reg_unused_after (insn
, base
))
3594 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3600 else /* reg_base != REG_X */
3602 if (reg_dest
== reg_base
)
3603 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3604 "ldd __tmp_reg__,%1+1" CR_TAB
3606 "mov %B0,__tmp_reg__", op
, plen
, -4);
3608 return avr_asm_len ("ld %A0,%1" CR_TAB
3609 "ldd %B0,%1+1" CR_TAB
3610 "ldd %C0,%1+2", op
, plen
, -3);
3613 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3615 int disp
= INTVAL (XEXP (base
, 1));
3617 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3619 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3620 fatal_insn ("incorrect insn:",insn
);
3622 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3623 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3624 "ldd %A0,Y+61" CR_TAB
3625 "ldd %B0,Y+62" CR_TAB
3626 "ldd %C0,Y+63" CR_TAB
3627 "sbiw r28,%o1-61", op
, plen
, -5);
3629 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3630 "sbci r29,hi8(-%o1)" CR_TAB
3632 "ldd %B0,Y+1" CR_TAB
3633 "ldd %C0,Y+2" CR_TAB
3634 "subi r28,lo8(%o1)" CR_TAB
3635 "sbci r29,hi8(%o1)", op
, plen
, -7);
3638 reg_base
= true_regnum (XEXP (base
, 0));
3639 if (reg_base
== REG_X
)
3642 if (reg_dest
== REG_X
)
3644 /* "ld r26,-X" is undefined */
3645 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3647 "ld __tmp_reg__,-X" CR_TAB
3650 "mov r27,__tmp_reg__", op
, plen
, -6);
3653 avr_asm_len ("adiw r26,%o1" CR_TAB
3656 "ld %C0,X", op
, plen
, -4);
3658 if (reg_dest
!= REG_W
3659 && !reg_unused_after (insn
, XEXP (base
, 0)))
3660 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3665 if (reg_dest
== reg_base
)
3666 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3667 "ldd __tmp_reg__,%B1" CR_TAB
3668 "ldd %A0,%A1" CR_TAB
3669 "mov %B0,__tmp_reg__", op
, plen
, -4);
3671 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3672 "ldd %B0,%B1" CR_TAB
3673 "ldd %C0,%C1", op
, plen
, -3);
3675 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3676 return avr_asm_len ("ld %C0,%1" CR_TAB
3678 "ld %A0,%1", op
, plen
, -3);
3679 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3680 return avr_asm_len ("ld %A0,%1" CR_TAB
3682 "ld %C0,%1", op
, plen
, -3);
3684 else if (CONSTANT_ADDRESS_P (base
))
3685 return avr_asm_len ("lds %A0,%m1" CR_TAB
3686 "lds %B0,%m1+1" CR_TAB
3687 "lds %C0,%m1+2", op
, plen
, -6);
3689 fatal_insn ("unknown move insn:",insn
);
3693 /* Handle store of 24-bit type from register or zero to memory. */
3696 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3700 rtx base
= XEXP (dest
, 0);
3701 int reg_base
= true_regnum (base
);
3703 if (CONSTANT_ADDRESS_P (base
))
3704 return avr_asm_len ("sts %m0,%A1" CR_TAB
3705 "sts %m0+1,%B1" CR_TAB
3706 "sts %m0+2,%C1", op
, plen
, -6);
3708 if (reg_base
> 0) /* (r) */
3710 if (reg_base
== REG_X
) /* (R26) */
3712 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3714 avr_asm_len ("st %0+,%A1" CR_TAB
3716 "st %0,%C1", op
, plen
, -3);
3718 if (!reg_unused_after (insn
, base
))
3719 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3724 return avr_asm_len ("st %0,%A1" CR_TAB
3725 "std %0+1,%B1" CR_TAB
3726 "std %0+2,%C1", op
, plen
, -3);
3728 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3730 int disp
= INTVAL (XEXP (base
, 1));
3731 reg_base
= REGNO (XEXP (base
, 0));
3733 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3735 if (reg_base
!= REG_Y
)
3736 fatal_insn ("incorrect insn:",insn
);
3738 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3739 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3740 "std Y+61,%A1" CR_TAB
3741 "std Y+62,%B1" CR_TAB
3742 "std Y+63,%C1" CR_TAB
3743 "sbiw r28,%o0-60", op
, plen
, -5);
3745 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3746 "sbci r29,hi8(-%o0)" CR_TAB
3748 "std Y+1,%B1" CR_TAB
3749 "std Y+2,%C1" CR_TAB
3750 "subi r28,lo8(%o0)" CR_TAB
3751 "sbci r29,hi8(%o0)", op
, plen
, -7);
3753 if (reg_base
== REG_X
)
3756 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3758 avr_asm_len ("adiw r26,%o0" CR_TAB
3761 "st X,%C1", op
, plen
, -4);
3763 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3764 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3769 return avr_asm_len ("std %A0,%A1" CR_TAB
3770 "std %B0,%B1" CR_TAB
3771 "std %C0,%C1", op
, plen
, -3);
3773 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3774 return avr_asm_len ("st %0,%C1" CR_TAB
3776 "st %0,%A1", op
, plen
, -3);
3777 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3778 return avr_asm_len ("st %0,%A1" CR_TAB
3780 "st %0,%C1", op
, plen
, -3);
3782 fatal_insn ("unknown move insn:",insn
);
3787 /* Move around 24-bit stuff. */
3790 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3795 if (avr_mem_flash_p (src
)
3796 || avr_mem_flash_p (dest
))
3798 return avr_out_lpm (insn
, op
, plen
);
3801 if (register_operand (dest
, VOIDmode
))
3803 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3805 if (true_regnum (dest
) > true_regnum (src
))
3807 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3810 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3812 return avr_asm_len ("mov %B0,%B1" CR_TAB
3813 "mov %A0,%A1", op
, plen
, 2);
3818 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3820 avr_asm_len ("mov %A0,%A1" CR_TAB
3821 "mov %B0,%B1", op
, plen
, -2);
3823 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3826 else if (CONSTANT_P (src
))
3828 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3830 else if (MEM_P (src
))
3831 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3833 else if (MEM_P (dest
))
3838 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3840 return avr_out_store_psi (insn
, xop
, plen
);
3843 fatal_insn ("invalid insn:", insn
);
3849 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3853 rtx x
= XEXP (dest
, 0);
3855 if (CONSTANT_ADDRESS_P (x
))
3857 return optimize
> 0 && io_address_operand (x
, QImode
)
3858 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3859 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3861 else if (GET_CODE (x
) == PLUS
3862 && REG_P (XEXP (x
, 0))
3863 && CONST_INT_P (XEXP (x
, 1)))
3865 /* memory access by reg+disp */
3867 int disp
= INTVAL (XEXP (x
, 1));
3869 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3871 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3872 fatal_insn ("incorrect insn:",insn
);
3874 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3875 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3876 "std Y+63,%1" CR_TAB
3877 "sbiw r28,%o0-63", op
, plen
, -3);
3879 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3880 "sbci r29,hi8(-%o0)" CR_TAB
3882 "subi r28,lo8(%o0)" CR_TAB
3883 "sbci r29,hi8(%o0)", op
, plen
, -5);
3885 else if (REGNO (XEXP (x
,0)) == REG_X
)
3887 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3889 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3890 "adiw r26,%o0" CR_TAB
3891 "st X,__tmp_reg__", op
, plen
, -3);
3895 avr_asm_len ("adiw r26,%o0" CR_TAB
3896 "st X,%1", op
, plen
, -2);
3899 if (!reg_unused_after (insn
, XEXP (x
,0)))
3900 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3905 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3908 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3912 /* Helper for the next function for XMEGA. It does the same
3913 but with low byte first. */
3916 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3920 rtx base
= XEXP (dest
, 0);
3921 int reg_base
= true_regnum (base
);
3922 int reg_src
= true_regnum (src
);
3924 /* "volatile" forces writing low byte first, even if less efficient,
3925 for correct operation with 16-bit I/O registers like SP. */
3926 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3928 if (CONSTANT_ADDRESS_P (base
))
3929 return optimize
> 0 && io_address_operand (base
, HImode
)
3930 ? avr_asm_len ("out %i0,%A1" CR_TAB
3931 "out %i0+1,%B1", op
, plen
, -2)
3933 : avr_asm_len ("sts %m0,%A1" CR_TAB
3934 "sts %m0+1,%B1", op
, plen
, -4);
3938 if (reg_base
!= REG_X
)
3939 return avr_asm_len ("st %0,%A1" CR_TAB
3940 "std %0+1,%B1", op
, plen
, -2);
3942 if (reg_src
== REG_X
)
3943 /* "st X+,r26" and "st -X,r26" are undefined. */
3944 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3947 "st X,__tmp_reg__", op
, plen
, -4);
3949 avr_asm_len ("st X+,%A1" CR_TAB
3950 "st X,%B1", op
, plen
, -2);
3952 return reg_unused_after (insn
, base
)
3954 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3956 else if (GET_CODE (base
) == PLUS
)
3958 int disp
= INTVAL (XEXP (base
, 1));
3959 reg_base
= REGNO (XEXP (base
, 0));
3960 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3962 if (reg_base
!= REG_Y
)
3963 fatal_insn ("incorrect insn:",insn
);
3965 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3966 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3967 "std Y+62,%A1" CR_TAB
3968 "std Y+63,%B1" CR_TAB
3969 "sbiw r28,%o0-62", op
, plen
, -4)
3971 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3972 "sbci r29,hi8(-%o0)" CR_TAB
3974 "std Y+1,%B1" CR_TAB
3975 "subi r28,lo8(%o0)" CR_TAB
3976 "sbci r29,hi8(%o0)", op
, plen
, -6);
3979 if (reg_base
!= REG_X
)
3980 return avr_asm_len ("std %A0,%A1" CR_TAB
3981 "std %B0,%B1", op
, plen
, -2);
3983 return reg_src
== REG_X
3984 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3985 "mov __zero_reg__,r27" CR_TAB
3986 "adiw r26,%o0" CR_TAB
3987 "st X+,__tmp_reg__" CR_TAB
3988 "st X,__zero_reg__" CR_TAB
3989 "clr __zero_reg__" CR_TAB
3990 "sbiw r26,%o0+1", op
, plen
, -7)
3992 : avr_asm_len ("adiw r26,%o0" CR_TAB
3995 "sbiw r26,%o0+1", op
, plen
, -4);
3997 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3999 if (!mem_volatile_p
)
4000 return avr_asm_len ("st %0,%B1" CR_TAB
4001 "st %0,%A1", op
, plen
, -2);
4003 return REGNO (XEXP (base
, 0)) == REG_X
4004 ? avr_asm_len ("sbiw r26,2" CR_TAB
4007 "sbiw r26,1", op
, plen
, -4)
4009 : avr_asm_len ("sbiw %r0,2" CR_TAB
4011 "std %p0+1,%B1", op
, plen
, -3);
4013 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4015 return avr_asm_len ("st %0,%A1" CR_TAB
4016 "st %0,%B1", op
, plen
, -2);
4019 fatal_insn ("unknown move insn:",insn
);
4025 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4029 rtx base
= XEXP (dest
, 0);
4030 int reg_base
= true_regnum (base
);
4031 int reg_src
= true_regnum (src
);
4034 /* "volatile" forces writing high-byte first (no-xmega) resp.
4035 low-byte first (xmega) even if less efficient, for correct
4036 operation with 16-bit I/O registers like. */
4039 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4041 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4043 if (CONSTANT_ADDRESS_P (base
))
4044 return optimize
> 0 && io_address_operand (base
, HImode
)
4045 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4046 "out %i0,%A1", op
, plen
, -2)
4048 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4049 "sts %m0,%A1", op
, plen
, -4);
4053 if (reg_base
!= REG_X
)
4054 return avr_asm_len ("std %0+1,%B1" CR_TAB
4055 "st %0,%A1", op
, plen
, -2);
4057 if (reg_src
== REG_X
)
4058 /* "st X+,r26" and "st -X,r26" are undefined. */
4059 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4060 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4063 "st X,__tmp_reg__", op
, plen
, -4)
4065 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4067 "st X,__tmp_reg__" CR_TAB
4069 "st X,r26", op
, plen
, -5);
4071 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4072 ? avr_asm_len ("st X+,%A1" CR_TAB
4073 "st X,%B1", op
, plen
, -2)
4074 : avr_asm_len ("adiw r26,1" CR_TAB
4076 "st -X,%A1", op
, plen
, -3);
4078 else if (GET_CODE (base
) == PLUS
)
4080 int disp
= INTVAL (XEXP (base
, 1));
4081 reg_base
= REGNO (XEXP (base
, 0));
4082 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4084 if (reg_base
!= REG_Y
)
4085 fatal_insn ("incorrect insn:",insn
);
4087 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4088 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4089 "std Y+63,%B1" CR_TAB
4090 "std Y+62,%A1" CR_TAB
4091 "sbiw r28,%o0-62", op
, plen
, -4)
4093 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4094 "sbci r29,hi8(-%o0)" CR_TAB
4095 "std Y+1,%B1" CR_TAB
4097 "subi r28,lo8(%o0)" CR_TAB
4098 "sbci r29,hi8(%o0)", op
, plen
, -6);
4101 if (reg_base
!= REG_X
)
4102 return avr_asm_len ("std %B0,%B1" CR_TAB
4103 "std %A0,%A1", op
, plen
, -2);
4105 return reg_src
== REG_X
4106 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4107 "mov __zero_reg__,r27" CR_TAB
4108 "adiw r26,%o0+1" CR_TAB
4109 "st X,__zero_reg__" CR_TAB
4110 "st -X,__tmp_reg__" CR_TAB
4111 "clr __zero_reg__" CR_TAB
4112 "sbiw r26,%o0", op
, plen
, -7)
4114 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4117 "sbiw r26,%o0", op
, plen
, -4);
4119 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4121 return avr_asm_len ("st %0,%B1" CR_TAB
4122 "st %0,%A1", op
, plen
, -2);
4124 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4126 if (!mem_volatile_p
)
4127 return avr_asm_len ("st %0,%A1" CR_TAB
4128 "st %0,%B1", op
, plen
, -2);
4130 return REGNO (XEXP (base
, 0)) == REG_X
4131 ? avr_asm_len ("adiw r26,1" CR_TAB
4134 "adiw r26,2", op
, plen
, -4)
4136 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4138 "adiw %r0,2", op
, plen
, -3);
4140 fatal_insn ("unknown move insn:",insn
);
4144 /* Return 1 if frame pointer for current function required. */
4147 avr_frame_pointer_required_p (void)
4149 return (cfun
->calls_alloca
4150 || cfun
->calls_setjmp
4151 || cfun
->has_nonlocal_label
4152 || crtl
->args
.info
.nregs
== 0
4153 || get_frame_size () > 0);
4156 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4159 compare_condition (rtx insn
)
4161 rtx next
= next_real_insn (insn
);
4163 if (next
&& JUMP_P (next
))
4165 rtx pat
= PATTERN (next
);
4166 rtx src
= SET_SRC (pat
);
4168 if (IF_THEN_ELSE
== GET_CODE (src
))
4169 return GET_CODE (XEXP (src
, 0));
4176 /* Returns true iff INSN is a tst insn that only tests the sign. */
4179 compare_sign_p (rtx insn
)
4181 RTX_CODE cond
= compare_condition (insn
);
4182 return (cond
== GE
|| cond
== LT
);
4186 /* Returns true iff the next insn is a JUMP_INSN with a condition
4187 that needs to be swapped (GT, GTU, LE, LEU). */
4190 compare_diff_p (rtx insn
)
4192 RTX_CODE cond
= compare_condition (insn
);
4193 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4196 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4199 compare_eq_p (rtx insn
)
4201 RTX_CODE cond
= compare_condition (insn
);
4202 return (cond
== EQ
|| cond
== NE
);
4206 /* Output compare instruction
4208 compare (XOP[0], XOP[1])
4210 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4211 XOP[2] is an 8-bit scratch register as needed.
4213 PLEN == NULL: Output instructions.
4214 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4215 Don't output anything. */
4218 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4220 /* Register to compare and value to compare against. */
4224 /* MODE of the comparison. */
4225 enum machine_mode mode
;
4227 /* Number of bytes to operate on. */
4228 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4230 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4231 int clobber_val
= -1;
4233 /* Map fixed mode operands to integer operands with the same binary
4234 representation. They are easier to handle in the remainder. */
4236 if (CONST_FIXED_P (xval
))
4238 xreg
= avr_to_int_mode (xop
[0]);
4239 xval
= avr_to_int_mode (xop
[1]);
4242 mode
= GET_MODE (xreg
);
4244 gcc_assert (REG_P (xreg
));
4245 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4246 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4251 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4252 against 0 by ORing the bytes. This is one instruction shorter.
4253 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4254 and therefore don't use this. */
4256 if (!test_hard_reg_class (LD_REGS
, xreg
)
4257 && compare_eq_p (insn
)
4258 && reg_unused_after (insn
, xreg
))
4260 if (xval
== const1_rtx
)
4262 avr_asm_len ("dec %A0" CR_TAB
4263 "or %A0,%B0", xop
, plen
, 2);
4266 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4269 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4273 else if (xval
== constm1_rtx
)
4276 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4279 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4281 return avr_asm_len ("and %A0,%B0" CR_TAB
4282 "com %A0", xop
, plen
, 2);
4286 for (i
= 0; i
< n_bytes
; i
++)
4288 /* We compare byte-wise. */
4289 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4290 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4292 /* 8-bit value to compare with this byte. */
4293 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4295 /* Registers R16..R31 can operate with immediate. */
4296 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4299 xop
[1] = gen_int_mode (val8
, QImode
);
4301 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4304 && test_hard_reg_class (ADDW_REGS
, reg8
))
4306 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4308 if (IN_RANGE (val16
, 0, 63)
4310 || reg_unused_after (insn
, xreg
)))
4312 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4318 && IN_RANGE (val16
, -63, -1)
4319 && compare_eq_p (insn
)
4320 && reg_unused_after (insn
, xreg
))
4322 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4326 /* Comparing against 0 is easy. */
4331 ? "cp %0,__zero_reg__"
4332 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4336 /* Upper registers can compare and subtract-with-carry immediates.
4337 Notice that compare instructions do the same as respective subtract
4338 instruction; the only difference is that comparisons don't write
4339 the result back to the target register. */
4345 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4348 else if (reg_unused_after (insn
, xreg
))
4350 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4355 /* Must load the value into the scratch register. */
4357 gcc_assert (REG_P (xop
[2]));
4359 if (clobber_val
!= (int) val8
)
4360 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4361 clobber_val
= (int) val8
;
4365 : "cpc %0,%2", xop
, plen
, 1);
4372 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4375 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4379 xop
[0] = gen_rtx_REG (DImode
, 18);
4383 return avr_out_compare (insn
, xop
, plen
);
4386 /* Output test instruction for HImode. */
4389 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4391 if (compare_sign_p (insn
))
4393 avr_asm_len ("tst %B0", op
, plen
, -1);
4395 else if (reg_unused_after (insn
, op
[0])
4396 && compare_eq_p (insn
))
4398 /* Faster than sbiw if we can clobber the operand. */
4399 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4403 avr_out_compare (insn
, op
, plen
);
4410 /* Output test instruction for PSImode. */
4413 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4415 if (compare_sign_p (insn
))
4417 avr_asm_len ("tst %C0", op
, plen
, -1);
4419 else if (reg_unused_after (insn
, op
[0])
4420 && compare_eq_p (insn
))
4422 /* Faster than sbiw if we can clobber the operand. */
4423 avr_asm_len ("or %A0,%B0" CR_TAB
4424 "or %A0,%C0", op
, plen
, -2);
4428 avr_out_compare (insn
, op
, plen
);
4435 /* Output test instruction for SImode. */
4438 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4440 if (compare_sign_p (insn
))
4442 avr_asm_len ("tst %D0", op
, plen
, -1);
4444 else if (reg_unused_after (insn
, op
[0])
4445 && compare_eq_p (insn
))
4447 /* Faster than sbiw if we can clobber the operand. */
4448 avr_asm_len ("or %A0,%B0" CR_TAB
4450 "or %A0,%D0", op
, plen
, -3);
4454 avr_out_compare (insn
, op
, plen
);
4461 /* Generate asm equivalent for various shifts. This only handles cases
4462 that are not already carefully hand-optimized in ?sh??i3_out.
4464 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4465 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4466 OPERANDS[3] is a QImode scratch register from LD regs if
4467 available and SCRATCH, otherwise (no scratch available)
4469 TEMPL is an assembler template that shifts by one position.
4470 T_LEN is the length of this template. */
4473 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4474 int *plen
, int t_len
)
4476 bool second_label
= true;
4477 bool saved_in_tmp
= false;
4478 bool use_zero_reg
= false;
4481 op
[0] = operands
[0];
4482 op
[1] = operands
[1];
4483 op
[2] = operands
[2];
4484 op
[3] = operands
[3];
4489 if (CONST_INT_P (operands
[2]))
4491 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4492 && REG_P (operands
[3]));
4493 int count
= INTVAL (operands
[2]);
4494 int max_len
= 10; /* If larger than this, always use a loop. */
4499 if (count
< 8 && !scratch
)
4500 use_zero_reg
= true;
4503 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4505 if (t_len
* count
<= max_len
)
4507 /* Output shifts inline with no loop - faster. */
4510 avr_asm_len (templ
, op
, plen
, t_len
);
4517 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4519 else if (use_zero_reg
)
4521 /* Hack to save one word: use __zero_reg__ as loop counter.
4522 Set one bit, then shift in a loop until it is 0 again. */
4524 op
[3] = zero_reg_rtx
;
4526 avr_asm_len ("set" CR_TAB
4527 "bld %3,%2-1", op
, plen
, 2);
4531 /* No scratch register available, use one from LD_REGS (saved in
4532 __tmp_reg__) that doesn't overlap with registers to shift. */
4534 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4535 op
[4] = tmp_reg_rtx
;
4536 saved_in_tmp
= true;
4538 avr_asm_len ("mov %4,%3" CR_TAB
4539 "ldi %3,%2", op
, plen
, 2);
4542 second_label
= false;
4544 else if (MEM_P (op
[2]))
4548 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4551 out_movqi_r_mr (insn
, op_mov
, plen
);
4553 else if (register_operand (op
[2], QImode
))
4557 if (!reg_unused_after (insn
, op
[2])
4558 || reg_overlap_mentioned_p (op
[0], op
[2]))
4560 op
[3] = tmp_reg_rtx
;
4561 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4565 fatal_insn ("bad shift insn:", insn
);
4568 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4570 avr_asm_len ("1:", op
, plen
, 0);
4571 avr_asm_len (templ
, op
, plen
, t_len
);
4574 avr_asm_len ("2:", op
, plen
, 0);
4576 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4577 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4580 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4584 /* 8bit shift left ((char)x << i) */
4587 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4589 if (GET_CODE (operands
[2]) == CONST_INT
)
4596 switch (INTVAL (operands
[2]))
4599 if (INTVAL (operands
[2]) < 8)
4611 return ("lsl %0" CR_TAB
4616 return ("lsl %0" CR_TAB
4621 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4624 return ("swap %0" CR_TAB
4628 return ("lsl %0" CR_TAB
4634 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4637 return ("swap %0" CR_TAB
4642 return ("lsl %0" CR_TAB
4649 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4652 return ("swap %0" CR_TAB
4658 return ("lsl %0" CR_TAB
4667 return ("ror %0" CR_TAB
4672 else if (CONSTANT_P (operands
[2]))
4673 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4675 out_shift_with_cnt ("lsl %0",
4676 insn
, operands
, len
, 1);
4681 /* 16bit shift left ((short)x << i) */
4684 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4686 if (GET_CODE (operands
[2]) == CONST_INT
)
4688 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4689 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4696 switch (INTVAL (operands
[2]))
4699 if (INTVAL (operands
[2]) < 16)
4703 return ("clr %B0" CR_TAB
4707 if (optimize_size
&& scratch
)
4712 return ("swap %A0" CR_TAB
4714 "andi %B0,0xf0" CR_TAB
4715 "eor %B0,%A0" CR_TAB
4716 "andi %A0,0xf0" CR_TAB
4722 return ("swap %A0" CR_TAB
4724 "ldi %3,0xf0" CR_TAB
4726 "eor %B0,%A0" CR_TAB
4730 break; /* optimize_size ? 6 : 8 */
4734 break; /* scratch ? 5 : 6 */
4738 return ("lsl %A0" CR_TAB
4742 "andi %B0,0xf0" CR_TAB
4743 "eor %B0,%A0" CR_TAB
4744 "andi %A0,0xf0" CR_TAB
4750 return ("lsl %A0" CR_TAB
4754 "ldi %3,0xf0" CR_TAB
4756 "eor %B0,%A0" CR_TAB
4764 break; /* scratch ? 5 : 6 */
4766 return ("clr __tmp_reg__" CR_TAB
4769 "ror __tmp_reg__" CR_TAB
4772 "ror __tmp_reg__" CR_TAB
4773 "mov %B0,%A0" CR_TAB
4774 "mov %A0,__tmp_reg__");
4778 return ("lsr %B0" CR_TAB
4779 "mov %B0,%A0" CR_TAB
4785 return *len
= 2, ("mov %B0,%A1" CR_TAB
4790 return ("mov %B0,%A0" CR_TAB
4796 return ("mov %B0,%A0" CR_TAB
4803 return ("mov %B0,%A0" CR_TAB
4813 return ("mov %B0,%A0" CR_TAB
4821 return ("mov %B0,%A0" CR_TAB
4824 "ldi %3,0xf0" CR_TAB
4828 return ("mov %B0,%A0" CR_TAB
4839 return ("mov %B0,%A0" CR_TAB
4845 if (AVR_HAVE_MUL
&& scratch
)
4848 return ("ldi %3,0x20" CR_TAB
4852 "clr __zero_reg__");
4854 if (optimize_size
&& scratch
)
4859 return ("mov %B0,%A0" CR_TAB
4863 "ldi %3,0xe0" CR_TAB
4869 return ("set" CR_TAB
4874 "clr __zero_reg__");
4877 return ("mov %B0,%A0" CR_TAB
4886 if (AVR_HAVE_MUL
&& ldi_ok
)
4889 return ("ldi %B0,0x40" CR_TAB
4890 "mul %A0,%B0" CR_TAB
4893 "clr __zero_reg__");
4895 if (AVR_HAVE_MUL
&& scratch
)
4898 return ("ldi %3,0x40" CR_TAB
4902 "clr __zero_reg__");
4904 if (optimize_size
&& ldi_ok
)
4907 return ("mov %B0,%A0" CR_TAB
4908 "ldi %A0,6" "\n1:\t"
4913 if (optimize_size
&& scratch
)
4916 return ("clr %B0" CR_TAB
4925 return ("clr %B0" CR_TAB
4932 out_shift_with_cnt ("lsl %A0" CR_TAB
4933 "rol %B0", insn
, operands
, len
, 2);
4938 /* 24-bit shift left */
4941 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4946 if (CONST_INT_P (op
[2]))
4948 switch (INTVAL (op
[2]))
4951 if (INTVAL (op
[2]) < 24)
4954 return avr_asm_len ("clr %A0" CR_TAB
4956 "clr %C0", op
, plen
, 3);
4960 int reg0
= REGNO (op
[0]);
4961 int reg1
= REGNO (op
[1]);
4964 return avr_asm_len ("mov %C0,%B1" CR_TAB
4965 "mov %B0,%A1" CR_TAB
4966 "clr %A0", op
, plen
, 3);
4968 return avr_asm_len ("clr %A0" CR_TAB
4969 "mov %B0,%A1" CR_TAB
4970 "mov %C0,%B1", op
, plen
, 3);
4975 int reg0
= REGNO (op
[0]);
4976 int reg1
= REGNO (op
[1]);
4978 if (reg0
+ 2 != reg1
)
4979 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4981 return avr_asm_len ("clr %B0" CR_TAB
4982 "clr %A0", op
, plen
, 2);
4986 return avr_asm_len ("clr %C0" CR_TAB
4990 "clr %A0", op
, plen
, 5);
4994 out_shift_with_cnt ("lsl %A0" CR_TAB
4996 "rol %C0", insn
, op
, plen
, 3);
5001 /* 32bit shift left ((long)x << i) */
5004 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5006 if (GET_CODE (operands
[2]) == CONST_INT
)
5014 switch (INTVAL (operands
[2]))
5017 if (INTVAL (operands
[2]) < 32)
5021 return *len
= 3, ("clr %D0" CR_TAB
5025 return ("clr %D0" CR_TAB
5032 int reg0
= true_regnum (operands
[0]);
5033 int reg1
= true_regnum (operands
[1]);
5036 return ("mov %D0,%C1" CR_TAB
5037 "mov %C0,%B1" CR_TAB
5038 "mov %B0,%A1" CR_TAB
5041 return ("clr %A0" CR_TAB
5042 "mov %B0,%A1" CR_TAB
5043 "mov %C0,%B1" CR_TAB
5049 int reg0
= true_regnum (operands
[0]);
5050 int reg1
= true_regnum (operands
[1]);
5051 if (reg0
+ 2 == reg1
)
5052 return *len
= 2, ("clr %B0" CR_TAB
5055 return *len
= 3, ("movw %C0,%A1" CR_TAB
5059 return *len
= 4, ("mov %C0,%A1" CR_TAB
5060 "mov %D0,%B1" CR_TAB
5067 return ("mov %D0,%A1" CR_TAB
5074 return ("clr %D0" CR_TAB
5083 out_shift_with_cnt ("lsl %A0" CR_TAB
5086 "rol %D0", insn
, operands
, len
, 4);
5090 /* 8bit arithmetic shift right ((signed char)x >> i) */
5093 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5095 if (GET_CODE (operands
[2]) == CONST_INT
)
5102 switch (INTVAL (operands
[2]))
5110 return ("asr %0" CR_TAB
5115 return ("asr %0" CR_TAB
5121 return ("asr %0" CR_TAB
5128 return ("asr %0" CR_TAB
5136 return ("bst %0,6" CR_TAB
5142 if (INTVAL (operands
[2]) < 8)
5149 return ("lsl %0" CR_TAB
5153 else if (CONSTANT_P (operands
[2]))
5154 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5156 out_shift_with_cnt ("asr %0",
5157 insn
, operands
, len
, 1);
5162 /* 16bit arithmetic shift right ((signed short)x >> i) */
5165 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5167 if (GET_CODE (operands
[2]) == CONST_INT
)
5169 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5170 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5177 switch (INTVAL (operands
[2]))
5181 /* XXX try to optimize this too? */
5186 break; /* scratch ? 5 : 6 */
5188 return ("mov __tmp_reg__,%A0" CR_TAB
5189 "mov %A0,%B0" CR_TAB
5190 "lsl __tmp_reg__" CR_TAB
5192 "sbc %B0,%B0" CR_TAB
5193 "lsl __tmp_reg__" CR_TAB
5199 return ("lsl %A0" CR_TAB
5200 "mov %A0,%B0" CR_TAB
5206 int reg0
= true_regnum (operands
[0]);
5207 int reg1
= true_regnum (operands
[1]);
5210 return *len
= 3, ("mov %A0,%B0" CR_TAB
5214 return *len
= 4, ("mov %A0,%B1" CR_TAB
5222 return ("mov %A0,%B0" CR_TAB
5224 "sbc %B0,%B0" CR_TAB
5229 return ("mov %A0,%B0" CR_TAB
5231 "sbc %B0,%B0" CR_TAB
5236 if (AVR_HAVE_MUL
&& ldi_ok
)
5239 return ("ldi %A0,0x20" CR_TAB
5240 "muls %B0,%A0" CR_TAB
5242 "sbc %B0,%B0" CR_TAB
5243 "clr __zero_reg__");
5245 if (optimize_size
&& scratch
)
5248 return ("mov %A0,%B0" CR_TAB
5250 "sbc %B0,%B0" CR_TAB
5256 if (AVR_HAVE_MUL
&& ldi_ok
)
5259 return ("ldi %A0,0x10" CR_TAB
5260 "muls %B0,%A0" CR_TAB
5262 "sbc %B0,%B0" CR_TAB
5263 "clr __zero_reg__");
5265 if (optimize_size
&& scratch
)
5268 return ("mov %A0,%B0" CR_TAB
5270 "sbc %B0,%B0" CR_TAB
5277 if (AVR_HAVE_MUL
&& ldi_ok
)
5280 return ("ldi %A0,0x08" CR_TAB
5281 "muls %B0,%A0" CR_TAB
5283 "sbc %B0,%B0" CR_TAB
5284 "clr __zero_reg__");
5287 break; /* scratch ? 5 : 7 */
5289 return ("mov %A0,%B0" CR_TAB
5291 "sbc %B0,%B0" CR_TAB
5300 return ("lsl %B0" CR_TAB
5301 "sbc %A0,%A0" CR_TAB
5303 "mov %B0,%A0" CR_TAB
5307 if (INTVAL (operands
[2]) < 16)
5313 return *len
= 3, ("lsl %B0" CR_TAB
5314 "sbc %A0,%A0" CR_TAB
5319 out_shift_with_cnt ("asr %B0" CR_TAB
5320 "ror %A0", insn
, operands
, len
, 2);
5325 /* 24-bit arithmetic shift right */
5328 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5330 int dest
= REGNO (op
[0]);
5331 int src
= REGNO (op
[1]);
5333 if (CONST_INT_P (op
[2]))
5338 switch (INTVAL (op
[2]))
5342 return avr_asm_len ("mov %A0,%B1" CR_TAB
5343 "mov %B0,%C1" CR_TAB
5346 "dec %C0", op
, plen
, 5);
5348 return avr_asm_len ("clr %C0" CR_TAB
5351 "mov %B0,%C1" CR_TAB
5352 "mov %A0,%B1", op
, plen
, 5);
5355 if (dest
!= src
+ 2)
5356 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5358 return avr_asm_len ("clr %B0" CR_TAB
5361 "mov %C0,%B0", op
, plen
, 4);
5364 if (INTVAL (op
[2]) < 24)
5370 return avr_asm_len ("lsl %C0" CR_TAB
5371 "sbc %A0,%A0" CR_TAB
5372 "mov %B0,%A0" CR_TAB
5373 "mov %C0,%A0", op
, plen
, 4);
5377 out_shift_with_cnt ("asr %C0" CR_TAB
5379 "ror %A0", insn
, op
, plen
, 3);
5384 /* 32bit arithmetic shift right ((signed long)x >> i) */
5387 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5389 if (GET_CODE (operands
[2]) == CONST_INT
)
5397 switch (INTVAL (operands
[2]))
5401 int reg0
= true_regnum (operands
[0]);
5402 int reg1
= true_regnum (operands
[1]);
5405 return ("mov %A0,%B1" CR_TAB
5406 "mov %B0,%C1" CR_TAB
5407 "mov %C0,%D1" CR_TAB
5412 return ("clr %D0" CR_TAB
5415 "mov %C0,%D1" CR_TAB
5416 "mov %B0,%C1" CR_TAB
5422 int reg0
= true_regnum (operands
[0]);
5423 int reg1
= true_regnum (operands
[1]);
5425 if (reg0
== reg1
+ 2)
5426 return *len
= 4, ("clr %D0" CR_TAB
5431 return *len
= 5, ("movw %A0,%C1" CR_TAB
5437 return *len
= 6, ("mov %B0,%D1" CR_TAB
5438 "mov %A0,%C1" CR_TAB
5446 return *len
= 6, ("mov %A0,%D1" CR_TAB
5450 "mov %B0,%D0" CR_TAB
5454 if (INTVAL (operands
[2]) < 32)
5461 return *len
= 4, ("lsl %D0" CR_TAB
5462 "sbc %A0,%A0" CR_TAB
5463 "mov %B0,%A0" CR_TAB
5466 return *len
= 5, ("lsl %D0" CR_TAB
5467 "sbc %A0,%A0" CR_TAB
5468 "mov %B0,%A0" CR_TAB
5469 "mov %C0,%A0" CR_TAB
5474 out_shift_with_cnt ("asr %D0" CR_TAB
5477 "ror %A0", insn
, operands
, len
, 4);
5481 /* 8bit logic shift right ((unsigned char)x >> i) */
5484 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5486 if (GET_CODE (operands
[2]) == CONST_INT
)
5493 switch (INTVAL (operands
[2]))
5496 if (INTVAL (operands
[2]) < 8)
5508 return ("lsr %0" CR_TAB
5512 return ("lsr %0" CR_TAB
5517 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5520 return ("swap %0" CR_TAB
5524 return ("lsr %0" CR_TAB
5530 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5533 return ("swap %0" CR_TAB
5538 return ("lsr %0" CR_TAB
5545 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5548 return ("swap %0" CR_TAB
5554 return ("lsr %0" CR_TAB
5563 return ("rol %0" CR_TAB
5568 else if (CONSTANT_P (operands
[2]))
5569 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5571 out_shift_with_cnt ("lsr %0",
5572 insn
, operands
, len
, 1);
5576 /* 16bit logic shift right ((unsigned short)x >> i) */
5579 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5581 if (GET_CODE (operands
[2]) == CONST_INT
)
5583 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5584 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5591 switch (INTVAL (operands
[2]))
5594 if (INTVAL (operands
[2]) < 16)
5598 return ("clr %B0" CR_TAB
5602 if (optimize_size
&& scratch
)
5607 return ("swap %B0" CR_TAB
5609 "andi %A0,0x0f" CR_TAB
5610 "eor %A0,%B0" CR_TAB
5611 "andi %B0,0x0f" CR_TAB
5617 return ("swap %B0" CR_TAB
5619 "ldi %3,0x0f" CR_TAB
5621 "eor %A0,%B0" CR_TAB
5625 break; /* optimize_size ? 6 : 8 */
5629 break; /* scratch ? 5 : 6 */
5633 return ("lsr %B0" CR_TAB
5637 "andi %A0,0x0f" CR_TAB
5638 "eor %A0,%B0" CR_TAB
5639 "andi %B0,0x0f" CR_TAB
5645 return ("lsr %B0" CR_TAB
5649 "ldi %3,0x0f" CR_TAB
5651 "eor %A0,%B0" CR_TAB
5659 break; /* scratch ? 5 : 6 */
5661 return ("clr __tmp_reg__" CR_TAB
5664 "rol __tmp_reg__" CR_TAB
5667 "rol __tmp_reg__" CR_TAB
5668 "mov %A0,%B0" CR_TAB
5669 "mov %B0,__tmp_reg__");
5673 return ("lsl %A0" CR_TAB
5674 "mov %A0,%B0" CR_TAB
5676 "sbc %B0,%B0" CR_TAB
5680 return *len
= 2, ("mov %A0,%B1" CR_TAB
5685 return ("mov %A0,%B0" CR_TAB
5691 return ("mov %A0,%B0" CR_TAB
5698 return ("mov %A0,%B0" CR_TAB
5708 return ("mov %A0,%B0" CR_TAB
5716 return ("mov %A0,%B0" CR_TAB
5719 "ldi %3,0x0f" CR_TAB
5723 return ("mov %A0,%B0" CR_TAB
5734 return ("mov %A0,%B0" CR_TAB
5740 if (AVR_HAVE_MUL
&& scratch
)
5743 return ("ldi %3,0x08" CR_TAB
5747 "clr __zero_reg__");
5749 if (optimize_size
&& scratch
)
5754 return ("mov %A0,%B0" CR_TAB
5758 "ldi %3,0x07" CR_TAB
5764 return ("set" CR_TAB
5769 "clr __zero_reg__");
5772 return ("mov %A0,%B0" CR_TAB
5781 if (AVR_HAVE_MUL
&& ldi_ok
)
5784 return ("ldi %A0,0x04" CR_TAB
5785 "mul %B0,%A0" CR_TAB
5788 "clr __zero_reg__");
5790 if (AVR_HAVE_MUL
&& scratch
)
5793 return ("ldi %3,0x04" CR_TAB
5797 "clr __zero_reg__");
5799 if (optimize_size
&& ldi_ok
)
5802 return ("mov %A0,%B0" CR_TAB
5803 "ldi %B0,6" "\n1:\t"
5808 if (optimize_size
&& scratch
)
5811 return ("clr %A0" CR_TAB
5820 return ("clr %A0" CR_TAB
5827 out_shift_with_cnt ("lsr %B0" CR_TAB
5828 "ror %A0", insn
, operands
, len
, 2);
5833 /* 24-bit logic shift right */
5836 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5838 int dest
= REGNO (op
[0]);
5839 int src
= REGNO (op
[1]);
5841 if (CONST_INT_P (op
[2]))
5846 switch (INTVAL (op
[2]))
5850 return avr_asm_len ("mov %A0,%B1" CR_TAB
5851 "mov %B0,%C1" CR_TAB
5852 "clr %C0", op
, plen
, 3);
5854 return avr_asm_len ("clr %C0" CR_TAB
5855 "mov %B0,%C1" CR_TAB
5856 "mov %A0,%B1", op
, plen
, 3);
5859 if (dest
!= src
+ 2)
5860 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5862 return avr_asm_len ("clr %B0" CR_TAB
5863 "clr %C0", op
, plen
, 2);
5866 if (INTVAL (op
[2]) < 24)
5872 return avr_asm_len ("clr %A0" CR_TAB
5876 "clr %C0", op
, plen
, 5);
5880 out_shift_with_cnt ("lsr %C0" CR_TAB
5882 "ror %A0", insn
, op
, plen
, 3);
5887 /* 32bit logic shift right ((unsigned int)x >> i) */
5890 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5892 if (GET_CODE (operands
[2]) == CONST_INT
)
5900 switch (INTVAL (operands
[2]))
5903 if (INTVAL (operands
[2]) < 32)
5907 return *len
= 3, ("clr %D0" CR_TAB
5911 return ("clr %D0" CR_TAB
5918 int reg0
= true_regnum (operands
[0]);
5919 int reg1
= true_regnum (operands
[1]);
5922 return ("mov %A0,%B1" CR_TAB
5923 "mov %B0,%C1" CR_TAB
5924 "mov %C0,%D1" CR_TAB
5927 return ("clr %D0" CR_TAB
5928 "mov %C0,%D1" CR_TAB
5929 "mov %B0,%C1" CR_TAB
5935 int reg0
= true_regnum (operands
[0]);
5936 int reg1
= true_regnum (operands
[1]);
5938 if (reg0
== reg1
+ 2)
5939 return *len
= 2, ("clr %C0" CR_TAB
5942 return *len
= 3, ("movw %A0,%C1" CR_TAB
5946 return *len
= 4, ("mov %B0,%D1" CR_TAB
5947 "mov %A0,%C1" CR_TAB
5953 return *len
= 4, ("mov %A0,%D1" CR_TAB
5960 return ("clr %A0" CR_TAB
5969 out_shift_with_cnt ("lsr %D0" CR_TAB
5972 "ror %A0", insn
, operands
, len
, 4);
5977 /* Output addition of register XOP[0] and compile time constant XOP[2].
5978 CODE == PLUS: perform addition by using ADD instructions or
5979 CODE == MINUS: perform addition by using SUB instructions:
5981 XOP[0] = XOP[0] + XOP[2]
5983 Or perform addition/subtraction with register XOP[2] depending on CODE:
5985 XOP[0] = XOP[0] +/- XOP[2]
5987 If PLEN == NULL, print assembler instructions to perform the operation;
5988 otherwise, set *PLEN to the length of the instruction sequence (in words)
5989 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
5990 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
5992 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
5993 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
5994 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
5995 the subtrahend in the original insn, provided it is a compile time constant.
5996 In all other cases, SIGN is 0.
6001 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6002 enum rtx_code code_sat
= UNKNOWN
, int sign
= 0)
6004 /* MODE of the operation. */
6005 enum machine_mode mode
= GET_MODE (xop
[0]);
6007 /* INT_MODE of the same size. */
6008 enum machine_mode imode
= int_mode_for_mode (mode
);
6010 /* Number of bytes to operate on. */
6011 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6013 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6014 int clobber_val
= -1;
6016 /* op[0]: 8-bit destination register
6017 op[1]: 8-bit const int
6018 op[2]: 8-bit scratch register */
6021 /* Started the operation? Before starting the operation we may skip
6022 adding 0. This is no more true after the operation started because
6023 carry must be taken into account. */
6024 bool started
= false;
6026 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6029 /* Output a BRVC instruction. Only needed with saturation. */
6030 bool out_brvc
= true;
6037 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6039 for (i
= 0; i
< n_bytes
; i
++)
6041 /* We operate byte-wise on the destination. */
6042 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6043 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6046 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6049 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6053 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6055 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6064 /* Except in the case of ADIW with 16-bit register (see below)
6065 addition does not set cc0 in a usable way. */
6067 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6069 if (CONST_FIXED_P (xval
))
6070 xval
= avr_to_int_mode (xval
);
6072 /* Adding/Subtracting zero is a no-op. */
6074 if (xval
== const0_rtx
)
6081 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6085 if (SS_PLUS
== code_sat
&& MINUS
== code
6087 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6088 & GET_MODE_MASK (QImode
)))
6090 /* We compute x + 0x80 by means of SUB instructions. We negated the
6091 constant subtrahend above and are left with x - (-128) so that we
6092 need something like SUBI r,128 which does not exist because SUBI sets
6093 V according to the sign of the subtrahend. Notice the only case
6094 where this must be done is when NEG overflowed in case [2s] because
6095 the V computation needs the right sign of the subtrahend. */
6097 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6099 avr_asm_len ("subi %0,128" CR_TAB
6100 "brmi 0f", &msb
, plen
, 2);
6106 for (i
= 0; i
< n_bytes
; i
++)
6108 /* We operate byte-wise on the destination. */
6109 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6110 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6112 /* 8-bit value to operate with this byte. */
6113 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6115 /* Registers R16..R31 can operate with immediate. */
6116 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6119 op
[1] = gen_int_mode (val8
, QImode
);
6121 /* To get usable cc0 no low-bytes must have been skipped. */
6129 && test_hard_reg_class (ADDW_REGS
, reg8
))
6131 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6132 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6134 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6135 i.e. operate word-wise. */
6142 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6145 if (n_bytes
== 2 && PLUS
== code
)
6157 avr_asm_len (code
== PLUS
6158 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6162 else if ((val8
== 1 || val8
== 0xff)
6163 && UNKNOWN
== code_sat
6165 && i
== n_bytes
- 1)
6167 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6176 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6178 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6180 /* This belongs to the x + 0x80 corner case. The code with
6181 ADD instruction is not smaller, thus make this case
6182 expensive so that the caller won't pick it. */
6188 if (clobber_val
!= (int) val8
)
6189 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6190 clobber_val
= (int) val8
;
6192 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6199 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6202 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6204 if (clobber_val
!= (int) val8
)
6205 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6206 clobber_val
= (int) val8
;
6208 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6220 } /* for all sub-bytes */
6224 if (UNKNOWN
== code_sat
)
6227 *pcc
= (int) CC_CLOBBER
;
6229 /* Vanilla addition/subtraction is done. We are left with saturation.
6231 We have to compute A = A <op> B where A is a register and
6232 B is a register or a non-zero compile time constant CONST.
6233 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6234 B stands for the original operand $2 in INSN. In the case of B = CONST
6235 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6237 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6241 operation | code | sat if | b is | sat value | case
6242 -----------------+-------+----------+--------------+-----------+-------
6243 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6244 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6245 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6246 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6250 operation | code | sat if | b is | sat value | case
6251 -----------------+-------+----------+--------------+-----------+-------
6252 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6253 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6254 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6255 - as a + (-b) | add | V == 1 | const | s- | [4s]
6257 s+ = b < 0 ? -0x80 : 0x7f
6258 s- = b < 0 ? 0x7f : -0x80
6260 The cases a - b actually perform a - (-(-b)) if B is CONST.
6263 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6265 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6268 bool need_copy
= true;
6269 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6280 avr_asm_len ("brvc 0f", op
, plen
, 1);
6282 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6287 avr_asm_len ("ldi %0,0x7f" CR_TAB
6288 "adc %0,__zero_reg__", op
, plen
, 2);
6290 avr_asm_len ("ldi %0,0x7f" CR_TAB
6291 "ldi %1,0xff" CR_TAB
6292 "adc %1,__zero_reg__" CR_TAB
6293 "adc %0,__zero_reg__", op
, plen
, 4);
6295 else if (sign
== 0 && PLUS
== code
)
6299 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6302 avr_asm_len ("ldi %0,0x80" CR_TAB
6304 "dec %0", op
, plen
, 3);
6306 avr_asm_len ("ldi %0,0x80" CR_TAB
6309 "sbci %0,0", op
, plen
, 4);
6311 else if (sign
== 0 && MINUS
== code
)
6315 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6318 avr_asm_len ("ldi %0,0x7f" CR_TAB
6320 "inc %0", op
, plen
, 3);
6322 avr_asm_len ("ldi %0,0x7f" CR_TAB
6325 "sbci %0,-1", op
, plen
, 4);
6327 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6329 /* [1s,const,B < 0] [2s,B < 0] */
6330 /* [3s,const,B > 0] [4s,B > 0] */
6334 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6338 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6339 if (n_bytes
> 1 && need_copy
)
6340 avr_asm_len ("clr %1", op
, plen
, 1);
6342 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6344 /* [1s,const,B > 0] [2s,B > 0] */
6345 /* [3s,const,B < 0] [4s,B < 0] */
6349 avr_asm_len ("sec" CR_TAB
6350 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6354 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6355 if (n_bytes
> 1 && need_copy
)
6356 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6366 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6371 avr_asm_len ("sec", op
, plen
, 1);
6372 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6378 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6379 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6381 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6384 break; /* US_PLUS */
6389 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6393 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6397 avr_asm_len ("clr %0", op
, plen
, 1);
6402 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6403 Now copy the right value to the LSBs. */
6405 if (need_copy
&& n_bytes
> 1)
6407 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6409 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6415 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6417 avr_asm_len ("mov %A0,%1" CR_TAB
6418 "mov %B0,%1", op
, plen
, 2);
6421 else if (n_bytes
> 2)
6424 avr_asm_len ("mov %A0,%1" CR_TAB
6425 "mov %B0,%1", op
, plen
, 2);
6429 if (need_copy
&& n_bytes
== 8)
6432 avr_asm_len ("movw %r0+2,%0" CR_TAB
6433 "movw %r0+4,%0", xop
, plen
, 2);
6435 avr_asm_len ("mov %r0+2,%0" CR_TAB
6436 "mov %r0+3,%0" CR_TAB
6437 "mov %r0+4,%0" CR_TAB
6438 "mov %r0+5,%0", xop
, plen
, 4);
6441 avr_asm_len ("0:", op
, plen
, 0);
6445 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6446 is ont a compile-time constant:
6448 XOP[0] = XOP[0] +/- XOP[2]
6450 This is a helper for the function below. The only insns that need this
6451 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6454 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6456 enum machine_mode mode
= GET_MODE (xop
[0]);
6458 /* Only pointer modes want to add symbols. */
6460 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6462 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6464 avr_asm_len (PLUS
== code
6465 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6466 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6469 if (PSImode
== mode
)
6470 avr_asm_len (PLUS
== code
6471 ? "sbci %C0,hlo8(-(%2))"
6472 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6477 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6479 INSN is a single_set insn with a binary operation as SET_SRC that is
6480 one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6482 XOP are the operands of INSN. In the case of 64-bit operations with
6483 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6484 The non-saturating insns up to 32 bits may or may not supply a "d" class
6487 If PLEN == NULL output the instructions.
6488 If PLEN != NULL set *PLEN to the length of the sequence in words.
6490 PCC is a pointer to store the instructions' effect on cc0.
6493 PLEN and PCC default to NULL.
6498 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
)
6500 int cc_plus
, cc_minus
, cc_dummy
;
6501 int len_plus
, len_minus
;
6503 rtx xdest
= SET_DEST (single_set (insn
));
6504 enum machine_mode mode
= GET_MODE (xdest
);
6505 enum machine_mode imode
= int_mode_for_mode (mode
);
6506 int n_bytes
= GET_MODE_SIZE (mode
);
6507 enum rtx_code code_sat
= GET_CODE (SET_SRC (single_set (insn
)));
6509 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6515 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6517 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6520 if (n_bytes
<= 4 && REG_P (xop
[2]))
6522 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
);
6528 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6529 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6530 op
[2] = avr_to_int_mode (xop
[0]);
6535 && !CONST_INT_P (xop
[2])
6536 && !CONST_FIXED_P (xop
[2]))
6538 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6541 op
[0] = avr_to_int_mode (xop
[0]);
6542 op
[1] = avr_to_int_mode (xop
[1]);
6543 op
[2] = avr_to_int_mode (xop
[2]);
6546 /* Saturations and 64-bit operations don't have a clobber operand.
6547 For the other cases, the caller will provide a proper XOP[3]. */
6549 op
[3] = PARALLEL
== GET_CODE (PATTERN (insn
)) ? xop
[3] : NULL_RTX
;
6551 /* Saturation will need the sign of the original operand. */
6553 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6554 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6556 /* If we subtract and the subtrahend is a constant, then negate it
6557 so that avr_out_plus_1 can be used. */
6560 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6562 /* Work out the shortest sequence. */
6564 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_plus
, code_sat
, sign
);
6565 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_minus
, code_sat
, sign
);
6569 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6570 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6572 else if (len_minus
<= len_plus
)
6573 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
);
6575 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
);
6581 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6582 time constant XOP[2]:
6584 XOP[0] = XOP[0] <op> XOP[2]
6586 and return "". If PLEN == NULL, print assembler instructions to perform the
6587 operation; otherwise, set *PLEN to the length of the instruction sequence
6588 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6589 register or SCRATCH if no clobber register is needed for the operation. */
6592 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6594 /* CODE and MODE of the operation. */
6595 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6596 enum machine_mode mode
= GET_MODE (xop
[0]);
6598 /* Number of bytes to operate on. */
6599 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6601 /* Value of T-flag (0 or 1) or -1 if unknow. */
6604 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6605 int clobber_val
= -1;
6607 /* op[0]: 8-bit destination register
6608 op[1]: 8-bit const int
6609 op[2]: 8-bit clobber register or SCRATCH
6610 op[3]: 8-bit register containing 0xff or NULL_RTX */
6619 for (i
= 0; i
< n_bytes
; i
++)
6621 /* We operate byte-wise on the destination. */
6622 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6623 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6625 /* 8-bit value to operate with this byte. */
6626 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6628 /* Number of bits set in the current byte of the constant. */
6629 int pop8
= avr_popcount (val8
);
6631 /* Registers R16..R31 can operate with immediate. */
6632 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6635 op
[1] = GEN_INT (val8
);
6644 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6648 avr_asm_len ("set", op
, plen
, 1);
6651 op
[1] = GEN_INT (exact_log2 (val8
));
6652 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6656 if (op
[3] != NULL_RTX
)
6657 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6659 avr_asm_len ("clr %0" CR_TAB
6660 "dec %0", op
, plen
, 2);
6666 if (clobber_val
!= (int) val8
)
6667 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6668 clobber_val
= (int) val8
;
6670 avr_asm_len ("or %0,%2", op
, plen
, 1);
6680 avr_asm_len ("clr %0", op
, plen
, 1);
6682 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6686 avr_asm_len ("clt", op
, plen
, 1);
6689 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6690 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6694 if (clobber_val
!= (int) val8
)
6695 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6696 clobber_val
= (int) val8
;
6698 avr_asm_len ("and %0,%2", op
, plen
, 1);
6708 avr_asm_len ("com %0", op
, plen
, 1);
6709 else if (ld_reg_p
&& val8
== (1 << 7))
6710 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6713 if (clobber_val
!= (int) val8
)
6714 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6715 clobber_val
= (int) val8
;
6717 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6723 /* Unknown rtx_code */
6726 } /* for all sub-bytes */
6732 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6733 PLEN != NULL: Set *PLEN to the length of that sequence.
6737 avr_out_addto_sp (rtx
*op
, int *plen
)
6739 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6740 int addend
= INTVAL (op
[0]);
6747 if (flag_verbose_asm
|| flag_print_asm_name
)
6748 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6750 while (addend
<= -pc_len
)
6753 avr_asm_len ("rcall .", op
, plen
, 1);
6756 while (addend
++ < 0)
6757 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6759 else if (addend
> 0)
6761 if (flag_verbose_asm
|| flag_print_asm_name
)
6762 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6764 while (addend
-- > 0)
6765 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6772 /* Create RTL split patterns for byte sized rotate expressions. This
6773 produces a series of move instructions and considers overlap situations.
6774 Overlapping non-HImode operands need a scratch register. */
6777 avr_rotate_bytes (rtx operands
[])
6780 enum machine_mode mode
= GET_MODE (operands
[0]);
6781 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6782 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6783 int num
= INTVAL (operands
[2]);
6784 rtx scratch
= operands
[3];
6785 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6786 Word move if no scratch is needed, otherwise use size of scratch. */
6787 enum machine_mode move_mode
= QImode
;
6788 int move_size
, offset
, size
;
6792 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6795 move_mode
= GET_MODE (scratch
);
6797 /* Force DI rotate to use QI moves since other DI moves are currently split
6798 into QI moves so forward propagation works better. */
6801 /* Make scratch smaller if needed. */
6802 if (SCRATCH
!= GET_CODE (scratch
)
6803 && HImode
== GET_MODE (scratch
)
6804 && QImode
== move_mode
)
6805 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6807 move_size
= GET_MODE_SIZE (move_mode
);
6808 /* Number of bytes/words to rotate. */
6809 offset
= (num
>> 3) / move_size
;
6810 /* Number of moves needed. */
6811 size
= GET_MODE_SIZE (mode
) / move_size
;
6812 /* Himode byte swap is special case to avoid a scratch register. */
6813 if (mode
== HImode
&& same_reg
)
6815 /* HImode byte swap, using xor. This is as quick as using scratch. */
6817 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6818 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6819 if (!rtx_equal_p (dst
, src
))
6821 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6822 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6823 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6828 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6829 /* Create linked list of moves to determine move order. */
6833 } move
[MAX_SIZE
+ 8];
6836 gcc_assert (size
<= MAX_SIZE
);
6837 /* Generate list of subreg moves. */
6838 for (i
= 0; i
< size
; i
++)
6841 int to
= (from
+ offset
) % size
;
6842 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6843 mode
, from
* move_size
);
6844 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6845 mode
, to
* move_size
);
6848 /* Mark dependence where a dst of one move is the src of another move.
6849 The first move is a conflict as it must wait until second is
6850 performed. We ignore moves to self - we catch this later. */
6852 for (i
= 0; i
< size
; i
++)
6853 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6854 for (j
= 0; j
< size
; j
++)
6855 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6857 /* The dst of move i is the src of move j. */
6864 /* Go through move list and perform non-conflicting moves. As each
6865 non-overlapping move is made, it may remove other conflicts
6866 so the process is repeated until no conflicts remain. */
6871 /* Emit move where dst is not also a src or we have used that
6873 for (i
= 0; i
< size
; i
++)
6874 if (move
[i
].src
!= NULL_RTX
)
6876 if (move
[i
].links
== -1
6877 || move
[move
[i
].links
].src
== NULL_RTX
)
6880 /* Ignore NOP moves to self. */
6881 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6882 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6884 /* Remove conflict from list. */
6885 move
[i
].src
= NULL_RTX
;
6891 /* Check for deadlock. This is when no moves occurred and we have
6892 at least one blocked move. */
6893 if (moves
== 0 && blocked
!= -1)
6895 /* Need to use scratch register to break deadlock.
6896 Add move to put dst of blocked move into scratch.
6897 When this move occurs, it will break chain deadlock.
6898 The scratch register is substituted for real move. */
6900 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6902 move
[size
].src
= move
[blocked
].dst
;
6903 move
[size
].dst
= scratch
;
6904 /* Scratch move is never blocked. */
6905 move
[size
].links
= -1;
6906 /* Make sure we have valid link. */
6907 gcc_assert (move
[blocked
].links
!= -1);
6908 /* Replace src of blocking move with scratch reg. */
6909 move
[move
[blocked
].links
].src
= scratch
;
6910 /* Make dependent on scratch move occuring. */
6911 move
[blocked
].links
= size
;
6915 while (blocked
!= -1);
6921 /* Outputs instructions needed for fixed point type conversion.
6922 This includes converting between any fixed point type, as well
6923 as converting to any integer type. Conversion between integer
6924 types is not supported.
6926 The number of instructions generated depends on the types
6927 being converted and the registers assigned to them.
6929 The number of instructions required to complete the conversion
6930 is least if the registers for source and destination are overlapping
6931 and are aligned at the decimal place as actual movement of data is
6932 completely avoided. In some cases, the conversion may already be
6933 complete without any instructions needed.
6935 When converting to signed types from signed types, sign extension
6938 Converting signed fractional types requires a bit shift if converting
6939 to or from any unsigned fractional type because the decimal place is
6940 shifted by 1 bit. When the destination is a signed fractional, the sign
6941 is stored in either the carry or T bit. */
6944 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
6948 /* ilen: Length of integral part (in bytes)
6949 flen: Length of fractional part (in bytes)
6950 tlen: Length of operand (in bytes)
6951 blen: Length of operand (in bits) */
6952 int ilen
[2], flen
[2], tlen
[2], blen
[2];
6953 int rdest
, rsource
, offset
;
6954 int start
, end
, dir
;
6955 bool sign_in_T
= false, sign_in_Carry
= false, sign_done
= false;
6956 bool widening_sign_extend
= false;
6957 int clrword
= -1, lastclr
= 0, clr
= 0;
6963 xop
[dest
] = operands
[dest
];
6964 xop
[src
] = operands
[src
];
6969 /* Determine format (integer and fractional parts)
6970 of types needing conversion. */
6972 for (i
= 0; i
< 2; i
++)
6974 enum machine_mode mode
= GET_MODE (xop
[i
]);
6976 tlen
[i
] = GET_MODE_SIZE (mode
);
6977 blen
[i
] = GET_MODE_BITSIZE (mode
);
6979 if (SCALAR_INT_MODE_P (mode
))
6981 sbit
[i
] = intsigned
;
6982 ilen
[i
] = GET_MODE_SIZE (mode
);
6985 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
6987 sbit
[i
] = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
6988 ilen
[i
] = (GET_MODE_IBIT (mode
) + 1) / 8;
6989 flen
[i
] = (GET_MODE_FBIT (mode
) + 1) / 8;
6992 fatal_insn ("unsupported fixed-point conversion", insn
);
6995 /* Perform sign extension if source and dest are both signed,
6996 and there are more integer parts in dest than in source. */
6998 widening_sign_extend
= sbit
[dest
] && sbit
[src
] && ilen
[dest
] > ilen
[src
];
7000 rdest
= REGNO (xop
[dest
]);
7001 rsource
= REGNO (xop
[src
]);
7002 offset
= flen
[src
] - flen
[dest
];
7004 /* Position of MSB resp. sign bit. */
7006 xop
[2] = GEN_INT (blen
[dest
] - 1);
7007 xop
[3] = GEN_INT (blen
[src
] - 1);
7009 /* Store the sign bit if the destination is a signed fract and the source
7010 has a sign in the integer part. */
7012 if (sbit
[dest
] && ilen
[dest
] == 0 && sbit
[src
] && ilen
[src
] > 0)
7014 /* To avoid using BST and BLD if the source and destination registers
7015 overlap or the source is unused after, we can use LSL to store the
7016 sign bit in carry since we don't need the integral part of the source.
7017 Restoring the sign from carry saves one BLD instruction below. */
7019 if (reg_unused_after (insn
, xop
[src
])
7020 || (rdest
< rsource
+ tlen
[src
]
7021 && rdest
+ tlen
[dest
] > rsource
))
7023 avr_asm_len ("lsl %T1%t3", xop
, plen
, 1);
7024 sign_in_Carry
= true;
7028 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
7033 /* Pick the correct direction to shift bytes. */
7035 if (rdest
< rsource
+ offset
)
7044 start
= tlen
[dest
] - 1;
7048 /* Perform conversion by moving registers into place, clearing
7049 destination registers that do not overlap with any source. */
7051 for (i
= start
; i
!= end
; i
+= dir
)
7053 int destloc
= rdest
+ i
;
7054 int sourceloc
= rsource
+ i
+ offset
;
7056 /* Source register location is outside range of source register,
7057 so clear this byte in the dest. */
7059 if (sourceloc
< rsource
7060 || sourceloc
>= rsource
+ tlen
[src
])
7064 && (sourceloc
+ dir
< rsource
7065 || sourceloc
+ dir
>= rsource
+ tlen
[src
])
7066 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
7067 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2)))
7070 /* Use already cleared word to clear two bytes at a time. */
7072 int even_i
= i
& ~1;
7073 int even_clrword
= clrword
& ~1;
7075 xop
[4] = GEN_INT (8 * even_i
);
7076 xop
[5] = GEN_INT (8 * even_clrword
);
7077 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
7082 if (i
== tlen
[dest
] - 1
7083 && widening_sign_extend
7084 && blen
[src
] - 1 - 8 * offset
< 0)
7086 /* The SBRC below that sign-extends would come
7087 up with a negative bit number because the sign
7088 bit is out of reach. ALso avoid some early-clobber
7089 situations because of premature CLR. */
7091 if (reg_unused_after (insn
, xop
[src
]))
7092 avr_asm_len ("lsl %T1%t3" CR_TAB
7093 "sbc %T0%t2,%T0%t2", xop
, plen
, 2);
7095 avr_asm_len ("mov __tmp_reg__,%T1%t3" CR_TAB
7096 "lsl __tmp_reg__" CR_TAB
7097 "sbc %T0%t2,%T0%t2", xop
, plen
, 3);
7103 /* Do not clear the register if it is going to get
7104 sign extended with a MOV later. */
7106 if (sbit
[dest
] && sbit
[src
]
7107 && i
!= tlen
[dest
] - 1
7113 xop
[4] = GEN_INT (8 * i
);
7114 avr_asm_len ("clr %T0%t4", xop
, plen
, 1);
7116 /* If the last byte was cleared too, we have a cleared
7117 word we can MOVW to clear two bytes at a time. */
7125 else if (destloc
== sourceloc
)
7127 /* Source byte is already in destination: Nothing needed. */
7133 /* Registers do not line up and source register location
7134 is within range: Perform move, shifting with MOV or MOVW. */
7138 && sourceloc
+ dir
>= rsource
7139 && sourceloc
+ dir
< rsource
+ tlen
[src
]
7140 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
7141 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2))))
7143 int even_i
= i
& ~1;
7144 int even_i_plus_offset
= (i
+ offset
) & ~1;
7146 xop
[4] = GEN_INT (8 * even_i
);
7147 xop
[5] = GEN_INT (8 * even_i_plus_offset
);
7148 avr_asm_len ("movw %T0%t4,%T1%t5", xop
, plen
, 1);
7153 xop
[4] = GEN_INT (8 * i
);
7154 xop
[5] = GEN_INT (8 * (i
+ offset
));
7155 avr_asm_len ("mov %T0%t4,%T1%t5", xop
, plen
, 1);
7163 /* Perform sign extension if source and dest are both signed,
7164 and there are more integer parts in dest than in source. */
7166 if (widening_sign_extend
)
7170 xop
[4] = GEN_INT (blen
[src
] - 1 - 8 * offset
);
7172 /* Register was cleared above, so can become 0xff and extended.
7173 Note: Instead of the CLR/SBRC/COM the sign extension could
7174 be performed after the LSL below by means of a SBC if only
7175 one byte has to be shifted left. */
7177 avr_asm_len ("sbrc %T0%T4" CR_TAB
7178 "com %T0%t2", xop
, plen
, 2);
7181 /* Sign extend additional bytes by MOV and MOVW. */
7183 start
= tlen
[dest
] - 2;
7184 end
= flen
[dest
] + ilen
[src
] - 1;
7186 for (i
= start
; i
!= end
; i
--)
7188 if (AVR_HAVE_MOVW
&& i
!= start
&& i
-1 != end
)
7191 xop
[4] = GEN_INT (8 * i
);
7192 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 2));
7193 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
7197 xop
[4] = GEN_INT (8 * i
);
7198 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 1));
7199 avr_asm_len ("mov %T0%t4,%T0%t5", xop
, plen
, 1);
7204 /* If destination is a signed fract, and the source was not, a shift
7205 by 1 bit is needed. Also restore sign from carry or T. */
7207 if (sbit
[dest
] && !ilen
[dest
] && (!sbit
[src
] || ilen
[src
]))
7209 /* We have flen[src] non-zero fractional bytes to shift.
7210 Because of the right shift, handle one byte more so that the
7211 LSB won't be lost. */
7213 int nonzero
= flen
[src
] + 1;
7215 /* If the LSB is in the T flag and there are no fractional
7216 bits, the high byte is zero and no shift needed. */
7218 if (flen
[src
] == 0 && sign_in_T
)
7221 start
= flen
[dest
] - 1;
7222 end
= start
- nonzero
;
7224 for (i
= start
; i
> end
&& i
>= 0; i
--)
7226 xop
[4] = GEN_INT (8 * i
);
7227 if (i
== start
&& !sign_in_Carry
)
7228 avr_asm_len ("lsr %T0%t4", xop
, plen
, 1);
7230 avr_asm_len ("ror %T0%t4", xop
, plen
, 1);
7235 avr_asm_len ("bld %T0%T2", xop
, plen
, 1);
7238 else if (sbit
[src
] && !ilen
[src
] && (!sbit
[dest
] || ilen
[dest
]))
7240 /* If source was a signed fract and dest was not, shift 1 bit
7243 start
= flen
[dest
] - flen
[src
];
7248 for (i
= start
; i
< flen
[dest
]; i
++)
7250 xop
[4] = GEN_INT (8 * i
);
7253 avr_asm_len ("lsl %T0%t4", xop
, plen
, 1);
7255 avr_asm_len ("rol %T0%t4", xop
, plen
, 1);
7263 /* Modifies the length assigned to instruction INSN
7264 LEN is the initially computed length of the insn. */
7267 adjust_insn_length (rtx insn
, int len
)
7269 rtx
*op
= recog_data
.operand
;
7270 enum attr_adjust_len adjust_len
;
7272 /* Some complex insns don't need length adjustment and therefore
7273 the length need not/must not be adjusted for these insns.
7274 It is easier to state this in an insn attribute "adjust_len" than
7275 to clutter up code here... */
7277 if (-1 == recog_memoized (insn
))
7282 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7284 adjust_len
= get_attr_adjust_len (insn
);
7286 if (adjust_len
== ADJUST_LEN_NO
)
7288 /* Nothing to adjust: The length from attribute "length" is fine.
7289 This is the default. */
7294 /* Extract insn's operands. */
7296 extract_constrain_insn_cached (insn
);
7298 /* Dispatch to right function. */
7302 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7303 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7304 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7306 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7308 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7309 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7311 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7312 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7313 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7314 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7315 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7316 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7317 case ADJUST_LEN_LOAD_LPM
: avr_load_lpm (insn
, op
, &len
); break;
7319 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7320 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7322 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7323 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7324 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7325 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7326 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7328 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7329 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7330 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7332 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7333 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7334 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7336 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7337 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7338 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7340 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7341 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7342 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7344 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7346 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7355 /* Return nonzero if register REG dead after INSN. */
7358 reg_unused_after (rtx insn
, rtx reg
)
7360 return (dead_or_set_p (insn
, reg
)
7361 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7364 /* Return nonzero if REG is not used after INSN.
7365 We assume REG is a reload reg, and therefore does
7366 not live past labels. It may live past calls or jumps though. */
7369 _reg_unused_after (rtx insn
, rtx reg
)
7374 /* If the reg is set by this instruction, then it is safe for our
7375 case. Disregard the case where this is a store to memory, since
7376 we are checking a register used in the store address. */
7377 set
= single_set (insn
);
7378 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7379 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7382 while ((insn
= NEXT_INSN (insn
)))
7385 code
= GET_CODE (insn
);
7388 /* If this is a label that existed before reload, then the register
7389 if dead here. However, if this is a label added by reorg, then
7390 the register may still be live here. We can't tell the difference,
7391 so we just ignore labels completely. */
7392 if (code
== CODE_LABEL
)
7400 if (code
== JUMP_INSN
)
7403 /* If this is a sequence, we must handle them all at once.
7404 We could have for instance a call that sets the target register,
7405 and an insn in a delay slot that uses the register. In this case,
7406 we must return 0. */
7407 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7412 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7414 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7415 rtx set
= single_set (this_insn
);
7417 if (GET_CODE (this_insn
) == CALL_INSN
)
7419 else if (GET_CODE (this_insn
) == JUMP_INSN
)
7421 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7426 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7428 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7430 if (GET_CODE (SET_DEST (set
)) != MEM
)
7436 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7441 else if (code
== JUMP_INSN
)
7445 if (code
== CALL_INSN
)
7448 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7449 if (GET_CODE (XEXP (tem
, 0)) == USE
7450 && REG_P (XEXP (XEXP (tem
, 0), 0))
7451 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7453 if (call_used_regs
[REGNO (reg
)])
7457 set
= single_set (insn
);
7459 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7461 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7462 return GET_CODE (SET_DEST (set
)) != MEM
;
7463 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7470 /* Target hook for assembling integer objects. The AVR version needs
7471 special handling for references to certain labels. */
7474 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7476 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7477 && text_segment_operand (x
, VOIDmode
))
7479 fputs ("\t.word\tgs(", asm_out_file
);
7480 output_addr_const (asm_out_file
, x
);
7481 fputs (")\n", asm_out_file
);
7485 else if (GET_MODE (x
) == PSImode
)
7487 /* This needs binutils 2.23+, see PR binutils/13503 */
7489 fputs ("\t.byte\tlo8(", asm_out_file
);
7490 output_addr_const (asm_out_file
, x
);
7491 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7493 fputs ("\t.byte\thi8(", asm_out_file
);
7494 output_addr_const (asm_out_file
, x
);
7495 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7497 fputs ("\t.byte\thh8(", asm_out_file
);
7498 output_addr_const (asm_out_file
, x
);
7499 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7503 else if (CONST_FIXED_P (x
))
7507 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7509 for (n
= 0; n
< size
; n
++)
7511 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
7512 default_assemble_integer (xn
, 1, aligned_p
);
7518 return default_assemble_integer (x
, size
, aligned_p
);
7522 /* Return value is nonzero if pseudos that have been
7523 assigned to registers of class CLASS would likely be spilled
7524 because registers of CLASS are needed for spill registers. */
7527 avr_class_likely_spilled_p (reg_class_t c
)
7529 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
7532 /* Valid attributes:
7533 progmem - put data to program memory;
7534 signal - make a function to be hardware interrupt. After function
7535 prologue interrupts are disabled;
7536 interrupt - make a function to be hardware interrupt. After function
7537 prologue interrupts are enabled;
7538 naked - don't generate function prologue/epilogue and `ret' command.
7540 Only `progmem' attribute valid for type. */
7542 /* Handle a "progmem" attribute; arguments as in
7543 struct attribute_spec.handler. */
7545 avr_handle_progmem_attribute (tree
*node
, tree name
,
7546 tree args ATTRIBUTE_UNUSED
,
7547 int flags ATTRIBUTE_UNUSED
,
7552 if (TREE_CODE (*node
) == TYPE_DECL
)
7554 /* This is really a decl attribute, not a type attribute,
7555 but try to handle it for GCC 3.0 backwards compatibility. */
7557 tree type
= TREE_TYPE (*node
);
7558 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
7559 tree newtype
= build_type_attribute_variant (type
, attr
);
7561 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
7562 TREE_TYPE (*node
) = newtype
;
7563 *no_add_attrs
= true;
7565 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
7567 *no_add_attrs
= false;
7571 warning (OPT_Wattributes
, "%qE attribute ignored",
7573 *no_add_attrs
= true;
7580 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7581 struct attribute_spec.handler. */
7584 avr_handle_fndecl_attribute (tree
*node
, tree name
,
7585 tree args ATTRIBUTE_UNUSED
,
7586 int flags ATTRIBUTE_UNUSED
,
7589 if (TREE_CODE (*node
) != FUNCTION_DECL
)
7591 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7593 *no_add_attrs
= true;
7600 avr_handle_fntype_attribute (tree
*node
, tree name
,
7601 tree args ATTRIBUTE_UNUSED
,
7602 int flags ATTRIBUTE_UNUSED
,
7605 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7607 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7609 *no_add_attrs
= true;
7616 /* AVR attributes. */
7617 static const struct attribute_spec
7618 avr_attribute_table
[] =
7620 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7621 affects_type_identity } */
7622 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7624 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7626 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7628 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7630 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7632 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7634 { NULL
, 0, 0, false, false, false, NULL
, false }
7638 /* Look if DECL shall be placed in program memory space by
7639 means of attribute `progmem' or some address-space qualifier.
7640 Return non-zero if DECL is data that must end up in Flash and
7641 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7643 Return 2 if DECL is located in 24-bit flash address-space
7644 Return 1 if DECL is located in 16-bit flash address-space
7645 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7646 Return 0 otherwise */
7649 avr_progmem_p (tree decl
, tree attributes
)
7653 if (TREE_CODE (decl
) != VAR_DECL
)
7656 if (avr_decl_memx_p (decl
))
7659 if (avr_decl_flash_p (decl
))
7663 != lookup_attribute ("progmem", attributes
))
7670 while (TREE_CODE (a
) == ARRAY_TYPE
);
7672 if (a
== error_mark_node
)
7675 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7682 /* Scan type TYP for pointer references to address space ASn.
7683 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7684 the AS are also declared to be CONST.
7685 Otherwise, return the respective address space, i.e. a value != 0. */
7688 avr_nonconst_pointer_addrspace (tree typ
)
7690 while (ARRAY_TYPE
== TREE_CODE (typ
))
7691 typ
= TREE_TYPE (typ
);
7693 if (POINTER_TYPE_P (typ
))
7696 tree target
= TREE_TYPE (typ
);
7698 /* Pointer to function: Test the function's return type. */
7700 if (FUNCTION_TYPE
== TREE_CODE (target
))
7701 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7703 /* "Ordinary" pointers... */
7705 while (TREE_CODE (target
) == ARRAY_TYPE
)
7706 target
= TREE_TYPE (target
);
7708 /* Pointers to non-generic address space must be const.
7709 Refuse address spaces outside the device's flash. */
7711 as
= TYPE_ADDR_SPACE (target
);
7713 if (!ADDR_SPACE_GENERIC_P (as
)
7714 && (!TYPE_READONLY (target
)
7715 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
7720 /* Scan pointer's target type. */
7722 return avr_nonconst_pointer_addrspace (target
);
7725 return ADDR_SPACE_GENERIC
;
7729 /* Sanity check NODE so that all pointers targeting non-generic address spaces
7730 go along with CONST qualifier. Writing to these address spaces should
7731 be detected and complained about as early as possible. */
7734 avr_pgm_check_var_decl (tree node
)
7736 const char *reason
= NULL
;
7738 addr_space_t as
= ADDR_SPACE_GENERIC
;
7740 gcc_assert (as
== 0);
7742 if (avr_log
.progmem
)
7743 avr_edump ("%?: %t\n", node
);
7745 switch (TREE_CODE (node
))
7751 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7752 reason
= "variable";
7756 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7757 reason
= "function parameter";
7761 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7762 reason
= "structure field";
7766 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7768 reason
= "return type of function";
7772 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7779 avr_edump ("%?: %s, %d, %d\n",
7780 avr_addrspace
[as
].name
,
7781 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
7782 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7785 error ("%qT uses address space %qs beyond flash of %qs",
7786 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7788 error ("%s %q+D uses address space %qs beyond flash of %qs",
7789 reason
, node
, avr_addrspace
[as
].name
,
7790 avr_current_device
->name
);
7795 error ("pointer targeting address space %qs must be const in %qT",
7796 avr_addrspace
[as
].name
, node
);
7798 error ("pointer targeting address space %qs must be const"
7800 avr_addrspace
[as
].name
, reason
, node
);
7804 return reason
== NULL
;
7808 /* Add the section attribute if the variable is in progmem. */
7811 avr_insert_attributes (tree node
, tree
*attributes
)
7813 avr_pgm_check_var_decl (node
);
7815 if (TREE_CODE (node
) == VAR_DECL
7816 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
7817 && avr_progmem_p (node
, *attributes
))
7822 /* For C++, we have to peel arrays in order to get correct
7823 determination of readonlyness. */
7826 node0
= TREE_TYPE (node0
);
7827 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7829 if (error_mark_node
== node0
)
7832 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7834 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7836 error ("variable %q+D located in address space %qs"
7837 " beyond flash of %qs",
7838 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7841 if (!TYPE_READONLY (node0
)
7842 && !TREE_READONLY (node
))
7844 const char *reason
= "__attribute__((progmem))";
7846 if (!ADDR_SPACE_GENERIC_P (as
))
7847 reason
= avr_addrspace
[as
].name
;
7849 if (avr_log
.progmem
)
7850 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7852 error ("variable %q+D must be const in order to be put into"
7853 " read-only section by means of %qs", node
, reason
);
7859 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7860 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7861 /* Track need of __do_clear_bss. */
7864 avr_asm_output_aligned_decl_common (FILE * stream
,
7865 const_tree decl ATTRIBUTE_UNUSED
,
7867 unsigned HOST_WIDE_INT size
,
7868 unsigned int align
, bool local_p
)
7870 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7871 There is no need to trigger __do_clear_bss code for them. */
7873 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7874 avr_need_clear_bss_p
= true;
7877 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7879 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7883 /* Unnamed section callback for data_section
7884 to track need of __do_copy_data. */
7887 avr_output_data_section_asm_op (const void *data
)
7889 avr_need_copy_data_p
= true;
7891 /* Dispatch to default. */
7892 output_section_asm_op (data
);
7896 /* Unnamed section callback for bss_section
7897 to track need of __do_clear_bss. */
7900 avr_output_bss_section_asm_op (const void *data
)
7902 avr_need_clear_bss_p
= true;
7904 /* Dispatch to default. */
7905 output_section_asm_op (data
);
7909 /* Unnamed section callback for progmem*.data sections. */
7912 avr_output_progmem_section_asm_op (const void *data
)
7914 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7915 (const char*) data
);
7919 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7922 avr_asm_init_sections (void)
7926 /* Set up a section for jump tables. Alignment is handled by
7927 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7929 if (AVR_HAVE_JMP_CALL
)
7931 progmem_swtable_section
7932 = get_unnamed_section (0, output_section_asm_op
,
7933 "\t.section\t.progmem.gcc_sw_table"
7934 ",\"a\",@progbits");
7938 progmem_swtable_section
7939 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7940 "\t.section\t.progmem.gcc_sw_table"
7941 ",\"ax\",@progbits");
7944 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7947 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7948 progmem_section_prefix
[n
]);
7951 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7952 resp. `avr_need_copy_data_p'. */
7954 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7955 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7956 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7960 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7963 avr_asm_function_rodata_section (tree decl
)
7965 /* If a function is unused and optimized out by -ffunction-sections
7966 and --gc-sections, ensure that the same will happen for its jump
7967 tables by putting them into individual sections. */
7972 /* Get the frodata section from the default function in varasm.c
7973 but treat function-associated data-like jump tables as code
7974 rather than as user defined data. AVR has no constant pools. */
7976 int fdata
= flag_data_sections
;
7978 flag_data_sections
= flag_function_sections
;
7979 frodata
= default_function_rodata_section (decl
);
7980 flag_data_sections
= fdata
;
7981 flags
= frodata
->common
.flags
;
7984 if (frodata
!= readonly_data_section
7985 && flags
& SECTION_NAMED
)
7987 /* Adjust section flags and replace section name prefix. */
7991 static const char* const prefix
[] =
7993 ".rodata", ".progmem.gcc_sw_table",
7994 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7997 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7999 const char * old_prefix
= prefix
[i
];
8000 const char * new_prefix
= prefix
[i
+1];
8001 const char * name
= frodata
->named
.name
;
8003 if (STR_PREFIX_P (name
, old_prefix
))
8005 const char *rname
= ACONCAT ((new_prefix
,
8006 name
+ strlen (old_prefix
), NULL
));
8007 flags
&= ~SECTION_CODE
;
8008 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8010 return get_section (rname
, flags
, frodata
->named
.decl
);
8015 return progmem_swtable_section
;
8019 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8020 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8023 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8025 if (flags
& AVR_SECTION_PROGMEM
)
8027 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8028 int segment
= avr_addrspace
[as
].segment
;
8029 const char *old_prefix
= ".rodata";
8030 const char *new_prefix
= progmem_section_prefix
[segment
];
8032 if (STR_PREFIX_P (name
, old_prefix
))
8034 const char *sname
= ACONCAT ((new_prefix
,
8035 name
+ strlen (old_prefix
), NULL
));
8036 default_elf_asm_named_section (sname
, flags
, decl
);
8040 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8044 if (!avr_need_copy_data_p
)
8045 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8046 || STR_PREFIX_P (name
, ".rodata")
8047 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8049 if (!avr_need_clear_bss_p
)
8050 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8052 default_elf_asm_named_section (name
, flags
, decl
);
8056 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8058 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8060 if (STR_PREFIX_P (name
, ".noinit"))
8062 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8063 && DECL_INITIAL (decl
) == NULL_TREE
)
8064 flags
|= SECTION_BSS
; /* @nobits */
8066 warning (0, "only uninitialized variables can be placed in the "
8070 if (decl
&& DECL_P (decl
)
8071 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8073 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8075 /* Attribute progmem puts data in generic address space.
8076 Set section flags as if it was in __flash to get the right
8077 section prefix in the remainder. */
8079 if (ADDR_SPACE_GENERIC_P (as
))
8080 as
= ADDR_SPACE_FLASH
;
8082 flags
|= as
* SECTION_MACH_DEP
;
8083 flags
&= ~SECTION_WRITE
;
8084 flags
&= ~SECTION_BSS
;
8091 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8094 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8096 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8097 readily available, see PR34734. So we postpone the warning
8098 about uninitialized data in program memory section until here. */
8101 && decl
&& DECL_P (decl
)
8102 && NULL_TREE
== DECL_INITIAL (decl
)
8103 && !DECL_EXTERNAL (decl
)
8104 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8106 warning (OPT_Wuninitialized
,
8107 "uninitialized variable %q+D put into "
8108 "program memory area", decl
);
8111 default_encode_section_info (decl
, rtl
, new_decl_p
);
8113 if (decl
&& DECL_P (decl
)
8114 && TREE_CODE (decl
) != FUNCTION_DECL
8116 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8118 rtx sym
= XEXP (rtl
, 0);
8119 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8121 /* PSTR strings are in generic space but located in flash:
8122 patch address space. */
8124 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8125 as
= ADDR_SPACE_FLASH
;
8127 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8132 /* Implement `TARGET_ASM_SELECT_SECTION' */
8135 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8137 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8139 if (decl
&& DECL_P (decl
)
8140 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8142 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8143 int segment
= avr_addrspace
[as
].segment
;
8145 if (sect
->common
.flags
& SECTION_NAMED
)
8147 const char * name
= sect
->named
.name
;
8148 const char * old_prefix
= ".rodata";
8149 const char * new_prefix
= progmem_section_prefix
[segment
];
8151 if (STR_PREFIX_P (name
, old_prefix
))
8153 const char *sname
= ACONCAT ((new_prefix
,
8154 name
+ strlen (old_prefix
), NULL
));
8155 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8159 return progmem_section
[segment
];
8165 /* Implement `TARGET_ASM_FILE_START'. */
8166 /* Outputs some text at the start of each assembler file. */
8169 avr_file_start (void)
8171 int sfr_offset
= avr_current_arch
->sfr_offset
;
8173 if (avr_current_arch
->asm_only
)
8174 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8176 default_file_start ();
8178 /* Print I/O addresses of some SFRs used with IN and OUT. */
8181 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8183 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8184 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8186 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8188 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8190 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8192 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8194 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8195 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8196 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8200 /* Implement `TARGET_ASM_FILE_END'. */
8201 /* Outputs to the stdio stream FILE some
8202 appropriate text to go at the end of an assembler file. */
8207 /* Output these only if there is anything in the
8208 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
8209 input section(s) - some code size can be saved by not
8210 linking in the initialization code from libgcc if resp.
8211 sections are empty. */
8213 if (avr_need_copy_data_p
)
8214 fputs (".global __do_copy_data\n", asm_out_file
);
8216 if (avr_need_clear_bss_p
)
8217 fputs (".global __do_clear_bss\n", asm_out_file
);
8220 /* Choose the order in which to allocate hard registers for
8221 pseudo-registers local to a basic block.
8223 Store the desired register order in the array `reg_alloc_order'.
8224 Element 0 should be the register to allocate first; element 1, the
8225 next register; and so on. */
8228 order_regs_for_local_alloc (void)
8231 static const int order_0
[] = {
8239 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8243 static const int order_1
[] = {
8251 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8255 static const int order_2
[] = {
8264 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8269 const int *order
= (TARGET_ORDER_1
? order_1
:
8270 TARGET_ORDER_2
? order_2
:
8272 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
8273 reg_alloc_order
[i
] = order
[i
];
8277 /* Implement `TARGET_REGISTER_MOVE_COST' */
8280 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8281 reg_class_t from
, reg_class_t to
)
8283 return (from
== STACK_REG
? 6
8284 : to
== STACK_REG
? 12
8289 /* Implement `TARGET_MEMORY_MOVE_COST' */
8292 avr_memory_move_cost (enum machine_mode mode
,
8293 reg_class_t rclass ATTRIBUTE_UNUSED
,
8294 bool in ATTRIBUTE_UNUSED
)
8296 return (mode
== QImode
? 2
8297 : mode
== HImode
? 4
8298 : mode
== SImode
? 8
8299 : mode
== SFmode
? 8
8304 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8305 cost of an RTX operand given its context. X is the rtx of the
8306 operand, MODE is its mode, and OUTER is the rtx_code of this
8307 operand's parent operator. */
8310 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8311 int opno
, bool speed
)
8313 enum rtx_code code
= GET_CODE (x
);
8325 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8332 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8336 /* Worker function for AVR backend's rtx_cost function.
8337 X is rtx expression whose cost is to be calculated.
8338 Return true if the complete cost has been computed.
8339 Return false if subexpressions should be scanned.
8340 In either case, *TOTAL contains the cost result. */
8343 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8344 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8346 enum rtx_code code
= (enum rtx_code
) codearg
;
8347 enum machine_mode mode
= GET_MODE (x
);
8358 /* Immediate constants are as cheap as registers. */
8363 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8371 *total
= COSTS_N_INSNS (1);
8377 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8383 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8391 *total
= COSTS_N_INSNS (1);
8397 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8401 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8402 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8406 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8407 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8408 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8412 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8413 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8414 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8422 && MULT
== GET_CODE (XEXP (x
, 0))
8423 && register_operand (XEXP (x
, 1), QImode
))
8426 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8427 /* multiply-add with constant: will be split and load constant. */
8428 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8429 *total
= COSTS_N_INSNS (1) + *total
;
8432 *total
= COSTS_N_INSNS (1);
8433 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8434 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8439 && (MULT
== GET_CODE (XEXP (x
, 0))
8440 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8441 && register_operand (XEXP (x
, 1), HImode
)
8442 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8443 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8446 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8447 /* multiply-add with constant: will be split and load constant. */
8448 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8449 *total
= COSTS_N_INSNS (1) + *total
;
8452 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8454 *total
= COSTS_N_INSNS (2);
8455 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8458 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8459 *total
= COSTS_N_INSNS (1);
8461 *total
= COSTS_N_INSNS (2);
8465 if (!CONST_INT_P (XEXP (x
, 1)))
8467 *total
= COSTS_N_INSNS (3);
8468 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8471 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8472 *total
= COSTS_N_INSNS (2);
8474 *total
= COSTS_N_INSNS (3);
8478 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8480 *total
= COSTS_N_INSNS (4);
8481 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8484 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8485 *total
= COSTS_N_INSNS (1);
8487 *total
= COSTS_N_INSNS (4);
8493 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8499 && register_operand (XEXP (x
, 0), QImode
)
8500 && MULT
== GET_CODE (XEXP (x
, 1)))
8503 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8504 /* multiply-sub with constant: will be split and load constant. */
8505 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8506 *total
= COSTS_N_INSNS (1) + *total
;
8511 && register_operand (XEXP (x
, 0), HImode
)
8512 && (MULT
== GET_CODE (XEXP (x
, 1))
8513 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
8514 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
8515 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
8518 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8519 /* multiply-sub with constant: will be split and load constant. */
8520 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8521 *total
= COSTS_N_INSNS (1) + *total
;
8527 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8528 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8529 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8530 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8534 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8535 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8536 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8544 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
8546 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8554 rtx op0
= XEXP (x
, 0);
8555 rtx op1
= XEXP (x
, 1);
8556 enum rtx_code code0
= GET_CODE (op0
);
8557 enum rtx_code code1
= GET_CODE (op1
);
8558 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
8559 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
8562 && (u8_operand (op1
, HImode
)
8563 || s8_operand (op1
, HImode
)))
8565 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8569 && register_operand (op1
, HImode
))
8571 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8574 else if (ex0
|| ex1
)
8576 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
8579 else if (register_operand (op0
, HImode
)
8580 && (u8_operand (op1
, HImode
)
8581 || s8_operand (op1
, HImode
)))
8583 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
8587 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
8590 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8597 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8607 /* Add some additional costs besides CALL like moves etc. */
8609 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8613 /* Just a rough estimate. Even with -O2 we don't want bulky
8614 code expanded inline. */
8616 *total
= COSTS_N_INSNS (25);
8622 *total
= COSTS_N_INSNS (300);
8624 /* Add some additional costs besides CALL like moves etc. */
8625 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8633 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8634 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8642 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8644 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8645 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8646 /* For div/mod with const-int divisor we have at least the cost of
8647 loading the divisor. */
8648 if (CONST_INT_P (XEXP (x
, 1)))
8649 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8650 /* Add some overall penaly for clobbering and moving around registers */
8651 *total
+= COSTS_N_INSNS (2);
8658 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8659 *total
= COSTS_N_INSNS (1);
8664 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8665 *total
= COSTS_N_INSNS (3);
8670 if (CONST_INT_P (XEXP (x
, 1)))
8671 switch (INTVAL (XEXP (x
, 1)))
8675 *total
= COSTS_N_INSNS (5);
8678 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8686 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8693 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8695 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8696 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8701 val
= INTVAL (XEXP (x
, 1));
8703 *total
= COSTS_N_INSNS (3);
8704 else if (val
>= 0 && val
<= 7)
8705 *total
= COSTS_N_INSNS (val
);
8707 *total
= COSTS_N_INSNS (1);
8714 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8715 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8716 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8718 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8723 if (const1_rtx
== (XEXP (x
, 1))
8724 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8726 *total
= COSTS_N_INSNS (2);
8730 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8732 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8733 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8737 switch (INTVAL (XEXP (x
, 1)))
8744 *total
= COSTS_N_INSNS (2);
8747 *total
= COSTS_N_INSNS (3);
8753 *total
= COSTS_N_INSNS (4);
8758 *total
= COSTS_N_INSNS (5);
8761 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8764 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8767 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8770 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8771 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8777 if (!CONST_INT_P (XEXP (x
, 1)))
8779 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8782 switch (INTVAL (XEXP (x
, 1)))
8790 *total
= COSTS_N_INSNS (3);
8793 *total
= COSTS_N_INSNS (5);
8796 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8802 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8804 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8805 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8809 switch (INTVAL (XEXP (x
, 1)))
8815 *total
= COSTS_N_INSNS (3);
8820 *total
= COSTS_N_INSNS (4);
8823 *total
= COSTS_N_INSNS (6);
8826 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8829 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8830 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8838 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8845 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8847 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8848 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8853 val
= INTVAL (XEXP (x
, 1));
8855 *total
= COSTS_N_INSNS (4);
8857 *total
= COSTS_N_INSNS (2);
8858 else if (val
>= 0 && val
<= 7)
8859 *total
= COSTS_N_INSNS (val
);
8861 *total
= COSTS_N_INSNS (1);
8866 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8868 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8869 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8873 switch (INTVAL (XEXP (x
, 1)))
8879 *total
= COSTS_N_INSNS (2);
8882 *total
= COSTS_N_INSNS (3);
8888 *total
= COSTS_N_INSNS (4);
8892 *total
= COSTS_N_INSNS (5);
8895 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8898 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8902 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8905 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8906 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8912 if (!CONST_INT_P (XEXP (x
, 1)))
8914 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8917 switch (INTVAL (XEXP (x
, 1)))
8923 *total
= COSTS_N_INSNS (3);
8927 *total
= COSTS_N_INSNS (5);
8930 *total
= COSTS_N_INSNS (4);
8933 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8939 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8941 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8942 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8946 switch (INTVAL (XEXP (x
, 1)))
8952 *total
= COSTS_N_INSNS (4);
8957 *total
= COSTS_N_INSNS (6);
8960 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8963 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8966 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8967 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8975 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8982 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8984 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8985 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8990 val
= INTVAL (XEXP (x
, 1));
8992 *total
= COSTS_N_INSNS (3);
8993 else if (val
>= 0 && val
<= 7)
8994 *total
= COSTS_N_INSNS (val
);
8996 *total
= COSTS_N_INSNS (1);
9001 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9003 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9004 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9008 switch (INTVAL (XEXP (x
, 1)))
9015 *total
= COSTS_N_INSNS (2);
9018 *total
= COSTS_N_INSNS (3);
9023 *total
= COSTS_N_INSNS (4);
9027 *total
= COSTS_N_INSNS (5);
9033 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9036 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9040 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9043 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9044 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9050 if (!CONST_INT_P (XEXP (x
, 1)))
9052 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9055 switch (INTVAL (XEXP (x
, 1)))
9063 *total
= COSTS_N_INSNS (3);
9066 *total
= COSTS_N_INSNS (5);
9069 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9075 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9077 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9078 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9082 switch (INTVAL (XEXP (x
, 1)))
9088 *total
= COSTS_N_INSNS (4);
9091 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9096 *total
= COSTS_N_INSNS (4);
9099 *total
= COSTS_N_INSNS (6);
9102 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9103 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9111 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9115 switch (GET_MODE (XEXP (x
, 0)))
9118 *total
= COSTS_N_INSNS (1);
9119 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9120 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9124 *total
= COSTS_N_INSNS (2);
9125 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9126 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9127 else if (INTVAL (XEXP (x
, 1)) != 0)
9128 *total
+= COSTS_N_INSNS (1);
9132 *total
= COSTS_N_INSNS (3);
9133 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9134 *total
+= COSTS_N_INSNS (2);
9138 *total
= COSTS_N_INSNS (4);
9139 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9140 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9141 else if (INTVAL (XEXP (x
, 1)) != 0)
9142 *total
+= COSTS_N_INSNS (3);
9148 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9153 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9154 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9155 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9157 if (QImode
== mode
|| HImode
== mode
)
9159 *total
= COSTS_N_INSNS (2);
9172 /* Implement `TARGET_RTX_COSTS'. */
9175 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9176 int opno
, int *total
, bool speed
)
9178 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9179 opno
, total
, speed
);
9181 if (avr_log
.rtx_costs
)
9183 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9184 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9191 /* Implement `TARGET_ADDRESS_COST'. */
9194 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9195 addr_space_t as ATTRIBUTE_UNUSED
,
9196 bool speed ATTRIBUTE_UNUSED
)
9200 if (GET_CODE (x
) == PLUS
9201 && CONST_INT_P (XEXP (x
, 1))
9202 && (REG_P (XEXP (x
, 0))
9203 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9205 if (INTVAL (XEXP (x
, 1)) >= 61)
9208 else if (CONSTANT_ADDRESS_P (x
))
9211 && io_address_operand (x
, QImode
))
9215 if (avr_log
.address_cost
)
9216 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9221 /* Test for extra memory constraint 'Q'.
9222 It's a memory address based on Y or Z pointer with valid displacement. */
9225 extra_constraint_Q (rtx x
)
9229 if (GET_CODE (XEXP (x
,0)) == PLUS
9230 && REG_P (XEXP (XEXP (x
,0), 0))
9231 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9232 && (INTVAL (XEXP (XEXP (x
,0), 1))
9233 <= MAX_LD_OFFSET (GET_MODE (x
))))
9235 rtx xx
= XEXP (XEXP (x
,0), 0);
9236 int regno
= REGNO (xx
);
9238 ok
= (/* allocate pseudos */
9239 regno
>= FIRST_PSEUDO_REGISTER
9240 /* strictly check */
9241 || regno
== REG_Z
|| regno
== REG_Y
9242 /* XXX frame & arg pointer checks */
9243 || xx
== frame_pointer_rtx
9244 || xx
== arg_pointer_rtx
);
9246 if (avr_log
.constraints
)
9247 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9248 ok
, reload_completed
, reload_in_progress
, x
);
9254 /* Convert condition code CONDITION to the valid AVR condition code. */
9257 avr_normalize_condition (RTX_CODE condition
)
9274 /* Helper function for `avr_reorg'. */
9277 avr_compare_pattern (rtx insn
)
9279 rtx pattern
= single_set (insn
);
9282 && NONJUMP_INSN_P (insn
)
9283 && SET_DEST (pattern
) == cc0_rtx
9284 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9286 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9287 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9289 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9290 They must not be swapped, thus skip them. */
9292 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9293 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9300 /* Helper function for `avr_reorg'. */
9302 /* Expansion of switch/case decision trees leads to code like
9304 cc0 = compare (Reg, Num)
9308 cc0 = compare (Reg, Num)
9312 The second comparison is superfluous and can be deleted.
9313 The second jump condition can be transformed from a
9314 "difficult" one to a "simple" one because "cc0 > 0" and
9315 "cc0 >= 0" will have the same effect here.
9317 This function relies on the way switch/case is being expaned
9318 as binary decision tree. For example code see PR 49903.
9320 Return TRUE if optimization performed.
9321 Return FALSE if nothing changed.
9323 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9325 We don't want to do this in text peephole because it is
9326 tedious to work out jump offsets there and the second comparison
9327 might have been transormed by `avr_reorg'.
9329 RTL peephole won't do because peephole2 does not scan across
9333 avr_reorg_remove_redundant_compare (rtx insn1
)
9335 rtx comp1
, ifelse1
, xcond1
, branch1
;
9336 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9338 rtx jump
, target
, cond
;
9340 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9342 branch1
= next_nonnote_nondebug_insn (insn1
);
9343 if (!branch1
|| !JUMP_P (branch1
))
9346 insn2
= next_nonnote_nondebug_insn (branch1
);
9347 if (!insn2
|| !avr_compare_pattern (insn2
))
9350 branch2
= next_nonnote_nondebug_insn (insn2
);
9351 if (!branch2
|| !JUMP_P (branch2
))
9354 comp1
= avr_compare_pattern (insn1
);
9355 comp2
= avr_compare_pattern (insn2
);
9356 xcond1
= single_set (branch1
);
9357 xcond2
= single_set (branch2
);
9359 if (!comp1
|| !comp2
9360 || !rtx_equal_p (comp1
, comp2
)
9361 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9362 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9363 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9364 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9369 comp1
= SET_SRC (comp1
);
9370 ifelse1
= SET_SRC (xcond1
);
9371 ifelse2
= SET_SRC (xcond2
);
9373 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9375 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9376 || !REG_P (XEXP (comp1
, 0))
9377 || !CONST_INT_P (XEXP (comp1
, 1))
9378 || XEXP (ifelse1
, 2) != pc_rtx
9379 || XEXP (ifelse2
, 2) != pc_rtx
9380 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9381 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9382 || !COMPARISON_P (XEXP (ifelse2
, 0))
9383 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9384 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9385 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9386 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9391 /* We filtered the insn sequence to look like
9397 (if_then_else (eq (cc0)
9406 (if_then_else (CODE (cc0)
9412 code
= GET_CODE (XEXP (ifelse2
, 0));
9414 /* Map GT/GTU to GE/GEU which is easier for AVR.
9415 The first two instructions compare/branch on EQ
9416 so we may replace the difficult
9418 if (x == VAL) goto L1;
9419 if (x > VAL) goto L2;
9423 if (x == VAL) goto L1;
9424 if (x >= VAL) goto L2;
9426 Similarly, replace LE/LEU by LT/LTU. */
9437 code
= avr_normalize_condition (code
);
9444 /* Wrap the branches into UNSPECs so they won't be changed or
9445 optimized in the remainder. */
9447 target
= XEXP (XEXP (ifelse1
, 1), 0);
9448 cond
= XEXP (ifelse1
, 0);
9449 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9451 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9453 target
= XEXP (XEXP (ifelse2
, 1), 0);
9454 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9455 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9457 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9459 /* The comparisons in insn1 and insn2 are exactly the same;
9460 insn2 is superfluous so delete it. */
9462 delete_insn (insn2
);
9463 delete_insn (branch1
);
9464 delete_insn (branch2
);
9470 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9471 /* Optimize conditional jumps. */
9476 rtx insn
= get_insns();
9478 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9480 rtx pattern
= avr_compare_pattern (insn
);
9486 && avr_reorg_remove_redundant_compare (insn
))
9491 if (compare_diff_p (insn
))
9493 /* Now we work under compare insn with difficult branch. */
9495 rtx next
= next_real_insn (insn
);
9496 rtx pat
= PATTERN (next
);
9498 pattern
= SET_SRC (pattern
);
9500 if (true_regnum (XEXP (pattern
, 0)) >= 0
9501 && true_regnum (XEXP (pattern
, 1)) >= 0)
9503 rtx x
= XEXP (pattern
, 0);
9504 rtx src
= SET_SRC (pat
);
9505 rtx t
= XEXP (src
,0);
9506 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9507 XEXP (pattern
, 0) = XEXP (pattern
, 1);
9508 XEXP (pattern
, 1) = x
;
9509 INSN_CODE (next
) = -1;
9511 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9512 && XEXP (pattern
, 1) == const0_rtx
)
9514 /* This is a tst insn, we can reverse it. */
9515 rtx src
= SET_SRC (pat
);
9516 rtx t
= XEXP (src
,0);
9518 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9519 XEXP (pattern
, 1) = XEXP (pattern
, 0);
9520 XEXP (pattern
, 0) = const0_rtx
;
9521 INSN_CODE (next
) = -1;
9522 INSN_CODE (insn
) = -1;
9524 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9525 && CONST_INT_P (XEXP (pattern
, 1)))
9527 rtx x
= XEXP (pattern
, 1);
9528 rtx src
= SET_SRC (pat
);
9529 rtx t
= XEXP (src
,0);
9530 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
9532 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
9534 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
9535 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
9536 INSN_CODE (next
) = -1;
9537 INSN_CODE (insn
) = -1;
9544 /* Returns register number for function return value.*/
9546 static inline unsigned int
9547 avr_ret_register (void)
9552 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
9555 avr_function_value_regno_p (const unsigned int regno
)
9557 return (regno
== avr_ret_register ());
9560 /* Create an RTX representing the place where a
9561 library function returns a value of mode MODE. */
9564 avr_libcall_value (enum machine_mode mode
,
9565 const_rtx func ATTRIBUTE_UNUSED
)
9567 int offs
= GET_MODE_SIZE (mode
);
9570 offs
= (offs
+ 1) & ~1;
9572 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
9575 /* Create an RTX representing the place where a
9576 function returns a value of data type VALTYPE. */
9579 avr_function_value (const_tree type
,
9580 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
9581 bool outgoing ATTRIBUTE_UNUSED
)
9585 if (TYPE_MODE (type
) != BLKmode
)
9586 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
9588 offs
= int_size_in_bytes (type
);
9591 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
9592 offs
= GET_MODE_SIZE (SImode
);
9593 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
9594 offs
= GET_MODE_SIZE (DImode
);
9596 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
9600 test_hard_reg_class (enum reg_class rclass
, rtx x
)
9602 int regno
= true_regnum (x
);
9606 if (TEST_HARD_REG_CLASS (rclass
, regno
))
9613 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9614 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9617 avr_2word_insn_p (rtx insn
)
9619 if (avr_current_device
->errata_skip
9621 || 2 != get_attr_length (insn
))
9626 switch (INSN_CODE (insn
))
9631 case CODE_FOR_movqi_insn
:
9632 case CODE_FOR_movuqq_insn
:
9633 case CODE_FOR_movqq_insn
:
9635 rtx set
= single_set (insn
);
9636 rtx src
= SET_SRC (set
);
9637 rtx dest
= SET_DEST (set
);
9639 /* Factor out LDS and STS from movqi_insn. */
9642 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
9644 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9646 else if (REG_P (dest
)
9649 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9655 case CODE_FOR_call_insn
:
9656 case CODE_FOR_call_value_insn
:
9663 jump_over_one_insn_p (rtx insn
, rtx dest
)
9665 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9668 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9669 int dest_addr
= INSN_ADDRESSES (uid
);
9670 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9672 return (jump_offset
== 1
9673 || (jump_offset
== 2
9674 && avr_2word_insn_p (next_active_insn (insn
))));
9677 /* Returns 1 if a value of mode MODE can be stored starting with hard
9678 register number REGNO. On the enhanced core, anything larger than
9679 1 byte must start in even numbered register for "movw" to work
9680 (this way we don't have to check for odd registers everywhere). */
9683 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9685 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9686 Disallowing QI et al. in these regs might lead to code like
9687 (set (subreg:QI (reg:HI 28) n) ...)
9688 which will result in wrong code because reload does not
9689 handle SUBREGs of hard regsisters like this.
9690 This could be fixed in reload. However, it appears
9691 that fixing reload is not wanted by reload people. */
9693 /* Any GENERAL_REGS register can hold 8-bit values. */
9695 if (GET_MODE_SIZE (mode
) == 1)
9698 /* FIXME: Ideally, the following test is not needed.
9699 However, it turned out that it can reduce the number
9700 of spill fails. AVR and it's poor endowment with
9701 address registers is extreme stress test for reload. */
9703 if (GET_MODE_SIZE (mode
) >= 4
9707 /* All modes larger than 8 bits should start in an even register. */
9709 return !(regno
& 1);
9713 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9716 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
9718 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9719 represent valid hard registers like, e.g. HI:29. Returning TRUE
9720 for such registers can lead to performance degradation as mentioned
9721 in PR53595. Thus, report invalid hard registers as FALSE. */
9723 if (!avr_hard_regno_mode_ok (regno
, mode
))
9726 /* Return true if any of the following boundaries is crossed:
9727 17/18, 27/28 and 29/30. */
9729 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
9730 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
9731 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
9735 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9738 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9739 addr_space_t as
, RTX_CODE outer_code
,
9740 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9742 if (!ADDR_SPACE_GENERIC_P (as
))
9744 return POINTER_Z_REGS
;
9748 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9750 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9754 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9757 avr_regno_mode_code_ok_for_base_p (int regno
,
9758 enum machine_mode mode ATTRIBUTE_UNUSED
,
9759 addr_space_t as ATTRIBUTE_UNUSED
,
9760 RTX_CODE outer_code
,
9761 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9765 if (!ADDR_SPACE_GENERIC_P (as
))
9767 if (regno
< FIRST_PSEUDO_REGISTER
9775 regno
= reg_renumber
[regno
];
9786 if (regno
< FIRST_PSEUDO_REGISTER
9790 || regno
== ARG_POINTER_REGNUM
))
9794 else if (reg_renumber
)
9796 regno
= reg_renumber
[regno
];
9801 || regno
== ARG_POINTER_REGNUM
)
9808 && PLUS
== outer_code
9818 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9819 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9820 CLOBBER_REG is a QI clobber register or NULL_RTX.
9821 LEN == NULL: output instructions.
9822 LEN != NULL: set *LEN to the length of the instruction sequence
9823 (in words) printed with LEN = NULL.
9824 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9825 If CLEAR_P is false, nothing is known about OP[0].
9827 The effect on cc0 is as follows:
9829 Load 0 to any register except ZERO_REG : NONE
9830 Load ld register with any value : NONE
9831 Anything else: : CLOBBER */
9834 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
9840 int clobber_val
= 1234;
9841 bool cooked_clobber_p
= false;
9843 enum machine_mode mode
= GET_MODE (dest
);
9844 int n
, n_bytes
= GET_MODE_SIZE (mode
);
9846 gcc_assert (REG_P (dest
)
9847 && CONSTANT_P (src
));
9852 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9853 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9855 if (REGNO (dest
) < 16
9856 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
9858 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
9861 /* We might need a clobber reg but don't have one. Look at the value to
9862 be loaded more closely. A clobber is only needed if it is a symbol
9863 or contains a byte that is neither 0, -1 or a power of 2. */
9865 if (NULL_RTX
== clobber_reg
9866 && !test_hard_reg_class (LD_REGS
, dest
)
9867 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
9868 || !avr_popcount_each_byte (src
, n_bytes
,
9869 (1 << 0) | (1 << 1) | (1 << 8))))
9871 /* We have no clobber register but need one. Cook one up.
9872 That's cheaper than loading from constant pool. */
9874 cooked_clobber_p
= true;
9875 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9876 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9879 /* Now start filling DEST from LSB to MSB. */
9881 for (n
= 0; n
< n_bytes
; n
++)
9884 bool done_byte
= false;
9888 /* Crop the n-th destination byte. */
9890 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9891 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9893 if (!CONST_INT_P (src
)
9894 && !CONST_FIXED_P (src
)
9895 && !CONST_DOUBLE_P (src
))
9897 static const char* const asm_code
[][2] =
9899 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9900 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9901 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9902 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9907 xop
[2] = clobber_reg
;
9909 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9914 /* Crop the n-th source byte. */
9916 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9917 ival
[n
] = INTVAL (xval
);
9919 /* Look if we can reuse the low word by means of MOVW. */
9925 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9926 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9928 if (INTVAL (lo16
) == INTVAL (hi16
))
9930 if (0 != INTVAL (lo16
)
9933 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9940 /* Don't use CLR so that cc0 is set as expected. */
9945 avr_asm_len (ldreg_p
? "ldi %0,0"
9946 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9947 : "mov %0,__zero_reg__",
9952 if (clobber_val
== ival
[n
]
9953 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9958 /* LD_REGS can use LDI to move a constant value */
9964 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9968 /* Try to reuse value already loaded in some lower byte. */
9970 for (j
= 0; j
< n
; j
++)
9971 if (ival
[j
] == ival
[n
])
9976 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9984 /* Need no clobber reg for -1: Use CLR/DEC */
9989 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9991 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9994 else if (1 == ival
[n
])
9997 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9999 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10003 /* Use T flag or INC to manage powers of 2 if we have
10006 if (NULL_RTX
== clobber_reg
10007 && single_one_operand (xval
, QImode
))
10010 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10012 gcc_assert (constm1_rtx
!= xop
[1]);
10017 avr_asm_len ("set", xop
, len
, 1);
10021 avr_asm_len ("clr %0", xop
, len
, 1);
10023 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10027 /* We actually need the LD_REGS clobber reg. */
10029 gcc_assert (NULL_RTX
!= clobber_reg
);
10033 xop
[2] = clobber_reg
;
10034 clobber_val
= ival
[n
];
10036 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10037 "mov %0,%2", xop
, len
, 2);
10040 /* If we cooked up a clobber reg above, restore it. */
10042 if (cooked_clobber_p
)
10044 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10049 /* Reload the constant OP[1] into the HI register OP[0].
10050 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10051 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10052 need a clobber reg or have to cook one up.
10054 PLEN == NULL: Output instructions.
10055 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10056 by the insns printed.
10061 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10063 output_reload_in_const (op
, clobber_reg
, plen
, false);
10068 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10069 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10070 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10071 need a clobber reg or have to cook one up.
10073 LEN == NULL: Output instructions.
10075 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10076 by the insns printed.
10081 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10084 && !test_hard_reg_class (LD_REGS
, op
[0])
10085 && (CONST_INT_P (op
[1])
10086 || CONST_FIXED_P (op
[1])
10087 || CONST_DOUBLE_P (op
[1])))
10089 int len_clr
, len_noclr
;
10091 /* In some cases it is better to clear the destination beforehand, e.g.
10093 CLR R2 CLR R3 MOVW R4,R2 INC R2
10097 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10099 We find it too tedious to work that out in the print function.
10100 Instead, we call the print function twice to get the lengths of
10101 both methods and use the shortest one. */
10103 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10104 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10106 if (len_noclr
- len_clr
== 4)
10108 /* Default needs 4 CLR instructions: clear register beforehand. */
10110 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10111 "mov %B0,__zero_reg__" CR_TAB
10112 "movw %C0,%A0", &op
[0], len
, 3);
10114 output_reload_in_const (op
, clobber_reg
, len
, true);
10123 /* Default: destination not pre-cleared. */
10125 output_reload_in_const (op
, clobber_reg
, len
, false);
10130 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10132 output_reload_in_const (op
, clobber_reg
, len
, false);
10138 avr_output_addr_vec_elt (FILE *stream
, int value
)
10140 if (AVR_HAVE_JMP_CALL
)
10141 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10143 fprintf (stream
, "\trjmp .L%d\n", value
);
10146 /* Returns true if SCRATCH are safe to be allocated as a scratch
10147 registers (for a define_peephole2) in the current function. */
10150 avr_hard_regno_scratch_ok (unsigned int regno
)
10152 /* Interrupt functions can only use registers that have already been saved
10153 by the prologue, even if they would normally be call-clobbered. */
10155 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10156 && !df_regs_ever_live_p (regno
))
10159 /* Don't allow hard registers that might be part of the frame pointer.
10160 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10161 and don't care for a frame pointer that spans more than one register. */
10163 if ((!reload_completed
|| frame_pointer_needed
)
10164 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10172 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10175 avr_hard_regno_rename_ok (unsigned int old_reg
,
10176 unsigned int new_reg
)
10178 /* Interrupt functions can only use registers that have already been
10179 saved by the prologue, even if they would normally be
10182 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10183 && !df_regs_ever_live_p (new_reg
))
10186 /* Don't allow hard registers that might be part of the frame pointer.
10187 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10188 and don't care for a frame pointer that spans more than one register. */
10190 if ((!reload_completed
|| frame_pointer_needed
)
10191 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10192 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10200 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10201 or memory location in the I/O space (QImode only).
10203 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10204 Operand 1: register operand to test, or CONST_INT memory address.
10205 Operand 2: bit number.
10206 Operand 3: label to jump to if the test is true. */
10209 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10211 enum rtx_code comp
= GET_CODE (operands
[0]);
10212 bool long_jump
= get_attr_length (insn
) >= 4;
10213 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10217 else if (comp
== LT
)
10221 comp
= reverse_condition (comp
);
10223 switch (GET_CODE (operands
[1]))
10230 if (low_io_address_operand (operands
[1], QImode
))
10233 output_asm_insn ("sbis %i1,%2", operands
);
10235 output_asm_insn ("sbic %i1,%2", operands
);
10239 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10241 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10243 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10246 break; /* CONST_INT */
10251 output_asm_insn ("sbrs %T1%T2", operands
);
10253 output_asm_insn ("sbrc %T1%T2", operands
);
10259 return ("rjmp .+4" CR_TAB
10268 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
10271 avr_asm_out_ctor (rtx symbol
, int priority
)
10273 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10274 default_ctor_section_asm_out_constructor (symbol
, priority
);
10277 /* Worker function for TARGET_ASM_DESTRUCTOR. */
10280 avr_asm_out_dtor (rtx symbol
, int priority
)
10282 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10283 default_dtor_section_asm_out_destructor (symbol
, priority
);
10286 /* Worker function for TARGET_RETURN_IN_MEMORY. */
10289 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10291 if (TYPE_MODE (type
) == BLKmode
)
10293 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10294 return (size
== -1 || size
> 8);
10301 /* Implement `CASE_VALUES_THRESHOLD'. */
10302 /* Supply the default for --param case-values-threshold=0 */
10304 static unsigned int
10305 avr_case_values_threshold (void)
10307 /* The exact break-even point between a jump table and an if-else tree
10308 depends on several factors not available here like, e.g. if 8-bit
10309 comparisons can be used in the if-else tree or not, on the
10310 range of the case values, if the case value can be reused, on the
10311 register allocation, etc. '7' appears to be a good choice. */
10317 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10319 static enum machine_mode
10320 avr_addr_space_address_mode (addr_space_t as
)
10322 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10326 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10328 static enum machine_mode
10329 avr_addr_space_pointer_mode (addr_space_t as
)
10331 return avr_addr_space_address_mode (as
);
10335 /* Helper for following function. */
10338 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10345 return REGNO (reg
) == REG_Z
;
10348 /* Avoid combine to propagate hard regs. */
10350 if (can_create_pseudo_p()
10351 && REGNO (reg
) < REG_Z
)
10360 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10363 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10364 bool strict
, addr_space_t as
)
10373 case ADDR_SPACE_GENERIC
:
10374 return avr_legitimate_address_p (mode
, x
, strict
);
10376 case ADDR_SPACE_FLASH
:
10377 case ADDR_SPACE_FLASH1
:
10378 case ADDR_SPACE_FLASH2
:
10379 case ADDR_SPACE_FLASH3
:
10380 case ADDR_SPACE_FLASH4
:
10381 case ADDR_SPACE_FLASH5
:
10383 switch (GET_CODE (x
))
10386 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10390 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10399 case ADDR_SPACE_MEMX
:
10402 && can_create_pseudo_p());
10404 if (LO_SUM
== GET_CODE (x
))
10406 rtx hi
= XEXP (x
, 0);
10407 rtx lo
= XEXP (x
, 1);
10410 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10412 && REGNO (lo
) == REG_Z
);
10418 if (avr_log
.legitimate_address_p
)
10420 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10421 "reload_completed=%d reload_in_progress=%d %s:",
10422 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10423 reg_renumber
? "(reg_renumber)" : "");
10425 if (GET_CODE (x
) == PLUS
10426 && REG_P (XEXP (x
, 0))
10427 && CONST_INT_P (XEXP (x
, 1))
10428 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10431 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10432 true_regnum (XEXP (x
, 0)));
10435 avr_edump ("\n%r\n", x
);
10442 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10445 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10446 enum machine_mode mode
, addr_space_t as
)
10448 if (ADDR_SPACE_GENERIC_P (as
))
10449 return avr_legitimize_address (x
, old_x
, mode
);
10451 if (avr_log
.legitimize_address
)
10453 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10460 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10463 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10465 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10466 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10468 if (avr_log
.progmem
)
10469 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10470 src
, type_from
, type_to
);
10472 /* Up-casting from 16-bit to 24-bit pointer. */
10474 if (as_from
!= ADDR_SPACE_MEMX
10475 && as_to
== ADDR_SPACE_MEMX
)
10479 rtx reg
= gen_reg_rtx (PSImode
);
10481 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10482 sym
= XEXP (sym
, 0);
10484 /* Look at symbol flags: avr_encode_section_info set the flags
10485 also if attribute progmem was seen so that we get the right
10486 promotion for, e.g. PSTR-like strings that reside in generic space
10487 but are located in flash. In that case we patch the incoming
10490 if (SYMBOL_REF
== GET_CODE (sym
)
10491 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
10493 as_from
= ADDR_SPACE_FLASH
;
10496 /* Linearize memory: RAM has bit 23 set. */
10498 msb
= ADDR_SPACE_GENERIC_P (as_from
)
10500 : avr_addrspace
[as_from
].segment
;
10502 src
= force_reg (Pmode
, src
);
10504 emit_insn (msb
== 0
10505 ? gen_zero_extendhipsi2 (reg
, src
)
10506 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
10511 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
10513 if (as_from
== ADDR_SPACE_MEMX
10514 && as_to
!= ADDR_SPACE_MEMX
)
10516 rtx new_src
= gen_reg_rtx (Pmode
);
10518 src
= force_reg (PSImode
, src
);
10520 emit_move_insn (new_src
,
10521 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
10529 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10532 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
10533 addr_space_t superset ATTRIBUTE_UNUSED
)
10535 /* Allow any kind of pointer mess. */
10541 /* Worker function for movmemhi expander.
10542 XOP[0] Destination as MEM:BLK
10544 XOP[2] # Bytes to copy
10546 Return TRUE if the expansion is accomplished.
10547 Return FALSE if the operand compination is not supported. */
10550 avr_emit_movmemhi (rtx
*xop
)
10552 HOST_WIDE_INT count
;
10553 enum machine_mode loop_mode
;
10554 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
10555 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
10556 rtx a_hi8
= NULL_RTX
;
10558 if (avr_mem_flash_p (xop
[0]))
10561 if (!CONST_INT_P (xop
[2]))
10564 count
= INTVAL (xop
[2]);
10568 a_src
= XEXP (xop
[1], 0);
10569 a_dest
= XEXP (xop
[0], 0);
10571 if (PSImode
== GET_MODE (a_src
))
10573 gcc_assert (as
== ADDR_SPACE_MEMX
);
10575 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
10576 loop_reg
= gen_rtx_REG (loop_mode
, 24);
10577 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
10579 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
10580 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
10584 int segment
= avr_addrspace
[as
].segment
;
10587 && avr_current_device
->n_flash
> 1)
10589 a_hi8
= GEN_INT (segment
);
10590 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
10592 else if (!ADDR_SPACE_GENERIC_P (as
))
10594 as
= ADDR_SPACE_FLASH
;
10599 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
10600 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
10603 xas
= GEN_INT (as
);
10605 /* FIXME: Register allocator might come up with spill fails if it is left
10606 on its own. Thus, we allocate the pointer registers by hand:
10608 X = destination address */
10610 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
10611 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
10613 /* FIXME: Register allocator does a bad job and might spill address
10614 register(s) inside the loop leading to additional move instruction
10615 to/from stack which could clobber tmp_reg. Thus, do *not* emit
10616 load and store as separate insns. Instead, we perform the copy
10617 by means of one monolithic insn. */
10619 gcc_assert (TMP_REGNO
== LPM_REGNO
);
10621 if (as
!= ADDR_SPACE_MEMX
)
10623 /* Load instruction ([E]LPM or LD) is known at compile time:
10624 Do the copy-loop inline. */
10626 rtx (*fun
) (rtx
, rtx
, rtx
)
10627 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
10629 insn
= fun (xas
, loop_reg
, loop_reg
);
10633 rtx (*fun
) (rtx
, rtx
)
10634 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
10636 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
10638 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
10641 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10648 /* Print assembler for movmem_qi, movmem_hi insns...
10650 $1, $2 : Loop register
10652 X : Destination address
10656 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
10658 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
10659 enum machine_mode loop_mode
= GET_MODE (op
[1]);
10660 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
10668 xop
[2] = tmp_reg_rtx
;
10672 avr_asm_len ("0:", xop
, plen
, 0);
10674 /* Load with post-increment */
10681 case ADDR_SPACE_GENERIC
:
10683 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
10686 case ADDR_SPACE_FLASH
:
10689 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
10691 avr_asm_len ("lpm" CR_TAB
10692 "adiw r30,1", xop
, plen
, 2);
10695 case ADDR_SPACE_FLASH1
:
10696 case ADDR_SPACE_FLASH2
:
10697 case ADDR_SPACE_FLASH3
:
10698 case ADDR_SPACE_FLASH4
:
10699 case ADDR_SPACE_FLASH5
:
10701 if (AVR_HAVE_ELPMX
)
10702 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
10704 avr_asm_len ("elpm" CR_TAB
10705 "adiw r30,1", xop
, plen
, 2);
10709 /* Store with post-increment */
10711 avr_asm_len ("st X+,%2", xop
, plen
, 1);
10713 /* Decrement loop-counter and set Z-flag */
10715 if (QImode
== loop_mode
)
10717 avr_asm_len ("dec %1", xop
, plen
, 1);
10721 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
10725 avr_asm_len ("subi %A1,1" CR_TAB
10726 "sbci %B1,0", xop
, plen
, 2);
10729 /* Loop until zero */
10731 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10736 /* Helper for __builtin_avr_delay_cycles */
10739 avr_mem_clobber (void)
10741 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
10742 MEM_VOLATILE_P (mem
) = 1;
10747 avr_expand_delay_cycles (rtx operands0
)
10749 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
10750 unsigned HOST_WIDE_INT cycles_used
;
10751 unsigned HOST_WIDE_INT loop_count
;
10753 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10755 loop_count
= ((cycles
- 9) / 6) + 1;
10756 cycles_used
= ((loop_count
- 1) * 6) + 9;
10757 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
10758 avr_mem_clobber()));
10759 cycles
-= cycles_used
;
10762 if (IN_RANGE (cycles
, 262145, 83886081))
10764 loop_count
= ((cycles
- 7) / 5) + 1;
10765 if (loop_count
> 0xFFFFFF)
10766 loop_count
= 0xFFFFFF;
10767 cycles_used
= ((loop_count
- 1) * 5) + 7;
10768 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
10769 avr_mem_clobber()));
10770 cycles
-= cycles_used
;
10773 if (IN_RANGE (cycles
, 768, 262144))
10775 loop_count
= ((cycles
- 5) / 4) + 1;
10776 if (loop_count
> 0xFFFF)
10777 loop_count
= 0xFFFF;
10778 cycles_used
= ((loop_count
- 1) * 4) + 5;
10779 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
10780 avr_mem_clobber()));
10781 cycles
-= cycles_used
;
10784 if (IN_RANGE (cycles
, 6, 767))
10786 loop_count
= cycles
/ 3;
10787 if (loop_count
> 255)
10789 cycles_used
= loop_count
* 3;
10790 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
10791 avr_mem_clobber()));
10792 cycles
-= cycles_used
;
10795 while (cycles
>= 2)
10797 emit_insn (gen_nopv (GEN_INT(2)));
10803 emit_insn (gen_nopv (GEN_INT(1)));
10809 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10812 avr_double_int_push_digit (double_int val
, int base
,
10813 unsigned HOST_WIDE_INT digit
)
10816 ? val
.llshift (32, 64)
10817 : val
* double_int::from_uhwi (base
);
10819 return val
+ double_int::from_uhwi (digit
);
10823 /* Compute the image of x under f, i.e. perform x --> f(x) */
10826 avr_map (double_int f
, int x
)
10828 return 0xf & f
.lrshift (4*x
, 64).to_uhwi ();
10832 /* Return some metrics of map A. */
10836 /* Number of fixed points in { 0 ... 7 } */
10839 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10842 /* Mask representing the fixed points in { 0 ... 7 } */
10843 MAP_MASK_FIXED_0_7
,
10845 /* Size of the preimage of { 0 ... 7 } */
10848 /* Mask that represents the preimage of { f } */
10849 MAP_MASK_PREIMAGE_F
10853 avr_map_metric (double_int a
, int mode
)
10855 unsigned i
, metric
= 0;
10857 for (i
= 0; i
< 8; i
++)
10859 unsigned ai
= avr_map (a
, i
);
10861 if (mode
== MAP_FIXED_0_7
)
10863 else if (mode
== MAP_NONFIXED_0_7
)
10864 metric
+= ai
< 8 && ai
!= i
;
10865 else if (mode
== MAP_MASK_FIXED_0_7
)
10866 metric
|= ((unsigned) (ai
== i
)) << i
;
10867 else if (mode
== MAP_PREIMAGE_0_7
)
10869 else if (mode
== MAP_MASK_PREIMAGE_F
)
10870 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10879 /* Return true if IVAL has a 0xf in its hexadecimal representation
10880 and false, otherwise. Only nibbles 0..7 are taken into account.
10881 Used as constraint helper for C0f and Cxf. */
10884 avr_has_nibble_0xf (rtx ival
)
10886 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10890 /* We have a set of bits that are mapped by a function F.
10891 Try to decompose F by means of a second function G so that
10897 cost (F o G^-1) + cost (G) < cost (F)
10899 Example: Suppose builtin insert_bits supplies us with the map
10900 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10901 nibble of the result, we can just as well rotate the bits before inserting
10902 them and use the map 0x7654ffff which is cheaper than the original map.
10903 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10907 /* tree code of binary function G */
10908 enum tree_code code
;
10910 /* The constant second argument of G */
10913 /* G^-1, the inverse of G (*, arg) */
10916 /* The cost of appplying G (*, arg) */
10919 /* The composition F o G^-1 (*, arg) for some function F */
10922 /* For debug purpose only */
10926 static const avr_map_op_t avr_map_op
[] =
10928 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10929 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10930 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10931 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10932 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10933 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10934 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10935 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10936 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10937 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10938 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10939 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10940 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10941 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10942 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10946 /* Try to decompose F as F = (F o G^-1) o G as described above.
10947 The result is a struct representing F o G^-1 and G.
10948 If result.cost < 0 then such a decomposition does not exist. */
10950 static avr_map_op_t
10951 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10954 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10955 avr_map_op_t f_ginv
= *g
;
10956 double_int ginv
= double_int::from_uhwi (g
->ginv
);
10960 /* Step 1: Computing F o G^-1 */
10962 for (i
= 7; i
>= 0; i
--)
10964 int x
= avr_map (f
, i
);
10968 x
= avr_map (ginv
, x
);
10970 /* The bit is no element of the image of G: no avail (cost = -1) */
10976 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10979 /* Step 2: Compute the cost of the operations.
10980 The overall cost of doing an operation prior to the insertion is
10981 the cost of the insertion plus the cost of the operation. */
10983 /* Step 2a: Compute cost of F o G^-1 */
10985 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10987 /* The mapping consists only of fixed points and can be folded
10988 to AND/OR logic in the remainder. Reasonable cost is 3. */
10990 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10996 /* Get the cost of the insn by calling the output worker with some
10997 fake values. Mimic effect of reloading xop[3]: Unused operands
10998 are mapped to 0 and used operands are reloaded to xop[0]. */
11000 xop
[0] = all_regs_rtx
[24];
11001 xop
[1] = gen_int_mode (f_ginv
.map
.to_uhwi (), SImode
);
11002 xop
[2] = all_regs_rtx
[25];
11003 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11005 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11007 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11010 /* Step 2b: Add cost of G */
11012 f_ginv
.cost
+= g
->cost
;
11014 if (avr_log
.builtin
)
11015 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11021 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11022 XOP[0] and XOP[1] don't overlap.
11023 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11024 If FIXP_P = false: Just move the bit if its position in the destination
11025 is different to its source position. */
11028 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
11032 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11033 int t_bit_src
= -1;
11035 /* We order the operations according to the requested source bit b. */
11037 for (b
= 0; b
< 8; b
++)
11038 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11040 int bit_src
= avr_map (map
, bit_dest
);
11044 /* Same position: No need to copy as requested by FIXP_P. */
11045 || (bit_dest
== bit_src
&& !fixp_p
))
11048 if (t_bit_src
!= bit_src
)
11050 /* Source bit is not yet in T: Store it to T. */
11052 t_bit_src
= bit_src
;
11054 xop
[3] = GEN_INT (bit_src
);
11055 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11058 /* Load destination bit with T. */
11060 xop
[3] = GEN_INT (bit_dest
);
11061 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11066 /* PLEN == 0: Print assembler code for `insert_bits'.
11067 PLEN != 0: Compute code length in bytes.
11070 OP[1]: The mapping composed of nibbles. If nibble no. N is
11071 0: Bit N of result is copied from bit OP[2].0
11073 7: Bit N of result is copied from bit OP[2].7
11074 0xf: Bit N of result is copied from bit OP[3].N
11075 OP[2]: Bits to be inserted
11076 OP[3]: Target value */
11079 avr_out_insert_bits (rtx
*op
, int *plen
)
11081 double_int map
= rtx_to_double_int (op
[1]);
11082 unsigned mask_fixed
;
11083 bool fixp_p
= true;
11090 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11094 else if (flag_print_asm_name
)
11095 fprintf (asm_out_file
,
11096 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
11097 map
.to_uhwi () & GET_MODE_MASK (SImode
));
11099 /* If MAP has fixed points it might be better to initialize the result
11100 with the bits to be inserted instead of moving all bits by hand. */
11102 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11104 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11106 /* Avoid early-clobber conflicts */
11108 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11109 xop
[1] = tmp_reg_rtx
;
11113 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11115 /* XOP[2] is used and reloaded to XOP[0] already */
11117 int n_fix
= 0, n_nofix
= 0;
11119 gcc_assert (REG_P (xop
[2]));
11121 /* Get the code size of the bit insertions; once with all bits
11122 moved and once with fixed points omitted. */
11124 avr_move_bits (xop
, map
, true, &n_fix
);
11125 avr_move_bits (xop
, map
, false, &n_nofix
);
11127 if (fixp_p
&& n_fix
- n_nofix
> 3)
11129 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11131 avr_asm_len ("eor %0,%1" CR_TAB
11132 "andi %0,%3" CR_TAB
11133 "eor %0,%1", xop
, plen
, 3);
11139 /* XOP[2] is unused */
11141 if (fixp_p
&& mask_fixed
)
11143 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11148 /* Move/insert remaining bits. */
11150 avr_move_bits (xop
, map
, fixp_p
, plen
);
11156 /* IDs for all the AVR builtins. */
11158 enum avr_builtin_id
11161 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
11162 #include "builtins.def"
11168 struct GTY(()) avr_builtin_description
11170 enum insn_code icode
;
11177 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11178 that a built-in's ID can be used to access the built-in by means of
11181 static GTY(()) struct avr_builtin_description
11182 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11185 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
11186 { (enum insn_code) ICODE, NAME, N_ARGS, NULL_TREE },
11187 #include "builtins.def"
11192 /* Implement `TARGET_BUILTIN_DECL'. */
11195 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11197 if (id
< AVR_BUILTIN_COUNT
)
11198 return avr_bdesc
[id
].fndecl
;
11200 return error_mark_node
;
11205 avr_init_builtin_int24 (void)
11207 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11208 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11210 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11211 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11215 /* Implement `TARGET_INIT_BUILTINS' */
11216 /* Set up all builtin functions for this target. */
11219 avr_init_builtins (void)
11221 tree void_ftype_void
11222 = build_function_type_list (void_type_node
, NULL_TREE
);
11223 tree uchar_ftype_uchar
11224 = build_function_type_list (unsigned_char_type_node
,
11225 unsigned_char_type_node
,
11227 tree uint_ftype_uchar_uchar
11228 = build_function_type_list (unsigned_type_node
,
11229 unsigned_char_type_node
,
11230 unsigned_char_type_node
,
11232 tree int_ftype_char_char
11233 = build_function_type_list (integer_type_node
,
11237 tree int_ftype_char_uchar
11238 = build_function_type_list (integer_type_node
,
11240 unsigned_char_type_node
,
11242 tree void_ftype_ulong
11243 = build_function_type_list (void_type_node
,
11244 long_unsigned_type_node
,
11247 tree uchar_ftype_ulong_uchar_uchar
11248 = build_function_type_list (unsigned_char_type_node
,
11249 long_unsigned_type_node
,
11250 unsigned_char_type_node
,
11251 unsigned_char_type_node
,
11254 tree const_memx_void_node
11255 = build_qualified_type (void_type_node
,
11257 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11259 tree const_memx_ptr_type_node
11260 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11262 tree char_ftype_const_memx_ptr
11263 = build_function_type_list (char_type_node
,
11264 const_memx_ptr_type_node
,
11267 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
11268 gcc_assert (ID < AVR_BUILTIN_COUNT); \
11269 avr_bdesc[ID].fndecl \
11270 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
11271 #include "builtins.def"
11274 avr_init_builtin_int24 ();
11278 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11279 with non-void result and 1 ... 3 arguments. */
11282 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11285 int n
, n_args
= call_expr_nargs (exp
);
11286 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11288 gcc_assert (n_args
>= 1 && n_args
<= 3);
11290 if (target
== NULL_RTX
11291 || GET_MODE (target
) != tmode
11292 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11294 target
= gen_reg_rtx (tmode
);
11297 for (n
= 0; n
< n_args
; n
++)
11299 tree arg
= CALL_EXPR_ARG (exp
, n
);
11300 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11301 enum machine_mode opmode
= GET_MODE (op
);
11302 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11304 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11307 op
= gen_lowpart (HImode
, op
);
11310 /* In case the insn wants input operands in modes different from
11311 the result, abort. */
11313 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11315 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11316 op
= copy_to_mode_reg (mode
, op
);
11323 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
11324 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
11325 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
11331 if (pat
== NULL_RTX
)
11340 /* Implement `TARGET_EXPAND_BUILTIN'. */
11341 /* Expand an expression EXP that calls a built-in function,
11342 with result going to TARGET if that's convenient
11343 (and in mode MODE if that's convenient).
11344 SUBTARGET may be used as the target for computing one of EXP's operands.
11345 IGNORE is nonzero if the value is to be ignored. */
11348 avr_expand_builtin (tree exp
, rtx target
,
11349 rtx subtarget ATTRIBUTE_UNUSED
,
11350 enum machine_mode mode ATTRIBUTE_UNUSED
,
11351 int ignore ATTRIBUTE_UNUSED
)
11353 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
11354 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
11355 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
11356 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
11360 gcc_assert (id
< AVR_BUILTIN_COUNT
);
11364 case AVR_BUILTIN_NOP
:
11365 emit_insn (gen_nopv (GEN_INT(1)));
11368 case AVR_BUILTIN_DELAY_CYCLES
:
11370 arg0
= CALL_EXPR_ARG (exp
, 0);
11371 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11373 if (!CONST_INT_P (op0
))
11374 error ("%s expects a compile time integer constant", bname
);
11376 avr_expand_delay_cycles (op0
);
11381 case AVR_BUILTIN_INSERT_BITS
:
11383 arg0
= CALL_EXPR_ARG (exp
, 0);
11384 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11386 if (!CONST_INT_P (op0
))
11388 error ("%s expects a compile time long integer constant"
11389 " as first argument", bname
);
11395 /* No special treatment needed: vanilla expand. */
11397 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
11399 if (d
->n_args
== 0)
11401 emit_insn ((GEN_FCN (d
->icode
)) (target
));
11405 return avr_default_expand_builtin (d
->icode
, exp
, target
);
11409 /* Implement `TARGET_FOLD_BUILTIN'. */
11412 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
11413 bool ignore ATTRIBUTE_UNUSED
)
11415 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
11416 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
11426 case AVR_BUILTIN_SWAP
:
11428 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
11429 build_int_cst (val_type
, 4));
11432 case AVR_BUILTIN_INSERT_BITS
:
11434 tree tbits
= arg
[1];
11435 tree tval
= arg
[2];
11437 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
11439 bool changed
= false;
11441 avr_map_op_t best_g
;
11443 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
11445 /* No constant as first argument: Don't fold this and run into
11446 error in avr_expand_builtin. */
11451 map
= tree_to_double_int (arg
[0]);
11452 tmap
= double_int_to_tree (map_type
, map
);
11454 if (TREE_CODE (tval
) != INTEGER_CST
11455 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11457 /* There are no F in the map, i.e. 3rd operand is unused.
11458 Replace that argument with some constant to render
11459 respective input unused. */
11461 tval
= build_int_cst (val_type
, 0);
11465 if (TREE_CODE (tbits
) != INTEGER_CST
11466 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
11468 /* Similar for the bits to be inserted. If they are unused,
11469 we can just as well pass 0. */
11471 tbits
= build_int_cst (val_type
, 0);
11474 if (TREE_CODE (tbits
) == INTEGER_CST
)
11476 /* Inserting bits known at compile time is easy and can be
11477 performed by AND and OR with appropriate masks. */
11479 int bits
= TREE_INT_CST_LOW (tbits
);
11480 int mask_ior
= 0, mask_and
= 0xff;
11482 for (i
= 0; i
< 8; i
++)
11484 int mi
= avr_map (map
, i
);
11488 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
11489 else mask_and
&= ~(1 << i
);
11493 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
11494 build_int_cst (val_type
, mask_ior
));
11495 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
11496 build_int_cst (val_type
, mask_and
));
11500 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11502 /* If bits don't change their position we can use vanilla logic
11503 to merge the two arguments. */
11505 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
11507 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11508 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
11510 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
11511 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
11512 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
11515 /* Try to decomposing map to reduce overall cost. */
11517 if (avr_log
.builtin
)
11518 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
11520 best_g
= avr_map_op
[0];
11521 best_g
.cost
= 1000;
11523 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
11526 = avr_map_decompose (map
, avr_map_op
+ i
,
11527 TREE_CODE (tval
) == INTEGER_CST
);
11529 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
11533 if (avr_log
.builtin
)
11536 if (best_g
.arg
== 0)
11537 /* No optimization found */
11540 /* Apply operation G to the 2nd argument. */
11542 if (avr_log
.builtin
)
11543 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11544 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
11546 /* Do right-shifts arithmetically: They copy the MSB instead of
11547 shifting in a non-usable value (0) as with logic right-shift. */
11549 tbits
= fold_convert (signed_char_type_node
, tbits
);
11550 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
11551 build_int_cst (val_type
, best_g
.arg
));
11552 tbits
= fold_convert (val_type
, tbits
);
11554 /* Use map o G^-1 instead of original map to undo the effect of G. */
11556 tmap
= double_int_to_tree (map_type
, best_g
.map
);
11558 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11559 } /* AVR_BUILTIN_INSERT_BITS */
11567 /* Initialize the GCC target structure. */
11569 #undef TARGET_ASM_ALIGNED_HI_OP
11570 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11571 #undef TARGET_ASM_ALIGNED_SI_OP
11572 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
11573 #undef TARGET_ASM_UNALIGNED_HI_OP
11574 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
11575 #undef TARGET_ASM_UNALIGNED_SI_OP
11576 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
11577 #undef TARGET_ASM_INTEGER
11578 #define TARGET_ASM_INTEGER avr_assemble_integer
11579 #undef TARGET_ASM_FILE_START
11580 #define TARGET_ASM_FILE_START avr_file_start
11581 #undef TARGET_ASM_FILE_END
11582 #define TARGET_ASM_FILE_END avr_file_end
11584 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
11585 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
11586 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
11587 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
11589 #undef TARGET_FUNCTION_VALUE
11590 #define TARGET_FUNCTION_VALUE avr_function_value
11591 #undef TARGET_LIBCALL_VALUE
11592 #define TARGET_LIBCALL_VALUE avr_libcall_value
11593 #undef TARGET_FUNCTION_VALUE_REGNO_P
11594 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
11596 #undef TARGET_ATTRIBUTE_TABLE
11597 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
11598 #undef TARGET_INSERT_ATTRIBUTES
11599 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
11600 #undef TARGET_SECTION_TYPE_FLAGS
11601 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
11603 #undef TARGET_ASM_NAMED_SECTION
11604 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
11605 #undef TARGET_ASM_INIT_SECTIONS
11606 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
11607 #undef TARGET_ENCODE_SECTION_INFO
11608 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
11609 #undef TARGET_ASM_SELECT_SECTION
11610 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
11612 #undef TARGET_REGISTER_MOVE_COST
11613 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
11614 #undef TARGET_MEMORY_MOVE_COST
11615 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
11616 #undef TARGET_RTX_COSTS
11617 #define TARGET_RTX_COSTS avr_rtx_costs
11618 #undef TARGET_ADDRESS_COST
11619 #define TARGET_ADDRESS_COST avr_address_cost
11620 #undef TARGET_MACHINE_DEPENDENT_REORG
11621 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
11622 #undef TARGET_FUNCTION_ARG
11623 #define TARGET_FUNCTION_ARG avr_function_arg
11624 #undef TARGET_FUNCTION_ARG_ADVANCE
11625 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
11627 #undef TARGET_SET_CURRENT_FUNCTION
11628 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
11630 #undef TARGET_RETURN_IN_MEMORY
11631 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
11633 #undef TARGET_STRICT_ARGUMENT_NAMING
11634 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
11636 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
11637 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
11639 #undef TARGET_HARD_REGNO_SCRATCH_OK
11640 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
11641 #undef TARGET_CASE_VALUES_THRESHOLD
11642 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
11644 #undef TARGET_FRAME_POINTER_REQUIRED
11645 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
11646 #undef TARGET_CAN_ELIMINATE
11647 #define TARGET_CAN_ELIMINATE avr_can_eliminate
11649 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
11650 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
11652 #undef TARGET_WARN_FUNC_RETURN
11653 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
11655 #undef TARGET_CLASS_LIKELY_SPILLED_P
11656 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
11658 #undef TARGET_OPTION_OVERRIDE
11659 #define TARGET_OPTION_OVERRIDE avr_option_override
11661 #undef TARGET_CANNOT_MODIFY_JUMPS_P
11662 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
11664 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
11665 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
11667 #undef TARGET_INIT_BUILTINS
11668 #define TARGET_INIT_BUILTINS avr_init_builtins
11670 #undef TARGET_BUILTIN_DECL
11671 #define TARGET_BUILTIN_DECL avr_builtin_decl
11673 #undef TARGET_EXPAND_BUILTIN
11674 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
11676 #undef TARGET_FOLD_BUILTIN
11677 #define TARGET_FOLD_BUILTIN avr_fold_builtin
11679 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
11680 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
11682 #undef TARGET_SCALAR_MODE_SUPPORTED_P
11683 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
11685 #undef TARGET_BUILD_BUILTIN_VA_LIST
11686 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
11688 #undef TARGET_FIXED_POINT_SUPPORTED_P
11689 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
11691 #undef TARGET_ADDR_SPACE_SUBSET_P
11692 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11694 #undef TARGET_ADDR_SPACE_CONVERT
11695 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11697 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
11698 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11700 #undef TARGET_ADDR_SPACE_POINTER_MODE
11701 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11703 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11704 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11705 avr_addr_space_legitimate_address_p
11707 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11708 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11710 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
11711 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
11713 #undef TARGET_PRINT_OPERAND
11714 #define TARGET_PRINT_OPERAND avr_print_operand
11715 #undef TARGET_PRINT_OPERAND_ADDRESS
11716 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11717 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11718 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11720 struct gcc_target targetm
= TARGET_INITIALIZER
;
11723 #include "gt-avr.h"