1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
35 #include "print-tree.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
48 #include "langhooks.h"
51 #include "target-def.h"
56 /* Maximal allowed offset for an address in the LD command */
57 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
59 /* Return true if STR starts with PREFIX and false, otherwise. */
60 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
62 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
63 address space where data is to be located.
64 As the only non-generic address spaces are all located in flash,
65 this can be used to test if data shall go into some .progmem* section.
66 This must be the rightmost field of machine dependent section flags. */
67 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
69 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
70 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
72 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
73 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
74 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
76 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
77 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
80 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
81 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
82 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
83 / SYMBOL_FLAG_MACH_DEP)
85 /* Known address spaces. The order must be the same as in the respective
86 enum from avr.h (or designated initialized must be used). */
87 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
89 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
90 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
91 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
92 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
93 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
94 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
95 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
96 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
100 /* Holding RAM addresses of some SFRs used by the compiler and that
101 are unique over all devices in an architecture like 'avr4'. */
105 /* SREG: The processor status */
108 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
114 /* RAMPZ: The high byte of 24-bit address used with ELPM */
117 /* SP: The stack pointer and its low and high byte */
122 static avr_addr_t avr_addr
;
125 /* Prototypes for local helper functions. */
127 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
128 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
129 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
130 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
131 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
132 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
134 static int get_sequence_length (rtx_insn
*insns
);
135 static int sequent_regs_live (void);
136 static const char *ptrreg_to_str (int);
137 static const char *cond_string (enum rtx_code
);
138 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
139 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
141 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
142 static struct machine_function
* avr_init_machine_status (void);
145 /* Prototypes for hook implementors if needed before their implementation. */
147 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
150 /* Allocate registers from r25 to r8 for parameters for function calls. */
151 #define FIRST_CUM_REG 26
153 /* Implicit target register of LPM instruction (R0) */
154 extern GTY(()) rtx lpm_reg_rtx
;
157 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
158 extern GTY(()) rtx lpm_addr_reg_rtx
;
159 rtx lpm_addr_reg_rtx
;
161 /* Temporary register RTX (reg:QI TMP_REGNO) */
162 extern GTY(()) rtx tmp_reg_rtx
;
165 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
166 extern GTY(()) rtx zero_reg_rtx
;
169 /* RTXs for all general purpose registers as QImode */
170 extern GTY(()) rtx all_regs_rtx
[32];
171 rtx all_regs_rtx
[32];
173 /* SREG, the processor status */
174 extern GTY(()) rtx sreg_rtx
;
177 /* RAMP* special function registers */
178 extern GTY(()) rtx rampd_rtx
;
179 extern GTY(()) rtx rampx_rtx
;
180 extern GTY(()) rtx rampy_rtx
;
181 extern GTY(()) rtx rampz_rtx
;
187 /* RTX containing the strings "" and "e", respectively */
188 static GTY(()) rtx xstring_empty
;
189 static GTY(()) rtx xstring_e
;
191 /* Current architecture. */
192 const avr_arch_t
*avr_current_arch
;
194 /* Current device. */
195 const avr_mcu_t
*avr_current_device
;
197 /* Section to put switch tables in. */
198 static GTY(()) section
*progmem_swtable_section
;
200 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
201 or to address space __flash* or __memx. Only used as singletons inside
202 avr_asm_select_section, but it must not be local there because of GTY. */
203 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
205 /* Condition for insns/expanders from avr-dimode.md. */
206 bool avr_have_dimode
= true;
208 /* To track if code will use .bss and/or .data. */
209 bool avr_need_clear_bss_p
= false;
210 bool avr_need_copy_data_p
= false;
213 /* Transform UP into lowercase and write the result to LO.
214 You must provide enough space for LO. Return LO. */
217 avr_tolower (char *lo
, const char *up
)
221 for (; *up
; up
++, lo
++)
230 /* Custom function to count number of set bits. */
233 avr_popcount (unsigned int val
)
247 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
248 Return true if the least significant N_BYTES bytes of XVAL all have a
249 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
250 of integers which contains an integer N iff bit N of POP_MASK is set. */
253 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
257 enum machine_mode mode
= GET_MODE (xval
);
259 if (VOIDmode
== mode
)
262 for (i
= 0; i
< n_bytes
; i
++)
264 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
265 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
267 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
275 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
276 the bit representation of X by "casting" it to CONST_INT. */
279 avr_to_int_mode (rtx x
)
281 enum machine_mode mode
= GET_MODE (x
);
283 return VOIDmode
== mode
285 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
289 /* Implement `TARGET_OPTION_OVERRIDE'. */
292 avr_option_override (void)
294 /* Disable -fdelete-null-pointer-checks option for AVR target.
295 This option compiler assumes that dereferencing of a null pointer
296 would halt the program. For AVR this assumption is not true and
297 programs can safely dereference null pointers. Changes made by this
298 option may not work properly for AVR. So disable this option. */
300 flag_delete_null_pointer_checks
= 0;
302 /* caller-save.c looks for call-clobbered hard registers that are assigned
303 to pseudos that cross calls and tries so save-restore them around calls
304 in order to reduce the number of stack slots needed.
306 This might lead to situations where reload is no more able to cope
307 with the challenge of AVR's very few address registers and fails to
308 perform the requested spills. */
311 flag_caller_saves
= 0;
313 /* Unwind tables currently require a frame pointer for correctness,
314 see toplev.c:process_options(). */
316 if ((flag_unwind_tables
317 || flag_non_call_exceptions
318 || flag_asynchronous_unwind_tables
)
319 && !ACCUMULATE_OUTGOING_ARGS
)
321 flag_omit_frame_pointer
= 0;
325 warning (OPT_fpic
, "-fpic is not supported");
327 warning (OPT_fPIC
, "-fPIC is not supported");
329 warning (OPT_fpie
, "-fpie is not supported");
331 warning (OPT_fPIE
, "-fPIE is not supported");
333 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
334 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
336 /* RAM addresses of some SFRs common to all devices in respective arch. */
338 /* SREG: Status Register containing flags like I (global IRQ) */
339 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
341 /* RAMPZ: Address' high part when loading via ELPM */
342 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
344 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
345 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
346 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
347 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
349 /* SP: Stack Pointer (SP_H:SP_L) */
350 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
351 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
353 init_machine_status
= avr_init_machine_status
;
355 avr_log_set_avr_log();
358 /* Function to set up the backend function structure. */
360 static struct machine_function
*
361 avr_init_machine_status (void)
363 return ggc_cleared_alloc
<machine_function
> ();
367 /* Implement `INIT_EXPANDERS'. */
368 /* The function works like a singleton. */
371 avr_init_expanders (void)
375 for (regno
= 0; regno
< 32; regno
++)
376 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
378 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
379 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
380 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
382 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
384 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
385 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
386 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
387 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
388 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
390 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
391 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
395 /* Implement `REGNO_REG_CLASS'. */
396 /* Return register class for register R. */
399 avr_regno_reg_class (int r
)
401 static const enum reg_class reg_class_tab
[] =
405 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
406 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
407 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
408 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
410 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
411 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
413 ADDW_REGS
, ADDW_REGS
,
415 POINTER_X_REGS
, POINTER_X_REGS
,
417 POINTER_Y_REGS
, POINTER_Y_REGS
,
419 POINTER_Z_REGS
, POINTER_Z_REGS
,
425 return reg_class_tab
[r
];
431 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
434 avr_scalar_mode_supported_p (enum machine_mode mode
)
436 if (ALL_FIXED_POINT_MODE_P (mode
))
442 return default_scalar_mode_supported_p (mode
);
446 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
449 avr_decl_flash_p (tree decl
)
451 if (TREE_CODE (decl
) != VAR_DECL
452 || TREE_TYPE (decl
) == error_mark_node
)
457 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
461 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
462 address space and FALSE, otherwise. */
465 avr_decl_memx_p (tree decl
)
467 if (TREE_CODE (decl
) != VAR_DECL
468 || TREE_TYPE (decl
) == error_mark_node
)
473 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
477 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
480 avr_mem_flash_p (rtx x
)
483 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
487 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
488 address space and FALSE, otherwise. */
491 avr_mem_memx_p (rtx x
)
494 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
498 /* A helper for the subsequent function attribute used to dig for
499 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
502 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
504 if (FUNCTION_DECL
== TREE_CODE (func
))
506 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
511 func
= TREE_TYPE (func
);
514 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
515 || TREE_CODE (func
) == METHOD_TYPE
);
517 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
520 /* Return nonzero if FUNC is a naked function. */
523 avr_naked_function_p (tree func
)
525 return avr_lookup_function_attribute1 (func
, "naked");
528 /* Return nonzero if FUNC is an interrupt function as specified
529 by the "interrupt" attribute. */
532 avr_interrupt_function_p (tree func
)
534 return avr_lookup_function_attribute1 (func
, "interrupt");
537 /* Return nonzero if FUNC is a signal function as specified
538 by the "signal" attribute. */
541 avr_signal_function_p (tree func
)
543 return avr_lookup_function_attribute1 (func
, "signal");
546 /* Return nonzero if FUNC is an OS_task function. */
549 avr_OS_task_function_p (tree func
)
551 return avr_lookup_function_attribute1 (func
, "OS_task");
554 /* Return nonzero if FUNC is an OS_main function. */
557 avr_OS_main_function_p (tree func
)
559 return avr_lookup_function_attribute1 (func
, "OS_main");
563 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
564 /* Sanity cheching for above function attributes. */
567 avr_set_current_function (tree decl
)
572 if (decl
== NULL_TREE
573 || current_function_decl
== NULL_TREE
574 || current_function_decl
== error_mark_node
576 || cfun
->machine
->attributes_checked_p
)
579 loc
= DECL_SOURCE_LOCATION (decl
);
581 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
582 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
583 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
584 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
585 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
587 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
589 /* Too much attributes make no sense as they request conflicting features. */
591 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
592 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
593 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
594 " exclusive", "OS_task", "OS_main", isr
);
596 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
598 if (cfun
->machine
->is_naked
599 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
600 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
601 " no effect on %qs function", "OS_task", "OS_main", "naked");
603 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
605 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
606 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
609 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
610 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
611 : IDENTIFIER_POINTER (DECL_NAME (decl
));
613 /* Skip a leading '*' that might still prefix the assembler name,
614 e.g. in non-LTO runs. */
616 name
= default_strip_name_encoding (name
);
618 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
619 using this when it switched from SIGNAL and INTERRUPT to ISR. */
621 if (cfun
->machine
->is_interrupt
)
622 cfun
->machine
->is_signal
= 0;
624 /* Interrupt handlers must be void __vector (void) functions. */
626 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
627 error_at (loc
, "%qs function cannot have arguments", isr
);
629 if (TREE_CODE (ret
) != VOID_TYPE
)
630 error_at (loc
, "%qs function cannot return a value", isr
);
632 /* If the function has the 'signal' or 'interrupt' attribute, ensure
633 that the name of the function is "__vector_NN" so as to catch
634 when the user misspells the vector name. */
636 if (!STR_PREFIX_P (name
, "__vector"))
637 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
641 /* Don't print the above diagnostics more than once. */
643 cfun
->machine
->attributes_checked_p
= 1;
647 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
650 avr_accumulate_outgoing_args (void)
653 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
655 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
656 what offset is correct. In some cases it is relative to
657 virtual_outgoing_args_rtx and in others it is relative to
658 virtual_stack_vars_rtx. For example code see
659 gcc.c-torture/execute/built-in-setjmp.c
660 gcc.c-torture/execute/builtins/sprintf-chk.c */
662 return (TARGET_ACCUMULATE_OUTGOING_ARGS
663 && !(cfun
->calls_setjmp
664 || cfun
->has_nonlocal_label
));
668 /* Report contribution of accumulated outgoing arguments to stack size. */
671 avr_outgoing_args_size (void)
673 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
677 /* Implement `STARTING_FRAME_OFFSET'. */
678 /* This is the offset from the frame pointer register to the first stack slot
679 that contains a variable living in the frame. */
682 avr_starting_frame_offset (void)
684 return 1 + avr_outgoing_args_size ();
688 /* Return the number of hard registers to push/pop in the prologue/epilogue
689 of the current function, and optionally store these registers in SET. */
692 avr_regs_to_save (HARD_REG_SET
*set
)
695 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
698 CLEAR_HARD_REG_SET (*set
);
701 /* No need to save any registers if the function never returns or
702 has the "OS_task" or "OS_main" attribute. */
704 if (TREE_THIS_VOLATILE (current_function_decl
)
705 || cfun
->machine
->is_OS_task
706 || cfun
->machine
->is_OS_main
)
709 for (reg
= 0; reg
< 32; reg
++)
711 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
712 any global register variables. */
717 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
718 || (df_regs_ever_live_p (reg
)
719 && (int_or_sig_p
|| !call_used_regs
[reg
])
720 /* Don't record frame pointer registers here. They are treated
721 indivitually in prologue. */
722 && !(frame_pointer_needed
723 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
726 SET_HARD_REG_BIT (*set
, reg
);
734 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
737 avr_allocate_stack_slots_for_args (void)
739 return !cfun
->machine
->is_naked
;
743 /* Return true if register FROM can be eliminated via register TO. */
746 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
748 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
749 || !frame_pointer_needed
);
753 /* Implement `TARGET_WARN_FUNC_RETURN'. */
756 avr_warn_func_return (tree decl
)
758 /* Naked functions are implemented entirely in assembly, including the
759 return sequence, so suppress warnings about this. */
761 return !avr_naked_function_p (decl
);
764 /* Compute offset between arg_pointer and frame_pointer. */
767 avr_initial_elimination_offset (int from
, int to
)
769 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
773 int offset
= frame_pointer_needed
? 2 : 0;
774 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
776 offset
+= avr_regs_to_save (NULL
);
777 return (get_frame_size () + avr_outgoing_args_size()
778 + avr_pc_size
+ 1 + offset
);
783 /* Helper for the function below. */
786 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
788 *node
= make_node (FIXED_POINT_TYPE
);
789 TYPE_SATURATING (*node
) = sat_p
;
790 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
791 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
792 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
793 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
794 TYPE_ALIGN (*node
) = 8;
795 SET_TYPE_MODE (*node
, mode
);
801 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
804 avr_build_builtin_va_list (void)
806 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
807 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
808 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
809 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
810 to the long long accum modes instead of the desired [U]TAmode.
812 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
813 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
814 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
815 libgcc to detect IBIT and FBIT. */
817 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
818 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
819 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
820 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
822 unsigned_long_long_accum_type_node
= uta_type_node
;
823 long_long_accum_type_node
= ta_type_node
;
824 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
825 sat_long_long_accum_type_node
= sat_ta_type_node
;
827 /* Dispatch to the default handler. */
829 return std_build_builtin_va_list ();
833 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
834 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
835 frame pointer by +STARTING_FRAME_OFFSET.
836 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
837 avoids creating add/sub of offset in nonlocal goto and setjmp. */
840 avr_builtin_setjmp_frame_value (void)
842 rtx xval
= gen_reg_rtx (Pmode
);
843 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
844 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
849 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
850 This is return address of function. */
853 avr_return_addr_rtx (int count
, rtx tem
)
857 /* Can only return this function's return address. Others not supported. */
863 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
864 warning (0, "%<builtin_return_address%> contains only 2 bytes"
868 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
870 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
871 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
872 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
876 /* Return 1 if the function epilogue is just a single "ret". */
879 avr_simple_epilogue (void)
881 return (! frame_pointer_needed
882 && get_frame_size () == 0
883 && avr_outgoing_args_size() == 0
884 && avr_regs_to_save (NULL
) == 0
885 && ! cfun
->machine
->is_interrupt
886 && ! cfun
->machine
->is_signal
887 && ! cfun
->machine
->is_naked
888 && ! TREE_THIS_VOLATILE (current_function_decl
));
891 /* This function checks sequence of live registers. */
894 sequent_regs_live (void)
900 for (reg
= 0; reg
< 18; ++reg
)
904 /* Don't recognize sequences that contain global register
913 if (!call_used_regs
[reg
])
915 if (df_regs_ever_live_p (reg
))
925 if (!frame_pointer_needed
)
927 if (df_regs_ever_live_p (REG_Y
))
935 if (df_regs_ever_live_p (REG_Y
+1))
948 return (cur_seq
== live_seq
) ? live_seq
: 0;
951 /* Obtain the length sequence of insns. */
954 get_sequence_length (rtx_insn
*insns
)
959 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
960 length
+= get_attr_length (insn
);
966 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
969 avr_incoming_return_addr_rtx (void)
971 /* The return address is at the top of the stack. Note that the push
972 was via post-decrement, which means the actual address is off by one. */
973 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
976 /* Helper for expand_prologue. Emit a push of a byte register. */
979 emit_push_byte (unsigned regno
, bool frame_related_p
)
984 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
985 mem
= gen_frame_mem (QImode
, mem
);
986 reg
= gen_rtx_REG (QImode
, regno
);
988 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
990 RTX_FRAME_RELATED_P (insn
) = 1;
992 cfun
->machine
->stack_usage
++;
996 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
997 SFR is a MEM representing the memory location of the SFR.
998 If CLR_P then clear the SFR after the push using zero_reg. */
1001 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
1005 gcc_assert (MEM_P (sfr
));
1007 /* IN __tmp_reg__, IO(SFR) */
1008 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1009 if (frame_related_p
)
1010 RTX_FRAME_RELATED_P (insn
) = 1;
1012 /* PUSH __tmp_reg__ */
1013 emit_push_byte (TMP_REGNO
, frame_related_p
);
1017 /* OUT IO(SFR), __zero_reg__ */
1018 insn
= emit_move_insn (sfr
, const0_rtx
);
1019 if (frame_related_p
)
1020 RTX_FRAME_RELATED_P (insn
) = 1;
1025 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1028 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1029 int live_seq
= sequent_regs_live ();
1031 HOST_WIDE_INT size_max
1032 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1034 bool minimize
= (TARGET_CALL_PROLOGUES
1038 && !cfun
->machine
->is_OS_task
1039 && !cfun
->machine
->is_OS_main
);
1042 && (frame_pointer_needed
1043 || avr_outgoing_args_size() > 8
1044 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1048 int first_reg
, reg
, offset
;
1050 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1051 gen_int_mode (size
, HImode
));
1053 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1054 gen_int_mode (live_seq
+size
, HImode
));
1055 insn
= emit_insn (pattern
);
1056 RTX_FRAME_RELATED_P (insn
) = 1;
1058 /* Describe the effect of the unspec_volatile call to prologue_saves.
1059 Note that this formulation assumes that add_reg_note pushes the
1060 notes to the front. Thus we build them in the reverse order of
1061 how we want dwarf2out to process them. */
1063 /* The function does always set frame_pointer_rtx, but whether that
1064 is going to be permanent in the function is frame_pointer_needed. */
1066 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1067 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1069 : stack_pointer_rtx
),
1070 plus_constant (Pmode
, stack_pointer_rtx
,
1071 -(size
+ live_seq
))));
1073 /* Note that live_seq always contains r28+r29, but the other
1074 registers to be saved are all below 18. */
1076 first_reg
= 18 - (live_seq
- 2);
1078 for (reg
= 29, offset
= -live_seq
+ 1;
1080 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1084 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1086 r
= gen_rtx_REG (QImode
, reg
);
1087 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1090 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1092 else /* !minimize */
1096 for (reg
= 0; reg
< 32; ++reg
)
1097 if (TEST_HARD_REG_BIT (set
, reg
))
1098 emit_push_byte (reg
, true);
1100 if (frame_pointer_needed
1101 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1103 /* Push frame pointer. Always be consistent about the
1104 ordering of pushes -- epilogue_restores expects the
1105 register pair to be pushed low byte first. */
1107 emit_push_byte (REG_Y
, true);
1108 emit_push_byte (REG_Y
+ 1, true);
1111 if (frame_pointer_needed
1114 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1115 RTX_FRAME_RELATED_P (insn
) = 1;
1120 /* Creating a frame can be done by direct manipulation of the
1121 stack or via the frame pointer. These two methods are:
1128 the optimum method depends on function type, stack and
1129 frame size. To avoid a complex logic, both methods are
1130 tested and shortest is selected.
1132 There is also the case where SIZE != 0 and no frame pointer is
1133 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1134 In that case, insn (*) is not needed in that case.
1135 We use the X register as scratch. This is save because in X
1137 In an interrupt routine, the case of SIZE != 0 together with
1138 !frame_pointer_needed can only occur if the function is not a
1139 leaf function and thus X has already been saved. */
1142 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1143 rtx_insn
*fp_plus_insns
;
1146 gcc_assert (frame_pointer_needed
1150 fp
= my_fp
= (frame_pointer_needed
1152 : gen_rtx_REG (Pmode
, REG_X
));
1154 if (AVR_HAVE_8BIT_SP
)
1156 /* The high byte (r29) does not change:
1157 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1159 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1162 /* Cut down size and avoid size = 0 so that we don't run
1163 into ICE like PR52488 in the remainder. */
1165 if (size
> size_max
)
1167 /* Don't error so that insane code from newlib still compiles
1168 and does not break building newlib. As PR51345 is implemented
1169 now, there are multilib variants with -msp8.
1171 If user wants sanity checks he can use -Wstack-usage=
1174 For CFA we emit the original, non-saturated size so that
1175 the generic machinery is aware of the real stack usage and
1176 will print the above diagnostic as expected. */
1181 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1182 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1184 /************ Method 1: Adjust frame pointer ************/
1188 /* Normally, the dwarf2out frame-related-expr interpreter does
1189 not expect to have the CFA change once the frame pointer is
1190 set up. Thus, we avoid marking the move insn below and
1191 instead indicate that the entire operation is complete after
1192 the frame pointer subtraction is done. */
1194 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1195 if (frame_pointer_needed
)
1197 RTX_FRAME_RELATED_P (insn
) = 1;
1198 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1199 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1202 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1205 if (frame_pointer_needed
)
1207 RTX_FRAME_RELATED_P (insn
) = 1;
1208 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1209 gen_rtx_SET (VOIDmode
, fp
,
1210 plus_constant (Pmode
, fp
,
1214 /* Copy to stack pointer. Note that since we've already
1215 changed the CFA to the frame pointer this operation
1216 need not be annotated if frame pointer is needed.
1217 Always move through unspec, see PR50063.
1218 For meaning of irq_state see movhi_sp_r insn. */
1220 if (cfun
->machine
->is_interrupt
)
1223 if (TARGET_NO_INTERRUPTS
1224 || cfun
->machine
->is_signal
1225 || cfun
->machine
->is_OS_main
)
1228 if (AVR_HAVE_8BIT_SP
)
1231 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1232 fp
, GEN_INT (irq_state
)));
1233 if (!frame_pointer_needed
)
1235 RTX_FRAME_RELATED_P (insn
) = 1;
1236 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1237 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1238 plus_constant (Pmode
,
1243 fp_plus_insns
= get_insns ();
1246 /************ Method 2: Adjust Stack pointer ************/
1248 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1249 can only handle specific offsets. */
1251 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1253 rtx_insn
*sp_plus_insns
;
1257 insn
= emit_move_insn (stack_pointer_rtx
,
1258 plus_constant (Pmode
, stack_pointer_rtx
,
1260 RTX_FRAME_RELATED_P (insn
) = 1;
1261 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1262 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1263 plus_constant (Pmode
,
1266 if (frame_pointer_needed
)
1268 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1269 RTX_FRAME_RELATED_P (insn
) = 1;
1272 sp_plus_insns
= get_insns ();
1275 /************ Use shortest method ************/
1277 emit_insn (get_sequence_length (sp_plus_insns
)
1278 < get_sequence_length (fp_plus_insns
)
1284 emit_insn (fp_plus_insns
);
1287 cfun
->machine
->stack_usage
+= size_cfa
;
1288 } /* !minimize && size != 0 */
1293 /* Output function prologue. */
1296 avr_expand_prologue (void)
1301 size
= get_frame_size() + avr_outgoing_args_size();
1303 cfun
->machine
->stack_usage
= 0;
1305 /* Prologue: naked. */
1306 if (cfun
->machine
->is_naked
)
1311 avr_regs_to_save (&set
);
1313 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1315 /* Enable interrupts. */
1316 if (cfun
->machine
->is_interrupt
)
1317 emit_insn (gen_enable_interrupt ());
1319 /* Push zero reg. */
1320 emit_push_byte (ZERO_REGNO
, true);
1323 emit_push_byte (TMP_REGNO
, true);
1326 /* ??? There's no dwarf2 column reserved for SREG. */
1327 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1329 /* Clear zero reg. */
1330 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1332 /* Prevent any attempt to delete the setting of ZERO_REG! */
1333 emit_use (zero_reg_rtx
);
1335 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1336 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1339 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1342 && TEST_HARD_REG_BIT (set
, REG_X
)
1343 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1345 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1349 && (frame_pointer_needed
1350 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1351 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1353 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1357 && TEST_HARD_REG_BIT (set
, REG_Z
)
1358 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1360 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1362 } /* is_interrupt is_signal */
1364 avr_prologue_setup_frame (size
, set
);
1366 if (flag_stack_usage_info
)
1367 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1371 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1372 /* Output summary at end of function prologue. */
1375 avr_asm_function_end_prologue (FILE *file
)
1377 if (cfun
->machine
->is_naked
)
1379 fputs ("/* prologue: naked */\n", file
);
1383 if (cfun
->machine
->is_interrupt
)
1385 fputs ("/* prologue: Interrupt */\n", file
);
1387 else if (cfun
->machine
->is_signal
)
1389 fputs ("/* prologue: Signal */\n", file
);
1392 fputs ("/* prologue: function */\n", file
);
1395 if (ACCUMULATE_OUTGOING_ARGS
)
1396 fprintf (file
, "/* outgoing args size = %d */\n",
1397 avr_outgoing_args_size());
1399 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1401 fprintf (file
, "/* stack size = %d */\n",
1402 cfun
->machine
->stack_usage
);
1403 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1404 usage for offset so that SP + .L__stack_offset = return address. */
1405 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1409 /* Implement `EPILOGUE_USES'. */
1412 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1414 if (reload_completed
1416 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1421 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1424 emit_pop_byte (unsigned regno
)
1428 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1429 mem
= gen_frame_mem (QImode
, mem
);
1430 reg
= gen_rtx_REG (QImode
, regno
);
1432 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1435 /* Output RTL epilogue. */
1438 avr_expand_epilogue (bool sibcall_p
)
1445 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1447 size
= get_frame_size() + avr_outgoing_args_size();
1449 /* epilogue: naked */
1450 if (cfun
->machine
->is_naked
)
1452 gcc_assert (!sibcall_p
);
1454 emit_jump_insn (gen_return ());
1458 avr_regs_to_save (&set
);
1459 live_seq
= sequent_regs_live ();
1461 minimize
= (TARGET_CALL_PROLOGUES
1464 && !cfun
->machine
->is_OS_task
1465 && !cfun
->machine
->is_OS_main
);
1469 || frame_pointer_needed
1472 /* Get rid of frame. */
1474 if (!frame_pointer_needed
)
1476 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1481 emit_move_insn (frame_pointer_rtx
,
1482 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1485 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1491 /* Try two methods to adjust stack and select shortest. */
1495 rtx_insn
*fp_plus_insns
;
1496 HOST_WIDE_INT size_max
;
1498 gcc_assert (frame_pointer_needed
1502 fp
= my_fp
= (frame_pointer_needed
1504 : gen_rtx_REG (Pmode
, REG_X
));
1506 if (AVR_HAVE_8BIT_SP
)
1508 /* The high byte (r29) does not change:
1509 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1511 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1514 /* For rationale see comment in prologue generation. */
1516 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1517 if (size
> size_max
)
1519 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1521 /********** Method 1: Adjust fp register **********/
1525 if (!frame_pointer_needed
)
1526 emit_move_insn (fp
, stack_pointer_rtx
);
1528 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1530 /* Copy to stack pointer. */
1532 if (TARGET_NO_INTERRUPTS
)
1535 if (AVR_HAVE_8BIT_SP
)
1538 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1539 GEN_INT (irq_state
)));
1541 fp_plus_insns
= get_insns ();
1544 /********** Method 2: Adjust Stack pointer **********/
1546 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1548 rtx_insn
*sp_plus_insns
;
1552 emit_move_insn (stack_pointer_rtx
,
1553 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1555 sp_plus_insns
= get_insns ();
1558 /************ Use shortest method ************/
1560 emit_insn (get_sequence_length (sp_plus_insns
)
1561 < get_sequence_length (fp_plus_insns
)
1566 emit_insn (fp_plus_insns
);
1569 if (frame_pointer_needed
1570 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1572 /* Restore previous frame_pointer. See avr_expand_prologue for
1573 rationale for not using pophi. */
1575 emit_pop_byte (REG_Y
+ 1);
1576 emit_pop_byte (REG_Y
);
1579 /* Restore used registers. */
1581 for (reg
= 31; reg
>= 0; --reg
)
1582 if (TEST_HARD_REG_BIT (set
, reg
))
1583 emit_pop_byte (reg
);
1587 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1588 The conditions to restore them must be tha same as in prologue. */
1591 && TEST_HARD_REG_BIT (set
, REG_Z
)
1592 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1594 emit_pop_byte (TMP_REGNO
);
1595 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1599 && (frame_pointer_needed
1600 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1601 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1603 emit_pop_byte (TMP_REGNO
);
1604 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1608 && TEST_HARD_REG_BIT (set
, REG_X
)
1609 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1611 emit_pop_byte (TMP_REGNO
);
1612 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1617 emit_pop_byte (TMP_REGNO
);
1618 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1621 /* Restore SREG using tmp_reg as scratch. */
1623 emit_pop_byte (TMP_REGNO
);
1624 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1626 /* Restore tmp REG. */
1627 emit_pop_byte (TMP_REGNO
);
1629 /* Restore zero REG. */
1630 emit_pop_byte (ZERO_REGNO
);
1634 emit_jump_insn (gen_return ());
1638 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1641 avr_asm_function_begin_epilogue (FILE *file
)
1643 fprintf (file
, "/* epilogue start */\n");
1647 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1650 avr_cannot_modify_jumps_p (void)
1653 /* Naked Functions must not have any instructions after
1654 their epilogue, see PR42240 */
1656 if (reload_completed
1658 && cfun
->machine
->is_naked
)
1667 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1670 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1672 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1673 This hook just serves to hack around PR rtl-optimization/52543 by
1674 claiming that non-generic addresses were mode-dependent so that
1675 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1676 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1677 generic address space which is not true. */
1679 return !ADDR_SPACE_GENERIC_P (as
);
1683 /* Helper function for `avr_legitimate_address_p'. */
1686 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1687 RTX_CODE outer_code
, bool strict
)
1690 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1691 as
, outer_code
, UNKNOWN
)
1693 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1697 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1698 machine for a memory operand of mode MODE. */
1701 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1703 bool ok
= CONSTANT_ADDRESS_P (x
);
1705 switch (GET_CODE (x
))
1708 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1712 && GET_MODE_SIZE (mode
) > 4
1713 && REG_X
== REGNO (x
))
1721 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1722 GET_CODE (x
), strict
);
1727 rtx reg
= XEXP (x
, 0);
1728 rtx op1
= XEXP (x
, 1);
1731 && CONST_INT_P (op1
)
1732 && INTVAL (op1
) >= 0)
1734 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1739 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1742 if (reg
== frame_pointer_rtx
1743 || reg
== arg_pointer_rtx
)
1748 else if (frame_pointer_needed
1749 && reg
== frame_pointer_rtx
)
1761 if (avr_log
.legitimate_address_p
)
1763 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1764 "reload_completed=%d reload_in_progress=%d %s:",
1765 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1766 reg_renumber
? "(reg_renumber)" : "");
1768 if (GET_CODE (x
) == PLUS
1769 && REG_P (XEXP (x
, 0))
1770 && CONST_INT_P (XEXP (x
, 1))
1771 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1774 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1775 true_regnum (XEXP (x
, 0)));
1778 avr_edump ("\n%r\n", x
);
1785 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1786 now only a helper for avr_addr_space_legitimize_address. */
1787 /* Attempts to replace X with a valid
1788 memory address for an operand of mode MODE */
1791 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1793 bool big_offset_p
= false;
1797 if (GET_CODE (oldx
) == PLUS
1798 && REG_P (XEXP (oldx
, 0)))
1800 if (REG_P (XEXP (oldx
, 1)))
1801 x
= force_reg (GET_MODE (oldx
), oldx
);
1802 else if (CONST_INT_P (XEXP (oldx
, 1)))
1804 int offs
= INTVAL (XEXP (oldx
, 1));
1805 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1806 && offs
> MAX_LD_OFFSET (mode
))
1808 big_offset_p
= true;
1809 x
= force_reg (GET_MODE (oldx
), oldx
);
1814 if (avr_log
.legitimize_address
)
1816 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1819 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1826 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1827 /* This will allow register R26/27 to be used where it is no worse than normal
1828 base pointers R28/29 or R30/31. For example, if base offset is greater
1829 than 63 bytes or for R++ or --R addressing. */
1832 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1833 int opnum
, int type
, int addr_type
,
1834 int ind_levels ATTRIBUTE_UNUSED
,
1835 rtx (*mk_memloc
)(rtx
,int))
1839 if (avr_log
.legitimize_reload_address
)
1840 avr_edump ("\n%?:%m %r\n", mode
, x
);
1842 if (1 && (GET_CODE (x
) == POST_INC
1843 || GET_CODE (x
) == PRE_DEC
))
1845 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1846 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1847 opnum
, RELOAD_OTHER
);
1849 if (avr_log
.legitimize_reload_address
)
1850 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1851 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1856 if (GET_CODE (x
) == PLUS
1857 && REG_P (XEXP (x
, 0))
1858 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1859 && CONST_INT_P (XEXP (x
, 1))
1860 && INTVAL (XEXP (x
, 1)) >= 1)
1862 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1866 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1868 int regno
= REGNO (XEXP (x
, 0));
1869 rtx mem
= mk_memloc (x
, regno
);
1871 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1872 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1873 1, (enum reload_type
) addr_type
);
1875 if (avr_log
.legitimize_reload_address
)
1876 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1877 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1879 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1880 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1881 opnum
, (enum reload_type
) type
);
1883 if (avr_log
.legitimize_reload_address
)
1884 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1885 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1890 else if (! (frame_pointer_needed
1891 && XEXP (x
, 0) == frame_pointer_rtx
))
1893 push_reload (x
, NULL_RTX
, px
, NULL
,
1894 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1895 opnum
, (enum reload_type
) type
);
1897 if (avr_log
.legitimize_reload_address
)
1898 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1899 POINTER_REGS
, x
, NULL_RTX
);
1909 /* Implement `TARGET_SECONDARY_RELOAD' */
1912 avr_secondary_reload (bool in_p
, rtx x
,
1913 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1914 enum machine_mode mode
, secondary_reload_info
*sri
)
1918 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1919 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1921 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1928 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1929 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1930 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1932 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1933 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1934 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1935 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1936 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1938 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1940 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1941 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1942 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1943 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1944 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1945 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1953 /* Helper function to print assembler resp. track instruction
1954 sequence lengths. Always return "".
1957 Output assembler code from template TPL with operands supplied
1958 by OPERANDS. This is just forwarding to output_asm_insn.
1961 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1962 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1963 Don't output anything.
1967 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1971 output_asm_insn (tpl
, operands
);
1985 /* Return a pointer register name as a string. */
1988 ptrreg_to_str (int regno
)
1992 case REG_X
: return "X";
1993 case REG_Y
: return "Y";
1994 case REG_Z
: return "Z";
1996 output_operand_lossage ("address operand requires constraint for"
1997 " X, Y, or Z register");
2002 /* Return the condition name as a string.
2003 Used in conditional jump constructing */
2006 cond_string (enum rtx_code code
)
2015 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2020 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2036 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2037 /* Output ADDR to FILE as address. */
2040 avr_print_operand_address (FILE *file
, rtx addr
)
2042 switch (GET_CODE (addr
))
2045 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2049 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2053 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2057 if (CONSTANT_ADDRESS_P (addr
)
2058 && text_segment_operand (addr
, VOIDmode
))
2061 if (GET_CODE (x
) == CONST
)
2063 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2065 /* Assembler gs() will implant word address. Make offset
2066 a byte offset inside gs() for assembler. This is
2067 needed because the more logical (constant+gs(sym)) is not
2068 accepted by gas. For 128K and smaller devices this is ok.
2069 For large devices it will create a trampoline to offset
2070 from symbol which may not be what the user really wanted. */
2072 fprintf (file
, "gs(");
2073 output_addr_const (file
, XEXP (x
,0));
2074 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2075 2 * INTVAL (XEXP (x
, 1)));
2077 if (warning (0, "pointer offset from symbol maybe incorrect"))
2079 output_addr_const (stderr
, addr
);
2080 fprintf(stderr
,"\n");
2085 fprintf (file
, "gs(");
2086 output_addr_const (file
, addr
);
2087 fprintf (file
, ")");
2091 output_addr_const (file
, addr
);
2096 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2099 avr_print_operand_punct_valid_p (unsigned char code
)
2101 return code
== '~' || code
== '!';
2105 /* Implement `TARGET_PRINT_OPERAND'. */
2106 /* Output X as assembler operand to file FILE.
2107 For a description of supported %-codes, see top of avr.md. */
2110 avr_print_operand (FILE *file
, rtx x
, int code
)
2114 if (code
>= 'A' && code
<= 'D')
2119 if (!AVR_HAVE_JMP_CALL
)
2122 else if (code
== '!')
2124 if (AVR_HAVE_EIJMP_EICALL
)
2127 else if (code
== 't'
2130 static int t_regno
= -1;
2131 static int t_nbits
= -1;
2133 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2135 t_regno
= REGNO (x
);
2136 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2138 else if (CONST_INT_P (x
) && t_regno
>= 0
2139 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2141 int bpos
= INTVAL (x
);
2143 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2145 fprintf (file
, ",%d", bpos
% 8);
2150 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2154 if (x
== zero_reg_rtx
)
2155 fprintf (file
, "__zero_reg__");
2156 else if (code
== 'r' && REGNO (x
) < 32)
2157 fprintf (file
, "%d", (int) REGNO (x
));
2159 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2161 else if (CONST_INT_P (x
))
2163 HOST_WIDE_INT ival
= INTVAL (x
);
2166 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2167 else if (low_io_address_operand (x
, VOIDmode
)
2168 || high_io_address_operand (x
, VOIDmode
))
2170 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2171 fprintf (file
, "__RAMPZ__");
2172 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2173 fprintf (file
, "__RAMPY__");
2174 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2175 fprintf (file
, "__RAMPX__");
2176 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2177 fprintf (file
, "__RAMPD__");
2178 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2179 fprintf (file
, "__CCP__");
2180 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2181 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2182 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2185 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2186 ival
- avr_current_arch
->sfr_offset
);
2190 fatal_insn ("bad address, not an I/O address:", x
);
2194 rtx addr
= XEXP (x
, 0);
2198 if (!CONSTANT_P (addr
))
2199 fatal_insn ("bad address, not a constant:", addr
);
2200 /* Assembler template with m-code is data - not progmem section */
2201 if (text_segment_operand (addr
, VOIDmode
))
2202 if (warning (0, "accessing data memory with"
2203 " program memory address"))
2205 output_addr_const (stderr
, addr
);
2206 fprintf(stderr
,"\n");
2208 output_addr_const (file
, addr
);
2210 else if (code
== 'i')
2212 avr_print_operand (file
, addr
, 'i');
2214 else if (code
== 'o')
2216 if (GET_CODE (addr
) != PLUS
)
2217 fatal_insn ("bad address, not (reg+disp):", addr
);
2219 avr_print_operand (file
, XEXP (addr
, 1), 0);
2221 else if (code
== 'p' || code
== 'r')
2223 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2224 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2227 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2229 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2231 else if (GET_CODE (addr
) == PLUS
)
2233 avr_print_operand_address (file
, XEXP (addr
,0));
2234 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2235 fatal_insn ("internal compiler error. Bad address:"
2238 avr_print_operand (file
, XEXP (addr
,1), code
);
2241 avr_print_operand_address (file
, addr
);
2243 else if (code
== 'i')
2245 fatal_insn ("bad address, not an I/O address:", x
);
2247 else if (code
== 'x')
2249 /* Constant progmem address - like used in jmp or call */
2250 if (0 == text_segment_operand (x
, VOIDmode
))
2251 if (warning (0, "accessing program memory"
2252 " with data memory address"))
2254 output_addr_const (stderr
, x
);
2255 fprintf(stderr
,"\n");
2257 /* Use normal symbol for direct address no linker trampoline needed */
2258 output_addr_const (file
, x
);
2260 else if (CONST_FIXED_P (x
))
2262 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2264 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2266 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2268 else if (GET_CODE (x
) == CONST_DOUBLE
)
2272 if (GET_MODE (x
) != SFmode
)
2273 fatal_insn ("internal compiler error. Unknown mode:", x
);
2274 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2275 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2276 fprintf (file
, "0x%lx", val
);
2278 else if (GET_CODE (x
) == CONST_STRING
)
2279 fputs (XSTR (x
, 0), file
);
2280 else if (code
== 'j')
2281 fputs (cond_string (GET_CODE (x
)), file
);
2282 else if (code
== 'k')
2283 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2285 avr_print_operand_address (file
, x
);
2289 /* Worker function for `NOTICE_UPDATE_CC'. */
2290 /* Update the condition code in the INSN. */
2293 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx_insn
*insn
)
2296 enum attr_cc cc
= get_attr_cc (insn
);
2306 rtx
*op
= recog_data
.operand
;
2309 /* Extract insn's operands. */
2310 extract_constrain_insn_cached (insn
);
2318 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2319 cc
= (enum attr_cc
) icc
;
2324 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2325 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2326 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2328 /* Any other "r,rL" combination does not alter cc0. */
2332 } /* inner switch */
2336 } /* outer swicth */
2341 /* Special values like CC_OUT_PLUS from above have been
2342 mapped to "standard" CC_* values so we never come here. */
2348 /* Insn does not affect CC at all. */
2356 set
= single_set (insn
);
2360 cc_status
.flags
|= CC_NO_OVERFLOW
;
2361 cc_status
.value1
= SET_DEST (set
);
2366 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2367 of this combination, cf. also PR61055. */
2372 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2373 The V flag may or may not be known but that's ok because
2374 alter_cond will change tests to use EQ/NE. */
2375 set
= single_set (insn
);
2379 cc_status
.value1
= SET_DEST (set
);
2380 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2385 set
= single_set (insn
);
2388 cc_status
.value1
= SET_SRC (set
);
2392 /* Insn doesn't leave CC in a usable state. */
2398 /* Choose mode for jump insn:
2399 1 - relative jump in range -63 <= x <= 62 ;
2400 2 - relative jump in range -2046 <= x <= 2045 ;
2401 3 - absolute jump (only for ATmega[16]03). */
2404 avr_jump_mode (rtx x
, rtx_insn
*insn
)
2406 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2407 ? XEXP (x
, 0) : x
));
2408 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2409 int jump_distance
= cur_addr
- dest_addr
;
2411 if (-63 <= jump_distance
&& jump_distance
<= 62)
2413 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2415 else if (AVR_HAVE_JMP_CALL
)
2421 /* Return an AVR condition jump commands.
2422 X is a comparison RTX.
2423 LEN is a number returned by avr_jump_mode function.
2424 If REVERSE nonzero then condition code in X must be reversed. */
2427 ret_cond_branch (rtx x
, int len
, int reverse
)
2429 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2434 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2435 return (len
== 1 ? ("breq .+2" CR_TAB
2437 len
== 2 ? ("breq .+4" CR_TAB
2445 return (len
== 1 ? ("breq .+2" CR_TAB
2447 len
== 2 ? ("breq .+4" CR_TAB
2454 return (len
== 1 ? ("breq .+2" CR_TAB
2456 len
== 2 ? ("breq .+4" CR_TAB
2463 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2464 return (len
== 1 ? ("breq %0" CR_TAB
2466 len
== 2 ? ("breq .+2" CR_TAB
2473 return (len
== 1 ? ("breq %0" CR_TAB
2475 len
== 2 ? ("breq .+2" CR_TAB
2482 return (len
== 1 ? ("breq %0" CR_TAB
2484 len
== 2 ? ("breq .+2" CR_TAB
2498 return ("br%j1 .+2" CR_TAB
2501 return ("br%j1 .+4" CR_TAB
2512 return ("br%k1 .+2" CR_TAB
2515 return ("br%k1 .+4" CR_TAB
2524 /* Worker function for `FINAL_PRESCAN_INSN'. */
2525 /* Output insn cost for next insn. */
2528 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2529 int num_operands ATTRIBUTE_UNUSED
)
2531 if (avr_log
.rtx_costs
)
2533 rtx set
= single_set (insn
);
2536 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2537 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2539 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2540 rtx_cost (PATTERN (insn
), INSN
, 0,
2541 optimize_insn_for_speed_p()));
2545 /* Return 0 if undefined, 1 if always true or always false. */
2548 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2550 unsigned int max
= (mode
== QImode
? 0xff :
2551 mode
== HImode
? 0xffff :
2552 mode
== PSImode
? 0xffffff :
2553 mode
== SImode
? 0xffffffff : 0);
2554 if (max
&& op
&& CONST_INT_P (x
))
2556 if (unsigned_condition (op
) != op
)
2559 if (max
!= (INTVAL (x
) & max
)
2560 && INTVAL (x
) != 0xff)
2567 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2568 /* Returns nonzero if REGNO is the number of a hard
2569 register in which function arguments are sometimes passed. */
2572 avr_function_arg_regno_p(int r
)
2574 return (r
>= 8 && r
<= 25);
2578 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2579 /* Initializing the variable cum for the state at the beginning
2580 of the argument list. */
2583 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2584 tree fndecl ATTRIBUTE_UNUSED
)
2587 cum
->regno
= FIRST_CUM_REG
;
2588 if (!libname
&& stdarg_p (fntype
))
2591 /* Assume the calle may be tail called */
2593 cfun
->machine
->sibcall_fails
= 0;
2596 /* Returns the number of registers to allocate for a function argument. */
2599 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2603 if (mode
== BLKmode
)
2604 size
= int_size_in_bytes (type
);
2606 size
= GET_MODE_SIZE (mode
);
2608 /* Align all function arguments to start in even-numbered registers.
2609 Odd-sized arguments leave holes above them. */
2611 return (size
+ 1) & ~1;
2615 /* Implement `TARGET_FUNCTION_ARG'. */
2616 /* Controls whether a function argument is passed
2617 in a register, and which register. */
2620 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2621 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2623 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2624 int bytes
= avr_num_arg_regs (mode
, type
);
2626 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2627 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2633 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2634 /* Update the summarizer variable CUM to advance past an argument
2635 in the argument list. */
2638 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2639 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2641 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2642 int bytes
= avr_num_arg_regs (mode
, type
);
2644 cum
->nregs
-= bytes
;
2645 cum
->regno
-= bytes
;
2647 /* A parameter is being passed in a call-saved register. As the original
2648 contents of these regs has to be restored before leaving the function,
2649 a function must not pass arguments in call-saved regs in order to get
2654 && !call_used_regs
[cum
->regno
])
2656 /* FIXME: We ship info on failing tail-call in struct machine_function.
2657 This uses internals of calls.c:expand_call() and the way args_so_far
2658 is used. targetm.function_ok_for_sibcall() needs to be extended to
2659 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2660 dependent so that such an extension is not wanted. */
2662 cfun
->machine
->sibcall_fails
= 1;
2665 /* Test if all registers needed by the ABI are actually available. If the
2666 user has fixed a GPR needed to pass an argument, an (implicit) function
2667 call will clobber that fixed register. See PR45099 for an example. */
2674 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2675 if (fixed_regs
[regno
])
2676 warning (0, "fixed register %s used to pass parameter to function",
2680 if (cum
->nregs
<= 0)
2683 cum
->regno
= FIRST_CUM_REG
;
2687 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2688 /* Decide whether we can make a sibling call to a function. DECL is the
2689 declaration of the function being targeted by the call and EXP is the
2690 CALL_EXPR representing the call. */
2693 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2697 /* Tail-calling must fail if callee-saved regs are used to pass
2698 function args. We must not tail-call when `epilogue_restores'
2699 is used. Unfortunately, we cannot tell at this point if that
2700 actually will happen or not, and we cannot step back from
2701 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2703 if (cfun
->machine
->sibcall_fails
2704 || TARGET_CALL_PROLOGUES
)
2709 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2713 decl_callee
= TREE_TYPE (decl_callee
);
2717 decl_callee
= fntype_callee
;
2719 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2720 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2722 decl_callee
= TREE_TYPE (decl_callee
);
2726 /* Ensure that caller and callee have compatible epilogues */
2728 if (cfun
->machine
->is_interrupt
2729 || cfun
->machine
->is_signal
2730 || cfun
->machine
->is_naked
2731 || avr_naked_function_p (decl_callee
)
2732 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2733 || (avr_OS_task_function_p (decl_callee
)
2734 != cfun
->machine
->is_OS_task
)
2735 || (avr_OS_main_function_p (decl_callee
)
2736 != cfun
->machine
->is_OS_main
))
2744 /***********************************************************************
2745 Functions for outputting various mov's for a various modes
2746 ************************************************************************/
2748 /* Return true if a value of mode MODE is read from flash by
2749 __load_* function from libgcc. */
2752 avr_load_libgcc_p (rtx op
)
2754 enum machine_mode mode
= GET_MODE (op
);
2755 int n_bytes
= GET_MODE_SIZE (mode
);
2759 && avr_mem_flash_p (op
));
2762 /* Return true if a value of mode MODE is read by __xload_* function. */
2765 avr_xload_libgcc_p (enum machine_mode mode
)
2767 int n_bytes
= GET_MODE_SIZE (mode
);
2770 || avr_current_device
->n_flash
> 1);
2774 /* Fixme: This is a hack because secondary reloads don't works as expected.
2776 Find an unused d-register to be used as scratch in INSN.
2777 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2778 is a register, skip all possible return values that overlap EXCLUDE.
2779 The policy for the returned register is similar to that of
2780 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2783 Return a QImode d-register or NULL_RTX if nothing found. */
2786 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
2789 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2790 || avr_signal_function_p (current_function_decl
));
2792 for (regno
= 16; regno
< 32; regno
++)
2794 rtx reg
= all_regs_rtx
[regno
];
2797 && reg_overlap_mentioned_p (exclude
, reg
))
2798 || fixed_regs
[regno
])
2803 /* Try non-live register */
2805 if (!df_regs_ever_live_p (regno
)
2806 && (TREE_THIS_VOLATILE (current_function_decl
)
2807 || cfun
->machine
->is_OS_task
2808 || cfun
->machine
->is_OS_main
2809 || (!isr_p
&& call_used_regs
[regno
])))
2814 /* Any live register can be used if it is unused after.
2815 Prologue/epilogue will care for it as needed. */
2817 if (df_regs_ever_live_p (regno
)
2818 && reg_unused_after (insn
, reg
))
2828 /* Helper function for the next function in the case where only restricted
2829 version of LPM instruction is available. */
2832 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
2836 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2839 regno_dest
= REGNO (dest
);
2841 /* The implicit target register of LPM. */
2842 xop
[3] = lpm_reg_rtx
;
2844 switch (GET_CODE (addr
))
2851 gcc_assert (REG_Z
== REGNO (addr
));
2859 avr_asm_len ("%4lpm", xop
, plen
, 1);
2861 if (regno_dest
!= LPM_REGNO
)
2862 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2867 if (REGNO (dest
) == REG_Z
)
2868 return avr_asm_len ("%4lpm" CR_TAB
2873 "pop %A0", xop
, plen
, 6);
2875 avr_asm_len ("%4lpm" CR_TAB
2879 "mov %B0,%3", xop
, plen
, 5);
2881 if (!reg_unused_after (insn
, addr
))
2882 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2891 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2894 if (regno_dest
== LPM_REGNO
)
2895 avr_asm_len ("%4lpm" CR_TAB
2896 "adiw %2,1", xop
, plen
, 2);
2898 avr_asm_len ("%4lpm" CR_TAB
2900 "adiw %2,1", xop
, plen
, 3);
2903 avr_asm_len ("%4lpm" CR_TAB
2905 "adiw %2,1", xop
, plen
, 3);
2908 avr_asm_len ("%4lpm" CR_TAB
2910 "adiw %2,1", xop
, plen
, 3);
2913 avr_asm_len ("%4lpm" CR_TAB
2915 "adiw %2,1", xop
, plen
, 3);
2917 break; /* POST_INC */
2919 } /* switch CODE (addr) */
2925 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2926 OP[1] in AS1 to register OP[0].
2927 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2931 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
2935 rtx src
= SET_SRC (single_set (insn
));
2937 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2940 addr_space_t as
= MEM_ADDR_SPACE (src
);
2947 warning (0, "writing to address space %qs not supported",
2948 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2953 addr
= XEXP (src
, 0);
2954 code
= GET_CODE (addr
);
2956 gcc_assert (REG_P (dest
));
2957 gcc_assert (REG
== code
|| POST_INC
== code
);
2961 xop
[2] = lpm_addr_reg_rtx
;
2962 xop
[4] = xstring_empty
;
2963 xop
[5] = tmp_reg_rtx
;
2964 xop
[6] = XEXP (rampz_rtx
, 0);
2966 segment
= avr_addrspace
[as
].segment
;
2968 /* Set RAMPZ as needed. */
2972 xop
[4] = GEN_INT (segment
);
2973 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
2975 if (xop
[3] != NULL_RTX
)
2977 avr_asm_len ("ldi %3,%4" CR_TAB
2978 "out %i6,%3", xop
, plen
, 2);
2980 else if (segment
== 1)
2982 avr_asm_len ("clr %5" CR_TAB
2984 "out %i6,%5", xop
, plen
, 3);
2988 avr_asm_len ("mov %5,%2" CR_TAB
2991 "mov %2,%5", xop
, plen
, 4);
2996 if (!AVR_HAVE_ELPMX
)
2997 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2999 else if (!AVR_HAVE_LPMX
)
3001 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3004 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3006 switch (GET_CODE (addr
))
3013 gcc_assert (REG_Z
== REGNO (addr
));
3021 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3024 if (REGNO (dest
) == REG_Z
)
3025 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3026 "%4lpm %B0,%a2" CR_TAB
3027 "mov %A0,%5", xop
, plen
, 3);
3030 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3031 "%4lpm %B0,%a2", xop
, plen
, 2);
3033 if (!reg_unused_after (insn
, addr
))
3034 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3041 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3042 "%4lpm %B0,%a2+" CR_TAB
3043 "%4lpm %C0,%a2", xop
, plen
, 3);
3045 if (!reg_unused_after (insn
, addr
))
3046 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3052 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3053 "%4lpm %B0,%a2+", xop
, plen
, 2);
3055 if (REGNO (dest
) == REG_Z
- 2)
3056 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3057 "%4lpm %C0,%a2" CR_TAB
3058 "mov %D0,%5", xop
, plen
, 3);
3061 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3062 "%4lpm %D0,%a2", xop
, plen
, 2);
3064 if (!reg_unused_after (insn
, addr
))
3065 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3075 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3078 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3079 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3080 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3081 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3083 break; /* POST_INC */
3085 } /* switch CODE (addr) */
3087 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3089 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3091 xop
[0] = zero_reg_rtx
;
3092 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3099 /* Worker function for xload_8 insn. */
3102 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3108 xop
[2] = lpm_addr_reg_rtx
;
3109 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3111 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3113 avr_asm_len ("sbrc %1,7" CR_TAB
3114 "ld %3,%a2", xop
, plen
, 2);
3116 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3117 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3124 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
3126 rtx dest
= operands
[0];
3127 rtx src
= operands
[1];
3129 if (avr_mem_flash_p (src
)
3130 || avr_mem_flash_p (dest
))
3132 return avr_out_lpm (insn
, operands
, plen
);
3135 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3139 if (REG_P (src
)) /* mov r,r */
3141 if (test_hard_reg_class (STACK_REG
, dest
))
3142 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3143 else if (test_hard_reg_class (STACK_REG
, src
))
3144 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3146 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3148 else if (CONSTANT_P (src
))
3150 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3153 else if (MEM_P (src
))
3154 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3156 else if (MEM_P (dest
))
3161 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3163 return out_movqi_mr_r (insn
, xop
, plen
);
3171 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
3176 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3178 if (avr_mem_flash_p (src
)
3179 || avr_mem_flash_p (dest
))
3181 return avr_out_lpm (insn
, xop
, plen
);
3184 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3188 if (REG_P (src
)) /* mov r,r */
3190 if (test_hard_reg_class (STACK_REG
, dest
))
3192 if (AVR_HAVE_8BIT_SP
)
3193 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3196 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3197 "out __SP_H__,%B1", xop
, plen
, -2);
3199 /* Use simple load of SP if no interrupts are used. */
3201 return TARGET_NO_INTERRUPTS
3202 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3203 "out __SP_L__,%A1", xop
, plen
, -2)
3204 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3206 "out __SP_H__,%B1" CR_TAB
3207 "out __SREG__,__tmp_reg__" CR_TAB
3208 "out __SP_L__,%A1", xop
, plen
, -5);
3210 else if (test_hard_reg_class (STACK_REG
, src
))
3212 return !AVR_HAVE_SPH
3213 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3214 "clr %B0", xop
, plen
, -2)
3216 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3217 "in %B0,__SP_H__", xop
, plen
, -2);
3220 return AVR_HAVE_MOVW
3221 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3223 : avr_asm_len ("mov %A0,%A1" CR_TAB
3224 "mov %B0,%B1", xop
, plen
, -2);
3226 else if (CONSTANT_P (src
))
3228 return output_reload_inhi (xop
, NULL
, plen
);
3230 else if (MEM_P (src
))
3232 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3235 else if (MEM_P (dest
))
3240 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3242 return out_movhi_mr_r (insn
, xop
, plen
);
3245 fatal_insn ("invalid insn:", insn
);
3251 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3255 rtx x
= XEXP (src
, 0);
3257 if (CONSTANT_ADDRESS_P (x
))
3259 return optimize
> 0 && io_address_operand (x
, QImode
)
3260 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3261 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3263 else if (GET_CODE (x
) == PLUS
3264 && REG_P (XEXP (x
, 0))
3265 && CONST_INT_P (XEXP (x
, 1)))
3267 /* memory access by reg+disp */
3269 int disp
= INTVAL (XEXP (x
, 1));
3271 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3273 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3274 fatal_insn ("incorrect insn:",insn
);
3276 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3277 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3278 "ldd %0,Y+63" CR_TAB
3279 "sbiw r28,%o1-63", op
, plen
, -3);
3281 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3282 "sbci r29,hi8(-%o1)" CR_TAB
3284 "subi r28,lo8(%o1)" CR_TAB
3285 "sbci r29,hi8(%o1)", op
, plen
, -5);
3287 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3289 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3290 it but I have this situation with extremal optimizing options. */
3292 avr_asm_len ("adiw r26,%o1" CR_TAB
3293 "ld %0,X", op
, plen
, -2);
3295 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3296 && !reg_unused_after (insn
, XEXP (x
,0)))
3298 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3304 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3307 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3311 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3315 rtx base
= XEXP (src
, 0);
3316 int reg_dest
= true_regnum (dest
);
3317 int reg_base
= true_regnum (base
);
3318 /* "volatile" forces reading low byte first, even if less efficient,
3319 for correct operation with 16-bit I/O registers. */
3320 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3324 if (reg_dest
== reg_base
) /* R = (R) */
3325 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3327 "mov %A0,__tmp_reg__", op
, plen
, -3);
3329 if (reg_base
!= REG_X
)
3330 return avr_asm_len ("ld %A0,%1" CR_TAB
3331 "ldd %B0,%1+1", op
, plen
, -2);
3333 avr_asm_len ("ld %A0,X+" CR_TAB
3334 "ld %B0,X", op
, plen
, -2);
3336 if (!reg_unused_after (insn
, base
))
3337 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3341 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3343 int disp
= INTVAL (XEXP (base
, 1));
3344 int reg_base
= true_regnum (XEXP (base
, 0));
3346 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3348 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3349 fatal_insn ("incorrect insn:",insn
);
3351 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3352 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3353 "ldd %A0,Y+62" CR_TAB
3354 "ldd %B0,Y+63" CR_TAB
3355 "sbiw r28,%o1-62", op
, plen
, -4)
3357 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3358 "sbci r29,hi8(-%o1)" CR_TAB
3360 "ldd %B0,Y+1" CR_TAB
3361 "subi r28,lo8(%o1)" CR_TAB
3362 "sbci r29,hi8(%o1)", op
, plen
, -6);
3365 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3366 it but I have this situation with extremal
3367 optimization options. */
3369 if (reg_base
== REG_X
)
3370 return reg_base
== reg_dest
3371 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3372 "ld __tmp_reg__,X+" CR_TAB
3374 "mov %A0,__tmp_reg__", op
, plen
, -4)
3376 : avr_asm_len ("adiw r26,%o1" CR_TAB
3379 "sbiw r26,%o1+1", op
, plen
, -4);
3381 return reg_base
== reg_dest
3382 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3383 "ldd %B0,%B1" CR_TAB
3384 "mov %A0,__tmp_reg__", op
, plen
, -3)
3386 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3387 "ldd %B0,%B1", op
, plen
, -2);
3389 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3391 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3392 fatal_insn ("incorrect insn:", insn
);
3394 if (!mem_volatile_p
)
3395 return avr_asm_len ("ld %B0,%1" CR_TAB
3396 "ld %A0,%1", op
, plen
, -2);
3398 return REGNO (XEXP (base
, 0)) == REG_X
3399 ? avr_asm_len ("sbiw r26,2" CR_TAB
3402 "sbiw r26,1", op
, plen
, -4)
3404 : avr_asm_len ("sbiw %r1,2" CR_TAB
3406 "ldd %B0,%p1+1", op
, plen
, -3);
3408 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3410 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3411 fatal_insn ("incorrect insn:", insn
);
3413 return avr_asm_len ("ld %A0,%1" CR_TAB
3414 "ld %B0,%1", op
, plen
, -2);
3416 else if (CONSTANT_ADDRESS_P (base
))
3418 return optimize
> 0 && io_address_operand (base
, HImode
)
3419 ? avr_asm_len ("in %A0,%i1" CR_TAB
3420 "in %B0,%i1+1", op
, plen
, -2)
3422 : avr_asm_len ("lds %A0,%m1" CR_TAB
3423 "lds %B0,%m1+1", op
, plen
, -4);
3426 fatal_insn ("unknown move insn:",insn
);
3431 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
3435 rtx base
= XEXP (src
, 0);
3436 int reg_dest
= true_regnum (dest
);
3437 int reg_base
= true_regnum (base
);
3445 if (reg_base
== REG_X
) /* (R26) */
3447 if (reg_dest
== REG_X
)
3448 /* "ld r26,-X" is undefined */
3449 return *l
=7, ("adiw r26,3" CR_TAB
3452 "ld __tmp_reg__,-X" CR_TAB
3455 "mov r27,__tmp_reg__");
3456 else if (reg_dest
== REG_X
- 2)
3457 return *l
=5, ("ld %A0,X+" CR_TAB
3459 "ld __tmp_reg__,X+" CR_TAB
3461 "mov %C0,__tmp_reg__");
3462 else if (reg_unused_after (insn
, base
))
3463 return *l
=4, ("ld %A0,X+" CR_TAB
3468 return *l
=5, ("ld %A0,X+" CR_TAB
3476 if (reg_dest
== reg_base
)
3477 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3478 "ldd %C0,%1+2" CR_TAB
3479 "ldd __tmp_reg__,%1+1" CR_TAB
3481 "mov %B0,__tmp_reg__");
3482 else if (reg_base
== reg_dest
+ 2)
3483 return *l
=5, ("ld %A0,%1" CR_TAB
3484 "ldd %B0,%1+1" CR_TAB
3485 "ldd __tmp_reg__,%1+2" CR_TAB
3486 "ldd %D0,%1+3" CR_TAB
3487 "mov %C0,__tmp_reg__");
3489 return *l
=4, ("ld %A0,%1" CR_TAB
3490 "ldd %B0,%1+1" CR_TAB
3491 "ldd %C0,%1+2" CR_TAB
3495 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3497 int disp
= INTVAL (XEXP (base
, 1));
3499 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3501 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3502 fatal_insn ("incorrect insn:",insn
);
3504 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3505 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3506 "ldd %A0,Y+60" CR_TAB
3507 "ldd %B0,Y+61" CR_TAB
3508 "ldd %C0,Y+62" CR_TAB
3509 "ldd %D0,Y+63" CR_TAB
3512 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3513 "sbci r29,hi8(-%o1)" CR_TAB
3515 "ldd %B0,Y+1" CR_TAB
3516 "ldd %C0,Y+2" CR_TAB
3517 "ldd %D0,Y+3" CR_TAB
3518 "subi r28,lo8(%o1)" CR_TAB
3519 "sbci r29,hi8(%o1)");
3522 reg_base
= true_regnum (XEXP (base
, 0));
3523 if (reg_base
== REG_X
)
3526 if (reg_dest
== REG_X
)
3529 /* "ld r26,-X" is undefined */
3530 return ("adiw r26,%o1+3" CR_TAB
3533 "ld __tmp_reg__,-X" CR_TAB
3536 "mov r27,__tmp_reg__");
3539 if (reg_dest
== REG_X
- 2)
3540 return ("adiw r26,%o1" CR_TAB
3543 "ld __tmp_reg__,X+" CR_TAB
3545 "mov r26,__tmp_reg__");
3547 return ("adiw r26,%o1" CR_TAB
3554 if (reg_dest
== reg_base
)
3555 return *l
=5, ("ldd %D0,%D1" CR_TAB
3556 "ldd %C0,%C1" CR_TAB
3557 "ldd __tmp_reg__,%B1" CR_TAB
3558 "ldd %A0,%A1" CR_TAB
3559 "mov %B0,__tmp_reg__");
3560 else if (reg_dest
== reg_base
- 2)
3561 return *l
=5, ("ldd %A0,%A1" CR_TAB
3562 "ldd %B0,%B1" CR_TAB
3563 "ldd __tmp_reg__,%C1" CR_TAB
3564 "ldd %D0,%D1" CR_TAB
3565 "mov %C0,__tmp_reg__");
3566 return *l
=4, ("ldd %A0,%A1" CR_TAB
3567 "ldd %B0,%B1" CR_TAB
3568 "ldd %C0,%C1" CR_TAB
3571 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3572 return *l
=4, ("ld %D0,%1" CR_TAB
3576 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3577 return *l
=4, ("ld %A0,%1" CR_TAB
3581 else if (CONSTANT_ADDRESS_P (base
))
3582 return *l
=8, ("lds %A0,%m1" CR_TAB
3583 "lds %B0,%m1+1" CR_TAB
3584 "lds %C0,%m1+2" CR_TAB
3587 fatal_insn ("unknown move insn:",insn
);
3592 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
3596 rtx base
= XEXP (dest
, 0);
3597 int reg_base
= true_regnum (base
);
3598 int reg_src
= true_regnum (src
);
3604 if (CONSTANT_ADDRESS_P (base
))
3605 return *l
=8,("sts %m0,%A1" CR_TAB
3606 "sts %m0+1,%B1" CR_TAB
3607 "sts %m0+2,%C1" CR_TAB
3609 if (reg_base
> 0) /* (r) */
3611 if (reg_base
== REG_X
) /* (R26) */
3613 if (reg_src
== REG_X
)
3615 /* "st X+,r26" is undefined */
3616 if (reg_unused_after (insn
, base
))
3617 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3620 "st X+,__tmp_reg__" CR_TAB
3624 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3627 "st X+,__tmp_reg__" CR_TAB
3632 else if (reg_base
== reg_src
+ 2)
3634 if (reg_unused_after (insn
, base
))
3635 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3636 "mov __tmp_reg__,%D1" CR_TAB
3639 "st %0+,__zero_reg__" CR_TAB
3640 "st %0,__tmp_reg__" CR_TAB
3641 "clr __zero_reg__");
3643 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3644 "mov __tmp_reg__,%D1" CR_TAB
3647 "st %0+,__zero_reg__" CR_TAB
3648 "st %0,__tmp_reg__" CR_TAB
3649 "clr __zero_reg__" CR_TAB
3652 return *l
=5, ("st %0+,%A1" CR_TAB
3659 return *l
=4, ("st %0,%A1" CR_TAB
3660 "std %0+1,%B1" CR_TAB
3661 "std %0+2,%C1" CR_TAB
3664 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3666 int disp
= INTVAL (XEXP (base
, 1));
3667 reg_base
= REGNO (XEXP (base
, 0));
3668 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3670 if (reg_base
!= REG_Y
)
3671 fatal_insn ("incorrect insn:",insn
);
3673 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3674 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3675 "std Y+60,%A1" CR_TAB
3676 "std Y+61,%B1" CR_TAB
3677 "std Y+62,%C1" CR_TAB
3678 "std Y+63,%D1" CR_TAB
3681 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3682 "sbci r29,hi8(-%o0)" CR_TAB
3684 "std Y+1,%B1" CR_TAB
3685 "std Y+2,%C1" CR_TAB
3686 "std Y+3,%D1" CR_TAB
3687 "subi r28,lo8(%o0)" CR_TAB
3688 "sbci r29,hi8(%o0)");
3690 if (reg_base
== REG_X
)
3693 if (reg_src
== REG_X
)
3696 return ("mov __tmp_reg__,r26" CR_TAB
3697 "mov __zero_reg__,r27" CR_TAB
3698 "adiw r26,%o0" CR_TAB
3699 "st X+,__tmp_reg__" CR_TAB
3700 "st X+,__zero_reg__" CR_TAB
3703 "clr __zero_reg__" CR_TAB
3706 else if (reg_src
== REG_X
- 2)
3709 return ("mov __tmp_reg__,r26" CR_TAB
3710 "mov __zero_reg__,r27" CR_TAB
3711 "adiw r26,%o0" CR_TAB
3714 "st X+,__tmp_reg__" CR_TAB
3715 "st X,__zero_reg__" CR_TAB
3716 "clr __zero_reg__" CR_TAB
3720 return ("adiw r26,%o0" CR_TAB
3727 return *l
=4, ("std %A0,%A1" CR_TAB
3728 "std %B0,%B1" CR_TAB
3729 "std %C0,%C1" CR_TAB
3732 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3733 return *l
=4, ("st %0,%D1" CR_TAB
3737 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3738 return *l
=4, ("st %0,%A1" CR_TAB
3742 fatal_insn ("unknown move insn:",insn
);
3747 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
3750 rtx dest
= operands
[0];
3751 rtx src
= operands
[1];
3754 if (avr_mem_flash_p (src
)
3755 || avr_mem_flash_p (dest
))
3757 return avr_out_lpm (insn
, operands
, real_l
);
3763 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3766 if (REG_P (src
)) /* mov r,r */
3768 if (true_regnum (dest
) > true_regnum (src
))
3773 return ("movw %C0,%C1" CR_TAB
3777 return ("mov %D0,%D1" CR_TAB
3778 "mov %C0,%C1" CR_TAB
3779 "mov %B0,%B1" CR_TAB
3787 return ("movw %A0,%A1" CR_TAB
3791 return ("mov %A0,%A1" CR_TAB
3792 "mov %B0,%B1" CR_TAB
3793 "mov %C0,%C1" CR_TAB
3797 else if (CONSTANT_P (src
))
3799 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3801 else if (MEM_P (src
))
3802 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3804 else if (MEM_P (dest
))
3808 if (src
== CONST0_RTX (GET_MODE (dest
)))
3809 operands
[1] = zero_reg_rtx
;
3811 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3814 output_asm_insn (templ
, operands
);
3819 fatal_insn ("invalid insn:", insn
);
3824 /* Handle loads of 24-bit types from memory to register. */
3827 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
3831 rtx base
= XEXP (src
, 0);
3832 int reg_dest
= true_regnum (dest
);
3833 int reg_base
= true_regnum (base
);
3837 if (reg_base
== REG_X
) /* (R26) */
3839 if (reg_dest
== REG_X
)
3840 /* "ld r26,-X" is undefined */
3841 return avr_asm_len ("adiw r26,2" CR_TAB
3843 "ld __tmp_reg__,-X" CR_TAB
3846 "mov r27,__tmp_reg__", op
, plen
, -6);
3849 avr_asm_len ("ld %A0,X+" CR_TAB
3851 "ld %C0,X", op
, plen
, -3);
3853 if (reg_dest
!= REG_X
- 2
3854 && !reg_unused_after (insn
, base
))
3856 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3862 else /* reg_base != REG_X */
3864 if (reg_dest
== reg_base
)
3865 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3866 "ldd __tmp_reg__,%1+1" CR_TAB
3868 "mov %B0,__tmp_reg__", op
, plen
, -4);
3870 return avr_asm_len ("ld %A0,%1" CR_TAB
3871 "ldd %B0,%1+1" CR_TAB
3872 "ldd %C0,%1+2", op
, plen
, -3);
3875 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3877 int disp
= INTVAL (XEXP (base
, 1));
3879 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3881 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3882 fatal_insn ("incorrect insn:",insn
);
3884 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3885 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3886 "ldd %A0,Y+61" CR_TAB
3887 "ldd %B0,Y+62" CR_TAB
3888 "ldd %C0,Y+63" CR_TAB
3889 "sbiw r28,%o1-61", op
, plen
, -5);
3891 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3892 "sbci r29,hi8(-%o1)" CR_TAB
3894 "ldd %B0,Y+1" CR_TAB
3895 "ldd %C0,Y+2" CR_TAB
3896 "subi r28,lo8(%o1)" CR_TAB
3897 "sbci r29,hi8(%o1)", op
, plen
, -7);
3900 reg_base
= true_regnum (XEXP (base
, 0));
3901 if (reg_base
== REG_X
)
3904 if (reg_dest
== REG_X
)
3906 /* "ld r26,-X" is undefined */
3907 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3909 "ld __tmp_reg__,-X" CR_TAB
3912 "mov r27,__tmp_reg__", op
, plen
, -6);
3915 avr_asm_len ("adiw r26,%o1" CR_TAB
3918 "ld %C0,X", op
, plen
, -4);
3920 if (reg_dest
!= REG_W
3921 && !reg_unused_after (insn
, XEXP (base
, 0)))
3922 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3927 if (reg_dest
== reg_base
)
3928 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3929 "ldd __tmp_reg__,%B1" CR_TAB
3930 "ldd %A0,%A1" CR_TAB
3931 "mov %B0,__tmp_reg__", op
, plen
, -4);
3933 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3934 "ldd %B0,%B1" CR_TAB
3935 "ldd %C0,%C1", op
, plen
, -3);
3937 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3938 return avr_asm_len ("ld %C0,%1" CR_TAB
3940 "ld %A0,%1", op
, plen
, -3);
3941 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3942 return avr_asm_len ("ld %A0,%1" CR_TAB
3944 "ld %C0,%1", op
, plen
, -3);
3946 else if (CONSTANT_ADDRESS_P (base
))
3947 return avr_asm_len ("lds %A0,%m1" CR_TAB
3948 "lds %B0,%m1+1" CR_TAB
3949 "lds %C0,%m1+2", op
, plen
, -6);
3951 fatal_insn ("unknown move insn:",insn
);
3955 /* Handle store of 24-bit type from register or zero to memory. */
3958 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
3962 rtx base
= XEXP (dest
, 0);
3963 int reg_base
= true_regnum (base
);
3965 if (CONSTANT_ADDRESS_P (base
))
3966 return avr_asm_len ("sts %m0,%A1" CR_TAB
3967 "sts %m0+1,%B1" CR_TAB
3968 "sts %m0+2,%C1", op
, plen
, -6);
3970 if (reg_base
> 0) /* (r) */
3972 if (reg_base
== REG_X
) /* (R26) */
3974 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3976 avr_asm_len ("st %0+,%A1" CR_TAB
3978 "st %0,%C1", op
, plen
, -3);
3980 if (!reg_unused_after (insn
, base
))
3981 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3986 return avr_asm_len ("st %0,%A1" CR_TAB
3987 "std %0+1,%B1" CR_TAB
3988 "std %0+2,%C1", op
, plen
, -3);
3990 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3992 int disp
= INTVAL (XEXP (base
, 1));
3993 reg_base
= REGNO (XEXP (base
, 0));
3995 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3997 if (reg_base
!= REG_Y
)
3998 fatal_insn ("incorrect insn:",insn
);
4000 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4001 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4002 "std Y+61,%A1" CR_TAB
4003 "std Y+62,%B1" CR_TAB
4004 "std Y+63,%C1" CR_TAB
4005 "sbiw r28,%o0-61", op
, plen
, -5);
4007 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4008 "sbci r29,hi8(-%o0)" CR_TAB
4010 "std Y+1,%B1" CR_TAB
4011 "std Y+2,%C1" CR_TAB
4012 "subi r28,lo8(%o0)" CR_TAB
4013 "sbci r29,hi8(%o0)", op
, plen
, -7);
4015 if (reg_base
== REG_X
)
4018 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4020 avr_asm_len ("adiw r26,%o0" CR_TAB
4023 "st X,%C1", op
, plen
, -4);
4025 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4026 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4031 return avr_asm_len ("std %A0,%A1" CR_TAB
4032 "std %B0,%B1" CR_TAB
4033 "std %C0,%C1", op
, plen
, -3);
4035 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4036 return avr_asm_len ("st %0,%C1" CR_TAB
4038 "st %0,%A1", op
, plen
, -3);
4039 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4040 return avr_asm_len ("st %0,%A1" CR_TAB
4042 "st %0,%C1", op
, plen
, -3);
4044 fatal_insn ("unknown move insn:",insn
);
4049 /* Move around 24-bit stuff. */
4052 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4057 if (avr_mem_flash_p (src
)
4058 || avr_mem_flash_p (dest
))
4060 return avr_out_lpm (insn
, op
, plen
);
4063 if (register_operand (dest
, VOIDmode
))
4065 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4067 if (true_regnum (dest
) > true_regnum (src
))
4069 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4072 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4074 return avr_asm_len ("mov %B0,%B1" CR_TAB
4075 "mov %A0,%A1", op
, plen
, 2);
4080 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4082 avr_asm_len ("mov %A0,%A1" CR_TAB
4083 "mov %B0,%B1", op
, plen
, -2);
4085 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4088 else if (CONSTANT_P (src
))
4090 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4092 else if (MEM_P (src
))
4093 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4095 else if (MEM_P (dest
))
4100 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4102 return avr_out_store_psi (insn
, xop
, plen
);
4105 fatal_insn ("invalid insn:", insn
);
4111 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4115 rtx x
= XEXP (dest
, 0);
4117 if (CONSTANT_ADDRESS_P (x
))
4119 return optimize
> 0 && io_address_operand (x
, QImode
)
4120 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4121 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4123 else if (GET_CODE (x
) == PLUS
4124 && REG_P (XEXP (x
, 0))
4125 && CONST_INT_P (XEXP (x
, 1)))
4127 /* memory access by reg+disp */
4129 int disp
= INTVAL (XEXP (x
, 1));
4131 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4133 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4134 fatal_insn ("incorrect insn:",insn
);
4136 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4137 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4138 "std Y+63,%1" CR_TAB
4139 "sbiw r28,%o0-63", op
, plen
, -3);
4141 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4142 "sbci r29,hi8(-%o0)" CR_TAB
4144 "subi r28,lo8(%o0)" CR_TAB
4145 "sbci r29,hi8(%o0)", op
, plen
, -5);
4147 else if (REGNO (XEXP (x
,0)) == REG_X
)
4149 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4151 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4152 "adiw r26,%o0" CR_TAB
4153 "st X,__tmp_reg__", op
, plen
, -3);
4157 avr_asm_len ("adiw r26,%o0" CR_TAB
4158 "st X,%1", op
, plen
, -2);
4161 if (!reg_unused_after (insn
, XEXP (x
,0)))
4162 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4167 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4170 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4174 /* Helper for the next function for XMEGA. It does the same
4175 but with low byte first. */
4178 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
4182 rtx base
= XEXP (dest
, 0);
4183 int reg_base
= true_regnum (base
);
4184 int reg_src
= true_regnum (src
);
4186 /* "volatile" forces writing low byte first, even if less efficient,
4187 for correct operation with 16-bit I/O registers like SP. */
4188 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4190 if (CONSTANT_ADDRESS_P (base
))
4191 return optimize
> 0 && io_address_operand (base
, HImode
)
4192 ? avr_asm_len ("out %i0,%A1" CR_TAB
4193 "out %i0+1,%B1", op
, plen
, -2)
4195 : avr_asm_len ("sts %m0,%A1" CR_TAB
4196 "sts %m0+1,%B1", op
, plen
, -4);
4200 if (reg_base
!= REG_X
)
4201 return avr_asm_len ("st %0,%A1" CR_TAB
4202 "std %0+1,%B1", op
, plen
, -2);
4204 if (reg_src
== REG_X
)
4205 /* "st X+,r26" and "st -X,r26" are undefined. */
4206 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4209 "st X,__tmp_reg__", op
, plen
, -4);
4211 avr_asm_len ("st X+,%A1" CR_TAB
4212 "st X,%B1", op
, plen
, -2);
4214 return reg_unused_after (insn
, base
)
4216 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4218 else if (GET_CODE (base
) == PLUS
)
4220 int disp
= INTVAL (XEXP (base
, 1));
4221 reg_base
= REGNO (XEXP (base
, 0));
4222 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4224 if (reg_base
!= REG_Y
)
4225 fatal_insn ("incorrect insn:",insn
);
4227 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4228 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4229 "std Y+62,%A1" CR_TAB
4230 "std Y+63,%B1" CR_TAB
4231 "sbiw r28,%o0-62", op
, plen
, -4)
4233 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4234 "sbci r29,hi8(-%o0)" CR_TAB
4236 "std Y+1,%B1" CR_TAB
4237 "subi r28,lo8(%o0)" CR_TAB
4238 "sbci r29,hi8(%o0)", op
, plen
, -6);
4241 if (reg_base
!= REG_X
)
4242 return avr_asm_len ("std %A0,%A1" CR_TAB
4243 "std %B0,%B1", op
, plen
, -2);
4245 return reg_src
== REG_X
4246 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4247 "mov __zero_reg__,r27" CR_TAB
4248 "adiw r26,%o0" CR_TAB
4249 "st X+,__tmp_reg__" CR_TAB
4250 "st X,__zero_reg__" CR_TAB
4251 "clr __zero_reg__" CR_TAB
4252 "sbiw r26,%o0+1", op
, plen
, -7)
4254 : avr_asm_len ("adiw r26,%o0" CR_TAB
4257 "sbiw r26,%o0+1", op
, plen
, -4);
4259 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4261 if (!mem_volatile_p
)
4262 return avr_asm_len ("st %0,%B1" CR_TAB
4263 "st %0,%A1", op
, plen
, -2);
4265 return REGNO (XEXP (base
, 0)) == REG_X
4266 ? avr_asm_len ("sbiw r26,2" CR_TAB
4269 "sbiw r26,1", op
, plen
, -4)
4271 : avr_asm_len ("sbiw %r0,2" CR_TAB
4273 "std %p0+1,%B1", op
, plen
, -3);
4275 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4277 return avr_asm_len ("st %0,%A1" CR_TAB
4278 "st %0,%B1", op
, plen
, -2);
4281 fatal_insn ("unknown move insn:",insn
);
4287 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4291 rtx base
= XEXP (dest
, 0);
4292 int reg_base
= true_regnum (base
);
4293 int reg_src
= true_regnum (src
);
4296 /* "volatile" forces writing high-byte first (no-xmega) resp.
4297 low-byte first (xmega) even if less efficient, for correct
4298 operation with 16-bit I/O registers like. */
4301 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4303 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4305 if (CONSTANT_ADDRESS_P (base
))
4306 return optimize
> 0 && io_address_operand (base
, HImode
)
4307 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4308 "out %i0,%A1", op
, plen
, -2)
4310 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4311 "sts %m0,%A1", op
, plen
, -4);
4315 if (reg_base
!= REG_X
)
4316 return avr_asm_len ("std %0+1,%B1" CR_TAB
4317 "st %0,%A1", op
, plen
, -2);
4319 if (reg_src
== REG_X
)
4320 /* "st X+,r26" and "st -X,r26" are undefined. */
4321 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4322 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4325 "st X,__tmp_reg__", op
, plen
, -4)
4327 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4329 "st X,__tmp_reg__" CR_TAB
4331 "st X,r26", op
, plen
, -5);
4333 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4334 ? avr_asm_len ("st X+,%A1" CR_TAB
4335 "st X,%B1", op
, plen
, -2)
4336 : avr_asm_len ("adiw r26,1" CR_TAB
4338 "st -X,%A1", op
, plen
, -3);
4340 else if (GET_CODE (base
) == PLUS
)
4342 int disp
= INTVAL (XEXP (base
, 1));
4343 reg_base
= REGNO (XEXP (base
, 0));
4344 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4346 if (reg_base
!= REG_Y
)
4347 fatal_insn ("incorrect insn:",insn
);
4349 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4350 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4351 "std Y+63,%B1" CR_TAB
4352 "std Y+62,%A1" CR_TAB
4353 "sbiw r28,%o0-62", op
, plen
, -4)
4355 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4356 "sbci r29,hi8(-%o0)" CR_TAB
4357 "std Y+1,%B1" CR_TAB
4359 "subi r28,lo8(%o0)" CR_TAB
4360 "sbci r29,hi8(%o0)", op
, plen
, -6);
4363 if (reg_base
!= REG_X
)
4364 return avr_asm_len ("std %B0,%B1" CR_TAB
4365 "std %A0,%A1", op
, plen
, -2);
4367 return reg_src
== REG_X
4368 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4369 "mov __zero_reg__,r27" CR_TAB
4370 "adiw r26,%o0+1" CR_TAB
4371 "st X,__zero_reg__" CR_TAB
4372 "st -X,__tmp_reg__" CR_TAB
4373 "clr __zero_reg__" CR_TAB
4374 "sbiw r26,%o0", op
, plen
, -7)
4376 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4379 "sbiw r26,%o0", op
, plen
, -4);
4381 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4383 return avr_asm_len ("st %0,%B1" CR_TAB
4384 "st %0,%A1", op
, plen
, -2);
4386 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4388 if (!mem_volatile_p
)
4389 return avr_asm_len ("st %0,%A1" CR_TAB
4390 "st %0,%B1", op
, plen
, -2);
4392 return REGNO (XEXP (base
, 0)) == REG_X
4393 ? avr_asm_len ("adiw r26,1" CR_TAB
4396 "adiw r26,2", op
, plen
, -4)
4398 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4400 "adiw %r0,2", op
, plen
, -3);
4402 fatal_insn ("unknown move insn:",insn
);
4406 /* Return 1 if frame pointer for current function required. */
4409 avr_frame_pointer_required_p (void)
4411 return (cfun
->calls_alloca
4412 || cfun
->calls_setjmp
4413 || cfun
->has_nonlocal_label
4414 || crtl
->args
.info
.nregs
== 0
4415 || get_frame_size () > 0);
4418 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4421 compare_condition (rtx_insn
*insn
)
4423 rtx_insn
*next
= next_real_insn (insn
);
4425 if (next
&& JUMP_P (next
))
4427 rtx pat
= PATTERN (next
);
4428 rtx src
= SET_SRC (pat
);
4430 if (IF_THEN_ELSE
== GET_CODE (src
))
4431 return GET_CODE (XEXP (src
, 0));
4438 /* Returns true iff INSN is a tst insn that only tests the sign. */
4441 compare_sign_p (rtx_insn
*insn
)
4443 RTX_CODE cond
= compare_condition (insn
);
4444 return (cond
== GE
|| cond
== LT
);
4448 /* Returns true iff the next insn is a JUMP_INSN with a condition
4449 that needs to be swapped (GT, GTU, LE, LEU). */
4452 compare_diff_p (rtx_insn
*insn
)
4454 RTX_CODE cond
= compare_condition (insn
);
4455 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4458 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4461 compare_eq_p (rtx_insn
*insn
)
4463 RTX_CODE cond
= compare_condition (insn
);
4464 return (cond
== EQ
|| cond
== NE
);
4468 /* Output compare instruction
4470 compare (XOP[0], XOP[1])
4472 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4473 XOP[2] is an 8-bit scratch register as needed.
4475 PLEN == NULL: Output instructions.
4476 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4477 Don't output anything. */
4480 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
4482 /* Register to compare and value to compare against. */
4486 /* MODE of the comparison. */
4487 enum machine_mode mode
;
4489 /* Number of bytes to operate on. */
4490 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4492 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4493 int clobber_val
= -1;
4495 /* Map fixed mode operands to integer operands with the same binary
4496 representation. They are easier to handle in the remainder. */
4498 if (CONST_FIXED_P (xval
))
4500 xreg
= avr_to_int_mode (xop
[0]);
4501 xval
= avr_to_int_mode (xop
[1]);
4504 mode
= GET_MODE (xreg
);
4506 gcc_assert (REG_P (xreg
));
4507 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4508 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4513 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4514 against 0 by ORing the bytes. This is one instruction shorter.
4515 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4516 and therefore don't use this. */
4518 if (!test_hard_reg_class (LD_REGS
, xreg
)
4519 && compare_eq_p (insn
)
4520 && reg_unused_after (insn
, xreg
))
4522 if (xval
== const1_rtx
)
4524 avr_asm_len ("dec %A0" CR_TAB
4525 "or %A0,%B0", xop
, plen
, 2);
4528 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4531 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4535 else if (xval
== constm1_rtx
)
4538 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4541 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4543 return avr_asm_len ("and %A0,%B0" CR_TAB
4544 "com %A0", xop
, plen
, 2);
4548 for (i
= 0; i
< n_bytes
; i
++)
4550 /* We compare byte-wise. */
4551 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4552 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4554 /* 8-bit value to compare with this byte. */
4555 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4557 /* Registers R16..R31 can operate with immediate. */
4558 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4561 xop
[1] = gen_int_mode (val8
, QImode
);
4563 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4566 && test_hard_reg_class (ADDW_REGS
, reg8
))
4568 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4570 if (IN_RANGE (val16
, 0, 63)
4572 || reg_unused_after (insn
, xreg
)))
4574 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4580 && IN_RANGE (val16
, -63, -1)
4581 && compare_eq_p (insn
)
4582 && reg_unused_after (insn
, xreg
))
4584 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4588 /* Comparing against 0 is easy. */
4593 ? "cp %0,__zero_reg__"
4594 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4598 /* Upper registers can compare and subtract-with-carry immediates.
4599 Notice that compare instructions do the same as respective subtract
4600 instruction; the only difference is that comparisons don't write
4601 the result back to the target register. */
4607 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4610 else if (reg_unused_after (insn
, xreg
))
4612 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4617 /* Must load the value into the scratch register. */
4619 gcc_assert (REG_P (xop
[2]));
4621 if (clobber_val
!= (int) val8
)
4622 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4623 clobber_val
= (int) val8
;
4627 : "cpc %0,%2", xop
, plen
, 1);
4634 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4637 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
4641 xop
[0] = gen_rtx_REG (DImode
, 18);
4645 return avr_out_compare (insn
, xop
, plen
);
4648 /* Output test instruction for HImode. */
4651 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4653 if (compare_sign_p (insn
))
4655 avr_asm_len ("tst %B0", op
, plen
, -1);
4657 else if (reg_unused_after (insn
, op
[0])
4658 && compare_eq_p (insn
))
4660 /* Faster than sbiw if we can clobber the operand. */
4661 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4665 avr_out_compare (insn
, op
, plen
);
4672 /* Output test instruction for PSImode. */
4675 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4677 if (compare_sign_p (insn
))
4679 avr_asm_len ("tst %C0", op
, plen
, -1);
4681 else if (reg_unused_after (insn
, op
[0])
4682 && compare_eq_p (insn
))
4684 /* Faster than sbiw if we can clobber the operand. */
4685 avr_asm_len ("or %A0,%B0" CR_TAB
4686 "or %A0,%C0", op
, plen
, -2);
4690 avr_out_compare (insn
, op
, plen
);
4697 /* Output test instruction for SImode. */
4700 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4702 if (compare_sign_p (insn
))
4704 avr_asm_len ("tst %D0", op
, plen
, -1);
4706 else if (reg_unused_after (insn
, op
[0])
4707 && compare_eq_p (insn
))
4709 /* Faster than sbiw if we can clobber the operand. */
4710 avr_asm_len ("or %A0,%B0" CR_TAB
4712 "or %A0,%D0", op
, plen
, -3);
4716 avr_out_compare (insn
, op
, plen
);
4723 /* Generate asm equivalent for various shifts. This only handles cases
4724 that are not already carefully hand-optimized in ?sh??i3_out.
4726 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4727 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4728 OPERANDS[3] is a QImode scratch register from LD regs if
4729 available and SCRATCH, otherwise (no scratch available)
4731 TEMPL is an assembler template that shifts by one position.
4732 T_LEN is the length of this template. */
4735 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
4736 int *plen
, int t_len
)
4738 bool second_label
= true;
4739 bool saved_in_tmp
= false;
4740 bool use_zero_reg
= false;
4743 op
[0] = operands
[0];
4744 op
[1] = operands
[1];
4745 op
[2] = operands
[2];
4746 op
[3] = operands
[3];
4751 if (CONST_INT_P (operands
[2]))
4753 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4754 && REG_P (operands
[3]));
4755 int count
= INTVAL (operands
[2]);
4756 int max_len
= 10; /* If larger than this, always use a loop. */
4761 if (count
< 8 && !scratch
)
4762 use_zero_reg
= true;
4765 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4767 if (t_len
* count
<= max_len
)
4769 /* Output shifts inline with no loop - faster. */
4772 avr_asm_len (templ
, op
, plen
, t_len
);
4779 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4781 else if (use_zero_reg
)
4783 /* Hack to save one word: use __zero_reg__ as loop counter.
4784 Set one bit, then shift in a loop until it is 0 again. */
4786 op
[3] = zero_reg_rtx
;
4788 avr_asm_len ("set" CR_TAB
4789 "bld %3,%2-1", op
, plen
, 2);
4793 /* No scratch register available, use one from LD_REGS (saved in
4794 __tmp_reg__) that doesn't overlap with registers to shift. */
4796 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4797 op
[4] = tmp_reg_rtx
;
4798 saved_in_tmp
= true;
4800 avr_asm_len ("mov %4,%3" CR_TAB
4801 "ldi %3,%2", op
, plen
, 2);
4804 second_label
= false;
4806 else if (MEM_P (op
[2]))
4810 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4813 out_movqi_r_mr (insn
, op_mov
, plen
);
4815 else if (register_operand (op
[2], QImode
))
4819 if (!reg_unused_after (insn
, op
[2])
4820 || reg_overlap_mentioned_p (op
[0], op
[2]))
4822 op
[3] = tmp_reg_rtx
;
4823 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4827 fatal_insn ("bad shift insn:", insn
);
4830 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4832 avr_asm_len ("1:", op
, plen
, 0);
4833 avr_asm_len (templ
, op
, plen
, t_len
);
4836 avr_asm_len ("2:", op
, plen
, 0);
4838 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4839 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4842 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4846 /* 8bit shift left ((char)x << i) */
4849 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
4851 if (GET_CODE (operands
[2]) == CONST_INT
)
4858 switch (INTVAL (operands
[2]))
4861 if (INTVAL (operands
[2]) < 8)
4873 return ("lsl %0" CR_TAB
4878 return ("lsl %0" CR_TAB
4883 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4886 return ("swap %0" CR_TAB
4890 return ("lsl %0" CR_TAB
4896 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4899 return ("swap %0" CR_TAB
4904 return ("lsl %0" CR_TAB
4911 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4914 return ("swap %0" CR_TAB
4920 return ("lsl %0" CR_TAB
4929 return ("ror %0" CR_TAB
4934 else if (CONSTANT_P (operands
[2]))
4935 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4937 out_shift_with_cnt ("lsl %0",
4938 insn
, operands
, len
, 1);
4943 /* 16bit shift left ((short)x << i) */
4946 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
4948 if (GET_CODE (operands
[2]) == CONST_INT
)
4950 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4951 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4958 switch (INTVAL (operands
[2]))
4961 if (INTVAL (operands
[2]) < 16)
4965 return ("clr %B0" CR_TAB
4969 if (optimize_size
&& scratch
)
4974 return ("swap %A0" CR_TAB
4976 "andi %B0,0xf0" CR_TAB
4977 "eor %B0,%A0" CR_TAB
4978 "andi %A0,0xf0" CR_TAB
4984 return ("swap %A0" CR_TAB
4986 "ldi %3,0xf0" CR_TAB
4988 "eor %B0,%A0" CR_TAB
4992 break; /* optimize_size ? 6 : 8 */
4996 break; /* scratch ? 5 : 6 */
5000 return ("lsl %A0" CR_TAB
5004 "andi %B0,0xf0" CR_TAB
5005 "eor %B0,%A0" CR_TAB
5006 "andi %A0,0xf0" CR_TAB
5012 return ("lsl %A0" CR_TAB
5016 "ldi %3,0xf0" CR_TAB
5018 "eor %B0,%A0" CR_TAB
5026 break; /* scratch ? 5 : 6 */
5028 return ("clr __tmp_reg__" CR_TAB
5031 "ror __tmp_reg__" CR_TAB
5034 "ror __tmp_reg__" CR_TAB
5035 "mov %B0,%A0" CR_TAB
5036 "mov %A0,__tmp_reg__");
5040 return ("lsr %B0" CR_TAB
5041 "mov %B0,%A0" CR_TAB
5047 return *len
= 2, ("mov %B0,%A1" CR_TAB
5052 return ("mov %B0,%A0" CR_TAB
5058 return ("mov %B0,%A0" CR_TAB
5065 return ("mov %B0,%A0" CR_TAB
5075 return ("mov %B0,%A0" CR_TAB
5083 return ("mov %B0,%A0" CR_TAB
5086 "ldi %3,0xf0" CR_TAB
5090 return ("mov %B0,%A0" CR_TAB
5101 return ("mov %B0,%A0" CR_TAB
5107 if (AVR_HAVE_MUL
&& scratch
)
5110 return ("ldi %3,0x20" CR_TAB
5114 "clr __zero_reg__");
5116 if (optimize_size
&& scratch
)
5121 return ("mov %B0,%A0" CR_TAB
5125 "ldi %3,0xe0" CR_TAB
5131 return ("set" CR_TAB
5136 "clr __zero_reg__");
5139 return ("mov %B0,%A0" CR_TAB
5148 if (AVR_HAVE_MUL
&& ldi_ok
)
5151 return ("ldi %B0,0x40" CR_TAB
5152 "mul %A0,%B0" CR_TAB
5155 "clr __zero_reg__");
5157 if (AVR_HAVE_MUL
&& scratch
)
5160 return ("ldi %3,0x40" CR_TAB
5164 "clr __zero_reg__");
5166 if (optimize_size
&& ldi_ok
)
5169 return ("mov %B0,%A0" CR_TAB
5170 "ldi %A0,6" "\n1:\t"
5175 if (optimize_size
&& scratch
)
5178 return ("clr %B0" CR_TAB
5187 return ("clr %B0" CR_TAB
5194 out_shift_with_cnt ("lsl %A0" CR_TAB
5195 "rol %B0", insn
, operands
, len
, 2);
5200 /* 24-bit shift left */
5203 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5208 if (CONST_INT_P (op
[2]))
5210 switch (INTVAL (op
[2]))
5213 if (INTVAL (op
[2]) < 24)
5216 return avr_asm_len ("clr %A0" CR_TAB
5218 "clr %C0", op
, plen
, 3);
5222 int reg0
= REGNO (op
[0]);
5223 int reg1
= REGNO (op
[1]);
5226 return avr_asm_len ("mov %C0,%B1" CR_TAB
5227 "mov %B0,%A1" CR_TAB
5228 "clr %A0", op
, plen
, 3);
5230 return avr_asm_len ("clr %A0" CR_TAB
5231 "mov %B0,%A1" CR_TAB
5232 "mov %C0,%B1", op
, plen
, 3);
5237 int reg0
= REGNO (op
[0]);
5238 int reg1
= REGNO (op
[1]);
5240 if (reg0
+ 2 != reg1
)
5241 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5243 return avr_asm_len ("clr %B0" CR_TAB
5244 "clr %A0", op
, plen
, 2);
5248 return avr_asm_len ("clr %C0" CR_TAB
5252 "clr %A0", op
, plen
, 5);
5256 out_shift_with_cnt ("lsl %A0" CR_TAB
5258 "rol %C0", insn
, op
, plen
, 3);
5263 /* 32bit shift left ((long)x << i) */
5266 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5268 if (GET_CODE (operands
[2]) == CONST_INT
)
5276 switch (INTVAL (operands
[2]))
5279 if (INTVAL (operands
[2]) < 32)
5283 return *len
= 3, ("clr %D0" CR_TAB
5287 return ("clr %D0" CR_TAB
5294 int reg0
= true_regnum (operands
[0]);
5295 int reg1
= true_regnum (operands
[1]);
5298 return ("mov %D0,%C1" CR_TAB
5299 "mov %C0,%B1" CR_TAB
5300 "mov %B0,%A1" CR_TAB
5303 return ("clr %A0" CR_TAB
5304 "mov %B0,%A1" CR_TAB
5305 "mov %C0,%B1" CR_TAB
5311 int reg0
= true_regnum (operands
[0]);
5312 int reg1
= true_regnum (operands
[1]);
5313 if (reg0
+ 2 == reg1
)
5314 return *len
= 2, ("clr %B0" CR_TAB
5317 return *len
= 3, ("movw %C0,%A1" CR_TAB
5321 return *len
= 4, ("mov %C0,%A1" CR_TAB
5322 "mov %D0,%B1" CR_TAB
5329 return ("mov %D0,%A1" CR_TAB
5336 return ("clr %D0" CR_TAB
5345 out_shift_with_cnt ("lsl %A0" CR_TAB
5348 "rol %D0", insn
, operands
, len
, 4);
5352 /* 8bit arithmetic shift right ((signed char)x >> i) */
5355 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5357 if (GET_CODE (operands
[2]) == CONST_INT
)
5364 switch (INTVAL (operands
[2]))
5372 return ("asr %0" CR_TAB
5377 return ("asr %0" CR_TAB
5383 return ("asr %0" CR_TAB
5390 return ("asr %0" CR_TAB
5398 return ("bst %0,6" CR_TAB
5404 if (INTVAL (operands
[2]) < 8)
5411 return ("lsl %0" CR_TAB
5415 else if (CONSTANT_P (operands
[2]))
5416 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5418 out_shift_with_cnt ("asr %0",
5419 insn
, operands
, len
, 1);
5424 /* 16bit arithmetic shift right ((signed short)x >> i) */
5427 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5429 if (GET_CODE (operands
[2]) == CONST_INT
)
5431 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5432 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5439 switch (INTVAL (operands
[2]))
5443 /* XXX try to optimize this too? */
5448 break; /* scratch ? 5 : 6 */
5450 return ("mov __tmp_reg__,%A0" CR_TAB
5451 "mov %A0,%B0" CR_TAB
5452 "lsl __tmp_reg__" CR_TAB
5454 "sbc %B0,%B0" CR_TAB
5455 "lsl __tmp_reg__" CR_TAB
5461 return ("lsl %A0" CR_TAB
5462 "mov %A0,%B0" CR_TAB
5468 int reg0
= true_regnum (operands
[0]);
5469 int reg1
= true_regnum (operands
[1]);
5472 return *len
= 3, ("mov %A0,%B0" CR_TAB
5476 return *len
= 4, ("mov %A0,%B1" CR_TAB
5484 return ("mov %A0,%B0" CR_TAB
5486 "sbc %B0,%B0" CR_TAB
5491 return ("mov %A0,%B0" CR_TAB
5493 "sbc %B0,%B0" CR_TAB
5498 if (AVR_HAVE_MUL
&& ldi_ok
)
5501 return ("ldi %A0,0x20" CR_TAB
5502 "muls %B0,%A0" CR_TAB
5504 "sbc %B0,%B0" CR_TAB
5505 "clr __zero_reg__");
5507 if (optimize_size
&& scratch
)
5510 return ("mov %A0,%B0" CR_TAB
5512 "sbc %B0,%B0" CR_TAB
5518 if (AVR_HAVE_MUL
&& ldi_ok
)
5521 return ("ldi %A0,0x10" CR_TAB
5522 "muls %B0,%A0" CR_TAB
5524 "sbc %B0,%B0" CR_TAB
5525 "clr __zero_reg__");
5527 if (optimize_size
&& scratch
)
5530 return ("mov %A0,%B0" CR_TAB
5532 "sbc %B0,%B0" CR_TAB
5539 if (AVR_HAVE_MUL
&& ldi_ok
)
5542 return ("ldi %A0,0x08" CR_TAB
5543 "muls %B0,%A0" CR_TAB
5545 "sbc %B0,%B0" CR_TAB
5546 "clr __zero_reg__");
5549 break; /* scratch ? 5 : 7 */
5551 return ("mov %A0,%B0" CR_TAB
5553 "sbc %B0,%B0" CR_TAB
5562 return ("lsl %B0" CR_TAB
5563 "sbc %A0,%A0" CR_TAB
5565 "mov %B0,%A0" CR_TAB
5569 if (INTVAL (operands
[2]) < 16)
5575 return *len
= 3, ("lsl %B0" CR_TAB
5576 "sbc %A0,%A0" CR_TAB
5581 out_shift_with_cnt ("asr %B0" CR_TAB
5582 "ror %A0", insn
, operands
, len
, 2);
5587 /* 24-bit arithmetic shift right */
5590 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5592 int dest
= REGNO (op
[0]);
5593 int src
= REGNO (op
[1]);
5595 if (CONST_INT_P (op
[2]))
5600 switch (INTVAL (op
[2]))
5604 return avr_asm_len ("mov %A0,%B1" CR_TAB
5605 "mov %B0,%C1" CR_TAB
5608 "dec %C0", op
, plen
, 5);
5610 return avr_asm_len ("clr %C0" CR_TAB
5613 "mov %B0,%C1" CR_TAB
5614 "mov %A0,%B1", op
, plen
, 5);
5617 if (dest
!= src
+ 2)
5618 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5620 return avr_asm_len ("clr %B0" CR_TAB
5623 "mov %C0,%B0", op
, plen
, 4);
5626 if (INTVAL (op
[2]) < 24)
5632 return avr_asm_len ("lsl %C0" CR_TAB
5633 "sbc %A0,%A0" CR_TAB
5634 "mov %B0,%A0" CR_TAB
5635 "mov %C0,%A0", op
, plen
, 4);
5639 out_shift_with_cnt ("asr %C0" CR_TAB
5641 "ror %A0", insn
, op
, plen
, 3);
5646 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5649 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5651 if (GET_CODE (operands
[2]) == CONST_INT
)
5659 switch (INTVAL (operands
[2]))
5663 int reg0
= true_regnum (operands
[0]);
5664 int reg1
= true_regnum (operands
[1]);
5667 return ("mov %A0,%B1" CR_TAB
5668 "mov %B0,%C1" CR_TAB
5669 "mov %C0,%D1" CR_TAB
5674 return ("clr %D0" CR_TAB
5677 "mov %C0,%D1" CR_TAB
5678 "mov %B0,%C1" CR_TAB
5684 int reg0
= true_regnum (operands
[0]);
5685 int reg1
= true_regnum (operands
[1]);
5687 if (reg0
== reg1
+ 2)
5688 return *len
= 4, ("clr %D0" CR_TAB
5693 return *len
= 5, ("movw %A0,%C1" CR_TAB
5699 return *len
= 6, ("mov %B0,%D1" CR_TAB
5700 "mov %A0,%C1" CR_TAB
5708 return *len
= 6, ("mov %A0,%D1" CR_TAB
5712 "mov %B0,%D0" CR_TAB
5716 if (INTVAL (operands
[2]) < 32)
5723 return *len
= 4, ("lsl %D0" CR_TAB
5724 "sbc %A0,%A0" CR_TAB
5725 "mov %B0,%A0" CR_TAB
5728 return *len
= 5, ("lsl %D0" CR_TAB
5729 "sbc %A0,%A0" CR_TAB
5730 "mov %B0,%A0" CR_TAB
5731 "mov %C0,%A0" CR_TAB
5736 out_shift_with_cnt ("asr %D0" CR_TAB
5739 "ror %A0", insn
, operands
, len
, 4);
5743 /* 8-bit logic shift right ((unsigned char)x >> i) */
5746 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5748 if (GET_CODE (operands
[2]) == CONST_INT
)
5755 switch (INTVAL (operands
[2]))
5758 if (INTVAL (operands
[2]) < 8)
5770 return ("lsr %0" CR_TAB
5774 return ("lsr %0" CR_TAB
5779 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5782 return ("swap %0" CR_TAB
5786 return ("lsr %0" CR_TAB
5792 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5795 return ("swap %0" CR_TAB
5800 return ("lsr %0" CR_TAB
5807 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5810 return ("swap %0" CR_TAB
5816 return ("lsr %0" CR_TAB
5825 return ("rol %0" CR_TAB
5830 else if (CONSTANT_P (operands
[2]))
5831 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5833 out_shift_with_cnt ("lsr %0",
5834 insn
, operands
, len
, 1);
5838 /* 16-bit logic shift right ((unsigned short)x >> i) */
5841 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5843 if (GET_CODE (operands
[2]) == CONST_INT
)
5845 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5846 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5853 switch (INTVAL (operands
[2]))
5856 if (INTVAL (operands
[2]) < 16)
5860 return ("clr %B0" CR_TAB
5864 if (optimize_size
&& scratch
)
5869 return ("swap %B0" CR_TAB
5871 "andi %A0,0x0f" CR_TAB
5872 "eor %A0,%B0" CR_TAB
5873 "andi %B0,0x0f" CR_TAB
5879 return ("swap %B0" CR_TAB
5881 "ldi %3,0x0f" CR_TAB
5883 "eor %A0,%B0" CR_TAB
5887 break; /* optimize_size ? 6 : 8 */
5891 break; /* scratch ? 5 : 6 */
5895 return ("lsr %B0" CR_TAB
5899 "andi %A0,0x0f" CR_TAB
5900 "eor %A0,%B0" CR_TAB
5901 "andi %B0,0x0f" CR_TAB
5907 return ("lsr %B0" CR_TAB
5911 "ldi %3,0x0f" CR_TAB
5913 "eor %A0,%B0" CR_TAB
5921 break; /* scratch ? 5 : 6 */
5923 return ("clr __tmp_reg__" CR_TAB
5926 "rol __tmp_reg__" CR_TAB
5929 "rol __tmp_reg__" CR_TAB
5930 "mov %A0,%B0" CR_TAB
5931 "mov %B0,__tmp_reg__");
5935 return ("lsl %A0" CR_TAB
5936 "mov %A0,%B0" CR_TAB
5938 "sbc %B0,%B0" CR_TAB
5942 return *len
= 2, ("mov %A0,%B1" CR_TAB
5947 return ("mov %A0,%B0" CR_TAB
5953 return ("mov %A0,%B0" CR_TAB
5960 return ("mov %A0,%B0" CR_TAB
5970 return ("mov %A0,%B0" CR_TAB
5978 return ("mov %A0,%B0" CR_TAB
5981 "ldi %3,0x0f" CR_TAB
5985 return ("mov %A0,%B0" CR_TAB
5996 return ("mov %A0,%B0" CR_TAB
6002 if (AVR_HAVE_MUL
&& scratch
)
6005 return ("ldi %3,0x08" CR_TAB
6009 "clr __zero_reg__");
6011 if (optimize_size
&& scratch
)
6016 return ("mov %A0,%B0" CR_TAB
6020 "ldi %3,0x07" CR_TAB
6026 return ("set" CR_TAB
6031 "clr __zero_reg__");
6034 return ("mov %A0,%B0" CR_TAB
6043 if (AVR_HAVE_MUL
&& ldi_ok
)
6046 return ("ldi %A0,0x04" CR_TAB
6047 "mul %B0,%A0" CR_TAB
6050 "clr __zero_reg__");
6052 if (AVR_HAVE_MUL
&& scratch
)
6055 return ("ldi %3,0x04" CR_TAB
6059 "clr __zero_reg__");
6061 if (optimize_size
&& ldi_ok
)
6064 return ("mov %A0,%B0" CR_TAB
6065 "ldi %B0,6" "\n1:\t"
6070 if (optimize_size
&& scratch
)
6073 return ("clr %A0" CR_TAB
6082 return ("clr %A0" CR_TAB
6089 out_shift_with_cnt ("lsr %B0" CR_TAB
6090 "ror %A0", insn
, operands
, len
, 2);
6095 /* 24-bit logic shift right */
6098 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6100 int dest
= REGNO (op
[0]);
6101 int src
= REGNO (op
[1]);
6103 if (CONST_INT_P (op
[2]))
6108 switch (INTVAL (op
[2]))
6112 return avr_asm_len ("mov %A0,%B1" CR_TAB
6113 "mov %B0,%C1" CR_TAB
6114 "clr %C0", op
, plen
, 3);
6116 return avr_asm_len ("clr %C0" CR_TAB
6117 "mov %B0,%C1" CR_TAB
6118 "mov %A0,%B1", op
, plen
, 3);
6121 if (dest
!= src
+ 2)
6122 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6124 return avr_asm_len ("clr %B0" CR_TAB
6125 "clr %C0", op
, plen
, 2);
6128 if (INTVAL (op
[2]) < 24)
6134 return avr_asm_len ("clr %A0" CR_TAB
6138 "clr %C0", op
, plen
, 5);
6142 out_shift_with_cnt ("lsr %C0" CR_TAB
6144 "ror %A0", insn
, op
, plen
, 3);
6149 /* 32-bit logic shift right ((unsigned int)x >> i) */
6152 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6154 if (GET_CODE (operands
[2]) == CONST_INT
)
6162 switch (INTVAL (operands
[2]))
6165 if (INTVAL (operands
[2]) < 32)
6169 return *len
= 3, ("clr %D0" CR_TAB
6173 return ("clr %D0" CR_TAB
6180 int reg0
= true_regnum (operands
[0]);
6181 int reg1
= true_regnum (operands
[1]);
6184 return ("mov %A0,%B1" CR_TAB
6185 "mov %B0,%C1" CR_TAB
6186 "mov %C0,%D1" CR_TAB
6189 return ("clr %D0" CR_TAB
6190 "mov %C0,%D1" CR_TAB
6191 "mov %B0,%C1" CR_TAB
6197 int reg0
= true_regnum (operands
[0]);
6198 int reg1
= true_regnum (operands
[1]);
6200 if (reg0
== reg1
+ 2)
6201 return *len
= 2, ("clr %C0" CR_TAB
6204 return *len
= 3, ("movw %A0,%C1" CR_TAB
6208 return *len
= 4, ("mov %B0,%D1" CR_TAB
6209 "mov %A0,%C1" CR_TAB
6215 return *len
= 4, ("mov %A0,%D1" CR_TAB
6222 return ("clr %A0" CR_TAB
6231 out_shift_with_cnt ("lsr %D0" CR_TAB
6234 "ror %A0", insn
, operands
, len
, 4);
6239 /* Output addition of register XOP[0] and compile time constant XOP[2].
6240 CODE == PLUS: perform addition by using ADD instructions or
6241 CODE == MINUS: perform addition by using SUB instructions:
6243 XOP[0] = XOP[0] + XOP[2]
6245 Or perform addition/subtraction with register XOP[2] depending on CODE:
6247 XOP[0] = XOP[0] +/- XOP[2]
6249 If PLEN == NULL, print assembler instructions to perform the operation;
6250 otherwise, set *PLEN to the length of the instruction sequence (in words)
6251 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6252 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6254 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6255 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6256 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6257 the subtrahend in the original insn, provided it is a compile time constant.
6258 In all other cases, SIGN is 0.
6260 If OUT_LABEL is true, print the final 0: label which is needed for
6261 saturated addition / subtraction. The only case where OUT_LABEL = false
6262 is useful is for saturated addition / subtraction performed during
6263 fixed-point rounding, cf. `avr_out_round'. */
6266 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6267 enum rtx_code code_sat
, int sign
, bool out_label
)
6269 /* MODE of the operation. */
6270 enum machine_mode mode
= GET_MODE (xop
[0]);
6272 /* INT_MODE of the same size. */
6273 enum machine_mode imode
= int_mode_for_mode (mode
);
6275 /* Number of bytes to operate on. */
6276 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6278 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6279 int clobber_val
= -1;
6281 /* op[0]: 8-bit destination register
6282 op[1]: 8-bit const int
6283 op[2]: 8-bit scratch register */
6286 /* Started the operation? Before starting the operation we may skip
6287 adding 0. This is no more true after the operation started because
6288 carry must be taken into account. */
6289 bool started
= false;
6291 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6294 /* Output a BRVC instruction. Only needed with saturation. */
6295 bool out_brvc
= true;
6302 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
6304 for (i
= 0; i
< n_bytes
; i
++)
6306 /* We operate byte-wise on the destination. */
6307 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6308 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6311 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6314 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6318 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6320 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6329 /* Except in the case of ADIW with 16-bit register (see below)
6330 addition does not set cc0 in a usable way. */
6332 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6334 if (CONST_FIXED_P (xval
))
6335 xval
= avr_to_int_mode (xval
);
6337 /* Adding/Subtracting zero is a no-op. */
6339 if (xval
== const0_rtx
)
6346 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6350 if (SS_PLUS
== code_sat
&& MINUS
== code
6352 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6353 & GET_MODE_MASK (QImode
)))
6355 /* We compute x + 0x80 by means of SUB instructions. We negated the
6356 constant subtrahend above and are left with x - (-128) so that we
6357 need something like SUBI r,128 which does not exist because SUBI sets
6358 V according to the sign of the subtrahend. Notice the only case
6359 where this must be done is when NEG overflowed in case [2s] because
6360 the V computation needs the right sign of the subtrahend. */
6362 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6364 avr_asm_len ("subi %0,128" CR_TAB
6365 "brmi 0f", &msb
, plen
, 2);
6371 for (i
= 0; i
< n_bytes
; i
++)
6373 /* We operate byte-wise on the destination. */
6374 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6375 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6377 /* 8-bit value to operate with this byte. */
6378 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6380 /* Registers R16..R31 can operate with immediate. */
6381 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6384 op
[1] = gen_int_mode (val8
, QImode
);
6386 /* To get usable cc0 no low-bytes must have been skipped. */
6394 && test_hard_reg_class (ADDW_REGS
, reg8
))
6396 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6397 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6399 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6400 i.e. operate word-wise. */
6407 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6410 if (n_bytes
== 2 && PLUS
== code
)
6422 avr_asm_len (code
== PLUS
6423 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6427 else if ((val8
== 1 || val8
== 0xff)
6428 && UNKNOWN
== code_sat
6430 && i
== n_bytes
- 1)
6432 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6442 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6444 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6446 /* This belongs to the x + 0x80 corner case. The code with
6447 ADD instruction is not smaller, thus make this case
6448 expensive so that the caller won't pick it. */
6454 if (clobber_val
!= (int) val8
)
6455 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6456 clobber_val
= (int) val8
;
6458 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6465 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6468 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6470 if (clobber_val
!= (int) val8
)
6471 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6472 clobber_val
= (int) val8
;
6474 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6486 } /* for all sub-bytes */
6490 if (UNKNOWN
== code_sat
)
6493 *pcc
= (int) CC_CLOBBER
;
6495 /* Vanilla addition/subtraction is done. We are left with saturation.
6497 We have to compute A = A <op> B where A is a register and
6498 B is a register or a non-zero compile time constant CONST.
6499 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6500 B stands for the original operand $2 in INSN. In the case of B = CONST,
6501 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6503 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6507 operation | code | sat if | b is | sat value | case
6508 -----------------+-------+----------+--------------+-----------+-------
6509 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6510 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6511 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6512 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6516 operation | code | sat if | b is | sat value | case
6517 -----------------+-------+----------+--------------+-----------+-------
6518 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6519 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6520 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6521 - as a + (-b) | add | V == 1 | const | s- | [4s]
6523 s+ = b < 0 ? -0x80 : 0x7f
6524 s- = b < 0 ? 0x7f : -0x80
6526 The cases a - b actually perform a - (-(-b)) if B is CONST.
6529 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6531 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6534 bool need_copy
= true;
6535 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6546 avr_asm_len ("brvc 0f", op
, plen
, 1);
6548 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6553 avr_asm_len ("ldi %0,0x7f" CR_TAB
6554 "adc %0,__zero_reg__", op
, plen
, 2);
6556 avr_asm_len ("ldi %0,0x7f" CR_TAB
6557 "ldi %1,0xff" CR_TAB
6558 "adc %1,__zero_reg__" CR_TAB
6559 "adc %0,__zero_reg__", op
, plen
, 4);
6561 else if (sign
== 0 && PLUS
== code
)
6565 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6568 avr_asm_len ("ldi %0,0x80" CR_TAB
6570 "dec %0", op
, plen
, 3);
6572 avr_asm_len ("ldi %0,0x80" CR_TAB
6575 "sbci %0,0", op
, plen
, 4);
6577 else if (sign
== 0 && MINUS
== code
)
6581 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6584 avr_asm_len ("ldi %0,0x7f" CR_TAB
6586 "inc %0", op
, plen
, 3);
6588 avr_asm_len ("ldi %0,0x7f" CR_TAB
6591 "sbci %0,-1", op
, plen
, 4);
6593 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6595 /* [1s,const,B < 0] [2s,B < 0] */
6596 /* [3s,const,B > 0] [4s,B > 0] */
6600 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6604 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6605 if (n_bytes
> 1 && need_copy
)
6606 avr_asm_len ("clr %1", op
, plen
, 1);
6608 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6610 /* [1s,const,B > 0] [2s,B > 0] */
6611 /* [3s,const,B < 0] [4s,B < 0] */
6615 avr_asm_len ("sec" CR_TAB
6616 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6620 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6621 if (n_bytes
> 1 && need_copy
)
6622 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6632 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6637 avr_asm_len ("sec", op
, plen
, 1);
6638 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6644 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6645 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6647 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6650 break; /* US_PLUS */
6655 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6659 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6663 avr_asm_len ("clr %0", op
, plen
, 1);
6668 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6669 Now copy the right value to the LSBs. */
6671 if (need_copy
&& n_bytes
> 1)
6673 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6675 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6681 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6683 avr_asm_len ("mov %A0,%1" CR_TAB
6684 "mov %B0,%1", op
, plen
, 2);
6687 else if (n_bytes
> 2)
6690 avr_asm_len ("mov %A0,%1" CR_TAB
6691 "mov %B0,%1", op
, plen
, 2);
6695 if (need_copy
&& n_bytes
== 8)
6698 avr_asm_len ("movw %r0+2,%0" CR_TAB
6699 "movw %r0+4,%0", xop
, plen
, 2);
6701 avr_asm_len ("mov %r0+2,%0" CR_TAB
6702 "mov %r0+3,%0" CR_TAB
6703 "mov %r0+4,%0" CR_TAB
6704 "mov %r0+5,%0", xop
, plen
, 4);
6708 avr_asm_len ("0:", op
, plen
, 0);
6712 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6713 is ont a compile-time constant:
6715 XOP[0] = XOP[0] +/- XOP[2]
6717 This is a helper for the function below. The only insns that need this
6718 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6721 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6723 enum machine_mode mode
= GET_MODE (xop
[0]);
6725 /* Only pointer modes want to add symbols. */
6727 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6729 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6731 avr_asm_len (PLUS
== code
6732 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6733 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6736 if (PSImode
== mode
)
6737 avr_asm_len (PLUS
== code
6738 ? "sbci %C0,hlo8(-(%2))"
6739 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6744 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6746 INSN is a single_set insn or an insn pattern with a binary operation as
6747 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6749 XOP are the operands of INSN. In the case of 64-bit operations with
6750 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6751 The non-saturating insns up to 32 bits may or may not supply a "d" class
6754 If PLEN == NULL output the instructions.
6755 If PLEN != NULL set *PLEN to the length of the sequence in words.
6757 PCC is a pointer to store the instructions' effect on cc0.
6760 PLEN and PCC default to NULL.
6762 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6767 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
6769 int cc_plus
, cc_minus
, cc_dummy
;
6770 int len_plus
, len_minus
;
6772 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6773 rtx xdest
= SET_DEST (xpattern
);
6774 enum machine_mode mode
= GET_MODE (xdest
);
6775 enum machine_mode imode
= int_mode_for_mode (mode
);
6776 int n_bytes
= GET_MODE_SIZE (mode
);
6777 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
6779 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6785 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6787 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6790 if (n_bytes
<= 4 && REG_P (xop
[2]))
6792 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
6798 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6799 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6800 op
[2] = avr_to_int_mode (xop
[0]);
6805 && !CONST_INT_P (xop
[2])
6806 && !CONST_FIXED_P (xop
[2]))
6808 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6811 op
[0] = avr_to_int_mode (xop
[0]);
6812 op
[1] = avr_to_int_mode (xop
[1]);
6813 op
[2] = avr_to_int_mode (xop
[2]);
6816 /* Saturations and 64-bit operations don't have a clobber operand.
6817 For the other cases, the caller will provide a proper XOP[3]. */
6819 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
6820 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
6822 /* Saturation will need the sign of the original operand. */
6824 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6825 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6827 /* If we subtract and the subtrahend is a constant, then negate it
6828 so that avr_out_plus_1 can be used. */
6831 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6833 /* Work out the shortest sequence. */
6835 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
6836 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
6840 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6841 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6843 else if (len_minus
<= len_plus
)
6844 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
6846 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
6852 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6853 time constant XOP[2]:
6855 XOP[0] = XOP[0] <op> XOP[2]
6857 and return "". If PLEN == NULL, print assembler instructions to perform the
6858 operation; otherwise, set *PLEN to the length of the instruction sequence
6859 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6860 register or SCRATCH if no clobber register is needed for the operation.
6861 INSN is an INSN_P or a pattern of an insn. */
6864 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6866 /* CODE and MODE of the operation. */
6867 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6868 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
6869 enum machine_mode mode
= GET_MODE (xop
[0]);
6871 /* Number of bytes to operate on. */
6872 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6874 /* Value of T-flag (0 or 1) or -1 if unknow. */
6877 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6878 int clobber_val
= -1;
6880 /* op[0]: 8-bit destination register
6881 op[1]: 8-bit const int
6882 op[2]: 8-bit clobber register or SCRATCH
6883 op[3]: 8-bit register containing 0xff or NULL_RTX */
6892 for (i
= 0; i
< n_bytes
; i
++)
6894 /* We operate byte-wise on the destination. */
6895 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6896 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6898 /* 8-bit value to operate with this byte. */
6899 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6901 /* Number of bits set in the current byte of the constant. */
6902 int pop8
= avr_popcount (val8
);
6904 /* Registers R16..R31 can operate with immediate. */
6905 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6908 op
[1] = GEN_INT (val8
);
6917 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6921 avr_asm_len ("set", op
, plen
, 1);
6924 op
[1] = GEN_INT (exact_log2 (val8
));
6925 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6929 if (op
[3] != NULL_RTX
)
6930 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6932 avr_asm_len ("clr %0" CR_TAB
6933 "dec %0", op
, plen
, 2);
6939 if (clobber_val
!= (int) val8
)
6940 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6941 clobber_val
= (int) val8
;
6943 avr_asm_len ("or %0,%2", op
, plen
, 1);
6953 avr_asm_len ("clr %0", op
, plen
, 1);
6955 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6959 avr_asm_len ("clt", op
, plen
, 1);
6962 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6963 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6967 if (clobber_val
!= (int) val8
)
6968 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6969 clobber_val
= (int) val8
;
6971 avr_asm_len ("and %0,%2", op
, plen
, 1);
6981 avr_asm_len ("com %0", op
, plen
, 1);
6982 else if (ld_reg_p
&& val8
== (1 << 7))
6983 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6986 if (clobber_val
!= (int) val8
)
6987 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6988 clobber_val
= (int) val8
;
6990 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6996 /* Unknown rtx_code */
6999 } /* for all sub-bytes */
7005 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7006 PLEN != NULL: Set *PLEN to the length of that sequence.
7010 avr_out_addto_sp (rtx
*op
, int *plen
)
7012 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
7013 int addend
= INTVAL (op
[0]);
7020 if (flag_verbose_asm
|| flag_print_asm_name
)
7021 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7023 while (addend
<= -pc_len
)
7026 avr_asm_len ("rcall .", op
, plen
, 1);
7029 while (addend
++ < 0)
7030 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7032 else if (addend
> 0)
7034 if (flag_verbose_asm
|| flag_print_asm_name
)
7035 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7037 while (addend
-- > 0)
7038 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7045 /* Outputs instructions needed for fixed point type conversion.
7046 This includes converting between any fixed point type, as well
7047 as converting to any integer type. Conversion between integer
7048 types is not supported.
7050 Converting signed fractional types requires a bit shift if converting
7051 to or from any unsigned fractional type because the decimal place is
7052 shifted by 1 bit. When the destination is a signed fractional, the sign
7053 is stored in either the carry or T bit. */
7056 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
7060 RTX_CODE shift
= UNKNOWN
;
7061 bool sign_in_carry
= false;
7062 bool msb_in_carry
= false;
7063 bool lsb_in_tmp_reg
= false;
7064 bool lsb_in_carry
= false;
7065 bool frac_rounded
= false;
7066 const char *code_ashift
= "lsl %0";
7069 #define MAY_CLOBBER(RR) \
7070 /* Shorthand used below. */ \
7072 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7073 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7074 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7075 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7079 /* bytes : Length of operand in bytes.
7080 ibyte : Length of integral part in bytes.
7081 fbyte, fbit : Length of fractional part in bytes, bits. */
7084 unsigned fbit
, bytes
, ibyte
, fbyte
;
7085 unsigned regno
, regno_msb
;
7086 } dest
, src
, *val
[2] = { &dest
, &src
};
7091 /* Step 0: Determine information on source and destination operand we
7092 ====== will need in the remainder. */
7094 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7096 enum machine_mode mode
;
7098 xop
[i
] = operands
[i
];
7100 mode
= GET_MODE (xop
[i
]);
7102 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7103 val
[i
]->regno
= REGNO (xop
[i
]);
7104 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7106 if (SCALAR_INT_MODE_P (mode
))
7108 val
[i
]->sbit
= intsigned
;
7111 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7113 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7114 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7117 fatal_insn ("unsupported fixed-point conversion", insn
);
7119 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7120 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7123 // Byte offset of the decimal point taking into account different place
7124 // of the decimal point in input and output and different register numbers
7125 // of input and output.
7126 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7128 // Number of destination bytes that will come from sign / zero extension.
7129 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7131 // Number of bytes at the low end to be filled with zeros.
7132 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7134 // Do we have a 16-Bit register that is cleared?
7135 rtx clrw
= NULL_RTX
;
7137 bool sign_extend
= src
.sbit
&& sign_bytes
;
7139 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7141 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7143 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7148 /* If we need to round the fraction part, we might need to save/round it
7149 before clobbering any of it in Step 1. Also, we might to want to do
7150 the rounding now to make use of LD_REGS. */
7151 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7152 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7153 && !TARGET_FRACT_CONV_TRUNC
)
7157 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7158 && dest
.regno
- offset
-1 >= dest
.regno
);
7159 unsigned s0
= dest
.regno
- offset
-1;
7160 bool use_src
= true;
7162 unsigned copied_msb
= src
.regno_msb
;
7163 bool have_carry
= false;
7165 if (src
.ibyte
> dest
.ibyte
)
7166 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7168 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7169 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7170 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
7172 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
7174 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7175 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7179 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
7180 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
7182 avr_asm_len ("sec" CR_TAB
"cpc %0,__zero_reg__",
7183 &all_regs_rtx
[sn
], plen
, 2);
7187 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7188 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
7189 &all_regs_rtx
[s0
], plen
, 1);
7190 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7191 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
7192 avr_asm_len ("\n0:", NULL
, plen
, 0);
7193 frac_rounded
= true;
7195 else if (use_src
&& overlap
)
7197 avr_asm_len ("clr __tmp_reg__" CR_TAB
7198 "sbrc %1,0" CR_TAB
"dec __tmp_reg__", xop
, plen
, 1);
7202 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7206 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7208 avr_asm_len ("clt" CR_TAB
"bld __tmp_reg__,7" CR_TAB
7209 "adc %0,__tmp_reg__",
7210 &all_regs_rtx
[s0
], plen
, 1);
7212 avr_asm_len ("lsr __tmp_reg" CR_TAB
"add %0,__tmp_reg__",
7213 &all_regs_rtx
[s0
], plen
, 2);
7214 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7215 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7216 frac_rounded
= true;
7221 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
7222 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
7223 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
7224 xop
[2] = all_regs_rtx
[s0
];
7225 unsigned sn
= src
.regno
;
7226 if (!use_src
|| sn
== s0
)
7227 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7228 /* We need to consider to-be-discarded bits
7229 if the value is negative. */
7232 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7233 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7234 /* Test to-be-discarded bytes for any nozero bits.
7235 ??? Could use OR or SBIW to test two registers at once. */
7237 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7239 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7240 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7242 avr_asm_len ("breq 0f" CR_TAB
7243 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7246 avr_asm_len ("breq 0f" CR_TAB
7247 "set" CR_TAB
"bld __tmp_reg__,0\n0:",
7250 lsb_in_tmp_reg
= true;
7254 /* Step 1: Clear bytes at the low end and copy payload bits from source
7255 ====== to destination. */
7257 int step
= offset
< 0 ? 1 : -1;
7258 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7260 // We cleared at least that number of registers.
7263 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7265 // Next regno of destination is needed for MOVW
7266 unsigned d1
= d0
+ step
;
7268 // Current and next regno of source
7269 signed s0
= d0
- offset
;
7270 signed s1
= s0
+ step
;
7272 // Must current resp. next regno be CLRed? This applies to the low
7273 // bytes of the destination that have no associated source bytes.
7274 bool clr0
= s0
< (signed) src
.regno
;
7275 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7277 // First gather what code to emit (if any) and additional step to
7278 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7279 // is the source rtx for the current loop iteration.
7280 const char *code
= NULL
;
7285 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7287 xop
[2] = all_regs_rtx
[d0
& ~1];
7289 code
= "movw %2,%3";
7294 xop
[2] = all_regs_rtx
[d0
];
7299 && d0
% 2 == (step
> 0))
7301 clrw
= all_regs_rtx
[d0
& ~1];
7305 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7307 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7308 && d0
% 2 == (offset
> 0)
7309 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7310 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7312 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7313 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7314 code
= movw
? "movw %2,%3" : "mov %2,%3";
7315 stepw
= step
* movw
;
7320 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7321 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7323 /* We are going to override the sign bit. If we sign-extend,
7324 store the sign in the Carry flag. This is not needed if
7325 the destination will be ASHIFT is the remainder because
7326 the ASHIFT will set Carry without extra instruction. */
7328 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7329 sign_in_carry
= true;
7332 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7334 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7335 && src
.ibyte
> dest
.ibyte
7336 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7338 /* We are going to override the MSB. If we shift right,
7339 store the MSB in the Carry flag. This is only needed if
7340 we don't sign-extend becaue with sign-extension the MSB
7341 (the sign) will be produced by the sign extension. */
7343 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7344 msb_in_carry
= true;
7347 unsigned src_lsb
= dest
.regno
- offset
-1;
7349 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7351 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7353 /* We are going to override the new LSB; store it into carry. */
7355 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7356 code_ashift
= "rol %0";
7357 lsb_in_carry
= true;
7360 avr_asm_len (code
, xop
, plen
, 1);
7365 /* Step 2: Shift destination left by 1 bit position. This might be needed
7366 ====== for signed input and unsigned output. */
7368 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7370 unsigned s0
= dest
.regno
- offset
-1;
7372 /* n1169 4.1.4 says:
7373 "Conversions from a fixed-point to an integer type round toward zero."
7374 Hence, converting a fract type to integer only gives a non-zero result
7376 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7377 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
7378 && !TARGET_FRACT_CONV_TRUNC
)
7380 gcc_assert (s0
== src
.regno_msb
);
7381 /* Check if the input is -1. We do that by checking if negating
7382 the input causes an integer overflow. */
7383 unsigned sn
= src
.regno
;
7384 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7386 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7388 /* Overflow goes with set carry. Clear carry otherwise. */
7389 avr_asm_len ("brvs 0f" CR_TAB
"clc\n0:", NULL
, plen
, 2);
7391 /* Likewise, when converting from accumulator types to integer, we
7392 need to round up negative values. */
7393 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7394 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7395 && !TARGET_FRACT_CONV_TRUNC
7398 bool have_carry
= false;
7400 xop
[2] = all_regs_rtx
[s0
];
7401 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
7402 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7403 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7404 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7405 if (!lsb_in_tmp_reg
)
7407 unsigned sn
= src
.regno
;
7410 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
7415 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
7416 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
7418 /* Add in C and the rounding value 127. */
7419 /* If the destination msb is a sign byte, and in LD_REGS,
7420 grab it as a temporary. */
7422 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
7425 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
7426 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
7427 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
7428 : have_carry
? "adc %2,%3"
7429 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
7435 /* Fall back to use __zero_reg__ as a temporary. */
7436 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
7438 avr_asm_len ("clt" CR_TAB
"bld __zero_reg__,7", NULL
, plen
, 2);
7440 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
7441 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
7442 ? "adc __tmp_reg__,__zero_reg__"
7443 : have_carry
? "adc %2,__zero_reg__"
7444 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
7445 : "add %2,__zero_reg__"),
7447 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
7449 for (d0
= dest
.regno
+ zero_bytes
;
7450 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7451 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
7452 avr_asm_len (lsb_in_tmp_reg
7453 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7456 else if (MAY_CLOBBER (s0
))
7457 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7459 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7460 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7462 code_ashift
= "rol %0";
7463 lsb_in_carry
= true;
7466 if (shift
== ASHIFT
)
7468 for (d0
= dest
.regno
+ zero_bytes
;
7469 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7471 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7472 code_ashift
= "rol %0";
7475 lsb_in_carry
= false;
7476 sign_in_carry
= true;
7479 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7480 ======= it in sign-extension below. */
7482 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7483 && src
.ibyte
> dest
.ibyte
)
7485 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7487 if (MAY_CLOBBER (s0
))
7488 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7490 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7491 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7493 msb_in_carry
= true;
7496 /* Step 3: Sign-extend or zero-extend the destination as needed.
7499 if (sign_extend
&& !sign_in_carry
)
7501 unsigned s0
= src
.regno_msb
;
7503 if (MAY_CLOBBER (s0
))
7504 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7506 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7507 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7509 sign_in_carry
= true;
7512 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7514 unsigned copies
= 0;
7515 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7517 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7519 if (AVR_HAVE_MOVW
&& movw
7520 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7522 xop
[2] = all_regs_rtx
[d0
];
7524 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7529 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7530 &all_regs_rtx
[d0
], plen
, 1);
7532 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7533 movw
= all_regs_rtx
[d0
-1];
7538 /* Step 4: Right shift the destination. This might be needed for
7539 ====== conversions from unsigned to signed. */
7541 if (shift
== ASHIFTRT
)
7543 const char *code_ashiftrt
= "lsr %0";
7545 if (sign_extend
|| msb_in_carry
)
7546 code_ashiftrt
= "ror %0";
7548 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7549 code_ashiftrt
= "asr %0";
7551 for (d0
= dest
.regno_msb
- sign_bytes
;
7552 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7554 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7555 code_ashiftrt
= "ror %0";
7565 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7566 XOP[2] is the rounding point, a CONST_INT. The function prints the
7567 instruction sequence if PLEN = NULL and computes the length in words
7568 of the sequence if PLEN != NULL. Most of this function deals with
7569 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7572 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
7574 enum machine_mode mode
= GET_MODE (xop
[0]);
7575 enum machine_mode imode
= int_mode_for_mode (mode
);
7576 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7577 int fbit
= (int) GET_MODE_FBIT (mode
);
7578 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
7579 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
7580 GET_MODE_PRECISION (imode
));
7581 // Lengths of PLUS and AND parts.
7582 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
7583 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
7585 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7586 // the saturated addition so that we can emit the "rjmp 1f" before the
7589 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
7590 rtx xpattern
, xsrc
, op
[4];
7592 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
7593 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
7594 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
7595 xpattern
= gen_rtx_SET (VOIDmode
, xop
[0], xsrc
);
7600 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
7602 avr_asm_len ("rjmp 1f" CR_TAB
7603 "0:", NULL
, plen_add
, 1);
7605 // Keep all bits from RP and higher: ... 2^(-RP)
7606 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7607 // Rounding point ^^^^^^^
7608 // Added above ^^^^^^^^^
7609 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
7610 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
7612 xpattern
= gen_rtx_SET (VOIDmode
, xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
7617 op
[3] = gen_rtx_SCRATCH (QImode
);
7618 avr_out_bitop (xpattern
, op
, plen_and
);
7619 avr_asm_len ("1:", NULL
, plen
, 0);
7622 *plen
= len_add
+ len_and
;
7628 /* Create RTL split patterns for byte sized rotate expressions. This
7629 produces a series of move instructions and considers overlap situations.
7630 Overlapping non-HImode operands need a scratch register. */
7633 avr_rotate_bytes (rtx operands
[])
7636 enum machine_mode mode
= GET_MODE (operands
[0]);
7637 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7638 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7639 int num
= INTVAL (operands
[2]);
7640 rtx scratch
= operands
[3];
7641 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7642 Word move if no scratch is needed, otherwise use size of scratch. */
7643 enum machine_mode move_mode
= QImode
;
7644 int move_size
, offset
, size
;
7648 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7651 move_mode
= GET_MODE (scratch
);
7653 /* Force DI rotate to use QI moves since other DI moves are currently split
7654 into QI moves so forward propagation works better. */
7657 /* Make scratch smaller if needed. */
7658 if (SCRATCH
!= GET_CODE (scratch
)
7659 && HImode
== GET_MODE (scratch
)
7660 && QImode
== move_mode
)
7661 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7663 move_size
= GET_MODE_SIZE (move_mode
);
7664 /* Number of bytes/words to rotate. */
7665 offset
= (num
>> 3) / move_size
;
7666 /* Number of moves needed. */
7667 size
= GET_MODE_SIZE (mode
) / move_size
;
7668 /* Himode byte swap is special case to avoid a scratch register. */
7669 if (mode
== HImode
&& same_reg
)
7671 /* HImode byte swap, using xor. This is as quick as using scratch. */
7673 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7674 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7675 if (!rtx_equal_p (dst
, src
))
7677 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7678 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7679 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7684 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7685 /* Create linked list of moves to determine move order. */
7689 } move
[MAX_SIZE
+ 8];
7692 gcc_assert (size
<= MAX_SIZE
);
7693 /* Generate list of subreg moves. */
7694 for (i
= 0; i
< size
; i
++)
7697 int to
= (from
+ offset
) % size
;
7698 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7699 mode
, from
* move_size
);
7700 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7701 mode
, to
* move_size
);
7704 /* Mark dependence where a dst of one move is the src of another move.
7705 The first move is a conflict as it must wait until second is
7706 performed. We ignore moves to self - we catch this later. */
7708 for (i
= 0; i
< size
; i
++)
7709 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7710 for (j
= 0; j
< size
; j
++)
7711 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7713 /* The dst of move i is the src of move j. */
7720 /* Go through move list and perform non-conflicting moves. As each
7721 non-overlapping move is made, it may remove other conflicts
7722 so the process is repeated until no conflicts remain. */
7727 /* Emit move where dst is not also a src or we have used that
7729 for (i
= 0; i
< size
; i
++)
7730 if (move
[i
].src
!= NULL_RTX
)
7732 if (move
[i
].links
== -1
7733 || move
[move
[i
].links
].src
== NULL_RTX
)
7736 /* Ignore NOP moves to self. */
7737 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7738 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7740 /* Remove conflict from list. */
7741 move
[i
].src
= NULL_RTX
;
7747 /* Check for deadlock. This is when no moves occurred and we have
7748 at least one blocked move. */
7749 if (moves
== 0 && blocked
!= -1)
7751 /* Need to use scratch register to break deadlock.
7752 Add move to put dst of blocked move into scratch.
7753 When this move occurs, it will break chain deadlock.
7754 The scratch register is substituted for real move. */
7756 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7758 move
[size
].src
= move
[blocked
].dst
;
7759 move
[size
].dst
= scratch
;
7760 /* Scratch move is never blocked. */
7761 move
[size
].links
= -1;
7762 /* Make sure we have valid link. */
7763 gcc_assert (move
[blocked
].links
!= -1);
7764 /* Replace src of blocking move with scratch reg. */
7765 move
[move
[blocked
].links
].src
= scratch
;
7766 /* Make dependent on scratch move occurring. */
7767 move
[blocked
].links
= size
;
7771 while (blocked
!= -1);
7777 /* Worker function for `ADJUST_INSN_LENGTH'. */
7778 /* Modifies the length assigned to instruction INSN
7779 LEN is the initially computed length of the insn. */
7782 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
7784 rtx
*op
= recog_data
.operand
;
7785 enum attr_adjust_len adjust_len
;
7787 /* Some complex insns don't need length adjustment and therefore
7788 the length need not/must not be adjusted for these insns.
7789 It is easier to state this in an insn attribute "adjust_len" than
7790 to clutter up code here... */
7792 if (JUMP_TABLE_DATA_P (insn
) || recog_memoized (insn
) == -1)
7797 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7799 adjust_len
= get_attr_adjust_len (insn
);
7801 if (adjust_len
== ADJUST_LEN_NO
)
7803 /* Nothing to adjust: The length from attribute "length" is fine.
7804 This is the default. */
7809 /* Extract insn's operands. */
7811 extract_constrain_insn_cached (insn
);
7813 /* Dispatch to right function. */
7817 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7818 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7819 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7821 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7823 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7824 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7826 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7827 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7828 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7829 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7830 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7831 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7832 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7834 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7835 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7836 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
7838 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7839 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7840 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7841 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7842 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7844 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7845 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7846 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7848 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7849 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7850 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7852 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7853 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7854 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7856 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7857 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7858 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7860 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7862 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7871 /* Return nonzero if register REG dead after INSN. */
7874 reg_unused_after (rtx_insn
*insn
, rtx reg
)
7876 return (dead_or_set_p (insn
, reg
)
7877 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7880 /* Return nonzero if REG is not used after INSN.
7881 We assume REG is a reload reg, and therefore does
7882 not live past labels. It may live past calls or jumps though. */
7885 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
7890 /* If the reg is set by this instruction, then it is safe for our
7891 case. Disregard the case where this is a store to memory, since
7892 we are checking a register used in the store address. */
7893 set
= single_set (insn
);
7894 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7895 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7898 while ((insn
= NEXT_INSN (insn
)))
7901 code
= GET_CODE (insn
);
7904 /* If this is a label that existed before reload, then the register
7905 if dead here. However, if this is a label added by reorg, then
7906 the register may still be live here. We can't tell the difference,
7907 so we just ignore labels completely. */
7908 if (code
== CODE_LABEL
)
7916 if (code
== JUMP_INSN
)
7919 /* If this is a sequence, we must handle them all at once.
7920 We could have for instance a call that sets the target register,
7921 and an insn in a delay slot that uses the register. In this case,
7922 we must return 0. */
7923 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7928 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7930 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7931 rtx set
= single_set (this_insn
);
7933 if (CALL_P (this_insn
))
7935 else if (JUMP_P (this_insn
))
7937 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7942 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7944 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7946 if (GET_CODE (SET_DEST (set
)) != MEM
)
7952 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7957 else if (code
== JUMP_INSN
)
7961 if (code
== CALL_INSN
)
7964 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7965 if (GET_CODE (XEXP (tem
, 0)) == USE
7966 && REG_P (XEXP (XEXP (tem
, 0), 0))
7967 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7969 if (call_used_regs
[REGNO (reg
)])
7973 set
= single_set (insn
);
7975 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7977 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7978 return GET_CODE (SET_DEST (set
)) != MEM
;
7979 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7986 /* Implement `TARGET_ASM_INTEGER'. */
7987 /* Target hook for assembling integer objects. The AVR version needs
7988 special handling for references to certain labels. */
7991 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7993 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7994 && text_segment_operand (x
, VOIDmode
))
7996 fputs ("\t.word\tgs(", asm_out_file
);
7997 output_addr_const (asm_out_file
, x
);
7998 fputs (")\n", asm_out_file
);
8002 else if (GET_MODE (x
) == PSImode
)
8004 /* This needs binutils 2.23+, see PR binutils/13503 */
8006 fputs ("\t.byte\tlo8(", asm_out_file
);
8007 output_addr_const (asm_out_file
, x
);
8008 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8010 fputs ("\t.byte\thi8(", asm_out_file
);
8011 output_addr_const (asm_out_file
, x
);
8012 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8014 fputs ("\t.byte\thh8(", asm_out_file
);
8015 output_addr_const (asm_out_file
, x
);
8016 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8020 else if (CONST_FIXED_P (x
))
8024 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8026 for (n
= 0; n
< size
; n
++)
8028 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8029 default_assemble_integer (xn
, 1, aligned_p
);
8035 return default_assemble_integer (x
, size
, aligned_p
);
8039 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8040 /* Return value is nonzero if pseudos that have been
8041 assigned to registers of class CLASS would likely be spilled
8042 because registers of CLASS are needed for spill registers. */
8045 avr_class_likely_spilled_p (reg_class_t c
)
8047 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
8051 /* Valid attributes:
8052 progmem - Put data to program memory.
8053 signal - Make a function to be hardware interrupt.
8054 After function prologue interrupts remain disabled.
8055 interrupt - Make a function to be hardware interrupt. Before function
8056 prologue interrupts are enabled by means of SEI.
8057 naked - Don't generate function prologue/epilogue and RET
8060 /* Handle a "progmem" attribute; arguments as in
8061 struct attribute_spec.handler. */
8064 avr_handle_progmem_attribute (tree
*node
, tree name
,
8065 tree args ATTRIBUTE_UNUSED
,
8066 int flags ATTRIBUTE_UNUSED
,
8071 if (TREE_CODE (*node
) == TYPE_DECL
)
8073 /* This is really a decl attribute, not a type attribute,
8074 but try to handle it for GCC 3.0 backwards compatibility. */
8076 tree type
= TREE_TYPE (*node
);
8077 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8078 tree newtype
= build_type_attribute_variant (type
, attr
);
8080 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8081 TREE_TYPE (*node
) = newtype
;
8082 *no_add_attrs
= true;
8084 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8086 *no_add_attrs
= false;
8090 warning (OPT_Wattributes
, "%qE attribute ignored",
8092 *no_add_attrs
= true;
8099 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8100 struct attribute_spec.handler. */
8103 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8104 tree args ATTRIBUTE_UNUSED
,
8105 int flags ATTRIBUTE_UNUSED
,
8108 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8110 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8112 *no_add_attrs
= true;
8119 avr_handle_fntype_attribute (tree
*node
, tree name
,
8120 tree args ATTRIBUTE_UNUSED
,
8121 int flags ATTRIBUTE_UNUSED
,
8124 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8126 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8128 *no_add_attrs
= true;
8135 /* AVR attributes. */
8136 static const struct attribute_spec
8137 avr_attribute_table
[] =
8139 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8140 affects_type_identity } */
8141 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
8143 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8145 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8147 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8149 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8151 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8153 { NULL
, 0, 0, false, false, false, NULL
, false }
8157 /* Look if DECL shall be placed in program memory space by
8158 means of attribute `progmem' or some address-space qualifier.
8159 Return non-zero if DECL is data that must end up in Flash and
8160 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8162 Return 2 if DECL is located in 24-bit flash address-space
8163 Return 1 if DECL is located in 16-bit flash address-space
8164 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8165 Return 0 otherwise */
8168 avr_progmem_p (tree decl
, tree attributes
)
8172 if (TREE_CODE (decl
) != VAR_DECL
)
8175 if (avr_decl_memx_p (decl
))
8178 if (avr_decl_flash_p (decl
))
8182 != lookup_attribute ("progmem", attributes
))
8189 while (TREE_CODE (a
) == ARRAY_TYPE
);
8191 if (a
== error_mark_node
)
8194 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
8201 /* Scan type TYP for pointer references to address space ASn.
8202 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8203 the AS are also declared to be CONST.
8204 Otherwise, return the respective address space, i.e. a value != 0. */
8207 avr_nonconst_pointer_addrspace (tree typ
)
8209 while (ARRAY_TYPE
== TREE_CODE (typ
))
8210 typ
= TREE_TYPE (typ
);
8212 if (POINTER_TYPE_P (typ
))
8215 tree target
= TREE_TYPE (typ
);
8217 /* Pointer to function: Test the function's return type. */
8219 if (FUNCTION_TYPE
== TREE_CODE (target
))
8220 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
8222 /* "Ordinary" pointers... */
8224 while (TREE_CODE (target
) == ARRAY_TYPE
)
8225 target
= TREE_TYPE (target
);
8227 /* Pointers to non-generic address space must be const.
8228 Refuse address spaces outside the device's flash. */
8230 as
= TYPE_ADDR_SPACE (target
);
8232 if (!ADDR_SPACE_GENERIC_P (as
)
8233 && (!TYPE_READONLY (target
)
8234 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
8239 /* Scan pointer's target type. */
8241 return avr_nonconst_pointer_addrspace (target
);
8244 return ADDR_SPACE_GENERIC
;
8248 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8249 go along with CONST qualifier. Writing to these address spaces should
8250 be detected and complained about as early as possible. */
8253 avr_pgm_check_var_decl (tree node
)
8255 const char *reason
= NULL
;
8257 addr_space_t as
= ADDR_SPACE_GENERIC
;
8259 gcc_assert (as
== 0);
8261 if (avr_log
.progmem
)
8262 avr_edump ("%?: %t\n", node
);
8264 switch (TREE_CODE (node
))
8270 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8271 reason
= "variable";
8275 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8276 reason
= "function parameter";
8280 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8281 reason
= "structure field";
8285 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
8287 reason
= "return type of function";
8291 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
8298 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8301 error ("%qT uses address space %qs beyond flash of %qs",
8302 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8304 error ("%s %q+D uses address space %qs beyond flash of %qs",
8305 reason
, node
, avr_addrspace
[as
].name
,
8306 avr_current_device
->name
);
8311 error ("pointer targeting address space %qs must be const in %qT",
8312 avr_addrspace
[as
].name
, node
);
8314 error ("pointer targeting address space %qs must be const"
8316 avr_addrspace
[as
].name
, reason
, node
);
8320 return reason
== NULL
;
8324 /* Add the section attribute if the variable is in progmem. */
8327 avr_insert_attributes (tree node
, tree
*attributes
)
8329 avr_pgm_check_var_decl (node
);
8331 if (TREE_CODE (node
) == VAR_DECL
8332 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8333 && avr_progmem_p (node
, *attributes
))
8338 /* For C++, we have to peel arrays in order to get correct
8339 determination of readonlyness. */
8342 node0
= TREE_TYPE (node0
);
8343 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8345 if (error_mark_node
== node0
)
8348 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8350 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8352 error ("variable %q+D located in address space %qs"
8353 " beyond flash of %qs",
8354 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8357 if (!TYPE_READONLY (node0
)
8358 && !TREE_READONLY (node
))
8360 const char *reason
= "__attribute__((progmem))";
8362 if (!ADDR_SPACE_GENERIC_P (as
))
8363 reason
= avr_addrspace
[as
].name
;
8365 if (avr_log
.progmem
)
8366 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8368 error ("variable %q+D must be const in order to be put into"
8369 " read-only section by means of %qs", node
, reason
);
8375 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8376 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8377 /* Track need of __do_clear_bss. */
8380 avr_asm_output_aligned_decl_common (FILE * stream
,
8381 const_tree decl ATTRIBUTE_UNUSED
,
8383 unsigned HOST_WIDE_INT size
,
8384 unsigned int align
, bool local_p
)
8386 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8387 There is no need to trigger __do_clear_bss code for them. */
8389 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8390 avr_need_clear_bss_p
= true;
8393 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8395 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8399 /* Unnamed section callback for data_section
8400 to track need of __do_copy_data. */
8403 avr_output_data_section_asm_op (const void *data
)
8405 avr_need_copy_data_p
= true;
8407 /* Dispatch to default. */
8408 output_section_asm_op (data
);
8412 /* Unnamed section callback for bss_section
8413 to track need of __do_clear_bss. */
8416 avr_output_bss_section_asm_op (const void *data
)
8418 avr_need_clear_bss_p
= true;
8420 /* Dispatch to default. */
8421 output_section_asm_op (data
);
8425 /* Unnamed section callback for progmem*.data sections. */
8428 avr_output_progmem_section_asm_op (const void *data
)
8430 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8431 (const char*) data
);
8435 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8438 avr_asm_init_sections (void)
8440 /* Set up a section for jump tables. Alignment is handled by
8441 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8443 if (AVR_HAVE_JMP_CALL
)
8445 progmem_swtable_section
8446 = get_unnamed_section (0, output_section_asm_op
,
8447 "\t.section\t.progmem.gcc_sw_table"
8448 ",\"a\",@progbits");
8452 progmem_swtable_section
8453 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8454 "\t.section\t.progmem.gcc_sw_table"
8455 ",\"ax\",@progbits");
8458 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8459 resp. `avr_need_copy_data_p'. */
8461 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8462 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8463 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8467 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8470 avr_asm_function_rodata_section (tree decl
)
8472 /* If a function is unused and optimized out by -ffunction-sections
8473 and --gc-sections, ensure that the same will happen for its jump
8474 tables by putting them into individual sections. */
8479 /* Get the frodata section from the default function in varasm.c
8480 but treat function-associated data-like jump tables as code
8481 rather than as user defined data. AVR has no constant pools. */
8483 int fdata
= flag_data_sections
;
8485 flag_data_sections
= flag_function_sections
;
8486 frodata
= default_function_rodata_section (decl
);
8487 flag_data_sections
= fdata
;
8488 flags
= frodata
->common
.flags
;
8491 if (frodata
!= readonly_data_section
8492 && flags
& SECTION_NAMED
)
8494 /* Adjust section flags and replace section name prefix. */
8498 static const char* const prefix
[] =
8500 ".rodata", ".progmem.gcc_sw_table",
8501 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8504 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8506 const char * old_prefix
= prefix
[i
];
8507 const char * new_prefix
= prefix
[i
+1];
8508 const char * name
= frodata
->named
.name
;
8510 if (STR_PREFIX_P (name
, old_prefix
))
8512 const char *rname
= ACONCAT ((new_prefix
,
8513 name
+ strlen (old_prefix
), NULL
));
8514 flags
&= ~SECTION_CODE
;
8515 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8517 return get_section (rname
, flags
, frodata
->named
.decl
);
8522 return progmem_swtable_section
;
8526 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8527 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8530 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8532 if (flags
& AVR_SECTION_PROGMEM
)
8534 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8535 const char *old_prefix
= ".rodata";
8536 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8538 if (STR_PREFIX_P (name
, old_prefix
))
8540 const char *sname
= ACONCAT ((new_prefix
,
8541 name
+ strlen (old_prefix
), NULL
));
8542 default_elf_asm_named_section (sname
, flags
, decl
);
8546 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8550 if (!avr_need_copy_data_p
)
8551 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8552 || STR_PREFIX_P (name
, ".rodata")
8553 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8555 if (!avr_need_clear_bss_p
)
8556 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8558 default_elf_asm_named_section (name
, flags
, decl
);
8562 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8565 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8567 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8569 if (STR_PREFIX_P (name
, ".noinit"))
8571 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8572 && DECL_INITIAL (decl
) == NULL_TREE
)
8573 flags
|= SECTION_BSS
; /* @nobits */
8575 warning (0, "only uninitialized variables can be placed in the "
8579 if (decl
&& DECL_P (decl
)
8580 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8582 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8584 /* Attribute progmem puts data in generic address space.
8585 Set section flags as if it was in __flash to get the right
8586 section prefix in the remainder. */
8588 if (ADDR_SPACE_GENERIC_P (as
))
8589 as
= ADDR_SPACE_FLASH
;
8591 flags
|= as
* SECTION_MACH_DEP
;
8592 flags
&= ~SECTION_WRITE
;
8593 flags
&= ~SECTION_BSS
;
8600 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8603 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8605 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8606 readily available, see PR34734. So we postpone the warning
8607 about uninitialized data in program memory section until here. */
8610 && decl
&& DECL_P (decl
)
8611 && NULL_TREE
== DECL_INITIAL (decl
)
8612 && !DECL_EXTERNAL (decl
)
8613 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8615 warning (OPT_Wuninitialized
,
8616 "uninitialized variable %q+D put into "
8617 "program memory area", decl
);
8620 default_encode_section_info (decl
, rtl
, new_decl_p
);
8622 if (decl
&& DECL_P (decl
)
8623 && TREE_CODE (decl
) != FUNCTION_DECL
8625 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8627 rtx sym
= XEXP (rtl
, 0);
8628 tree type
= TREE_TYPE (decl
);
8629 if (type
== error_mark_node
)
8631 addr_space_t as
= TYPE_ADDR_SPACE (type
);
8633 /* PSTR strings are in generic space but located in flash:
8634 patch address space. */
8636 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8637 as
= ADDR_SPACE_FLASH
;
8639 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8644 /* Implement `TARGET_ASM_SELECT_SECTION' */
8647 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8649 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8651 if (decl
&& DECL_P (decl
)
8652 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8654 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8656 /* __progmem__ goes in generic space but shall be allocated to
8659 if (ADDR_SPACE_GENERIC_P (as
))
8660 as
= ADDR_SPACE_FLASH
;
8662 if (sect
->common
.flags
& SECTION_NAMED
)
8664 const char * name
= sect
->named
.name
;
8665 const char * old_prefix
= ".rodata";
8666 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8668 if (STR_PREFIX_P (name
, old_prefix
))
8670 const char *sname
= ACONCAT ((new_prefix
,
8671 name
+ strlen (old_prefix
), NULL
));
8672 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8676 if (!progmem_section
[as
])
8679 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8680 avr_addrspace
[as
].section_name
);
8683 return progmem_section
[as
];
8689 /* Implement `TARGET_ASM_FILE_START'. */
8690 /* Outputs some text at the start of each assembler file. */
8693 avr_file_start (void)
8695 int sfr_offset
= avr_current_arch
->sfr_offset
;
8697 if (avr_current_arch
->asm_only
)
8698 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8700 default_file_start ();
8702 /* Print I/O addresses of some SFRs used with IN and OUT. */
8705 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8707 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8708 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8710 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8712 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8714 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8716 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8718 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8719 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8720 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8724 /* Implement `TARGET_ASM_FILE_END'. */
8725 /* Outputs to the stdio stream FILE some
8726 appropriate text to go at the end of an assembler file. */
8731 /* Output these only if there is anything in the
8732 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8733 input section(s) - some code size can be saved by not
8734 linking in the initialization code from libgcc if resp.
8735 sections are empty, see PR18145. */
8737 if (avr_need_copy_data_p
)
8738 fputs (".global __do_copy_data\n", asm_out_file
);
8740 if (avr_need_clear_bss_p
)
8741 fputs (".global __do_clear_bss\n", asm_out_file
);
8745 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8746 /* Choose the order in which to allocate hard registers for
8747 pseudo-registers local to a basic block.
8749 Store the desired register order in the array `reg_alloc_order'.
8750 Element 0 should be the register to allocate first; element 1, the
8751 next register; and so on. */
8754 avr_adjust_reg_alloc_order (void)
8757 static const int order_0
[] =
8760 18, 19, 20, 21, 22, 23,
8763 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8767 static const int order_1
[] =
8769 18, 19, 20, 21, 22, 23, 24, 25,
8772 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8776 static const int order_2
[] =
8778 25, 24, 23, 22, 21, 20, 19, 18,
8781 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8786 const int *order
= (TARGET_ORDER_1
? order_1
:
8787 TARGET_ORDER_2
? order_2
:
8789 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8790 reg_alloc_order
[i
] = order
[i
];
8794 /* Implement `TARGET_REGISTER_MOVE_COST' */
8797 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8798 reg_class_t from
, reg_class_t to
)
8800 return (from
== STACK_REG
? 6
8801 : to
== STACK_REG
? 12
8806 /* Implement `TARGET_MEMORY_MOVE_COST' */
8809 avr_memory_move_cost (enum machine_mode mode
,
8810 reg_class_t rclass ATTRIBUTE_UNUSED
,
8811 bool in ATTRIBUTE_UNUSED
)
8813 return (mode
== QImode
? 2
8814 : mode
== HImode
? 4
8815 : mode
== SImode
? 8
8816 : mode
== SFmode
? 8
8821 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8822 cost of an RTX operand given its context. X is the rtx of the
8823 operand, MODE is its mode, and OUTER is the rtx_code of this
8824 operand's parent operator. */
8827 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8828 int opno
, bool speed
)
8830 enum rtx_code code
= GET_CODE (x
);
8842 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8849 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8853 /* Worker function for AVR backend's rtx_cost function.
8854 X is rtx expression whose cost is to be calculated.
8855 Return true if the complete cost has been computed.
8856 Return false if subexpressions should be scanned.
8857 In either case, *TOTAL contains the cost result. */
8860 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8861 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8863 enum rtx_code code
= (enum rtx_code
) codearg
;
8864 enum machine_mode mode
= GET_MODE (x
);
8875 /* Immediate constants are as cheap as registers. */
8880 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8888 *total
= COSTS_N_INSNS (1);
8894 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8900 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8908 *total
= COSTS_N_INSNS (1);
8914 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8918 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8919 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8923 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8924 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8925 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8929 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8930 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8931 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8939 && MULT
== GET_CODE (XEXP (x
, 0))
8940 && register_operand (XEXP (x
, 1), QImode
))
8943 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8944 /* multiply-add with constant: will be split and load constant. */
8945 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8946 *total
= COSTS_N_INSNS (1) + *total
;
8949 *total
= COSTS_N_INSNS (1);
8950 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8951 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8956 && (MULT
== GET_CODE (XEXP (x
, 0))
8957 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8958 && register_operand (XEXP (x
, 1), HImode
)
8959 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8960 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8963 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8964 /* multiply-add with constant: will be split and load constant. */
8965 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8966 *total
= COSTS_N_INSNS (1) + *total
;
8969 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8971 *total
= COSTS_N_INSNS (2);
8972 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8975 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8976 *total
= COSTS_N_INSNS (1);
8978 *total
= COSTS_N_INSNS (2);
8982 if (!CONST_INT_P (XEXP (x
, 1)))
8984 *total
= COSTS_N_INSNS (3);
8985 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8988 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8989 *total
= COSTS_N_INSNS (2);
8991 *total
= COSTS_N_INSNS (3);
8995 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8997 *total
= COSTS_N_INSNS (4);
8998 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9001 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
9002 *total
= COSTS_N_INSNS (1);
9004 *total
= COSTS_N_INSNS (4);
9010 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9016 && register_operand (XEXP (x
, 0), QImode
)
9017 && MULT
== GET_CODE (XEXP (x
, 1)))
9020 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9021 /* multiply-sub with constant: will be split and load constant. */
9022 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9023 *total
= COSTS_N_INSNS (1) + *total
;
9028 && register_operand (XEXP (x
, 0), HImode
)
9029 && (MULT
== GET_CODE (XEXP (x
, 1))
9030 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
9031 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
9032 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
9035 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9036 /* multiply-sub with constant: will be split and load constant. */
9037 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9038 *total
= COSTS_N_INSNS (1) + *total
;
9044 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9045 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9046 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9047 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9051 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9052 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9053 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9061 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
9063 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9071 rtx op0
= XEXP (x
, 0);
9072 rtx op1
= XEXP (x
, 1);
9073 enum rtx_code code0
= GET_CODE (op0
);
9074 enum rtx_code code1
= GET_CODE (op1
);
9075 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
9076 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
9079 && (u8_operand (op1
, HImode
)
9080 || s8_operand (op1
, HImode
)))
9082 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9086 && register_operand (op1
, HImode
))
9088 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9091 else if (ex0
|| ex1
)
9093 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
9096 else if (register_operand (op0
, HImode
)
9097 && (u8_operand (op1
, HImode
)
9098 || s8_operand (op1
, HImode
)))
9100 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
9104 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
9107 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9114 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9124 /* Add some additional costs besides CALL like moves etc. */
9126 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9130 /* Just a rough estimate. Even with -O2 we don't want bulky
9131 code expanded inline. */
9133 *total
= COSTS_N_INSNS (25);
9139 *total
= COSTS_N_INSNS (300);
9141 /* Add some additional costs besides CALL like moves etc. */
9142 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9150 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9151 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9159 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9161 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
9162 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9163 /* For div/mod with const-int divisor we have at least the cost of
9164 loading the divisor. */
9165 if (CONST_INT_P (XEXP (x
, 1)))
9166 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9167 /* Add some overall penaly for clobbering and moving around registers */
9168 *total
+= COSTS_N_INSNS (2);
9175 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
9176 *total
= COSTS_N_INSNS (1);
9181 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
9182 *total
= COSTS_N_INSNS (3);
9187 if (CONST_INT_P (XEXP (x
, 1)))
9188 switch (INTVAL (XEXP (x
, 1)))
9192 *total
= COSTS_N_INSNS (5);
9195 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
9203 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9210 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9212 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9213 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9218 val
= INTVAL (XEXP (x
, 1));
9220 *total
= COSTS_N_INSNS (3);
9221 else if (val
>= 0 && val
<= 7)
9222 *total
= COSTS_N_INSNS (val
);
9224 *total
= COSTS_N_INSNS (1);
9231 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
9232 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
9233 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
9235 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9240 if (const1_rtx
== (XEXP (x
, 1))
9241 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
9243 *total
= COSTS_N_INSNS (2);
9247 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9249 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9250 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9254 switch (INTVAL (XEXP (x
, 1)))
9261 *total
= COSTS_N_INSNS (2);
9264 *total
= COSTS_N_INSNS (3);
9270 *total
= COSTS_N_INSNS (4);
9275 *total
= COSTS_N_INSNS (5);
9278 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9281 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9284 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
9287 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9288 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9294 if (!CONST_INT_P (XEXP (x
, 1)))
9296 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9299 switch (INTVAL (XEXP (x
, 1)))
9307 *total
= COSTS_N_INSNS (3);
9310 *total
= COSTS_N_INSNS (5);
9313 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9319 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9321 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9322 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9326 switch (INTVAL (XEXP (x
, 1)))
9332 *total
= COSTS_N_INSNS (3);
9337 *total
= COSTS_N_INSNS (4);
9340 *total
= COSTS_N_INSNS (6);
9343 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9346 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9347 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9355 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9362 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9364 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9365 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9370 val
= INTVAL (XEXP (x
, 1));
9372 *total
= COSTS_N_INSNS (4);
9374 *total
= COSTS_N_INSNS (2);
9375 else if (val
>= 0 && val
<= 7)
9376 *total
= COSTS_N_INSNS (val
);
9378 *total
= COSTS_N_INSNS (1);
9383 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9385 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9386 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9390 switch (INTVAL (XEXP (x
, 1)))
9396 *total
= COSTS_N_INSNS (2);
9399 *total
= COSTS_N_INSNS (3);
9405 *total
= COSTS_N_INSNS (4);
9409 *total
= COSTS_N_INSNS (5);
9412 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9415 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9419 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9422 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9423 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9429 if (!CONST_INT_P (XEXP (x
, 1)))
9431 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9434 switch (INTVAL (XEXP (x
, 1)))
9440 *total
= COSTS_N_INSNS (3);
9444 *total
= COSTS_N_INSNS (5);
9447 *total
= COSTS_N_INSNS (4);
9450 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9456 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9458 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9459 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9463 switch (INTVAL (XEXP (x
, 1)))
9469 *total
= COSTS_N_INSNS (4);
9474 *total
= COSTS_N_INSNS (6);
9477 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9480 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9483 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9484 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9492 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9499 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9501 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9502 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9507 val
= INTVAL (XEXP (x
, 1));
9509 *total
= COSTS_N_INSNS (3);
9510 else if (val
>= 0 && val
<= 7)
9511 *total
= COSTS_N_INSNS (val
);
9513 *total
= COSTS_N_INSNS (1);
9518 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9520 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9521 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9525 switch (INTVAL (XEXP (x
, 1)))
9532 *total
= COSTS_N_INSNS (2);
9535 *total
= COSTS_N_INSNS (3);
9540 *total
= COSTS_N_INSNS (4);
9544 *total
= COSTS_N_INSNS (5);
9550 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9553 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9557 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9560 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9561 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9567 if (!CONST_INT_P (XEXP (x
, 1)))
9569 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9572 switch (INTVAL (XEXP (x
, 1)))
9580 *total
= COSTS_N_INSNS (3);
9583 *total
= COSTS_N_INSNS (5);
9586 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9592 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9594 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9595 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9599 switch (INTVAL (XEXP (x
, 1)))
9605 *total
= COSTS_N_INSNS (4);
9608 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9613 *total
= COSTS_N_INSNS (4);
9616 *total
= COSTS_N_INSNS (6);
9619 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9620 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9628 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9632 switch (GET_MODE (XEXP (x
, 0)))
9635 *total
= COSTS_N_INSNS (1);
9636 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9637 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9641 *total
= COSTS_N_INSNS (2);
9642 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9643 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9644 else if (INTVAL (XEXP (x
, 1)) != 0)
9645 *total
+= COSTS_N_INSNS (1);
9649 *total
= COSTS_N_INSNS (3);
9650 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9651 *total
+= COSTS_N_INSNS (2);
9655 *total
= COSTS_N_INSNS (4);
9656 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9657 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9658 else if (INTVAL (XEXP (x
, 1)) != 0)
9659 *total
+= COSTS_N_INSNS (3);
9665 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9670 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9671 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9672 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9674 if (QImode
== mode
|| HImode
== mode
)
9676 *total
= COSTS_N_INSNS (2);
9689 /* Implement `TARGET_RTX_COSTS'. */
9692 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9693 int opno
, int *total
, bool speed
)
9695 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9696 opno
, total
, speed
);
9698 if (avr_log
.rtx_costs
)
9700 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9701 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9708 /* Implement `TARGET_ADDRESS_COST'. */
9711 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9712 addr_space_t as ATTRIBUTE_UNUSED
,
9713 bool speed ATTRIBUTE_UNUSED
)
9717 if (GET_CODE (x
) == PLUS
9718 && CONST_INT_P (XEXP (x
, 1))
9719 && (REG_P (XEXP (x
, 0))
9720 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9722 if (INTVAL (XEXP (x
, 1)) >= 61)
9725 else if (CONSTANT_ADDRESS_P (x
))
9728 && io_address_operand (x
, QImode
))
9732 if (avr_log
.address_cost
)
9733 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9738 /* Test for extra memory constraint 'Q'.
9739 It's a memory address based on Y or Z pointer with valid displacement. */
9742 extra_constraint_Q (rtx x
)
9746 if (GET_CODE (XEXP (x
,0)) == PLUS
9747 && REG_P (XEXP (XEXP (x
,0), 0))
9748 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9749 && (INTVAL (XEXP (XEXP (x
,0), 1))
9750 <= MAX_LD_OFFSET (GET_MODE (x
))))
9752 rtx xx
= XEXP (XEXP (x
,0), 0);
9753 int regno
= REGNO (xx
);
9755 ok
= (/* allocate pseudos */
9756 regno
>= FIRST_PSEUDO_REGISTER
9757 /* strictly check */
9758 || regno
== REG_Z
|| regno
== REG_Y
9759 /* XXX frame & arg pointer checks */
9760 || xx
== frame_pointer_rtx
9761 || xx
== arg_pointer_rtx
);
9763 if (avr_log
.constraints
)
9764 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9765 ok
, reload_completed
, reload_in_progress
, x
);
9771 /* Convert condition code CONDITION to the valid AVR condition code. */
9774 avr_normalize_condition (RTX_CODE condition
)
9791 /* Helper function for `avr_reorg'. */
9794 avr_compare_pattern (rtx_insn
*insn
)
9796 rtx pattern
= single_set (insn
);
9799 && NONJUMP_INSN_P (insn
)
9800 && SET_DEST (pattern
) == cc0_rtx
9801 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9803 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9804 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9806 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9807 They must not be swapped, thus skip them. */
9809 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9810 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9817 /* Helper function for `avr_reorg'. */
9819 /* Expansion of switch/case decision trees leads to code like
9821 cc0 = compare (Reg, Num)
9825 cc0 = compare (Reg, Num)
9829 The second comparison is superfluous and can be deleted.
9830 The second jump condition can be transformed from a
9831 "difficult" one to a "simple" one because "cc0 > 0" and
9832 "cc0 >= 0" will have the same effect here.
9834 This function relies on the way switch/case is being expaned
9835 as binary decision tree. For example code see PR 49903.
9837 Return TRUE if optimization performed.
9838 Return FALSE if nothing changed.
9840 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9842 We don't want to do this in text peephole because it is
9843 tedious to work out jump offsets there and the second comparison
9844 might have been transormed by `avr_reorg'.
9846 RTL peephole won't do because peephole2 does not scan across
9850 avr_reorg_remove_redundant_compare (rtx_insn
*insn1
)
9852 rtx comp1
, ifelse1
, xcond1
;
9854 rtx comp2
, ifelse2
, xcond2
;
9855 rtx_insn
*branch2
, *insn2
;
9860 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9862 branch1
= next_nonnote_nondebug_insn (insn1
);
9863 if (!branch1
|| !JUMP_P (branch1
))
9866 insn2
= next_nonnote_nondebug_insn (branch1
);
9867 if (!insn2
|| !avr_compare_pattern (insn2
))
9870 branch2
= next_nonnote_nondebug_insn (insn2
);
9871 if (!branch2
|| !JUMP_P (branch2
))
9874 comp1
= avr_compare_pattern (insn1
);
9875 comp2
= avr_compare_pattern (insn2
);
9876 xcond1
= single_set (branch1
);
9877 xcond2
= single_set (branch2
);
9879 if (!comp1
|| !comp2
9880 || !rtx_equal_p (comp1
, comp2
)
9881 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9882 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9883 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9884 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9889 comp1
= SET_SRC (comp1
);
9890 ifelse1
= SET_SRC (xcond1
);
9891 ifelse2
= SET_SRC (xcond2
);
9893 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9895 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9896 || !REG_P (XEXP (comp1
, 0))
9897 || !CONST_INT_P (XEXP (comp1
, 1))
9898 || XEXP (ifelse1
, 2) != pc_rtx
9899 || XEXP (ifelse2
, 2) != pc_rtx
9900 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9901 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9902 || !COMPARISON_P (XEXP (ifelse2
, 0))
9903 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9904 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9905 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9906 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9911 /* We filtered the insn sequence to look like
9917 (if_then_else (eq (cc0)
9926 (if_then_else (CODE (cc0)
9932 code
= GET_CODE (XEXP (ifelse2
, 0));
9934 /* Map GT/GTU to GE/GEU which is easier for AVR.
9935 The first two instructions compare/branch on EQ
9936 so we may replace the difficult
9938 if (x == VAL) goto L1;
9939 if (x > VAL) goto L2;
9943 if (x == VAL) goto L1;
9944 if (x >= VAL) goto L2;
9946 Similarly, replace LE/LEU by LT/LTU. */
9957 code
= avr_normalize_condition (code
);
9964 /* Wrap the branches into UNSPECs so they won't be changed or
9965 optimized in the remainder. */
9967 target
= XEXP (XEXP (ifelse1
, 1), 0);
9968 cond
= XEXP (ifelse1
, 0);
9969 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9971 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9973 target
= XEXP (XEXP (ifelse2
, 1), 0);
9974 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9975 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9977 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9979 /* The comparisons in insn1 and insn2 are exactly the same;
9980 insn2 is superfluous so delete it. */
9982 delete_insn (insn2
);
9983 delete_insn (branch1
);
9984 delete_insn (branch2
);
9990 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9991 /* Optimize conditional jumps. */
9996 rtx_insn
*insn
= get_insns();
9998 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
10000 rtx pattern
= avr_compare_pattern (insn
);
10006 && avr_reorg_remove_redundant_compare (insn
))
10011 if (compare_diff_p (insn
))
10013 /* Now we work under compare insn with difficult branch. */
10015 rtx next
= next_real_insn (insn
);
10016 rtx pat
= PATTERN (next
);
10018 pattern
= SET_SRC (pattern
);
10020 if (true_regnum (XEXP (pattern
, 0)) >= 0
10021 && true_regnum (XEXP (pattern
, 1)) >= 0)
10023 rtx x
= XEXP (pattern
, 0);
10024 rtx src
= SET_SRC (pat
);
10025 rtx t
= XEXP (src
,0);
10026 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10027 XEXP (pattern
, 0) = XEXP (pattern
, 1);
10028 XEXP (pattern
, 1) = x
;
10029 INSN_CODE (next
) = -1;
10031 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10032 && XEXP (pattern
, 1) == const0_rtx
)
10034 /* This is a tst insn, we can reverse it. */
10035 rtx src
= SET_SRC (pat
);
10036 rtx t
= XEXP (src
,0);
10038 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10039 XEXP (pattern
, 1) = XEXP (pattern
, 0);
10040 XEXP (pattern
, 0) = const0_rtx
;
10041 INSN_CODE (next
) = -1;
10042 INSN_CODE (insn
) = -1;
10044 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10045 && CONST_INT_P (XEXP (pattern
, 1)))
10047 rtx x
= XEXP (pattern
, 1);
10048 rtx src
= SET_SRC (pat
);
10049 rtx t
= XEXP (src
,0);
10050 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
10052 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
10054 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
10055 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
10056 INSN_CODE (next
) = -1;
10057 INSN_CODE (insn
) = -1;
10064 /* Returns register number for function return value.*/
10066 static inline unsigned int
10067 avr_ret_register (void)
10073 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10076 avr_function_value_regno_p (const unsigned int regno
)
10078 return (regno
== avr_ret_register ());
10082 /* Implement `TARGET_LIBCALL_VALUE'. */
10083 /* Create an RTX representing the place where a
10084 library function returns a value of mode MODE. */
10087 avr_libcall_value (enum machine_mode mode
,
10088 const_rtx func ATTRIBUTE_UNUSED
)
10090 int offs
= GET_MODE_SIZE (mode
);
10093 offs
= (offs
+ 1) & ~1;
10095 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
10099 /* Implement `TARGET_FUNCTION_VALUE'. */
10100 /* Create an RTX representing the place where a
10101 function returns a value of data type VALTYPE. */
10104 avr_function_value (const_tree type
,
10105 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
10106 bool outgoing ATTRIBUTE_UNUSED
)
10110 if (TYPE_MODE (type
) != BLKmode
)
10111 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
10113 offs
= int_size_in_bytes (type
);
10116 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
10117 offs
= GET_MODE_SIZE (SImode
);
10118 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
10119 offs
= GET_MODE_SIZE (DImode
);
10121 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
10125 test_hard_reg_class (enum reg_class rclass
, rtx x
)
10127 int regno
= true_regnum (x
);
10131 if (TEST_HARD_REG_CLASS (rclass
, regno
))
10138 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10139 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10142 avr_2word_insn_p (rtx_insn
*insn
)
10144 if ((avr_current_device
->dev_attribute
& AVR_ERRATA_SKIP
)
10146 || 2 != get_attr_length (insn
))
10151 switch (INSN_CODE (insn
))
10156 case CODE_FOR_movqi_insn
:
10157 case CODE_FOR_movuqq_insn
:
10158 case CODE_FOR_movqq_insn
:
10160 rtx set
= single_set (insn
);
10161 rtx src
= SET_SRC (set
);
10162 rtx dest
= SET_DEST (set
);
10164 /* Factor out LDS and STS from movqi_insn. */
10167 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
10169 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
10171 else if (REG_P (dest
)
10174 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
10180 case CODE_FOR_call_insn
:
10181 case CODE_FOR_call_value_insn
:
10188 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
10190 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
10193 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
10194 int dest_addr
= INSN_ADDRESSES (uid
);
10195 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
10197 return (jump_offset
== 1
10198 || (jump_offset
== 2
10199 && avr_2word_insn_p (next_active_insn (insn
))));
10203 /* Worker function for `HARD_REGNO_MODE_OK'. */
10204 /* Returns 1 if a value of mode MODE can be stored starting with hard
10205 register number REGNO. On the enhanced core, anything larger than
10206 1 byte must start in even numbered register for "movw" to work
10207 (this way we don't have to check for odd registers everywhere). */
10210 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
10212 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10213 Disallowing QI et al. in these regs might lead to code like
10214 (set (subreg:QI (reg:HI 28) n) ...)
10215 which will result in wrong code because reload does not
10216 handle SUBREGs of hard regsisters like this.
10217 This could be fixed in reload. However, it appears
10218 that fixing reload is not wanted by reload people. */
10220 /* Any GENERAL_REGS register can hold 8-bit values. */
10222 if (GET_MODE_SIZE (mode
) == 1)
10225 /* FIXME: Ideally, the following test is not needed.
10226 However, it turned out that it can reduce the number
10227 of spill fails. AVR and it's poor endowment with
10228 address registers is extreme stress test for reload. */
10230 if (GET_MODE_SIZE (mode
) >= 4
10234 /* All modes larger than 8 bits should start in an even register. */
10236 return !(regno
& 1);
10240 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10243 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
10245 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10246 represent valid hard registers like, e.g. HI:29. Returning TRUE
10247 for such registers can lead to performance degradation as mentioned
10248 in PR53595. Thus, report invalid hard registers as FALSE. */
10250 if (!avr_hard_regno_mode_ok (regno
, mode
))
10253 /* Return true if any of the following boundaries is crossed:
10254 17/18, 27/28 and 29/30. */
10256 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
10257 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
10258 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
10262 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10265 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
10266 addr_space_t as
, RTX_CODE outer_code
,
10267 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10269 if (!ADDR_SPACE_GENERIC_P (as
))
10271 return POINTER_Z_REGS
;
10275 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
10277 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
10281 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10284 avr_regno_mode_code_ok_for_base_p (int regno
,
10285 enum machine_mode mode ATTRIBUTE_UNUSED
,
10286 addr_space_t as ATTRIBUTE_UNUSED
,
10287 RTX_CODE outer_code
,
10288 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10292 if (!ADDR_SPACE_GENERIC_P (as
))
10294 if (regno
< FIRST_PSEUDO_REGISTER
10302 regno
= reg_renumber
[regno
];
10304 if (regno
== REG_Z
)
10313 if (regno
< FIRST_PSEUDO_REGISTER
10317 || regno
== ARG_POINTER_REGNUM
))
10321 else if (reg_renumber
)
10323 regno
= reg_renumber
[regno
];
10328 || regno
== ARG_POINTER_REGNUM
)
10335 && PLUS
== outer_code
10345 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10346 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10347 CLOBBER_REG is a QI clobber register or NULL_RTX.
10348 LEN == NULL: output instructions.
10349 LEN != NULL: set *LEN to the length of the instruction sequence
10350 (in words) printed with LEN = NULL.
10351 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10352 If CLEAR_P is false, nothing is known about OP[0].
10354 The effect on cc0 is as follows:
10356 Load 0 to any register except ZERO_REG : NONE
10357 Load ld register with any value : NONE
10358 Anything else: : CLOBBER */
10361 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10365 rtx xval
, xdest
[4];
10367 int clobber_val
= 1234;
10368 bool cooked_clobber_p
= false;
10369 bool set_p
= false;
10370 enum machine_mode mode
= GET_MODE (dest
);
10371 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10373 gcc_assert (REG_P (dest
)
10374 && CONSTANT_P (src
));
10379 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10380 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10382 if (REGNO (dest
) < 16
10383 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10385 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10388 /* We might need a clobber reg but don't have one. Look at the value to
10389 be loaded more closely. A clobber is only needed if it is a symbol
10390 or contains a byte that is neither 0, -1 or a power of 2. */
10392 if (NULL_RTX
== clobber_reg
10393 && !test_hard_reg_class (LD_REGS
, dest
)
10394 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10395 || !avr_popcount_each_byte (src
, n_bytes
,
10396 (1 << 0) | (1 << 1) | (1 << 8))))
10398 /* We have no clobber register but need one. Cook one up.
10399 That's cheaper than loading from constant pool. */
10401 cooked_clobber_p
= true;
10402 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10403 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10406 /* Now start filling DEST from LSB to MSB. */
10408 for (n
= 0; n
< n_bytes
; n
++)
10411 bool done_byte
= false;
10415 /* Crop the n-th destination byte. */
10417 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10418 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10420 if (!CONST_INT_P (src
)
10421 && !CONST_FIXED_P (src
)
10422 && !CONST_DOUBLE_P (src
))
10424 static const char* const asm_code
[][2] =
10426 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10427 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10428 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10429 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10434 xop
[2] = clobber_reg
;
10436 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10441 /* Crop the n-th source byte. */
10443 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10444 ival
[n
] = INTVAL (xval
);
10446 /* Look if we can reuse the low word by means of MOVW. */
10452 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10453 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10455 if (INTVAL (lo16
) == INTVAL (hi16
))
10457 if (0 != INTVAL (lo16
)
10460 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10467 /* Don't use CLR so that cc0 is set as expected. */
10472 avr_asm_len (ldreg_p
? "ldi %0,0"
10473 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10474 : "mov %0,__zero_reg__",
10475 &xdest
[n
], len
, 1);
10479 if (clobber_val
== ival
[n
]
10480 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10485 /* LD_REGS can use LDI to move a constant value */
10491 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10495 /* Try to reuse value already loaded in some lower byte. */
10497 for (j
= 0; j
< n
; j
++)
10498 if (ival
[j
] == ival
[n
])
10503 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10511 /* Need no clobber reg for -1: Use CLR/DEC */
10516 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10518 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10521 else if (1 == ival
[n
])
10524 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10526 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10530 /* Use T flag or INC to manage powers of 2 if we have
10533 if (NULL_RTX
== clobber_reg
10534 && single_one_operand (xval
, QImode
))
10537 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10539 gcc_assert (constm1_rtx
!= xop
[1]);
10544 avr_asm_len ("set", xop
, len
, 1);
10548 avr_asm_len ("clr %0", xop
, len
, 1);
10550 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10554 /* We actually need the LD_REGS clobber reg. */
10556 gcc_assert (NULL_RTX
!= clobber_reg
);
10560 xop
[2] = clobber_reg
;
10561 clobber_val
= ival
[n
];
10563 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10564 "mov %0,%2", xop
, len
, 2);
10567 /* If we cooked up a clobber reg above, restore it. */
10569 if (cooked_clobber_p
)
10571 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10576 /* Reload the constant OP[1] into the HI register OP[0].
10577 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10578 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10579 need a clobber reg or have to cook one up.
10581 PLEN == NULL: Output instructions.
10582 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10583 by the insns printed.
10588 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10590 output_reload_in_const (op
, clobber_reg
, plen
, false);
10595 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10596 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10597 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10598 need a clobber reg or have to cook one up.
10600 LEN == NULL: Output instructions.
10602 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10603 by the insns printed.
10608 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10611 && !test_hard_reg_class (LD_REGS
, op
[0])
10612 && (CONST_INT_P (op
[1])
10613 || CONST_FIXED_P (op
[1])
10614 || CONST_DOUBLE_P (op
[1])))
10616 int len_clr
, len_noclr
;
10618 /* In some cases it is better to clear the destination beforehand, e.g.
10620 CLR R2 CLR R3 MOVW R4,R2 INC R2
10624 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10626 We find it too tedious to work that out in the print function.
10627 Instead, we call the print function twice to get the lengths of
10628 both methods and use the shortest one. */
10630 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10631 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10633 if (len_noclr
- len_clr
== 4)
10635 /* Default needs 4 CLR instructions: clear register beforehand. */
10637 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10638 "mov %B0,__zero_reg__" CR_TAB
10639 "movw %C0,%A0", &op
[0], len
, 3);
10641 output_reload_in_const (op
, clobber_reg
, len
, true);
10650 /* Default: destination not pre-cleared. */
10652 output_reload_in_const (op
, clobber_reg
, len
, false);
10657 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10659 output_reload_in_const (op
, clobber_reg
, len
, false);
10664 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10667 avr_output_addr_vec_elt (FILE *stream
, int value
)
10669 if (AVR_HAVE_JMP_CALL
)
10670 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10672 fprintf (stream
, "\trjmp .L%d\n", value
);
10676 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10677 /* Returns true if SCRATCH are safe to be allocated as a scratch
10678 registers (for a define_peephole2) in the current function. */
10681 avr_hard_regno_scratch_ok (unsigned int regno
)
10683 /* Interrupt functions can only use registers that have already been saved
10684 by the prologue, even if they would normally be call-clobbered. */
10686 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10687 && !df_regs_ever_live_p (regno
))
10690 /* Don't allow hard registers that might be part of the frame pointer.
10691 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10692 and don't care for a frame pointer that spans more than one register. */
10694 if ((!reload_completed
|| frame_pointer_needed
)
10695 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10704 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10705 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10708 avr_hard_regno_rename_ok (unsigned int old_reg
,
10709 unsigned int new_reg
)
10711 /* Interrupt functions can only use registers that have already been
10712 saved by the prologue, even if they would normally be
10715 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10716 && !df_regs_ever_live_p (new_reg
))
10719 /* Don't allow hard registers that might be part of the frame pointer.
10720 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10721 and don't care for a frame pointer that spans more than one register. */
10723 if ((!reload_completed
|| frame_pointer_needed
)
10724 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10725 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10733 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10734 or memory location in the I/O space (QImode only).
10736 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10737 Operand 1: register operand to test, or CONST_INT memory address.
10738 Operand 2: bit number.
10739 Operand 3: label to jump to if the test is true. */
10742 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
10744 enum rtx_code comp
= GET_CODE (operands
[0]);
10745 bool long_jump
= get_attr_length (insn
) >= 4;
10746 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10750 else if (comp
== LT
)
10754 comp
= reverse_condition (comp
);
10756 switch (GET_CODE (operands
[1]))
10763 if (low_io_address_operand (operands
[1], QImode
))
10766 output_asm_insn ("sbis %i1,%2", operands
);
10768 output_asm_insn ("sbic %i1,%2", operands
);
10772 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10774 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10776 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10779 break; /* CONST_INT */
10784 output_asm_insn ("sbrs %T1%T2", operands
);
10786 output_asm_insn ("sbrc %T1%T2", operands
);
10792 return ("rjmp .+4" CR_TAB
10801 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10804 avr_asm_out_ctor (rtx symbol
, int priority
)
10806 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10807 default_ctor_section_asm_out_constructor (symbol
, priority
);
10811 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10814 avr_asm_out_dtor (rtx symbol
, int priority
)
10816 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10817 default_dtor_section_asm_out_destructor (symbol
, priority
);
10821 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10824 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10826 if (TYPE_MODE (type
) == BLKmode
)
10828 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10829 return (size
== -1 || size
> 8);
10836 /* Implement `CASE_VALUES_THRESHOLD'. */
10837 /* Supply the default for --param case-values-threshold=0 */
10839 static unsigned int
10840 avr_case_values_threshold (void)
10842 /* The exact break-even point between a jump table and an if-else tree
10843 depends on several factors not available here like, e.g. if 8-bit
10844 comparisons can be used in the if-else tree or not, on the
10845 range of the case values, if the case value can be reused, on the
10846 register allocation, etc. '7' appears to be a good choice. */
10852 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10854 static enum machine_mode
10855 avr_addr_space_address_mode (addr_space_t as
)
10857 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10861 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10863 static enum machine_mode
10864 avr_addr_space_pointer_mode (addr_space_t as
)
10866 return avr_addr_space_address_mode (as
);
10870 /* Helper for following function. */
10873 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10875 gcc_assert (REG_P (reg
));
10879 return REGNO (reg
) == REG_Z
;
10882 /* Avoid combine to propagate hard regs. */
10884 if (can_create_pseudo_p()
10885 && REGNO (reg
) < REG_Z
)
10894 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10897 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10898 bool strict
, addr_space_t as
)
10907 case ADDR_SPACE_GENERIC
:
10908 return avr_legitimate_address_p (mode
, x
, strict
);
10910 case ADDR_SPACE_FLASH
:
10911 case ADDR_SPACE_FLASH1
:
10912 case ADDR_SPACE_FLASH2
:
10913 case ADDR_SPACE_FLASH3
:
10914 case ADDR_SPACE_FLASH4
:
10915 case ADDR_SPACE_FLASH5
:
10917 switch (GET_CODE (x
))
10920 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10924 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10933 case ADDR_SPACE_MEMX
:
10936 && can_create_pseudo_p());
10938 if (LO_SUM
== GET_CODE (x
))
10940 rtx hi
= XEXP (x
, 0);
10941 rtx lo
= XEXP (x
, 1);
10944 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10946 && REGNO (lo
) == REG_Z
);
10952 if (avr_log
.legitimate_address_p
)
10954 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10955 "reload_completed=%d reload_in_progress=%d %s:",
10956 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10957 reg_renumber
? "(reg_renumber)" : "");
10959 if (GET_CODE (x
) == PLUS
10960 && REG_P (XEXP (x
, 0))
10961 && CONST_INT_P (XEXP (x
, 1))
10962 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10965 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10966 true_regnum (XEXP (x
, 0)));
10969 avr_edump ("\n%r\n", x
);
10976 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10979 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10980 enum machine_mode mode
, addr_space_t as
)
10982 if (ADDR_SPACE_GENERIC_P (as
))
10983 return avr_legitimize_address (x
, old_x
, mode
);
10985 if (avr_log
.legitimize_address
)
10987 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10994 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10997 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10999 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
11000 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
11002 if (avr_log
.progmem
)
11003 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
11004 src
, type_from
, type_to
);
11006 /* Up-casting from 16-bit to 24-bit pointer. */
11008 if (as_from
!= ADDR_SPACE_MEMX
11009 && as_to
== ADDR_SPACE_MEMX
)
11013 rtx reg
= gen_reg_rtx (PSImode
);
11015 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
11016 sym
= XEXP (sym
, 0);
11018 /* Look at symbol flags: avr_encode_section_info set the flags
11019 also if attribute progmem was seen so that we get the right
11020 promotion for, e.g. PSTR-like strings that reside in generic space
11021 but are located in flash. In that case we patch the incoming
11024 if (SYMBOL_REF
== GET_CODE (sym
)
11025 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
11027 as_from
= ADDR_SPACE_FLASH
;
11030 /* Linearize memory: RAM has bit 23 set. */
11032 msb
= ADDR_SPACE_GENERIC_P (as_from
)
11034 : avr_addrspace
[as_from
].segment
;
11036 src
= force_reg (Pmode
, src
);
11038 emit_insn (msb
== 0
11039 ? gen_zero_extendhipsi2 (reg
, src
)
11040 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
11045 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11047 if (as_from
== ADDR_SPACE_MEMX
11048 && as_to
!= ADDR_SPACE_MEMX
)
11050 rtx new_src
= gen_reg_rtx (Pmode
);
11052 src
= force_reg (PSImode
, src
);
11054 emit_move_insn (new_src
,
11055 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
11063 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11066 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
11067 addr_space_t superset ATTRIBUTE_UNUSED
)
11069 /* Allow any kind of pointer mess. */
11075 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11078 avr_convert_to_type (tree type
, tree expr
)
11080 /* Print a diagnose for pointer conversion that changes the address
11081 space of the pointer target to a non-enclosing address space,
11082 provided -Waddr-space-convert is on.
11084 FIXME: Filter out cases where the target object is known to
11085 be located in the right memory, like in
11087 (const __flash*) PSTR ("text")
11089 Also try to distinguish between explicit casts requested by
11090 the user and implicit casts like
11092 void f (const __flash char*);
11094 void g (const char *p)
11096 f ((const __flash*) p);
11099 under the assumption that an explicit casts means that the user
11100 knows what he is doing, e.g. interface with PSTR or old style
11101 code with progmem and pgm_read_xxx.
11104 if (avr_warn_addr_space_convert
11105 && expr
!= error_mark_node
11106 && POINTER_TYPE_P (type
)
11107 && POINTER_TYPE_P (TREE_TYPE (expr
)))
11109 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
11110 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
11112 if (avr_log
.progmem
)
11113 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
11115 if (as_new
!= ADDR_SPACE_MEMX
11116 && as_new
!= as_old
)
11118 location_t loc
= EXPR_LOCATION (expr
);
11119 const char *name_old
= avr_addrspace
[as_old
].name
;
11120 const char *name_new
= avr_addrspace
[as_new
].name
;
11122 warning (OPT_Waddr_space_convert
,
11123 "conversion from address space %qs to address space %qs",
11124 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
11125 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
11127 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
11135 /* Worker function for movmemhi expander.
11136 XOP[0] Destination as MEM:BLK
11138 XOP[2] # Bytes to copy
11140 Return TRUE if the expansion is accomplished.
11141 Return FALSE if the operand compination is not supported. */
11144 avr_emit_movmemhi (rtx
*xop
)
11146 HOST_WIDE_INT count
;
11147 enum machine_mode loop_mode
;
11148 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
11149 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
11150 rtx a_hi8
= NULL_RTX
;
11152 if (avr_mem_flash_p (xop
[0]))
11155 if (!CONST_INT_P (xop
[2]))
11158 count
= INTVAL (xop
[2]);
11162 a_src
= XEXP (xop
[1], 0);
11163 a_dest
= XEXP (xop
[0], 0);
11165 if (PSImode
== GET_MODE (a_src
))
11167 gcc_assert (as
== ADDR_SPACE_MEMX
);
11169 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
11170 loop_reg
= gen_rtx_REG (loop_mode
, 24);
11171 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
11173 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
11174 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
11178 int segment
= avr_addrspace
[as
].segment
;
11181 && avr_current_device
->n_flash
> 1)
11183 a_hi8
= GEN_INT (segment
);
11184 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
11186 else if (!ADDR_SPACE_GENERIC_P (as
))
11188 as
= ADDR_SPACE_FLASH
;
11193 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
11194 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
11197 xas
= GEN_INT (as
);
11199 /* FIXME: Register allocator might come up with spill fails if it is left
11200 on its own. Thus, we allocate the pointer registers by hand:
11202 X = destination address */
11204 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
11205 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
11207 /* FIXME: Register allocator does a bad job and might spill address
11208 register(s) inside the loop leading to additional move instruction
11209 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11210 load and store as separate insns. Instead, we perform the copy
11211 by means of one monolithic insn. */
11213 gcc_assert (TMP_REGNO
== LPM_REGNO
);
11215 if (as
!= ADDR_SPACE_MEMX
)
11217 /* Load instruction ([E]LPM or LD) is known at compile time:
11218 Do the copy-loop inline. */
11220 rtx (*fun
) (rtx
, rtx
, rtx
)
11221 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
11223 insn
= fun (xas
, loop_reg
, loop_reg
);
11227 rtx (*fun
) (rtx
, rtx
)
11228 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
11230 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
11232 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
11235 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
11242 /* Print assembler for movmem_qi, movmem_hi insns...
11244 $1, $2 : Loop register
11246 X : Destination address
11250 avr_out_movmem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
11252 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
11253 enum machine_mode loop_mode
= GET_MODE (op
[1]);
11254 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
11262 xop
[2] = tmp_reg_rtx
;
11266 avr_asm_len ("0:", xop
, plen
, 0);
11268 /* Load with post-increment */
11275 case ADDR_SPACE_GENERIC
:
11277 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
11280 case ADDR_SPACE_FLASH
:
11283 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
11285 avr_asm_len ("lpm" CR_TAB
11286 "adiw r30,1", xop
, plen
, 2);
11289 case ADDR_SPACE_FLASH1
:
11290 case ADDR_SPACE_FLASH2
:
11291 case ADDR_SPACE_FLASH3
:
11292 case ADDR_SPACE_FLASH4
:
11293 case ADDR_SPACE_FLASH5
:
11295 if (AVR_HAVE_ELPMX
)
11296 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
11298 avr_asm_len ("elpm" CR_TAB
11299 "adiw r30,1", xop
, plen
, 2);
11303 /* Store with post-increment */
11305 avr_asm_len ("st X+,%2", xop
, plen
, 1);
11307 /* Decrement loop-counter and set Z-flag */
11309 if (QImode
== loop_mode
)
11311 avr_asm_len ("dec %1", xop
, plen
, 1);
11315 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
11319 avr_asm_len ("subi %A1,1" CR_TAB
11320 "sbci %B1,0", xop
, plen
, 2);
11323 /* Loop until zero */
11325 return avr_asm_len ("brne 0b", xop
, plen
, 1);
11330 /* Helper for __builtin_avr_delay_cycles */
11333 avr_mem_clobber (void)
11335 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
11336 MEM_VOLATILE_P (mem
) = 1;
11341 avr_expand_delay_cycles (rtx operands0
)
11343 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
11344 unsigned HOST_WIDE_INT cycles_used
;
11345 unsigned HOST_WIDE_INT loop_count
;
11347 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
11349 loop_count
= ((cycles
- 9) / 6) + 1;
11350 cycles_used
= ((loop_count
- 1) * 6) + 9;
11351 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
11352 avr_mem_clobber()));
11353 cycles
-= cycles_used
;
11356 if (IN_RANGE (cycles
, 262145, 83886081))
11358 loop_count
= ((cycles
- 7) / 5) + 1;
11359 if (loop_count
> 0xFFFFFF)
11360 loop_count
= 0xFFFFFF;
11361 cycles_used
= ((loop_count
- 1) * 5) + 7;
11362 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
11363 avr_mem_clobber()));
11364 cycles
-= cycles_used
;
11367 if (IN_RANGE (cycles
, 768, 262144))
11369 loop_count
= ((cycles
- 5) / 4) + 1;
11370 if (loop_count
> 0xFFFF)
11371 loop_count
= 0xFFFF;
11372 cycles_used
= ((loop_count
- 1) * 4) + 5;
11373 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11374 avr_mem_clobber()));
11375 cycles
-= cycles_used
;
11378 if (IN_RANGE (cycles
, 6, 767))
11380 loop_count
= cycles
/ 3;
11381 if (loop_count
> 255)
11383 cycles_used
= loop_count
* 3;
11384 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11385 avr_mem_clobber()));
11386 cycles
-= cycles_used
;
11389 while (cycles
>= 2)
11391 emit_insn (gen_nopv (GEN_INT(2)));
11397 emit_insn (gen_nopv (GEN_INT(1)));
11403 /* Compute the image of x under f, i.e. perform x --> f(x) */
11406 avr_map (unsigned int f
, int x
)
11408 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
11412 /* Return some metrics of map A. */
11416 /* Number of fixed points in { 0 ... 7 } */
11419 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11422 /* Mask representing the fixed points in { 0 ... 7 } */
11423 MAP_MASK_FIXED_0_7
,
11425 /* Size of the preimage of { 0 ... 7 } */
11428 /* Mask that represents the preimage of { f } */
11429 MAP_MASK_PREIMAGE_F
11433 avr_map_metric (unsigned int a
, int mode
)
11435 unsigned i
, metric
= 0;
11437 for (i
= 0; i
< 8; i
++)
11439 unsigned ai
= avr_map (a
, i
);
11441 if (mode
== MAP_FIXED_0_7
)
11443 else if (mode
== MAP_NONFIXED_0_7
)
11444 metric
+= ai
< 8 && ai
!= i
;
11445 else if (mode
== MAP_MASK_FIXED_0_7
)
11446 metric
|= ((unsigned) (ai
== i
)) << i
;
11447 else if (mode
== MAP_PREIMAGE_0_7
)
11449 else if (mode
== MAP_MASK_PREIMAGE_F
)
11450 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11459 /* Return true if IVAL has a 0xf in its hexadecimal representation
11460 and false, otherwise. Only nibbles 0..7 are taken into account.
11461 Used as constraint helper for C0f and Cxf. */
11464 avr_has_nibble_0xf (rtx ival
)
11466 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
11467 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11471 /* We have a set of bits that are mapped by a function F.
11472 Try to decompose F by means of a second function G so that
11478 cost (F o G^-1) + cost (G) < cost (F)
11480 Example: Suppose builtin insert_bits supplies us with the map
11481 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11482 nibble of the result, we can just as well rotate the bits before inserting
11483 them and use the map 0x7654ffff which is cheaper than the original map.
11484 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11488 /* tree code of binary function G */
11489 enum tree_code code
;
11491 /* The constant second argument of G */
11494 /* G^-1, the inverse of G (*, arg) */
11497 /* The cost of appplying G (*, arg) */
11500 /* The composition F o G^-1 (*, arg) for some function F */
11503 /* For debug purpose only */
11507 static const avr_map_op_t avr_map_op
[] =
11509 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
11510 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
11511 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
11512 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
11513 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
11514 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
11515 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
11516 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
11517 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
11518 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
11519 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
11520 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
11521 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
11522 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
11523 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
11527 /* Try to decompose F as F = (F o G^-1) o G as described above.
11528 The result is a struct representing F o G^-1 and G.
11529 If result.cost < 0 then such a decomposition does not exist. */
11531 static avr_map_op_t
11532 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
11535 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11536 avr_map_op_t f_ginv
= *g
;
11537 unsigned int ginv
= g
->ginv
;
11541 /* Step 1: Computing F o G^-1 */
11543 for (i
= 7; i
>= 0; i
--)
11545 int x
= avr_map (f
, i
);
11549 x
= avr_map (ginv
, x
);
11551 /* The bit is no element of the image of G: no avail (cost = -1) */
11557 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
11560 /* Step 2: Compute the cost of the operations.
11561 The overall cost of doing an operation prior to the insertion is
11562 the cost of the insertion plus the cost of the operation. */
11564 /* Step 2a: Compute cost of F o G^-1 */
11566 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11568 /* The mapping consists only of fixed points and can be folded
11569 to AND/OR logic in the remainder. Reasonable cost is 3. */
11571 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11577 /* Get the cost of the insn by calling the output worker with some
11578 fake values. Mimic effect of reloading xop[3]: Unused operands
11579 are mapped to 0 and used operands are reloaded to xop[0]. */
11581 xop
[0] = all_regs_rtx
[24];
11582 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
11583 xop
[2] = all_regs_rtx
[25];
11584 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11586 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11588 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11591 /* Step 2b: Add cost of G */
11593 f_ginv
.cost
+= g
->cost
;
11595 if (avr_log
.builtin
)
11596 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11602 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11603 XOP[0] and XOP[1] don't overlap.
11604 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11605 If FIXP_P = false: Just move the bit if its position in the destination
11606 is different to its source position. */
11609 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
11613 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11614 int t_bit_src
= -1;
11616 /* We order the operations according to the requested source bit b. */
11618 for (b
= 0; b
< 8; b
++)
11619 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11621 int bit_src
= avr_map (map
, bit_dest
);
11625 /* Same position: No need to copy as requested by FIXP_P. */
11626 || (bit_dest
== bit_src
&& !fixp_p
))
11629 if (t_bit_src
!= bit_src
)
11631 /* Source bit is not yet in T: Store it to T. */
11633 t_bit_src
= bit_src
;
11635 xop
[3] = GEN_INT (bit_src
);
11636 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11639 /* Load destination bit with T. */
11641 xop
[3] = GEN_INT (bit_dest
);
11642 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11647 /* PLEN == 0: Print assembler code for `insert_bits'.
11648 PLEN != 0: Compute code length in bytes.
11651 OP[1]: The mapping composed of nibbles. If nibble no. N is
11652 0: Bit N of result is copied from bit OP[2].0
11654 7: Bit N of result is copied from bit OP[2].7
11655 0xf: Bit N of result is copied from bit OP[3].N
11656 OP[2]: Bits to be inserted
11657 OP[3]: Target value */
11660 avr_out_insert_bits (rtx
*op
, int *plen
)
11662 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
11663 unsigned mask_fixed
;
11664 bool fixp_p
= true;
11671 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11675 else if (flag_print_asm_name
)
11676 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
11678 /* If MAP has fixed points it might be better to initialize the result
11679 with the bits to be inserted instead of moving all bits by hand. */
11681 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11683 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11685 /* Avoid early-clobber conflicts */
11687 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11688 xop
[1] = tmp_reg_rtx
;
11692 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11694 /* XOP[2] is used and reloaded to XOP[0] already */
11696 int n_fix
= 0, n_nofix
= 0;
11698 gcc_assert (REG_P (xop
[2]));
11700 /* Get the code size of the bit insertions; once with all bits
11701 moved and once with fixed points omitted. */
11703 avr_move_bits (xop
, map
, true, &n_fix
);
11704 avr_move_bits (xop
, map
, false, &n_nofix
);
11706 if (fixp_p
&& n_fix
- n_nofix
> 3)
11708 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11710 avr_asm_len ("eor %0,%1" CR_TAB
11711 "andi %0,%3" CR_TAB
11712 "eor %0,%1", xop
, plen
, 3);
11718 /* XOP[2] is unused */
11720 if (fixp_p
&& mask_fixed
)
11722 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11727 /* Move/insert remaining bits. */
11729 avr_move_bits (xop
, map
, fixp_p
, plen
);
11735 /* IDs for all the AVR builtins. */
11737 enum avr_builtin_id
11739 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11740 AVR_BUILTIN_ ## NAME,
11741 #include "builtins.def"
11747 struct GTY(()) avr_builtin_description
11749 enum insn_code icode
;
11755 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11756 that a built-in's ID can be used to access the built-in by means of
11759 static GTY(()) struct avr_builtin_description
11760 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11762 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11763 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11764 #include "builtins.def"
11769 /* Implement `TARGET_BUILTIN_DECL'. */
11772 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11774 if (id
< AVR_BUILTIN_COUNT
)
11775 return avr_bdesc
[id
].fndecl
;
11777 return error_mark_node
;
11782 avr_init_builtin_int24 (void)
11784 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11785 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11787 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11788 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11792 /* Implement `TARGET_INIT_BUILTINS' */
11793 /* Set up all builtin functions for this target. */
11796 avr_init_builtins (void)
11798 tree void_ftype_void
11799 = build_function_type_list (void_type_node
, NULL_TREE
);
11800 tree uchar_ftype_uchar
11801 = build_function_type_list (unsigned_char_type_node
,
11802 unsigned_char_type_node
,
11804 tree uint_ftype_uchar_uchar
11805 = build_function_type_list (unsigned_type_node
,
11806 unsigned_char_type_node
,
11807 unsigned_char_type_node
,
11809 tree int_ftype_char_char
11810 = build_function_type_list (integer_type_node
,
11814 tree int_ftype_char_uchar
11815 = build_function_type_list (integer_type_node
,
11817 unsigned_char_type_node
,
11819 tree void_ftype_ulong
11820 = build_function_type_list (void_type_node
,
11821 long_unsigned_type_node
,
11824 tree uchar_ftype_ulong_uchar_uchar
11825 = build_function_type_list (unsigned_char_type_node
,
11826 long_unsigned_type_node
,
11827 unsigned_char_type_node
,
11828 unsigned_char_type_node
,
11831 tree const_memx_void_node
11832 = build_qualified_type (void_type_node
,
11834 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11836 tree const_memx_ptr_type_node
11837 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11839 tree char_ftype_const_memx_ptr
11840 = build_function_type_list (char_type_node
,
11841 const_memx_ptr_type_node
,
11845 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11847 #define FX_FTYPE_FX(fx) \
11848 tree fx##r_ftype_##fx##r \
11849 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11850 tree fx##k_ftype_##fx##k \
11851 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11853 #define FX_FTYPE_FX_INT(fx) \
11854 tree fx##r_ftype_##fx##r_int \
11855 = build_function_type_list (node_##fx##r, node_##fx##r, \
11856 integer_type_node, NULL); \
11857 tree fx##k_ftype_##fx##k_int \
11858 = build_function_type_list (node_##fx##k, node_##fx##k, \
11859 integer_type_node, NULL)
11861 #define INT_FTYPE_FX(fx) \
11862 tree int_ftype_##fx##r \
11863 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11864 tree int_ftype_##fx##k \
11865 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11867 #define INTX_FTYPE_FX(fx) \
11868 tree int##fx##r_ftype_##fx##r \
11869 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11870 tree int##fx##k_ftype_##fx##k \
11871 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11873 #define FX_FTYPE_INTX(fx) \
11874 tree fx##r_ftype_int##fx##r \
11875 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11876 tree fx##k_ftype_int##fx##k \
11877 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11879 tree node_hr
= short_fract_type_node
;
11880 tree node_nr
= fract_type_node
;
11881 tree node_lr
= long_fract_type_node
;
11882 tree node_llr
= long_long_fract_type_node
;
11884 tree node_uhr
= unsigned_short_fract_type_node
;
11885 tree node_unr
= unsigned_fract_type_node
;
11886 tree node_ulr
= unsigned_long_fract_type_node
;
11887 tree node_ullr
= unsigned_long_long_fract_type_node
;
11889 tree node_hk
= short_accum_type_node
;
11890 tree node_nk
= accum_type_node
;
11891 tree node_lk
= long_accum_type_node
;
11892 tree node_llk
= long_long_accum_type_node
;
11894 tree node_uhk
= unsigned_short_accum_type_node
;
11895 tree node_unk
= unsigned_accum_type_node
;
11896 tree node_ulk
= unsigned_long_accum_type_node
;
11897 tree node_ullk
= unsigned_long_long_accum_type_node
;
11900 /* For absfx builtins. */
11907 /* For roundfx builtins. */
11909 FX_FTYPE_FX_INT (h
);
11910 FX_FTYPE_FX_INT (n
);
11911 FX_FTYPE_FX_INT (l
);
11912 FX_FTYPE_FX_INT (ll
);
11914 FX_FTYPE_FX_INT (uh
);
11915 FX_FTYPE_FX_INT (un
);
11916 FX_FTYPE_FX_INT (ul
);
11917 FX_FTYPE_FX_INT (ull
);
11919 /* For countlsfx builtins. */
11929 INT_FTYPE_FX (ull
);
11931 /* For bitsfx builtins. */
11936 INTX_FTYPE_FX (ll
);
11938 INTX_FTYPE_FX (uh
);
11939 INTX_FTYPE_FX (un
);
11940 INTX_FTYPE_FX (ul
);
11941 INTX_FTYPE_FX (ull
);
11943 /* For fxbits builtins. */
11948 FX_FTYPE_INTX (ll
);
11950 FX_FTYPE_INTX (uh
);
11951 FX_FTYPE_INTX (un
);
11952 FX_FTYPE_INTX (ul
);
11953 FX_FTYPE_INTX (ull
);
11956 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11958 int id = AVR_BUILTIN_ ## NAME; \
11959 const char *Name = "__builtin_avr_" #NAME; \
11960 char *name = (char*) alloca (1 + strlen (Name)); \
11962 gcc_assert (id < AVR_BUILTIN_COUNT); \
11963 avr_bdesc[id].fndecl \
11964 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11965 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11967 #include "builtins.def"
11970 avr_init_builtin_int24 ();
11974 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11975 with non-void result and 1 ... 3 arguments. */
11978 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11981 int n
, n_args
= call_expr_nargs (exp
);
11982 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11984 gcc_assert (n_args
>= 1 && n_args
<= 3);
11986 if (target
== NULL_RTX
11987 || GET_MODE (target
) != tmode
11988 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11990 target
= gen_reg_rtx (tmode
);
11993 for (n
= 0; n
< n_args
; n
++)
11995 tree arg
= CALL_EXPR_ARG (exp
, n
);
11996 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11997 enum machine_mode opmode
= GET_MODE (op
);
11998 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
12000 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
12003 op
= gen_lowpart (HImode
, op
);
12006 /* In case the insn wants input operands in modes different from
12007 the result, abort. */
12009 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
12011 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
12012 op
= copy_to_mode_reg (mode
, op
);
12019 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
12020 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
12021 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
12027 if (pat
== NULL_RTX
)
12036 /* Implement `TARGET_EXPAND_BUILTIN'. */
12037 /* Expand an expression EXP that calls a built-in function,
12038 with result going to TARGET if that's convenient
12039 (and in mode MODE if that's convenient).
12040 SUBTARGET may be used as the target for computing one of EXP's operands.
12041 IGNORE is nonzero if the value is to be ignored. */
12044 avr_expand_builtin (tree exp
, rtx target
,
12045 rtx subtarget ATTRIBUTE_UNUSED
,
12046 enum machine_mode mode ATTRIBUTE_UNUSED
,
12049 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12050 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
12051 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
12052 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
12056 gcc_assert (id
< AVR_BUILTIN_COUNT
);
12060 case AVR_BUILTIN_NOP
:
12061 emit_insn (gen_nopv (GEN_INT(1)));
12064 case AVR_BUILTIN_DELAY_CYCLES
:
12066 arg0
= CALL_EXPR_ARG (exp
, 0);
12067 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12069 if (!CONST_INT_P (op0
))
12070 error ("%s expects a compile time integer constant", bname
);
12072 avr_expand_delay_cycles (op0
);
12077 case AVR_BUILTIN_INSERT_BITS
:
12079 arg0
= CALL_EXPR_ARG (exp
, 0);
12080 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12082 if (!CONST_INT_P (op0
))
12084 error ("%s expects a compile time long integer constant"
12085 " as first argument", bname
);
12092 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
12093 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
12094 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
12095 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
12097 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
12098 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
12099 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
12100 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
12102 /* Warn about odd rounding. Rounding points >= FBIT will have
12105 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
12108 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
12110 if (rbit
>= (int) GET_MODE_FBIT (mode
))
12112 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
12113 "fixed-point value with %d fractional bits",
12114 rbit
, GET_MODE_FBIT (mode
));
12116 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
12119 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
12121 warning (0, "rounding result will always be 0");
12122 return CONST0_RTX (mode
);
12125 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12127 TR 18037 only specifies results for RP > 0. However, the
12128 remaining cases of -IBIT < RP <= 0 can easily be supported
12129 without any additional overhead. */
12134 /* No fold found and no insn: Call support function from libgcc. */
12136 if (d
->icode
== CODE_FOR_nothing
12137 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
12139 return expand_call (exp
, target
, ignore
);
12142 /* No special treatment needed: vanilla expand. */
12144 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
12145 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
12147 if (d
->n_args
== 0)
12149 emit_insn ((GEN_FCN (d
->icode
)) (target
));
12153 return avr_default_expand_builtin (d
->icode
, exp
, target
);
12157 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12160 avr_fold_absfx (tree tval
)
12162 if (FIXED_CST
!= TREE_CODE (tval
))
12165 /* Our fixed-points have no padding: Use double_int payload directly. */
12167 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
12168 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
12169 double_int ival
= fval
.data
.sext (bits
);
12171 if (!ival
.is_negative())
12174 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12176 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
12177 ? double_int::max_value (bits
, false)
12180 return build_fixed (TREE_TYPE (tval
), fval
);
12184 /* Implement `TARGET_FOLD_BUILTIN'. */
12187 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
12188 bool ignore ATTRIBUTE_UNUSED
)
12190 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
12191 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
12201 case AVR_BUILTIN_SWAP
:
12203 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
12204 build_int_cst (val_type
, 4));
12207 case AVR_BUILTIN_ABSHR
:
12208 case AVR_BUILTIN_ABSR
:
12209 case AVR_BUILTIN_ABSLR
:
12210 case AVR_BUILTIN_ABSLLR
:
12212 case AVR_BUILTIN_ABSHK
:
12213 case AVR_BUILTIN_ABSK
:
12214 case AVR_BUILTIN_ABSLK
:
12215 case AVR_BUILTIN_ABSLLK
:
12216 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12218 return avr_fold_absfx (arg
[0]);
12220 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
12221 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
12222 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
12223 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
12225 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
12226 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
12227 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
12228 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
12230 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
12231 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
12232 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
12233 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
12235 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
12236 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
12237 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
12238 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
12240 gcc_assert (TYPE_PRECISION (val_type
)
12241 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
12243 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
12245 case AVR_BUILTIN_INSERT_BITS
:
12247 tree tbits
= arg
[1];
12248 tree tval
= arg
[2];
12250 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
12252 bool changed
= false;
12254 avr_map_op_t best_g
;
12256 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
12258 /* No constant as first argument: Don't fold this and run into
12259 error in avr_expand_builtin. */
12264 tmap
= wide_int_to_tree (map_type
, arg
[0]);
12265 map
= TREE_INT_CST_LOW (tmap
);
12267 if (TREE_CODE (tval
) != INTEGER_CST
12268 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12270 /* There are no F in the map, i.e. 3rd operand is unused.
12271 Replace that argument with some constant to render
12272 respective input unused. */
12274 tval
= build_int_cst (val_type
, 0);
12278 if (TREE_CODE (tbits
) != INTEGER_CST
12279 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
12281 /* Similar for the bits to be inserted. If they are unused,
12282 we can just as well pass 0. */
12284 tbits
= build_int_cst (val_type
, 0);
12287 if (TREE_CODE (tbits
) == INTEGER_CST
)
12289 /* Inserting bits known at compile time is easy and can be
12290 performed by AND and OR with appropriate masks. */
12292 int bits
= TREE_INT_CST_LOW (tbits
);
12293 int mask_ior
= 0, mask_and
= 0xff;
12295 for (i
= 0; i
< 8; i
++)
12297 int mi
= avr_map (map
, i
);
12301 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
12302 else mask_and
&= ~(1 << i
);
12306 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
12307 build_int_cst (val_type
, mask_ior
));
12308 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
12309 build_int_cst (val_type
, mask_and
));
12313 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12315 /* If bits don't change their position we can use vanilla logic
12316 to merge the two arguments. */
12318 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
12320 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12321 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
12323 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
12324 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
12325 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
12328 /* Try to decomposing map to reduce overall cost. */
12330 if (avr_log
.builtin
)
12331 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
12333 best_g
= avr_map_op
[0];
12334 best_g
.cost
= 1000;
12336 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
12339 = avr_map_decompose (map
, avr_map_op
+ i
,
12340 TREE_CODE (tval
) == INTEGER_CST
);
12342 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
12346 if (avr_log
.builtin
)
12349 if (best_g
.arg
== 0)
12350 /* No optimization found */
12353 /* Apply operation G to the 2nd argument. */
12355 if (avr_log
.builtin
)
12356 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12357 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12359 /* Do right-shifts arithmetically: They copy the MSB instead of
12360 shifting in a non-usable value (0) as with logic right-shift. */
12362 tbits
= fold_convert (signed_char_type_node
, tbits
);
12363 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12364 build_int_cst (val_type
, best_g
.arg
));
12365 tbits
= fold_convert (val_type
, tbits
);
12367 /* Use map o G^-1 instead of original map to undo the effect of G. */
12369 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
12371 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12372 } /* AVR_BUILTIN_INSERT_BITS */
12380 /* Initialize the GCC target structure. */
12382 #undef TARGET_ASM_ALIGNED_HI_OP
12383 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12384 #undef TARGET_ASM_ALIGNED_SI_OP
12385 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12386 #undef TARGET_ASM_UNALIGNED_HI_OP
12387 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12388 #undef TARGET_ASM_UNALIGNED_SI_OP
12389 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12390 #undef TARGET_ASM_INTEGER
12391 #define TARGET_ASM_INTEGER avr_assemble_integer
12392 #undef TARGET_ASM_FILE_START
12393 #define TARGET_ASM_FILE_START avr_file_start
12394 #undef TARGET_ASM_FILE_END
12395 #define TARGET_ASM_FILE_END avr_file_end
12397 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12398 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12399 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12400 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12402 #undef TARGET_FUNCTION_VALUE
12403 #define TARGET_FUNCTION_VALUE avr_function_value
12404 #undef TARGET_LIBCALL_VALUE
12405 #define TARGET_LIBCALL_VALUE avr_libcall_value
12406 #undef TARGET_FUNCTION_VALUE_REGNO_P
12407 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12409 #undef TARGET_ATTRIBUTE_TABLE
12410 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12411 #undef TARGET_INSERT_ATTRIBUTES
12412 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12413 #undef TARGET_SECTION_TYPE_FLAGS
12414 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12416 #undef TARGET_ASM_NAMED_SECTION
12417 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12418 #undef TARGET_ASM_INIT_SECTIONS
12419 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12420 #undef TARGET_ENCODE_SECTION_INFO
12421 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12422 #undef TARGET_ASM_SELECT_SECTION
12423 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12425 #undef TARGET_REGISTER_MOVE_COST
12426 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12427 #undef TARGET_MEMORY_MOVE_COST
12428 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12429 #undef TARGET_RTX_COSTS
12430 #define TARGET_RTX_COSTS avr_rtx_costs
12431 #undef TARGET_ADDRESS_COST
12432 #define TARGET_ADDRESS_COST avr_address_cost
12433 #undef TARGET_MACHINE_DEPENDENT_REORG
12434 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12435 #undef TARGET_FUNCTION_ARG
12436 #define TARGET_FUNCTION_ARG avr_function_arg
12437 #undef TARGET_FUNCTION_ARG_ADVANCE
12438 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12440 #undef TARGET_SET_CURRENT_FUNCTION
12441 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12443 #undef TARGET_RETURN_IN_MEMORY
12444 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12446 #undef TARGET_STRICT_ARGUMENT_NAMING
12447 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12449 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12450 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12452 #undef TARGET_HARD_REGNO_SCRATCH_OK
12453 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12454 #undef TARGET_CASE_VALUES_THRESHOLD
12455 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12457 #undef TARGET_FRAME_POINTER_REQUIRED
12458 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12459 #undef TARGET_CAN_ELIMINATE
12460 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12462 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12463 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12465 #undef TARGET_WARN_FUNC_RETURN
12466 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12468 #undef TARGET_CLASS_LIKELY_SPILLED_P
12469 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12471 #undef TARGET_OPTION_OVERRIDE
12472 #define TARGET_OPTION_OVERRIDE avr_option_override
12474 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12475 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12477 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12478 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12480 #undef TARGET_INIT_BUILTINS
12481 #define TARGET_INIT_BUILTINS avr_init_builtins
12483 #undef TARGET_BUILTIN_DECL
12484 #define TARGET_BUILTIN_DECL avr_builtin_decl
12486 #undef TARGET_EXPAND_BUILTIN
12487 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12489 #undef TARGET_FOLD_BUILTIN
12490 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12492 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12493 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12495 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12496 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12498 #undef TARGET_BUILD_BUILTIN_VA_LIST
12499 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12501 #undef TARGET_FIXED_POINT_SUPPORTED_P
12502 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12504 #undef TARGET_CONVERT_TO_TYPE
12505 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12507 #undef TARGET_ADDR_SPACE_SUBSET_P
12508 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12510 #undef TARGET_ADDR_SPACE_CONVERT
12511 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12513 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12514 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12516 #undef TARGET_ADDR_SPACE_POINTER_MODE
12517 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12519 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12520 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12521 avr_addr_space_legitimate_address_p
12523 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12524 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12526 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12527 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12529 #undef TARGET_SECONDARY_RELOAD
12530 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12532 #undef TARGET_PRINT_OPERAND
12533 #define TARGET_PRINT_OPERAND avr_print_operand
12534 #undef TARGET_PRINT_OPERAND_ADDRESS
12535 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12536 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12537 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12539 struct gcc_target targetm
= TARGET_INITIALIZER
;
12542 #include "gt-avr.h"