1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
37 #include "double-int.h"
44 #include "fold-const.h"
46 #include "print-tree.h"
48 #include "stor-layout.h"
49 #include "stringpool.h"
53 #include "statistics.h"
55 #include "fixed-value.h"
62 #include "c-family/c-common.h"
63 #include "diagnostic-core.h"
68 #include "langhooks.h"
71 #include "target-def.h"
73 #include "dominance.h"
79 #include "cfgcleanup.h"
81 #include "basic-block.h"
85 #include "tree-pass.h"
87 /* Maximal allowed offset for an address in the LD command */
88 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
90 /* Return true if STR starts with PREFIX and false, otherwise. */
91 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
93 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
94 address space where data is to be located.
95 As the only non-generic address spaces are all located in flash,
96 this can be used to test if data shall go into some .progmem* section.
97 This must be the rightmost field of machine dependent section flags. */
98 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
100 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
101 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
103 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
104 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
105 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
107 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
108 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
111 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
112 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
113 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
114 / SYMBOL_FLAG_MACH_DEP)
116 #define TINY_ADIW(REG1, REG2, I) \
117 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
118 "sbci " #REG2 ",hi8(-(" #I "))"
120 #define TINY_SBIW(REG1, REG2, I) \
121 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
122 "sbci " #REG2 ",hi8((" #I "))"
124 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
125 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
127 /* Known address spaces. The order must be the same as in the respective
128 enum from avr.h (or designated initialized must be used). */
129 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
131 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
132 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
133 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
134 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
135 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
136 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
137 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
138 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
142 /* Holding RAM addresses of some SFRs used by the compiler and that
143 are unique over all devices in an architecture like 'avr4'. */
147 /* SREG: The processor status */
150 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
156 /* RAMPZ: The high byte of 24-bit address used with ELPM */
159 /* SP: The stack pointer and its low and high byte */
164 static avr_addr_t avr_addr
;
167 /* Prototypes for local helper functions. */
169 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
170 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
171 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
172 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
173 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
174 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
176 static int get_sequence_length (rtx_insn
*insns
);
177 static int sequent_regs_live (void);
178 static const char *ptrreg_to_str (int);
179 static const char *cond_string (enum rtx_code
);
180 static int avr_num_arg_regs (machine_mode
, const_tree
);
181 static int avr_operand_rtx_cost (rtx
, machine_mode
, enum rtx_code
,
183 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
184 static struct machine_function
* avr_init_machine_status (void);
187 /* Prototypes for hook implementors if needed before their implementation. */
189 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
192 /* Allocate registers from r25 to r8 for parameters for function calls. */
193 #define FIRST_CUM_REG 26
195 /* Last call saved register */
196 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
198 /* Implicit target register of LPM instruction (R0) */
199 extern GTY(()) rtx lpm_reg_rtx
;
202 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
203 extern GTY(()) rtx lpm_addr_reg_rtx
;
204 rtx lpm_addr_reg_rtx
;
206 /* Temporary register RTX (reg:QI TMP_REGNO) */
207 extern GTY(()) rtx tmp_reg_rtx
;
210 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
211 extern GTY(()) rtx zero_reg_rtx
;
214 /* RTXs for all general purpose registers as QImode */
215 extern GTY(()) rtx all_regs_rtx
[32];
216 rtx all_regs_rtx
[32];
218 /* SREG, the processor status */
219 extern GTY(()) rtx sreg_rtx
;
222 /* RAMP* special function registers */
223 extern GTY(()) rtx rampd_rtx
;
224 extern GTY(()) rtx rampx_rtx
;
225 extern GTY(()) rtx rampy_rtx
;
226 extern GTY(()) rtx rampz_rtx
;
232 /* RTX containing the strings "" and "e", respectively */
233 static GTY(()) rtx xstring_empty
;
234 static GTY(()) rtx xstring_e
;
236 /* Current architecture. */
237 const avr_arch_t
*avr_arch
;
239 /* Section to put switch tables in. */
240 static GTY(()) section
*progmem_swtable_section
;
242 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
243 or to address space __flash* or __memx. Only used as singletons inside
244 avr_asm_select_section, but it must not be local there because of GTY. */
245 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
247 /* Condition for insns/expanders from avr-dimode.md. */
248 bool avr_have_dimode
= true;
250 /* To track if code will use .bss and/or .data. */
251 bool avr_need_clear_bss_p
= false;
252 bool avr_need_copy_data_p
= false;
255 /* Transform UP into lowercase and write the result to LO.
256 You must provide enough space for LO. Return LO. */
259 avr_tolower (char *lo
, const char *up
)
263 for (; *up
; up
++, lo
++)
272 /* Custom function to count number of set bits. */
275 avr_popcount (unsigned int val
)
289 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
290 Return true if the least significant N_BYTES bytes of XVAL all have a
291 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
292 of integers which contains an integer N iff bit N of POP_MASK is set. */
295 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
299 machine_mode mode
= GET_MODE (xval
);
301 if (VOIDmode
== mode
)
304 for (i
= 0; i
< n_bytes
; i
++)
306 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
307 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
309 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
317 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
318 the bit representation of X by "casting" it to CONST_INT. */
321 avr_to_int_mode (rtx x
)
323 machine_mode mode
= GET_MODE (x
);
325 return VOIDmode
== mode
327 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
331 static const pass_data avr_pass_data_recompute_notes
=
334 "", // name (will be patched)
335 OPTGROUP_NONE
, // optinfo_flags
337 0, // properties_required
338 0, // properties_provided
339 0, // properties_destroyed
340 0, // todo_flags_start
341 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
345 class avr_pass_recompute_notes
: public rtl_opt_pass
348 avr_pass_recompute_notes (gcc::context
*ctxt
, const char *name
)
349 : rtl_opt_pass (avr_pass_data_recompute_notes
, ctxt
)
354 virtual unsigned int execute (function
*)
356 df_note_add_problem ();
361 }; // avr_pass_recompute_notes
365 avr_register_passes (void)
367 /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
368 notes which are used by `avr.c::reg_unused_after' and branch offset
369 computations. These notes must be correct, i.e. there must be no
370 dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
372 DF needs (correct) CFG, hence right before free_cfg is the last
373 opportunity to rectify notes. */
375 register_pass (new avr_pass_recompute_notes (g
, "avr-notes-free-cfg"),
376 PASS_POS_INSERT_BEFORE
, "*free_cfg", 1);
380 /* Set `avr_arch' as specified by `-mmcu='.
381 Return true on success. */
384 avr_set_core_architecture (void)
386 /* Search for mcu core architecture. */
389 avr_mmcu
= AVR_MMCU_DEFAULT
;
391 avr_arch
= &avr_arch_types
[0];
393 for (const avr_mcu_t
*mcu
= avr_mcu_types
; ; mcu
++)
395 if (NULL
== mcu
->name
)
397 /* Reached the end of `avr_mcu_types'. This should actually never
398 happen as options are provided by device-specs. It could be a
399 typo in a device-specs or calling the compiler proper directly
400 with -mmcu=<device>. */
402 error ("unknown core architecture %qs specified with %qs",
404 avr_inform_core_architectures ();
407 else if (0 == strcmp (mcu
->name
, avr_mmcu
)
408 // Is this a proper architecture ?
409 && NULL
== mcu
->macro
)
411 avr_arch
= &avr_arch_types
[mcu
->arch_id
];
413 avr_n_flash
= mcu
->n_flash
;
423 /* Implement `TARGET_OPTION_OVERRIDE'. */
426 avr_option_override (void)
428 /* Disable -fdelete-null-pointer-checks option for AVR target.
429 This option compiler assumes that dereferencing of a null pointer
430 would halt the program. For AVR this assumption is not true and
431 programs can safely dereference null pointers. Changes made by this
432 option may not work properly for AVR. So disable this option. */
434 flag_delete_null_pointer_checks
= 0;
436 /* caller-save.c looks for call-clobbered hard registers that are assigned
437 to pseudos that cross calls and tries so save-restore them around calls
438 in order to reduce the number of stack slots needed.
440 This might lead to situations where reload is no more able to cope
441 with the challenge of AVR's very few address registers and fails to
442 perform the requested spills. */
445 flag_caller_saves
= 0;
447 /* Unwind tables currently require a frame pointer for correctness,
448 see toplev.c:process_options(). */
450 if ((flag_unwind_tables
451 || flag_non_call_exceptions
452 || flag_asynchronous_unwind_tables
)
453 && !ACCUMULATE_OUTGOING_ARGS
)
455 flag_omit_frame_pointer
= 0;
459 warning (OPT_fpic
, "-fpic is not supported");
461 warning (OPT_fPIC
, "-fPIC is not supported");
463 warning (OPT_fpie
, "-fpie is not supported");
465 warning (OPT_fPIE
, "-fPIE is not supported");
467 if (!avr_set_core_architecture())
470 /* RAM addresses of some SFRs common to all devices in respective arch. */
472 /* SREG: Status Register containing flags like I (global IRQ) */
473 avr_addr
.sreg
= 0x3F + avr_arch
->sfr_offset
;
475 /* RAMPZ: Address' high part when loading via ELPM */
476 avr_addr
.rampz
= 0x3B + avr_arch
->sfr_offset
;
478 avr_addr
.rampy
= 0x3A + avr_arch
->sfr_offset
;
479 avr_addr
.rampx
= 0x39 + avr_arch
->sfr_offset
;
480 avr_addr
.rampd
= 0x38 + avr_arch
->sfr_offset
;
481 avr_addr
.ccp
= (AVR_TINY
? 0x3C : 0x34) + avr_arch
->sfr_offset
;
483 /* SP: Stack Pointer (SP_H:SP_L) */
484 avr_addr
.sp_l
= 0x3D + avr_arch
->sfr_offset
;
485 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
487 init_machine_status
= avr_init_machine_status
;
489 avr_log_set_avr_log();
491 /* Register some avr-specific pass(es). There is no canonical place for
492 pass registration. This function is convenient. */
494 avr_register_passes ();
497 /* Function to set up the backend function structure. */
499 static struct machine_function
*
500 avr_init_machine_status (void)
502 return ggc_cleared_alloc
<machine_function
> ();
506 /* Implement `INIT_EXPANDERS'. */
507 /* The function works like a singleton. */
510 avr_init_expanders (void)
514 for (regno
= 0; regno
< 32; regno
++)
515 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
517 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
518 tmp_reg_rtx
= all_regs_rtx
[AVR_TMP_REGNO
];
519 zero_reg_rtx
= all_regs_rtx
[AVR_ZERO_REGNO
];
521 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
523 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
524 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
525 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
526 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
527 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
529 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
530 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
532 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
535 avr_have_dimode
= false;
539 /* Implement `REGNO_REG_CLASS'. */
540 /* Return register class for register R. */
543 avr_regno_reg_class (int r
)
545 static const enum reg_class reg_class_tab
[] =
549 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
550 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
551 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
552 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
554 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
555 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
557 ADDW_REGS
, ADDW_REGS
,
559 POINTER_X_REGS
, POINTER_X_REGS
,
561 POINTER_Y_REGS
, POINTER_Y_REGS
,
563 POINTER_Z_REGS
, POINTER_Z_REGS
,
569 return reg_class_tab
[r
];
575 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
578 avr_scalar_mode_supported_p (machine_mode mode
)
580 if (ALL_FIXED_POINT_MODE_P (mode
))
586 return default_scalar_mode_supported_p (mode
);
590 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
593 avr_decl_flash_p (tree decl
)
595 if (TREE_CODE (decl
) != VAR_DECL
596 || TREE_TYPE (decl
) == error_mark_node
)
601 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
605 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
606 address space and FALSE, otherwise. */
609 avr_decl_memx_p (tree decl
)
611 if (TREE_CODE (decl
) != VAR_DECL
612 || TREE_TYPE (decl
) == error_mark_node
)
617 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
621 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
624 avr_mem_flash_p (rtx x
)
627 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
631 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
632 address space and FALSE, otherwise. */
635 avr_mem_memx_p (rtx x
)
638 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
642 /* A helper for the subsequent function attribute used to dig for
643 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
646 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
648 if (FUNCTION_DECL
== TREE_CODE (func
))
650 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
655 func
= TREE_TYPE (func
);
658 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
659 || TREE_CODE (func
) == METHOD_TYPE
);
661 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
664 /* Return nonzero if FUNC is a naked function. */
667 avr_naked_function_p (tree func
)
669 return avr_lookup_function_attribute1 (func
, "naked");
672 /* Return nonzero if FUNC is an interrupt function as specified
673 by the "interrupt" attribute. */
676 avr_interrupt_function_p (tree func
)
678 return avr_lookup_function_attribute1 (func
, "interrupt");
681 /* Return nonzero if FUNC is a signal function as specified
682 by the "signal" attribute. */
685 avr_signal_function_p (tree func
)
687 return avr_lookup_function_attribute1 (func
, "signal");
690 /* Return nonzero if FUNC is an OS_task function. */
693 avr_OS_task_function_p (tree func
)
695 return avr_lookup_function_attribute1 (func
, "OS_task");
698 /* Return nonzero if FUNC is an OS_main function. */
701 avr_OS_main_function_p (tree func
)
703 return avr_lookup_function_attribute1 (func
, "OS_main");
707 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
708 /* Sanity cheching for above function attributes. */
711 avr_set_current_function (tree decl
)
716 if (decl
== NULL_TREE
717 || current_function_decl
== NULL_TREE
718 || current_function_decl
== error_mark_node
720 || cfun
->machine
->attributes_checked_p
)
723 loc
= DECL_SOURCE_LOCATION (decl
);
725 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
726 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
727 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
728 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
729 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
731 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
733 /* Too much attributes make no sense as they request conflicting features. */
735 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
736 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
737 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
738 " exclusive", "OS_task", "OS_main", isr
);
740 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
742 if (cfun
->machine
->is_naked
743 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
744 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
745 " no effect on %qs function", "OS_task", "OS_main", "naked");
747 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
749 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
750 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
753 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
754 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
755 : IDENTIFIER_POINTER (DECL_NAME (decl
));
757 /* Skip a leading '*' that might still prefix the assembler name,
758 e.g. in non-LTO runs. */
760 name
= default_strip_name_encoding (name
);
762 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
763 using this when it switched from SIGNAL and INTERRUPT to ISR. */
765 if (cfun
->machine
->is_interrupt
)
766 cfun
->machine
->is_signal
= 0;
768 /* Interrupt handlers must be void __vector (void) functions. */
770 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
771 error_at (loc
, "%qs function cannot have arguments", isr
);
773 if (TREE_CODE (ret
) != VOID_TYPE
)
774 error_at (loc
, "%qs function cannot return a value", isr
);
776 /* If the function has the 'signal' or 'interrupt' attribute, ensure
777 that the name of the function is "__vector_NN" so as to catch
778 when the user misspells the vector name. */
780 if (!STR_PREFIX_P (name
, "__vector"))
781 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
785 /* Don't print the above diagnostics more than once. */
787 cfun
->machine
->attributes_checked_p
= 1;
791 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
794 avr_accumulate_outgoing_args (void)
797 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
799 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
800 what offset is correct. In some cases it is relative to
801 virtual_outgoing_args_rtx and in others it is relative to
802 virtual_stack_vars_rtx. For example code see
803 gcc.c-torture/execute/built-in-setjmp.c
804 gcc.c-torture/execute/builtins/sprintf-chk.c */
806 return (TARGET_ACCUMULATE_OUTGOING_ARGS
807 && !(cfun
->calls_setjmp
808 || cfun
->has_nonlocal_label
));
812 /* Report contribution of accumulated outgoing arguments to stack size. */
815 avr_outgoing_args_size (void)
817 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
821 /* Implement `STARTING_FRAME_OFFSET'. */
822 /* This is the offset from the frame pointer register to the first stack slot
823 that contains a variable living in the frame. */
826 avr_starting_frame_offset (void)
828 return 1 + avr_outgoing_args_size ();
832 /* Return the number of hard registers to push/pop in the prologue/epilogue
833 of the current function, and optionally store these registers in SET. */
836 avr_regs_to_save (HARD_REG_SET
*set
)
839 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
842 CLEAR_HARD_REG_SET (*set
);
845 /* No need to save any registers if the function never returns or
846 has the "OS_task" or "OS_main" attribute. */
848 if (TREE_THIS_VOLATILE (current_function_decl
)
849 || cfun
->machine
->is_OS_task
850 || cfun
->machine
->is_OS_main
)
853 for (reg
= 0; reg
< 32; reg
++)
855 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
856 any global register variables. */
861 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
862 || (df_regs_ever_live_p (reg
)
863 && (int_or_sig_p
|| !call_used_regs
[reg
])
864 /* Don't record frame pointer registers here. They are treated
865 indivitually in prologue. */
866 && !(frame_pointer_needed
867 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
870 SET_HARD_REG_BIT (*set
, reg
);
878 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
881 avr_allocate_stack_slots_for_args (void)
883 return !cfun
->machine
->is_naked
;
887 /* Return true if register FROM can be eliminated via register TO. */
890 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
892 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
893 || !frame_pointer_needed
);
897 /* Implement `TARGET_WARN_FUNC_RETURN'. */
900 avr_warn_func_return (tree decl
)
902 /* Naked functions are implemented entirely in assembly, including the
903 return sequence, so suppress warnings about this. */
905 return !avr_naked_function_p (decl
);
908 /* Compute offset between arg_pointer and frame_pointer. */
911 avr_initial_elimination_offset (int from
, int to
)
913 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
917 int offset
= frame_pointer_needed
? 2 : 0;
918 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
920 offset
+= avr_regs_to_save (NULL
);
921 return (get_frame_size () + avr_outgoing_args_size()
922 + avr_pc_size
+ 1 + offset
);
927 /* Helper for the function below. */
930 avr_adjust_type_node (tree
*node
, machine_mode mode
, int sat_p
)
932 *node
= make_node (FIXED_POINT_TYPE
);
933 TYPE_SATURATING (*node
) = sat_p
;
934 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
935 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
936 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
937 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
938 TYPE_ALIGN (*node
) = 8;
939 SET_TYPE_MODE (*node
, mode
);
945 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
948 avr_build_builtin_va_list (void)
950 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
951 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
952 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
953 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
954 to the long long accum modes instead of the desired [U]TAmode.
956 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
957 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
958 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
959 libgcc to detect IBIT and FBIT. */
961 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
962 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
963 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
964 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
966 unsigned_long_long_accum_type_node
= uta_type_node
;
967 long_long_accum_type_node
= ta_type_node
;
968 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
969 sat_long_long_accum_type_node
= sat_ta_type_node
;
971 /* Dispatch to the default handler. */
973 return std_build_builtin_va_list ();
977 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
978 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
979 frame pointer by +STARTING_FRAME_OFFSET.
980 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
981 avoids creating add/sub of offset in nonlocal goto and setjmp. */
984 avr_builtin_setjmp_frame_value (void)
986 rtx xval
= gen_reg_rtx (Pmode
);
987 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
988 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
993 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
994 This is return address of function. */
997 avr_return_addr_rtx (int count
, rtx tem
)
1001 /* Can only return this function's return address. Others not supported. */
1007 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
1008 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1012 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
1014 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
1015 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
1016 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
1020 /* Return 1 if the function epilogue is just a single "ret". */
1023 avr_simple_epilogue (void)
1025 return (! frame_pointer_needed
1026 && get_frame_size () == 0
1027 && avr_outgoing_args_size() == 0
1028 && avr_regs_to_save (NULL
) == 0
1029 && ! cfun
->machine
->is_interrupt
1030 && ! cfun
->machine
->is_signal
1031 && ! cfun
->machine
->is_naked
1032 && ! TREE_THIS_VOLATILE (current_function_decl
));
1035 /* This function checks sequence of live registers. */
1038 sequent_regs_live (void)
1044 for (reg
= 0; reg
<= LAST_CALLEE_SAVED_REG
; ++reg
)
1046 if (fixed_regs
[reg
])
1048 /* Don't recognize sequences that contain global register
1057 if (!call_used_regs
[reg
])
1059 if (df_regs_ever_live_p (reg
))
1069 if (!frame_pointer_needed
)
1071 if (df_regs_ever_live_p (REG_Y
))
1079 if (df_regs_ever_live_p (REG_Y
+1))
1092 return (cur_seq
== live_seq
) ? live_seq
: 0;
1095 /* Obtain the length sequence of insns. */
1098 get_sequence_length (rtx_insn
*insns
)
1103 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
1104 length
+= get_attr_length (insn
);
1110 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1113 avr_incoming_return_addr_rtx (void)
1115 /* The return address is at the top of the stack. Note that the push
1116 was via post-decrement, which means the actual address is off by one. */
1117 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
1120 /* Helper for expand_prologue. Emit a push of a byte register. */
1123 emit_push_byte (unsigned regno
, bool frame_related_p
)
1128 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
1129 mem
= gen_frame_mem (QImode
, mem
);
1130 reg
= gen_rtx_REG (QImode
, regno
);
1132 insn
= emit_insn (gen_rtx_SET (mem
, reg
));
1133 if (frame_related_p
)
1134 RTX_FRAME_RELATED_P (insn
) = 1;
1136 cfun
->machine
->stack_usage
++;
1140 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1141 SFR is a MEM representing the memory location of the SFR.
1142 If CLR_P then clear the SFR after the push using zero_reg. */
1145 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
1149 gcc_assert (MEM_P (sfr
));
1151 /* IN __tmp_reg__, IO(SFR) */
1152 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1153 if (frame_related_p
)
1154 RTX_FRAME_RELATED_P (insn
) = 1;
1156 /* PUSH __tmp_reg__ */
1157 emit_push_byte (AVR_TMP_REGNO
, frame_related_p
);
1161 /* OUT IO(SFR), __zero_reg__ */
1162 insn
= emit_move_insn (sfr
, const0_rtx
);
1163 if (frame_related_p
)
1164 RTX_FRAME_RELATED_P (insn
) = 1;
1169 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1172 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1173 int live_seq
= sequent_regs_live ();
1175 HOST_WIDE_INT size_max
1176 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1178 bool minimize
= (TARGET_CALL_PROLOGUES
1182 && !cfun
->machine
->is_OS_task
1183 && !cfun
->machine
->is_OS_main
1187 && (frame_pointer_needed
1188 || avr_outgoing_args_size() > 8
1189 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1193 int first_reg
, reg
, offset
;
1195 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1196 gen_int_mode (size
, HImode
));
1198 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1199 gen_int_mode (live_seq
+size
, HImode
));
1200 insn
= emit_insn (pattern
);
1201 RTX_FRAME_RELATED_P (insn
) = 1;
1203 /* Describe the effect of the unspec_volatile call to prologue_saves.
1204 Note that this formulation assumes that add_reg_note pushes the
1205 notes to the front. Thus we build them in the reverse order of
1206 how we want dwarf2out to process them. */
1208 /* The function does always set frame_pointer_rtx, but whether that
1209 is going to be permanent in the function is frame_pointer_needed. */
1211 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1212 gen_rtx_SET ((frame_pointer_needed
1214 : stack_pointer_rtx
),
1215 plus_constant (Pmode
, stack_pointer_rtx
,
1216 -(size
+ live_seq
))));
1218 /* Note that live_seq always contains r28+r29, but the other
1219 registers to be saved are all below 18. */
1221 first_reg
= (LAST_CALLEE_SAVED_REG
+ 1) - (live_seq
- 2);
1223 for (reg
= 29, offset
= -live_seq
+ 1;
1225 reg
= (reg
== 28 ? LAST_CALLEE_SAVED_REG
: reg
- 1), ++offset
)
1229 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1231 r
= gen_rtx_REG (QImode
, reg
);
1232 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (m
, r
));
1235 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1237 else /* !minimize */
1241 for (reg
= 0; reg
< 32; ++reg
)
1242 if (TEST_HARD_REG_BIT (set
, reg
))
1243 emit_push_byte (reg
, true);
1245 if (frame_pointer_needed
1246 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1248 /* Push frame pointer. Always be consistent about the
1249 ordering of pushes -- epilogue_restores expects the
1250 register pair to be pushed low byte first. */
1252 emit_push_byte (REG_Y
, true);
1253 emit_push_byte (REG_Y
+ 1, true);
1256 if (frame_pointer_needed
1259 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1260 RTX_FRAME_RELATED_P (insn
) = 1;
1265 /* Creating a frame can be done by direct manipulation of the
1266 stack or via the frame pointer. These two methods are:
1273 the optimum method depends on function type, stack and
1274 frame size. To avoid a complex logic, both methods are
1275 tested and shortest is selected.
1277 There is also the case where SIZE != 0 and no frame pointer is
1278 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1279 In that case, insn (*) is not needed in that case.
1280 We use the X register as scratch. This is save because in X
1282 In an interrupt routine, the case of SIZE != 0 together with
1283 !frame_pointer_needed can only occur if the function is not a
1284 leaf function and thus X has already been saved. */
1287 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1288 rtx_insn
*fp_plus_insns
;
1291 gcc_assert (frame_pointer_needed
1295 fp
= my_fp
= (frame_pointer_needed
1297 : gen_rtx_REG (Pmode
, REG_X
));
1299 if (AVR_HAVE_8BIT_SP
)
1301 /* The high byte (r29) does not change:
1302 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1304 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1307 /* Cut down size and avoid size = 0 so that we don't run
1308 into ICE like PR52488 in the remainder. */
1310 if (size
> size_max
)
1312 /* Don't error so that insane code from newlib still compiles
1313 and does not break building newlib. As PR51345 is implemented
1314 now, there are multilib variants with -msp8.
1316 If user wants sanity checks he can use -Wstack-usage=
1319 For CFA we emit the original, non-saturated size so that
1320 the generic machinery is aware of the real stack usage and
1321 will print the above diagnostic as expected. */
1326 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1327 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1329 /************ Method 1: Adjust frame pointer ************/
1333 /* Normally, the dwarf2out frame-related-expr interpreter does
1334 not expect to have the CFA change once the frame pointer is
1335 set up. Thus, we avoid marking the move insn below and
1336 instead indicate that the entire operation is complete after
1337 the frame pointer subtraction is done. */
1339 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1340 if (frame_pointer_needed
)
1342 RTX_FRAME_RELATED_P (insn
) = 1;
1343 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1344 gen_rtx_SET (fp
, stack_pointer_rtx
));
1347 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1350 if (frame_pointer_needed
)
1352 RTX_FRAME_RELATED_P (insn
) = 1;
1353 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1354 gen_rtx_SET (fp
, plus_constant (Pmode
, fp
,
1358 /* Copy to stack pointer. Note that since we've already
1359 changed the CFA to the frame pointer this operation
1360 need not be annotated if frame pointer is needed.
1361 Always move through unspec, see PR50063.
1362 For meaning of irq_state see movhi_sp_r insn. */
1364 if (cfun
->machine
->is_interrupt
)
1367 if (TARGET_NO_INTERRUPTS
1368 || cfun
->machine
->is_signal
1369 || cfun
->machine
->is_OS_main
)
1372 if (AVR_HAVE_8BIT_SP
)
1375 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1376 fp
, GEN_INT (irq_state
)));
1377 if (!frame_pointer_needed
)
1379 RTX_FRAME_RELATED_P (insn
) = 1;
1380 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1381 gen_rtx_SET (stack_pointer_rtx
,
1382 plus_constant (Pmode
,
1387 fp_plus_insns
= get_insns ();
1390 /************ Method 2: Adjust Stack pointer ************/
1392 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1393 can only handle specific offsets. */
1395 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1397 rtx_insn
*sp_plus_insns
;
1401 insn
= emit_move_insn (stack_pointer_rtx
,
1402 plus_constant (Pmode
, stack_pointer_rtx
,
1404 RTX_FRAME_RELATED_P (insn
) = 1;
1405 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1406 gen_rtx_SET (stack_pointer_rtx
,
1407 plus_constant (Pmode
,
1410 if (frame_pointer_needed
)
1412 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1413 RTX_FRAME_RELATED_P (insn
) = 1;
1416 sp_plus_insns
= get_insns ();
1419 /************ Use shortest method ************/
1421 emit_insn (get_sequence_length (sp_plus_insns
)
1422 < get_sequence_length (fp_plus_insns
)
1428 emit_insn (fp_plus_insns
);
1431 cfun
->machine
->stack_usage
+= size_cfa
;
1432 } /* !minimize && size != 0 */
1437 /* Output function prologue. */
1440 avr_expand_prologue (void)
1445 size
= get_frame_size() + avr_outgoing_args_size();
1447 cfun
->machine
->stack_usage
= 0;
1449 /* Prologue: naked. */
1450 if (cfun
->machine
->is_naked
)
1455 avr_regs_to_save (&set
);
1457 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1459 /* Enable interrupts. */
1460 if (cfun
->machine
->is_interrupt
)
1461 emit_insn (gen_enable_interrupt ());
1463 /* Push zero reg. */
1464 emit_push_byte (AVR_ZERO_REGNO
, true);
1467 emit_push_byte (AVR_TMP_REGNO
, true);
1470 /* ??? There's no dwarf2 column reserved for SREG. */
1471 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1473 /* Clear zero reg. */
1474 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1476 /* Prevent any attempt to delete the setting of ZERO_REG! */
1477 emit_use (zero_reg_rtx
);
1479 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1480 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1483 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1486 && TEST_HARD_REG_BIT (set
, REG_X
)
1487 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1489 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1493 && (frame_pointer_needed
1494 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1495 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1497 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1501 && TEST_HARD_REG_BIT (set
, REG_Z
)
1502 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1504 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1506 } /* is_interrupt is_signal */
1508 avr_prologue_setup_frame (size
, set
);
1510 if (flag_stack_usage_info
)
1511 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1515 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1516 /* Output summary at end of function prologue. */
1519 avr_asm_function_end_prologue (FILE *file
)
1521 if (cfun
->machine
->is_naked
)
1523 fputs ("/* prologue: naked */\n", file
);
1527 if (cfun
->machine
->is_interrupt
)
1529 fputs ("/* prologue: Interrupt */\n", file
);
1531 else if (cfun
->machine
->is_signal
)
1533 fputs ("/* prologue: Signal */\n", file
);
1536 fputs ("/* prologue: function */\n", file
);
1539 if (ACCUMULATE_OUTGOING_ARGS
)
1540 fprintf (file
, "/* outgoing args size = %d */\n",
1541 avr_outgoing_args_size());
1543 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1545 fprintf (file
, "/* stack size = %d */\n",
1546 cfun
->machine
->stack_usage
);
1547 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1548 usage for offset so that SP + .L__stack_offset = return address. */
1549 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1553 /* Implement `EPILOGUE_USES'. */
1556 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1558 if (reload_completed
1560 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1565 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1568 emit_pop_byte (unsigned regno
)
1572 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1573 mem
= gen_frame_mem (QImode
, mem
);
1574 reg
= gen_rtx_REG (QImode
, regno
);
1576 emit_insn (gen_rtx_SET (reg
, mem
));
1579 /* Output RTL epilogue. */
1582 avr_expand_epilogue (bool sibcall_p
)
1589 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1591 size
= get_frame_size() + avr_outgoing_args_size();
1593 /* epilogue: naked */
1594 if (cfun
->machine
->is_naked
)
1596 gcc_assert (!sibcall_p
);
1598 emit_jump_insn (gen_return ());
1602 avr_regs_to_save (&set
);
1603 live_seq
= sequent_regs_live ();
1605 minimize
= (TARGET_CALL_PROLOGUES
1608 && !cfun
->machine
->is_OS_task
1609 && !cfun
->machine
->is_OS_main
1614 || frame_pointer_needed
1617 /* Get rid of frame. */
1619 if (!frame_pointer_needed
)
1621 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1626 emit_move_insn (frame_pointer_rtx
,
1627 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1630 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1636 /* Try two methods to adjust stack and select shortest. */
1640 rtx_insn
*fp_plus_insns
;
1641 HOST_WIDE_INT size_max
;
1643 gcc_assert (frame_pointer_needed
1647 fp
= my_fp
= (frame_pointer_needed
1649 : gen_rtx_REG (Pmode
, REG_X
));
1651 if (AVR_HAVE_8BIT_SP
)
1653 /* The high byte (r29) does not change:
1654 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1656 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1659 /* For rationale see comment in prologue generation. */
1661 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1662 if (size
> size_max
)
1664 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1666 /********** Method 1: Adjust fp register **********/
1670 if (!frame_pointer_needed
)
1671 emit_move_insn (fp
, stack_pointer_rtx
);
1673 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1675 /* Copy to stack pointer. */
1677 if (TARGET_NO_INTERRUPTS
)
1680 if (AVR_HAVE_8BIT_SP
)
1683 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1684 GEN_INT (irq_state
)));
1686 fp_plus_insns
= get_insns ();
1689 /********** Method 2: Adjust Stack pointer **********/
1691 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1693 rtx_insn
*sp_plus_insns
;
1697 emit_move_insn (stack_pointer_rtx
,
1698 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1700 sp_plus_insns
= get_insns ();
1703 /************ Use shortest method ************/
1705 emit_insn (get_sequence_length (sp_plus_insns
)
1706 < get_sequence_length (fp_plus_insns
)
1711 emit_insn (fp_plus_insns
);
1714 if (frame_pointer_needed
1715 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1717 /* Restore previous frame_pointer. See avr_expand_prologue for
1718 rationale for not using pophi. */
1720 emit_pop_byte (REG_Y
+ 1);
1721 emit_pop_byte (REG_Y
);
1724 /* Restore used registers. */
1726 for (reg
= 31; reg
>= 0; --reg
)
1727 if (TEST_HARD_REG_BIT (set
, reg
))
1728 emit_pop_byte (reg
);
1732 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1733 The conditions to restore them must be tha same as in prologue. */
1736 && TEST_HARD_REG_BIT (set
, REG_Z
)
1737 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1739 emit_pop_byte (TMP_REGNO
);
1740 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1744 && (frame_pointer_needed
1745 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1746 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1748 emit_pop_byte (TMP_REGNO
);
1749 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1753 && TEST_HARD_REG_BIT (set
, REG_X
)
1754 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1756 emit_pop_byte (TMP_REGNO
);
1757 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1762 emit_pop_byte (TMP_REGNO
);
1763 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1766 /* Restore SREG using tmp_reg as scratch. */
1768 emit_pop_byte (AVR_TMP_REGNO
);
1769 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1771 /* Restore tmp REG. */
1772 emit_pop_byte (AVR_TMP_REGNO
);
1774 /* Restore zero REG. */
1775 emit_pop_byte (AVR_ZERO_REGNO
);
1779 emit_jump_insn (gen_return ());
1783 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1786 avr_asm_function_begin_epilogue (FILE *file
)
1788 fprintf (file
, "/* epilogue start */\n");
1792 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1795 avr_cannot_modify_jumps_p (void)
1798 /* Naked Functions must not have any instructions after
1799 their epilogue, see PR42240 */
1801 if (reload_completed
1803 && cfun
->machine
->is_naked
)
1812 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1815 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1817 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1818 This hook just serves to hack around PR rtl-optimization/52543 by
1819 claiming that non-generic addresses were mode-dependent so that
1820 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1821 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1822 generic address space which is not true. */
1824 return !ADDR_SPACE_GENERIC_P (as
);
1828 /* Helper function for `avr_legitimate_address_p'. */
1831 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1832 RTX_CODE outer_code
, bool strict
)
1835 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1836 as
, outer_code
, UNKNOWN
)
1838 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1842 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1843 machine for a memory operand of mode MODE. */
1846 avr_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
1848 bool ok
= CONSTANT_ADDRESS_P (x
);
1850 switch (GET_CODE (x
))
1853 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1857 && GET_MODE_SIZE (mode
) > 4
1858 && REG_X
== REGNO (x
))
1866 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1867 GET_CODE (x
), strict
);
1872 rtx reg
= XEXP (x
, 0);
1873 rtx op1
= XEXP (x
, 1);
1876 && CONST_INT_P (op1
)
1877 && INTVAL (op1
) >= 0)
1879 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1884 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1887 if (reg
== frame_pointer_rtx
1888 || reg
== arg_pointer_rtx
)
1893 else if (frame_pointer_needed
1894 && reg
== frame_pointer_rtx
)
1907 && CONSTANT_ADDRESS_P (x
))
1909 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1910 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
1912 ok
= (CONST_INT_P (x
)
1913 && IN_RANGE (INTVAL (x
), 0, 0xc0 - GET_MODE_SIZE (mode
)));
1916 if (avr_log
.legitimate_address_p
)
1918 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1919 "reload_completed=%d reload_in_progress=%d %s:",
1920 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1921 reg_renumber
? "(reg_renumber)" : "");
1923 if (GET_CODE (x
) == PLUS
1924 && REG_P (XEXP (x
, 0))
1925 && CONST_INT_P (XEXP (x
, 1))
1926 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1929 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1930 true_regnum (XEXP (x
, 0)));
1933 avr_edump ("\n%r\n", x
);
1940 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1941 now only a helper for avr_addr_space_legitimize_address. */
1942 /* Attempts to replace X with a valid
1943 memory address for an operand of mode MODE */
1946 avr_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
)
1948 bool big_offset_p
= false;
1952 if (GET_CODE (oldx
) == PLUS
1953 && REG_P (XEXP (oldx
, 0)))
1955 if (REG_P (XEXP (oldx
, 1)))
1956 x
= force_reg (GET_MODE (oldx
), oldx
);
1957 else if (CONST_INT_P (XEXP (oldx
, 1)))
1959 int offs
= INTVAL (XEXP (oldx
, 1));
1960 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1961 && offs
> MAX_LD_OFFSET (mode
))
1963 big_offset_p
= true;
1964 x
= force_reg (GET_MODE (oldx
), oldx
);
1969 if (avr_log
.legitimize_address
)
1971 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1974 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1981 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1982 /* This will allow register R26/27 to be used where it is no worse than normal
1983 base pointers R28/29 or R30/31. For example, if base offset is greater
1984 than 63 bytes or for R++ or --R addressing. */
1987 avr_legitimize_reload_address (rtx
*px
, machine_mode mode
,
1988 int opnum
, int type
, int addr_type
,
1989 int ind_levels ATTRIBUTE_UNUSED
,
1990 rtx (*mk_memloc
)(rtx
,int))
1994 if (avr_log
.legitimize_reload_address
)
1995 avr_edump ("\n%?:%m %r\n", mode
, x
);
1997 if (1 && (GET_CODE (x
) == POST_INC
1998 || GET_CODE (x
) == PRE_DEC
))
2000 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
2001 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
2002 opnum
, RELOAD_OTHER
);
2004 if (avr_log
.legitimize_reload_address
)
2005 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2006 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
2011 if (GET_CODE (x
) == PLUS
2012 && REG_P (XEXP (x
, 0))
2013 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
2014 && CONST_INT_P (XEXP (x
, 1))
2015 && INTVAL (XEXP (x
, 1)) >= 1)
2017 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
2021 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
2023 int regno
= REGNO (XEXP (x
, 0));
2024 rtx mem
= mk_memloc (x
, regno
);
2026 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
2027 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
2028 1, (enum reload_type
) addr_type
);
2030 if (avr_log
.legitimize_reload_address
)
2031 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2032 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
2034 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
2035 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2036 opnum
, (enum reload_type
) type
);
2038 if (avr_log
.legitimize_reload_address
)
2039 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2040 BASE_POINTER_REGS
, mem
, NULL_RTX
);
2045 else if (! (frame_pointer_needed
2046 && XEXP (x
, 0) == frame_pointer_rtx
))
2048 push_reload (x
, NULL_RTX
, px
, NULL
,
2049 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2050 opnum
, (enum reload_type
) type
);
2052 if (avr_log
.legitimize_reload_address
)
2053 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2054 POINTER_REGS
, x
, NULL_RTX
);
2064 /* Implement `TARGET_SECONDARY_RELOAD' */
2067 avr_secondary_reload (bool in_p
, rtx x
,
2068 reg_class_t reload_class ATTRIBUTE_UNUSED
,
2069 machine_mode mode
, secondary_reload_info
*sri
)
2073 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
2074 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
2076 /* For the non-generic 16-bit spaces we need a d-class scratch. */
2083 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
2084 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
2085 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
2087 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
2088 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
2089 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
2090 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
2091 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
2093 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
2095 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
2096 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
2097 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
2098 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
2099 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
2100 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
2108 /* Helper function to print assembler resp. track instruction
2109 sequence lengths. Always return "".
2112 Output assembler code from template TPL with operands supplied
2113 by OPERANDS. This is just forwarding to output_asm_insn.
2116 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2117 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2118 Don't output anything.
2122 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
2126 output_asm_insn (tpl
, operands
);
2140 /* Return a pointer register name as a string. */
2143 ptrreg_to_str (int regno
)
2147 case REG_X
: return "X";
2148 case REG_Y
: return "Y";
2149 case REG_Z
: return "Z";
2151 output_operand_lossage ("address operand requires constraint for"
2152 " X, Y, or Z register");
2157 /* Return the condition name as a string.
2158 Used in conditional jump constructing */
2161 cond_string (enum rtx_code code
)
2170 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2175 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2191 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2192 /* Output ADDR to FILE as address. */
2195 avr_print_operand_address (FILE *file
, rtx addr
)
2197 switch (GET_CODE (addr
))
2200 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2204 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2208 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2212 if (CONSTANT_ADDRESS_P (addr
)
2213 && text_segment_operand (addr
, VOIDmode
))
2216 if (GET_CODE (x
) == CONST
)
2218 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2220 /* Assembler gs() will implant word address. Make offset
2221 a byte offset inside gs() for assembler. This is
2222 needed because the more logical (constant+gs(sym)) is not
2223 accepted by gas. For 128K and smaller devices this is ok.
2224 For large devices it will create a trampoline to offset
2225 from symbol which may not be what the user really wanted. */
2227 fprintf (file
, "gs(");
2228 output_addr_const (file
, XEXP (x
,0));
2229 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2230 2 * INTVAL (XEXP (x
, 1)));
2232 if (warning (0, "pointer offset from symbol maybe incorrect"))
2234 output_addr_const (stderr
, addr
);
2235 fprintf(stderr
,"\n");
2240 fprintf (file
, "gs(");
2241 output_addr_const (file
, addr
);
2242 fprintf (file
, ")");
2246 output_addr_const (file
, addr
);
2251 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2254 avr_print_operand_punct_valid_p (unsigned char code
)
2256 return code
== '~' || code
== '!';
2260 /* Implement `TARGET_PRINT_OPERAND'. */
2261 /* Output X as assembler operand to file FILE.
2262 For a description of supported %-codes, see top of avr.md. */
2265 avr_print_operand (FILE *file
, rtx x
, int code
)
2267 int abcd
= 0, ef
= 0, ij
= 0;
2269 if (code
>= 'A' && code
<= 'D')
2271 else if (code
== 'E' || code
== 'F')
2273 else if (code
== 'I' || code
== 'J')
2278 if (!AVR_HAVE_JMP_CALL
)
2281 else if (code
== '!')
2283 if (AVR_HAVE_EIJMP_EICALL
)
2286 else if (code
== 't'
2289 static int t_regno
= -1;
2290 static int t_nbits
= -1;
2292 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2294 t_regno
= REGNO (x
);
2295 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2297 else if (CONST_INT_P (x
) && t_regno
>= 0
2298 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2300 int bpos
= INTVAL (x
);
2302 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2304 fprintf (file
, ",%d", bpos
% 8);
2309 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2311 else if (code
== 'E' || code
== 'F')
2313 rtx op
= XEXP(x
, 0);
2314 fprintf (file
, reg_names
[REGNO (op
) + ef
]);
2316 else if (code
== 'I' || code
== 'J')
2318 rtx op
= XEXP(XEXP(x
, 0), 0);
2319 fprintf (file
, reg_names
[REGNO (op
) + ij
]);
2323 if (x
== zero_reg_rtx
)
2324 fprintf (file
, "__zero_reg__");
2325 else if (code
== 'r' && REGNO (x
) < 32)
2326 fprintf (file
, "%d", (int) REGNO (x
));
2328 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2330 else if (CONST_INT_P (x
))
2332 HOST_WIDE_INT ival
= INTVAL (x
);
2335 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2336 else if (low_io_address_operand (x
, VOIDmode
)
2337 || high_io_address_operand (x
, VOIDmode
))
2339 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2340 fprintf (file
, "__RAMPZ__");
2341 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2342 fprintf (file
, "__RAMPY__");
2343 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2344 fprintf (file
, "__RAMPX__");
2345 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2346 fprintf (file
, "__RAMPD__");
2347 else if ((AVR_XMEGA
|| AVR_TINY
) && ival
== avr_addr
.ccp
)
2348 fprintf (file
, "__CCP__");
2349 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2350 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2351 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2354 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2355 ival
- avr_arch
->sfr_offset
);
2359 fatal_insn ("bad address, not an I/O address:", x
);
2363 rtx addr
= XEXP (x
, 0);
2367 if (!CONSTANT_P (addr
))
2368 fatal_insn ("bad address, not a constant:", addr
);
2369 /* Assembler template with m-code is data - not progmem section */
2370 if (text_segment_operand (addr
, VOIDmode
))
2371 if (warning (0, "accessing data memory with"
2372 " program memory address"))
2374 output_addr_const (stderr
, addr
);
2375 fprintf(stderr
,"\n");
2377 output_addr_const (file
, addr
);
2379 else if (code
== 'i')
2381 avr_print_operand (file
, addr
, 'i');
2383 else if (code
== 'o')
2385 if (GET_CODE (addr
) != PLUS
)
2386 fatal_insn ("bad address, not (reg+disp):", addr
);
2388 avr_print_operand (file
, XEXP (addr
, 1), 0);
2390 else if (code
== 'b')
2392 if (GET_CODE (addr
) != PLUS
)
2393 fatal_insn ("bad address, not (reg+disp):", addr
);
2395 avr_print_operand_address (file
, XEXP (addr
, 0));
2397 else if (code
== 'p' || code
== 'r')
2399 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2400 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2403 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2405 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2407 else if (GET_CODE (addr
) == PLUS
)
2409 avr_print_operand_address (file
, XEXP (addr
,0));
2410 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2411 fatal_insn ("internal compiler error. Bad address:"
2414 avr_print_operand (file
, XEXP (addr
,1), code
);
2417 avr_print_operand_address (file
, addr
);
2419 else if (code
== 'i')
2421 if (GET_CODE (x
) == SYMBOL_REF
&& (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
))
2422 avr_print_operand_address
2423 (file
, plus_constant (HImode
, x
, -avr_arch
->sfr_offset
));
2425 fatal_insn ("bad address, not an I/O address:", x
);
2427 else if (code
== 'x')
2429 /* Constant progmem address - like used in jmp or call */
2430 if (0 == text_segment_operand (x
, VOIDmode
))
2431 if (warning (0, "accessing program memory"
2432 " with data memory address"))
2434 output_addr_const (stderr
, x
);
2435 fprintf(stderr
,"\n");
2437 /* Use normal symbol for direct address no linker trampoline needed */
2438 output_addr_const (file
, x
);
2440 else if (CONST_FIXED_P (x
))
2442 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2444 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2446 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2448 else if (GET_CODE (x
) == CONST_DOUBLE
)
2452 if (GET_MODE (x
) != SFmode
)
2453 fatal_insn ("internal compiler error. Unknown mode:", x
);
2454 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2455 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2456 fprintf (file
, "0x%lx", val
);
2458 else if (GET_CODE (x
) == CONST_STRING
)
2459 fputs (XSTR (x
, 0), file
);
2460 else if (code
== 'j')
2461 fputs (cond_string (GET_CODE (x
)), file
);
2462 else if (code
== 'k')
2463 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2465 avr_print_operand_address (file
, x
);
2469 /* Worker function for `NOTICE_UPDATE_CC'. */
2470 /* Update the condition code in the INSN. */
2473 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx_insn
*insn
)
2476 enum attr_cc cc
= get_attr_cc (insn
);
2486 rtx
*op
= recog_data
.operand
;
2489 /* Extract insn's operands. */
2490 extract_constrain_insn_cached (insn
);
2498 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2499 cc
= (enum attr_cc
) icc
;
2504 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2505 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2506 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2508 /* Any other "r,rL" combination does not alter cc0. */
2512 } /* inner switch */
2516 } /* outer swicth */
2521 /* Special values like CC_OUT_PLUS from above have been
2522 mapped to "standard" CC_* values so we never come here. */
2528 /* Insn does not affect CC at all. */
2536 set
= single_set (insn
);
2540 cc_status
.flags
|= CC_NO_OVERFLOW
;
2541 cc_status
.value1
= SET_DEST (set
);
2546 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2547 of this combination, cf. also PR61055. */
2552 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2553 The V flag may or may not be known but that's ok because
2554 alter_cond will change tests to use EQ/NE. */
2555 set
= single_set (insn
);
2559 cc_status
.value1
= SET_DEST (set
);
2560 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2565 set
= single_set (insn
);
2568 cc_status
.value1
= SET_SRC (set
);
2572 /* Insn doesn't leave CC in a usable state. */
2578 /* Choose mode for jump insn:
2579 1 - relative jump in range -63 <= x <= 62 ;
2580 2 - relative jump in range -2046 <= x <= 2045 ;
2581 3 - absolute jump (only for ATmega[16]03). */
2584 avr_jump_mode (rtx x
, rtx_insn
*insn
)
2586 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2587 ? XEXP (x
, 0) : x
));
2588 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2589 int jump_distance
= cur_addr
- dest_addr
;
2591 if (-63 <= jump_distance
&& jump_distance
<= 62)
2593 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2595 else if (AVR_HAVE_JMP_CALL
)
2601 /* Return an AVR condition jump commands.
2602 X is a comparison RTX.
2603 LEN is a number returned by avr_jump_mode function.
2604 If REVERSE nonzero then condition code in X must be reversed. */
2607 ret_cond_branch (rtx x
, int len
, int reverse
)
2609 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2614 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2615 return (len
== 1 ? ("breq .+2" CR_TAB
2617 len
== 2 ? ("breq .+4" CR_TAB
2625 return (len
== 1 ? ("breq .+2" CR_TAB
2627 len
== 2 ? ("breq .+4" CR_TAB
2634 return (len
== 1 ? ("breq .+2" CR_TAB
2636 len
== 2 ? ("breq .+4" CR_TAB
2643 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2644 return (len
== 1 ? ("breq %0" CR_TAB
2646 len
== 2 ? ("breq .+2" CR_TAB
2653 return (len
== 1 ? ("breq %0" CR_TAB
2655 len
== 2 ? ("breq .+2" CR_TAB
2662 return (len
== 1 ? ("breq %0" CR_TAB
2664 len
== 2 ? ("breq .+2" CR_TAB
2678 return ("br%j1 .+2" CR_TAB
2681 return ("br%j1 .+4" CR_TAB
2692 return ("br%k1 .+2" CR_TAB
2695 return ("br%k1 .+4" CR_TAB
2704 /* Worker function for `FINAL_PRESCAN_INSN'. */
2705 /* Output insn cost for next insn. */
2708 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2709 int num_operands ATTRIBUTE_UNUSED
)
2711 if (avr_log
.rtx_costs
)
2713 rtx set
= single_set (insn
);
2716 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2717 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2719 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2720 rtx_cost (PATTERN (insn
), INSN
, 0,
2721 optimize_insn_for_speed_p()));
2725 /* Return 0 if undefined, 1 if always true or always false. */
2728 avr_simplify_comparison_p (machine_mode mode
, RTX_CODE op
, rtx x
)
2730 unsigned int max
= (mode
== QImode
? 0xff :
2731 mode
== HImode
? 0xffff :
2732 mode
== PSImode
? 0xffffff :
2733 mode
== SImode
? 0xffffffff : 0);
2734 if (max
&& op
&& CONST_INT_P (x
))
2736 if (unsigned_condition (op
) != op
)
2739 if (max
!= (INTVAL (x
) & max
)
2740 && INTVAL (x
) != 0xff)
2747 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2748 /* Returns nonzero if REGNO is the number of a hard
2749 register in which function arguments are sometimes passed. */
2752 avr_function_arg_regno_p(int r
)
2754 return (AVR_TINY
? r
>= 20 && r
<= 25 : r
>= 8 && r
<= 25);
2758 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2759 /* Initializing the variable cum for the state at the beginning
2760 of the argument list. */
2763 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2764 tree fndecl ATTRIBUTE_UNUSED
)
2766 cum
->nregs
= AVR_TINY
? 6 : 18;
2767 cum
->regno
= FIRST_CUM_REG
;
2768 if (!libname
&& stdarg_p (fntype
))
2771 /* Assume the calle may be tail called */
2773 cfun
->machine
->sibcall_fails
= 0;
2776 /* Returns the number of registers to allocate for a function argument. */
2779 avr_num_arg_regs (machine_mode mode
, const_tree type
)
2783 if (mode
== BLKmode
)
2784 size
= int_size_in_bytes (type
);
2786 size
= GET_MODE_SIZE (mode
);
2788 /* Align all function arguments to start in even-numbered registers.
2789 Odd-sized arguments leave holes above them. */
2791 return (size
+ 1) & ~1;
2795 /* Implement `TARGET_FUNCTION_ARG'. */
2796 /* Controls whether a function argument is passed
2797 in a register, and which register. */
2800 avr_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2801 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2803 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2804 int bytes
= avr_num_arg_regs (mode
, type
);
2806 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2807 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2813 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2814 /* Update the summarizer variable CUM to advance past an argument
2815 in the argument list. */
2818 avr_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2819 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2821 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2822 int bytes
= avr_num_arg_regs (mode
, type
);
2824 cum
->nregs
-= bytes
;
2825 cum
->regno
-= bytes
;
2827 /* A parameter is being passed in a call-saved register. As the original
2828 contents of these regs has to be restored before leaving the function,
2829 a function must not pass arguments in call-saved regs in order to get
2834 && !call_used_regs
[cum
->regno
])
2836 /* FIXME: We ship info on failing tail-call in struct machine_function.
2837 This uses internals of calls.c:expand_call() and the way args_so_far
2838 is used. targetm.function_ok_for_sibcall() needs to be extended to
2839 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2840 dependent so that such an extension is not wanted. */
2842 cfun
->machine
->sibcall_fails
= 1;
2845 /* Test if all registers needed by the ABI are actually available. If the
2846 user has fixed a GPR needed to pass an argument, an (implicit) function
2847 call will clobber that fixed register. See PR45099 for an example. */
2854 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2855 if (fixed_regs
[regno
])
2856 warning (0, "fixed register %s used to pass parameter to function",
2860 if (cum
->nregs
<= 0)
2863 cum
->regno
= FIRST_CUM_REG
;
2867 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2868 /* Decide whether we can make a sibling call to a function. DECL is the
2869 declaration of the function being targeted by the call and EXP is the
2870 CALL_EXPR representing the call. */
2873 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2877 /* Tail-calling must fail if callee-saved regs are used to pass
2878 function args. We must not tail-call when `epilogue_restores'
2879 is used. Unfortunately, we cannot tell at this point if that
2880 actually will happen or not, and we cannot step back from
2881 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2883 if (cfun
->machine
->sibcall_fails
2884 || TARGET_CALL_PROLOGUES
)
2889 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2893 decl_callee
= TREE_TYPE (decl_callee
);
2897 decl_callee
= fntype_callee
;
2899 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2900 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2902 decl_callee
= TREE_TYPE (decl_callee
);
2906 /* Ensure that caller and callee have compatible epilogues */
2908 if (cfun
->machine
->is_interrupt
2909 || cfun
->machine
->is_signal
2910 || cfun
->machine
->is_naked
2911 || avr_naked_function_p (decl_callee
)
2912 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2913 || (avr_OS_task_function_p (decl_callee
)
2914 != cfun
->machine
->is_OS_task
)
2915 || (avr_OS_main_function_p (decl_callee
)
2916 != cfun
->machine
->is_OS_main
))
2924 /***********************************************************************
2925 Functions for outputting various mov's for a various modes
2926 ************************************************************************/
2928 /* Return true if a value of mode MODE is read from flash by
2929 __load_* function from libgcc. */
2932 avr_load_libgcc_p (rtx op
)
2934 machine_mode mode
= GET_MODE (op
);
2935 int n_bytes
= GET_MODE_SIZE (mode
);
2939 && avr_mem_flash_p (op
));
2942 /* Return true if a value of mode MODE is read by __xload_* function. */
2945 avr_xload_libgcc_p (machine_mode mode
)
2947 int n_bytes
= GET_MODE_SIZE (mode
);
2950 || avr_n_flash
> 1);
2954 /* Fixme: This is a hack because secondary reloads don't works as expected.
2956 Find an unused d-register to be used as scratch in INSN.
2957 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2958 is a register, skip all possible return values that overlap EXCLUDE.
2959 The policy for the returned register is similar to that of
2960 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2963 Return a QImode d-register or NULL_RTX if nothing found. */
2966 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
2969 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2970 || avr_signal_function_p (current_function_decl
));
2972 for (regno
= 16; regno
< 32; regno
++)
2974 rtx reg
= all_regs_rtx
[regno
];
2977 && reg_overlap_mentioned_p (exclude
, reg
))
2978 || fixed_regs
[regno
])
2983 /* Try non-live register */
2985 if (!df_regs_ever_live_p (regno
)
2986 && (TREE_THIS_VOLATILE (current_function_decl
)
2987 || cfun
->machine
->is_OS_task
2988 || cfun
->machine
->is_OS_main
2989 || (!isr_p
&& call_used_regs
[regno
])))
2994 /* Any live register can be used if it is unused after.
2995 Prologue/epilogue will care for it as needed. */
2997 if (df_regs_ever_live_p (regno
)
2998 && reg_unused_after (insn
, reg
))
3008 /* Helper function for the next function in the case where only restricted
3009 version of LPM instruction is available. */
3012 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
3016 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3019 regno_dest
= REGNO (dest
);
3021 /* The implicit target register of LPM. */
3022 xop
[3] = lpm_reg_rtx
;
3024 switch (GET_CODE (addr
))
3031 gcc_assert (REG_Z
== REGNO (addr
));
3039 avr_asm_len ("%4lpm", xop
, plen
, 1);
3041 if (regno_dest
!= LPM_REGNO
)
3042 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3047 if (REGNO (dest
) == REG_Z
)
3048 return avr_asm_len ("%4lpm" CR_TAB
3053 "pop %A0", xop
, plen
, 6);
3055 avr_asm_len ("%4lpm" CR_TAB
3059 "mov %B0,%3", xop
, plen
, 5);
3061 if (!reg_unused_after (insn
, addr
))
3062 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3071 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3074 if (regno_dest
== LPM_REGNO
)
3075 avr_asm_len ("%4lpm" CR_TAB
3076 "adiw %2,1", xop
, plen
, 2);
3078 avr_asm_len ("%4lpm" CR_TAB
3080 "adiw %2,1", xop
, plen
, 3);
3083 avr_asm_len ("%4lpm" CR_TAB
3085 "adiw %2,1", xop
, plen
, 3);
3088 avr_asm_len ("%4lpm" CR_TAB
3090 "adiw %2,1", xop
, plen
, 3);
3093 avr_asm_len ("%4lpm" CR_TAB
3095 "adiw %2,1", xop
, plen
, 3);
3097 break; /* POST_INC */
3099 } /* switch CODE (addr) */
3105 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3106 OP[1] in AS1 to register OP[0].
3107 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3111 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
3115 rtx src
= SET_SRC (single_set (insn
));
3117 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3120 addr_space_t as
= MEM_ADDR_SPACE (src
);
3127 warning (0, "writing to address space %qs not supported",
3128 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
3133 addr
= XEXP (src
, 0);
3134 code
= GET_CODE (addr
);
3136 gcc_assert (REG_P (dest
));
3137 gcc_assert (REG
== code
|| POST_INC
== code
);
3141 xop
[2] = lpm_addr_reg_rtx
;
3142 xop
[4] = xstring_empty
;
3143 xop
[5] = tmp_reg_rtx
;
3144 xop
[6] = XEXP (rampz_rtx
, 0);
3146 segment
= avr_addrspace
[as
].segment
;
3148 /* Set RAMPZ as needed. */
3152 xop
[4] = GEN_INT (segment
);
3153 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
3155 if (xop
[3] != NULL_RTX
)
3157 avr_asm_len ("ldi %3,%4" CR_TAB
3158 "out %i6,%3", xop
, plen
, 2);
3160 else if (segment
== 1)
3162 avr_asm_len ("clr %5" CR_TAB
3164 "out %i6,%5", xop
, plen
, 3);
3168 avr_asm_len ("mov %5,%2" CR_TAB
3171 "mov %2,%5", xop
, plen
, 4);
3176 if (!AVR_HAVE_ELPMX
)
3177 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3179 else if (!AVR_HAVE_LPMX
)
3181 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3184 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3186 switch (GET_CODE (addr
))
3193 gcc_assert (REG_Z
== REGNO (addr
));
3201 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3204 if (REGNO (dest
) == REG_Z
)
3205 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3206 "%4lpm %B0,%a2" CR_TAB
3207 "mov %A0,%5", xop
, plen
, 3);
3210 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3211 "%4lpm %B0,%a2", xop
, plen
, 2);
3213 if (!reg_unused_after (insn
, addr
))
3214 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3221 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3222 "%4lpm %B0,%a2+" CR_TAB
3223 "%4lpm %C0,%a2", xop
, plen
, 3);
3225 if (!reg_unused_after (insn
, addr
))
3226 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3232 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3233 "%4lpm %B0,%a2+", xop
, plen
, 2);
3235 if (REGNO (dest
) == REG_Z
- 2)
3236 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3237 "%4lpm %C0,%a2" CR_TAB
3238 "mov %D0,%5", xop
, plen
, 3);
3241 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3242 "%4lpm %D0,%a2", xop
, plen
, 2);
3244 if (!reg_unused_after (insn
, addr
))
3245 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3255 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3258 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3259 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3260 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3261 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3263 break; /* POST_INC */
3265 } /* switch CODE (addr) */
3267 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3269 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3271 xop
[0] = zero_reg_rtx
;
3272 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3279 /* Worker function for xload_8 insn. */
3282 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3288 xop
[2] = lpm_addr_reg_rtx
;
3289 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3291 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3293 avr_asm_len ("sbrc %1,7" CR_TAB
3294 "ld %3,%a2", xop
, plen
, 2);
3296 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3297 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3304 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
3306 rtx dest
= operands
[0];
3307 rtx src
= operands
[1];
3309 if (avr_mem_flash_p (src
)
3310 || avr_mem_flash_p (dest
))
3312 return avr_out_lpm (insn
, operands
, plen
);
3315 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3319 if (REG_P (src
)) /* mov r,r */
3321 if (test_hard_reg_class (STACK_REG
, dest
))
3322 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3323 else if (test_hard_reg_class (STACK_REG
, src
))
3324 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3326 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3328 else if (CONSTANT_P (src
))
3330 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3333 else if (MEM_P (src
))
3334 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3336 else if (MEM_P (dest
))
3341 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3343 return out_movqi_mr_r (insn
, xop
, plen
);
3351 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
3356 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3358 if (avr_mem_flash_p (src
)
3359 || avr_mem_flash_p (dest
))
3361 return avr_out_lpm (insn
, xop
, plen
);
3364 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3368 if (REG_P (src
)) /* mov r,r */
3370 if (test_hard_reg_class (STACK_REG
, dest
))
3372 if (AVR_HAVE_8BIT_SP
)
3373 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3376 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3377 "out __SP_H__,%B1", xop
, plen
, -2);
3379 /* Use simple load of SP if no interrupts are used. */
3381 return TARGET_NO_INTERRUPTS
3382 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3383 "out __SP_L__,%A1", xop
, plen
, -2)
3384 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3386 "out __SP_H__,%B1" CR_TAB
3387 "out __SREG__,__tmp_reg__" CR_TAB
3388 "out __SP_L__,%A1", xop
, plen
, -5);
3390 else if (test_hard_reg_class (STACK_REG
, src
))
3392 return !AVR_HAVE_SPH
3393 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3394 "clr %B0", xop
, plen
, -2)
3396 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3397 "in %B0,__SP_H__", xop
, plen
, -2);
3400 return AVR_HAVE_MOVW
3401 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3403 : avr_asm_len ("mov %A0,%A1" CR_TAB
3404 "mov %B0,%B1", xop
, plen
, -2);
3406 else if (CONSTANT_P (src
))
3408 return output_reload_inhi (xop
, NULL
, plen
);
3410 else if (MEM_P (src
))
3412 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3415 else if (MEM_P (dest
))
3420 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3422 return out_movhi_mr_r (insn
, xop
, plen
);
3425 fatal_insn ("invalid insn:", insn
);
3431 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3434 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3438 rtx x
= XEXP (src
, 0);
3440 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3441 "ld %0,%b1" , op
, plen
, -3);
3443 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3444 && !reg_unused_after (insn
, XEXP (x
,0)))
3445 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
), op
, plen
, 2);
3451 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3455 rtx x
= XEXP (src
, 0);
3457 if (CONSTANT_ADDRESS_P (x
))
3459 int n_words
= AVR_TINY
? 1 : 2;
3460 return optimize
> 0 && io_address_operand (x
, QImode
)
3461 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3462 : avr_asm_len ("lds %0,%m1", op
, plen
, -n_words
);
3465 if (GET_CODE (x
) == PLUS
3466 && REG_P (XEXP (x
, 0))
3467 && CONST_INT_P (XEXP (x
, 1)))
3469 /* memory access by reg+disp */
3471 int disp
= INTVAL (XEXP (x
, 1));
3474 return avr_out_movqi_r_mr_reg_disp_tiny (insn
, op
, plen
);
3476 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3478 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3479 fatal_insn ("incorrect insn:",insn
);
3481 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3482 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3483 "ldd %0,Y+63" CR_TAB
3484 "sbiw r28,%o1-63", op
, plen
, -3);
3486 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3487 "sbci r29,hi8(-%o1)" CR_TAB
3489 "subi r28,lo8(%o1)" CR_TAB
3490 "sbci r29,hi8(%o1)", op
, plen
, -5);
3492 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3494 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3495 it but I have this situation with extremal optimizing options. */
3497 avr_asm_len ("adiw r26,%o1" CR_TAB
3498 "ld %0,X", op
, plen
, -2);
3500 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3501 && !reg_unused_after (insn
, XEXP (x
,0)))
3503 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3509 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3512 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3516 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3519 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op
[], int *plen
)
3523 rtx base
= XEXP (src
, 0);
3525 int reg_dest
= true_regnum (dest
);
3526 int reg_base
= true_regnum (base
);
3528 if (reg_dest
== reg_base
) /* R = (R) */
3529 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3531 "mov %A0,__tmp_reg__", op
, plen
, -3);
3533 return avr_asm_len ("ld %A0,%1" CR_TAB
3534 TINY_ADIW (%E1
, %F1
, 1) CR_TAB
3536 TINY_SBIW (%E1
, %F1
, 1), op
, plen
, -6);
3540 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3543 avr_out_movhi_r_mr_reg_disp_tiny (rtx op
[], int *plen
)
3547 rtx base
= XEXP (src
, 0);
3549 int reg_dest
= true_regnum (dest
);
3550 int reg_base
= true_regnum (XEXP (base
, 0));
3552 if (reg_base
== reg_dest
)
3554 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3555 "ld __tmp_reg__,%b1+" CR_TAB
3557 "mov %A0,__tmp_reg__", op
, plen
, -5);
3561 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3562 "ld %A0,%b1+" CR_TAB
3564 TINY_SBIW (%I1
, %J1
, %o1
+1), op
, plen
, -6);
3569 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3572 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3574 int mem_volatile_p
= 0;
3577 rtx base
= XEXP (src
, 0);
3579 /* "volatile" forces reading low byte first, even if less efficient,
3580 for correct operation with 16-bit I/O registers. */
3581 mem_volatile_p
= MEM_VOLATILE_P (src
);
3583 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3584 fatal_insn ("incorrect insn:", insn
);
3586 if (!mem_volatile_p
)
3587 return avr_asm_len ("ld %B0,%1" CR_TAB
3588 "ld %A0,%1", op
, plen
, -2);
3590 return avr_asm_len (TINY_SBIW (%I1
, %J1
, 2) CR_TAB
3591 "ld %A0,%p1+" CR_TAB
3593 TINY_SBIW (%I1
, %J1
, 1), op
, plen
, -6);
3598 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3602 rtx base
= XEXP (src
, 0);
3603 int reg_dest
= true_regnum (dest
);
3604 int reg_base
= true_regnum (base
);
3605 /* "volatile" forces reading low byte first, even if less efficient,
3606 for correct operation with 16-bit I/O registers. */
3607 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3612 return avr_out_movhi_r_mr_reg_no_disp_tiny (op
, plen
);
3614 if (reg_dest
== reg_base
) /* R = (R) */
3615 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3617 "mov %A0,__tmp_reg__", op
, plen
, -3);
3619 if (reg_base
!= REG_X
)
3620 return avr_asm_len ("ld %A0,%1" CR_TAB
3621 "ldd %B0,%1+1", op
, plen
, -2);
3623 avr_asm_len ("ld %A0,X+" CR_TAB
3624 "ld %B0,X", op
, plen
, -2);
3626 if (!reg_unused_after (insn
, base
))
3627 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3631 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3633 int disp
= INTVAL (XEXP (base
, 1));
3634 int reg_base
= true_regnum (XEXP (base
, 0));
3637 return avr_out_movhi_r_mr_reg_disp_tiny (op
, plen
);
3639 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3641 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3642 fatal_insn ("incorrect insn:",insn
);
3644 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3645 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3646 "ldd %A0,Y+62" CR_TAB
3647 "ldd %B0,Y+63" CR_TAB
3648 "sbiw r28,%o1-62", op
, plen
, -4)
3650 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3651 "sbci r29,hi8(-%o1)" CR_TAB
3653 "ldd %B0,Y+1" CR_TAB
3654 "subi r28,lo8(%o1)" CR_TAB
3655 "sbci r29,hi8(%o1)", op
, plen
, -6);
3658 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3659 it but I have this situation with extremal
3660 optimization options. */
3662 if (reg_base
== REG_X
)
3663 return reg_base
== reg_dest
3664 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3665 "ld __tmp_reg__,X+" CR_TAB
3667 "mov %A0,__tmp_reg__", op
, plen
, -4)
3669 : avr_asm_len ("adiw r26,%o1" CR_TAB
3672 "sbiw r26,%o1+1", op
, plen
, -4);
3674 return reg_base
== reg_dest
3675 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3676 "ldd %B0,%B1" CR_TAB
3677 "mov %A0,__tmp_reg__", op
, plen
, -3)
3679 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3680 "ldd %B0,%B1", op
, plen
, -2);
3682 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3685 return avr_out_movhi_r_mr_pre_dec_tiny (insn
, op
, plen
);
3687 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3688 fatal_insn ("incorrect insn:", insn
);
3690 if (!mem_volatile_p
)
3691 return avr_asm_len ("ld %B0,%1" CR_TAB
3692 "ld %A0,%1", op
, plen
, -2);
3694 return REGNO (XEXP (base
, 0)) == REG_X
3695 ? avr_asm_len ("sbiw r26,2" CR_TAB
3698 "sbiw r26,1", op
, plen
, -4)
3700 : avr_asm_len ("sbiw %r1,2" CR_TAB
3702 "ldd %B0,%p1+1", op
, plen
, -3);
3704 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3706 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3707 fatal_insn ("incorrect insn:", insn
);
3709 return avr_asm_len ("ld %A0,%1" CR_TAB
3710 "ld %B0,%1", op
, plen
, -2);
3712 else if (CONSTANT_ADDRESS_P (base
))
3714 int n_words
= AVR_TINY
? 2 : 4;
3715 return optimize
> 0 && io_address_operand (base
, HImode
)
3716 ? avr_asm_len ("in %A0,%i1" CR_TAB
3717 "in %B0,%i1+1", op
, plen
, -2)
3719 : avr_asm_len ("lds %A0,%m1" CR_TAB
3720 "lds %B0,%m1+1", op
, plen
, -n_words
);
3723 fatal_insn ("unknown move insn:",insn
);
3728 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
3732 rtx base
= XEXP (src
, 0);
3733 int reg_dest
= true_regnum (dest
);
3734 int reg_base
= true_regnum (base
);
3736 if (reg_dest
== reg_base
)
3738 /* "ld r26,-X" is undefined */
3739 return *l
= 9, (TINY_ADIW (%E1
, %F1
, 3) CR_TAB
3742 "ld __tmp_reg__,-%1" CR_TAB
3743 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
3745 "mov %B0,__tmp_reg__");
3747 else if (reg_dest
== reg_base
- 2)
3749 return *l
= 5, ("ld %A0,%1+" CR_TAB
3751 "ld __tmp_reg__,%1+" CR_TAB
3753 "mov %C0,__tmp_reg__");
3755 else if (reg_unused_after (insn
, base
))
3757 return *l
= 4, ("ld %A0,%1+" CR_TAB
3764 return *l
= 6, ("ld %A0,%1+" CR_TAB
3768 TINY_SBIW (%E1
, %F1
, 3));
3774 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
3778 rtx base
= XEXP (src
, 0);
3779 int reg_dest
= true_regnum (dest
);
3780 int reg_base
= true_regnum (XEXP (base
, 0));
3782 if (reg_dest
== reg_base
)
3784 /* "ld r26,-X" is undefined */
3785 return *l
= 9, (TINY_ADIW (%I1
, %J1
, %o1
+3) CR_TAB
3787 "ld %C0,-%b1" CR_TAB
3788 "ld __tmp_reg__,-%b1" CR_TAB
3789 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
3791 "mov %B0,__tmp_reg__");
3793 else if (reg_dest
== reg_base
- 2)
3795 return *l
= 7, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3796 "ld %A0,%b1+" CR_TAB
3797 "ld %B0,%b1+" CR_TAB
3798 "ld __tmp_reg__,%b1+" CR_TAB
3800 "mov %C0,__tmp_reg__");
3802 else if (reg_unused_after (insn
, XEXP (base
, 0)))
3804 return *l
= 6, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3805 "ld %A0,%b1+" CR_TAB
3806 "ld %B0,%b1+" CR_TAB
3807 "ld %C0,%b1+" CR_TAB
3812 return *l
= 8, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3813 "ld %A0,%b1+" CR_TAB
3814 "ld %B0,%b1+" CR_TAB
3815 "ld %C0,%b1+" CR_TAB
3817 TINY_SBIW (%I1
, %J1
, %o1
+3));
3822 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
3826 rtx base
= XEXP (src
, 0);
3827 int reg_dest
= true_regnum (dest
);
3828 int reg_base
= true_regnum (base
);
3837 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn
, op
, l
);
3839 if (reg_base
== REG_X
) /* (R26) */
3841 if (reg_dest
== REG_X
)
3842 /* "ld r26,-X" is undefined */
3843 return *l
=7, ("adiw r26,3" CR_TAB
3846 "ld __tmp_reg__,-X" CR_TAB
3849 "mov r27,__tmp_reg__");
3850 else if (reg_dest
== REG_X
- 2)
3851 return *l
=5, ("ld %A0,X+" CR_TAB
3853 "ld __tmp_reg__,X+" CR_TAB
3855 "mov %C0,__tmp_reg__");
3856 else if (reg_unused_after (insn
, base
))
3857 return *l
=4, ("ld %A0,X+" CR_TAB
3862 return *l
=5, ("ld %A0,X+" CR_TAB
3870 if (reg_dest
== reg_base
)
3871 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3872 "ldd %C0,%1+2" CR_TAB
3873 "ldd __tmp_reg__,%1+1" CR_TAB
3875 "mov %B0,__tmp_reg__");
3876 else if (reg_base
== reg_dest
+ 2)
3877 return *l
=5, ("ld %A0,%1" CR_TAB
3878 "ldd %B0,%1+1" CR_TAB
3879 "ldd __tmp_reg__,%1+2" CR_TAB
3880 "ldd %D0,%1+3" CR_TAB
3881 "mov %C0,__tmp_reg__");
3883 return *l
=4, ("ld %A0,%1" CR_TAB
3884 "ldd %B0,%1+1" CR_TAB
3885 "ldd %C0,%1+2" CR_TAB
3889 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3891 int disp
= INTVAL (XEXP (base
, 1));
3894 return avr_out_movsi_r_mr_reg_disp_tiny (insn
, op
, l
);
3896 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3898 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3899 fatal_insn ("incorrect insn:",insn
);
3901 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3902 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3903 "ldd %A0,Y+60" CR_TAB
3904 "ldd %B0,Y+61" CR_TAB
3905 "ldd %C0,Y+62" CR_TAB
3906 "ldd %D0,Y+63" CR_TAB
3909 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3910 "sbci r29,hi8(-%o1)" CR_TAB
3912 "ldd %B0,Y+1" CR_TAB
3913 "ldd %C0,Y+2" CR_TAB
3914 "ldd %D0,Y+3" CR_TAB
3915 "subi r28,lo8(%o1)" CR_TAB
3916 "sbci r29,hi8(%o1)");
3919 reg_base
= true_regnum (XEXP (base
, 0));
3920 if (reg_base
== REG_X
)
3923 if (reg_dest
== REG_X
)
3926 /* "ld r26,-X" is undefined */
3927 return ("adiw r26,%o1+3" CR_TAB
3930 "ld __tmp_reg__,-X" CR_TAB
3933 "mov r27,__tmp_reg__");
3936 if (reg_dest
== REG_X
- 2)
3937 return ("adiw r26,%o1" CR_TAB
3940 "ld __tmp_reg__,X+" CR_TAB
3942 "mov r26,__tmp_reg__");
3944 return ("adiw r26,%o1" CR_TAB
3951 if (reg_dest
== reg_base
)
3952 return *l
=5, ("ldd %D0,%D1" CR_TAB
3953 "ldd %C0,%C1" CR_TAB
3954 "ldd __tmp_reg__,%B1" CR_TAB
3955 "ldd %A0,%A1" CR_TAB
3956 "mov %B0,__tmp_reg__");
3957 else if (reg_dest
== reg_base
- 2)
3958 return *l
=5, ("ldd %A0,%A1" CR_TAB
3959 "ldd %B0,%B1" CR_TAB
3960 "ldd __tmp_reg__,%C1" CR_TAB
3961 "ldd %D0,%D1" CR_TAB
3962 "mov %C0,__tmp_reg__");
3963 return *l
=4, ("ldd %A0,%A1" CR_TAB
3964 "ldd %B0,%B1" CR_TAB
3965 "ldd %C0,%C1" CR_TAB
3968 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3969 return *l
=4, ("ld %D0,%1" CR_TAB
3973 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3974 return *l
=4, ("ld %A0,%1" CR_TAB
3978 else if (CONSTANT_ADDRESS_P (base
))
3980 if (io_address_operand (base
, SImode
))
3983 return ("in %A0,%i1" CR_TAB
3984 "in %B0,%i1+1" CR_TAB
3985 "in %C0,%i1+2" CR_TAB
3990 *l
= AVR_TINY
? 4 : 8;
3991 return ("lds %A0,%m1" CR_TAB
3992 "lds %B0,%m1+1" CR_TAB
3993 "lds %C0,%m1+2" CR_TAB
3998 fatal_insn ("unknown move insn:",insn
);
4003 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4007 rtx base
= XEXP (dest
, 0);
4008 int reg_base
= true_regnum (base
);
4009 int reg_src
= true_regnum (src
);
4011 if (reg_base
== reg_src
)
4013 /* "ld r26,-X" is undefined */
4014 if (reg_unused_after (insn
, base
))
4016 return *l
= 7, ("mov __tmp_reg__, %B1" CR_TAB
4018 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4019 "st %0+,__tmp_reg__" CR_TAB
4025 return *l
= 9, ("mov __tmp_reg__, %B1" CR_TAB
4027 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4028 "st %0+,__tmp_reg__" CR_TAB
4031 TINY_SBIW (%E0
, %F0
, 3));
4034 else if (reg_base
== reg_src
+ 2)
4036 if (reg_unused_after (insn
, base
))
4037 return *l
= 7, ("mov __zero_reg__,%C1" CR_TAB
4038 "mov __tmp_reg__,%D1" CR_TAB
4041 "st %0+,__zero_reg__" CR_TAB
4042 "st %0,__tmp_reg__" CR_TAB
4043 "clr __zero_reg__");
4045 return *l
= 9, ("mov __zero_reg__,%C1" CR_TAB
4046 "mov __tmp_reg__,%D1" CR_TAB
4049 "st %0+,__zero_reg__" CR_TAB
4050 "st %0,__tmp_reg__" CR_TAB
4051 "clr __zero_reg__" CR_TAB
4052 TINY_SBIW (%E0
, %F0
, 3));
4055 return *l
= 6, ("st %0+,%A1" CR_TAB
4059 TINY_SBIW (%E0
, %F0
, 3));
4063 avr_out_movsi_mr_r_reg_disp_tiny (rtx op
[], int *l
)
4067 rtx base
= XEXP (dest
, 0);
4068 int reg_base
= REGNO (XEXP (base
, 0));
4069 int reg_src
=true_regnum (src
);
4071 if (reg_base
== reg_src
)
4074 return ("mov __tmp_reg__,%A2" CR_TAB
4075 "mov __zero_reg__,%B2" CR_TAB
4076 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4077 "st %b0+,__tmp_reg__" CR_TAB
4078 "st %b0+,__zero_reg__" CR_TAB
4079 "st %b0+,%C2" CR_TAB
4081 "clr __zero_reg__" CR_TAB
4082 TINY_SBIW (%I0
, %J0
, %o0
+3));
4084 else if (reg_src
== reg_base
- 2)
4087 return ("mov __tmp_reg__,%C2" CR_TAB
4088 "mov __zero_reg__,%D2" CR_TAB
4089 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4090 "st %b0+,%A0" CR_TAB
4091 "st %b0+,%B0" CR_TAB
4092 "st %b0+,__tmp_reg__" CR_TAB
4093 "st %b0,__zero_reg__" CR_TAB
4094 "clr __zero_reg__" CR_TAB
4095 TINY_SBIW (%I0
, %J0
, %o0
+3));
4098 return (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4099 "st %b0+,%A1" CR_TAB
4100 "st %b0+,%B1" CR_TAB
4101 "st %b0+,%C1" CR_TAB
4103 TINY_SBIW (%I0
, %J0
, %o0
+3));
4107 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
4111 rtx base
= XEXP (dest
, 0);
4112 int reg_base
= true_regnum (base
);
4113 int reg_src
= true_regnum (src
);
4119 if (CONSTANT_ADDRESS_P (base
))
4121 if (io_address_operand (base
, SImode
))
4123 return *l
=4,("out %i0, %A1" CR_TAB
4124 "out %i0+1,%B1" CR_TAB
4125 "out %i0+2,%C1" CR_TAB
4130 *l
= AVR_TINY
? 4 : 8;
4131 return ("sts %m0,%A1" CR_TAB
4132 "sts %m0+1,%B1" CR_TAB
4133 "sts %m0+2,%C1" CR_TAB
4138 if (reg_base
> 0) /* (r) */
4141 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn
, op
, l
);
4143 if (reg_base
== REG_X
) /* (R26) */
4145 if (reg_src
== REG_X
)
4147 /* "st X+,r26" is undefined */
4148 if (reg_unused_after (insn
, base
))
4149 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
4152 "st X+,__tmp_reg__" CR_TAB
4156 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
4159 "st X+,__tmp_reg__" CR_TAB
4164 else if (reg_base
== reg_src
+ 2)
4166 if (reg_unused_after (insn
, base
))
4167 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
4168 "mov __tmp_reg__,%D1" CR_TAB
4171 "st %0+,__zero_reg__" CR_TAB
4172 "st %0,__tmp_reg__" CR_TAB
4173 "clr __zero_reg__");
4175 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
4176 "mov __tmp_reg__,%D1" CR_TAB
4179 "st %0+,__zero_reg__" CR_TAB
4180 "st %0,__tmp_reg__" CR_TAB
4181 "clr __zero_reg__" CR_TAB
4184 return *l
=5, ("st %0+,%A1" CR_TAB
4191 return *l
=4, ("st %0,%A1" CR_TAB
4192 "std %0+1,%B1" CR_TAB
4193 "std %0+2,%C1" CR_TAB
4196 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4198 int disp
= INTVAL (XEXP (base
, 1));
4201 return avr_out_movsi_mr_r_reg_disp_tiny (op
, l
);
4203 reg_base
= REGNO (XEXP (base
, 0));
4204 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4206 if (reg_base
!= REG_Y
)
4207 fatal_insn ("incorrect insn:",insn
);
4209 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4210 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
4211 "std Y+60,%A1" CR_TAB
4212 "std Y+61,%B1" CR_TAB
4213 "std Y+62,%C1" CR_TAB
4214 "std Y+63,%D1" CR_TAB
4217 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
4218 "sbci r29,hi8(-%o0)" CR_TAB
4220 "std Y+1,%B1" CR_TAB
4221 "std Y+2,%C1" CR_TAB
4222 "std Y+3,%D1" CR_TAB
4223 "subi r28,lo8(%o0)" CR_TAB
4224 "sbci r29,hi8(%o0)");
4226 if (reg_base
== REG_X
)
4229 if (reg_src
== REG_X
)
4232 return ("mov __tmp_reg__,r26" CR_TAB
4233 "mov __zero_reg__,r27" CR_TAB
4234 "adiw r26,%o0" CR_TAB
4235 "st X+,__tmp_reg__" CR_TAB
4236 "st X+,__zero_reg__" CR_TAB
4239 "clr __zero_reg__" CR_TAB
4242 else if (reg_src
== REG_X
- 2)
4245 return ("mov __tmp_reg__,r26" CR_TAB
4246 "mov __zero_reg__,r27" CR_TAB
4247 "adiw r26,%o0" CR_TAB
4250 "st X+,__tmp_reg__" CR_TAB
4251 "st X,__zero_reg__" CR_TAB
4252 "clr __zero_reg__" CR_TAB
4256 return ("adiw r26,%o0" CR_TAB
4263 return *l
=4, ("std %A0,%A1" CR_TAB
4264 "std %B0,%B1" CR_TAB
4265 "std %C0,%C1" CR_TAB
4268 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4269 return *l
=4, ("st %0,%D1" CR_TAB
4273 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4274 return *l
=4, ("st %0,%A1" CR_TAB
4278 fatal_insn ("unknown move insn:",insn
);
4283 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
4286 rtx dest
= operands
[0];
4287 rtx src
= operands
[1];
4290 if (avr_mem_flash_p (src
)
4291 || avr_mem_flash_p (dest
))
4293 return avr_out_lpm (insn
, operands
, real_l
);
4299 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
4302 if (REG_P (src
)) /* mov r,r */
4304 if (true_regnum (dest
) > true_regnum (src
))
4309 return ("movw %C0,%C1" CR_TAB
4313 return ("mov %D0,%D1" CR_TAB
4314 "mov %C0,%C1" CR_TAB
4315 "mov %B0,%B1" CR_TAB
4323 return ("movw %A0,%A1" CR_TAB
4327 return ("mov %A0,%A1" CR_TAB
4328 "mov %B0,%B1" CR_TAB
4329 "mov %C0,%C1" CR_TAB
4333 else if (CONSTANT_P (src
))
4335 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
4337 else if (MEM_P (src
))
4338 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
4340 else if (MEM_P (dest
))
4344 if (src
== CONST0_RTX (GET_MODE (dest
)))
4345 operands
[1] = zero_reg_rtx
;
4347 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
4350 output_asm_insn (templ
, operands
);
4355 fatal_insn ("invalid insn:", insn
);
4360 /* Handle loads of 24-bit types from memory to register. */
4363 avr_out_load_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4367 rtx base
= XEXP (src
, 0);
4368 int reg_dest
= true_regnum (dest
);
4369 int reg_base
= true_regnum (base
);
4371 if (reg_base
== reg_dest
)
4373 return avr_asm_len (TINY_ADIW (%E1
, %F1
, 2) CR_TAB
4375 "ld __tmp_reg__,-%1" CR_TAB
4376 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
4378 "mov %B0,__tmp_reg__", op
, plen
, -8);
4382 return avr_asm_len ("ld %A0,%1+" CR_TAB
4384 "ld %C0,%1", op
, plen
, -3);
4386 if (reg_dest
!= reg_base
- 2 &&
4387 !reg_unused_after (insn
, base
))
4389 avr_asm_len (TINY_SBIW (%E1
, %F1
, 2), op
, plen
, 2);
4396 avr_out_load_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4400 rtx base
= XEXP (src
, 0);
4401 int reg_dest
= true_regnum (dest
);
4402 int reg_base
= true_regnum (base
);
4404 reg_base
= true_regnum (XEXP (base
, 0));
4405 if (reg_base
== reg_dest
)
4407 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
+2) CR_TAB
4409 "ld __tmp_reg__,-%b1" CR_TAB
4410 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
4412 "mov %B0,__tmp_reg__", op
, plen
, -8);
4416 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4417 "ld %A0,%b1+" CR_TAB
4418 "ld %B0,%b1+" CR_TAB
4419 "ld %C0,%b1", op
, plen
, -5);
4421 if (reg_dest
!= (reg_base
- 2)
4422 && !reg_unused_after (insn
, XEXP (base
, 0)))
4423 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+2), op
, plen
, 2);
4430 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4434 rtx base
= XEXP (src
, 0);
4435 int reg_dest
= true_regnum (dest
);
4436 int reg_base
= true_regnum (base
);
4441 return avr_out_load_psi_reg_no_disp_tiny (insn
, op
, plen
);
4443 if (reg_base
== REG_X
) /* (R26) */
4445 if (reg_dest
== REG_X
)
4446 /* "ld r26,-X" is undefined */
4447 return avr_asm_len ("adiw r26,2" CR_TAB
4449 "ld __tmp_reg__,-X" CR_TAB
4452 "mov r27,__tmp_reg__", op
, plen
, -6);
4455 avr_asm_len ("ld %A0,X+" CR_TAB
4457 "ld %C0,X", op
, plen
, -3);
4459 if (reg_dest
!= REG_X
- 2
4460 && !reg_unused_after (insn
, base
))
4462 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4468 else /* reg_base != REG_X */
4470 if (reg_dest
== reg_base
)
4471 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4472 "ldd __tmp_reg__,%1+1" CR_TAB
4474 "mov %B0,__tmp_reg__", op
, plen
, -4);
4476 return avr_asm_len ("ld %A0,%1" CR_TAB
4477 "ldd %B0,%1+1" CR_TAB
4478 "ldd %C0,%1+2", op
, plen
, -3);
4481 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4483 int disp
= INTVAL (XEXP (base
, 1));
4486 return avr_out_load_psi_reg_disp_tiny (insn
, op
, plen
);
4488 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4490 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4491 fatal_insn ("incorrect insn:",insn
);
4493 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4494 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4495 "ldd %A0,Y+61" CR_TAB
4496 "ldd %B0,Y+62" CR_TAB
4497 "ldd %C0,Y+63" CR_TAB
4498 "sbiw r28,%o1-61", op
, plen
, -5);
4500 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4501 "sbci r29,hi8(-%o1)" CR_TAB
4503 "ldd %B0,Y+1" CR_TAB
4504 "ldd %C0,Y+2" CR_TAB
4505 "subi r28,lo8(%o1)" CR_TAB
4506 "sbci r29,hi8(%o1)", op
, plen
, -7);
4509 reg_base
= true_regnum (XEXP (base
, 0));
4510 if (reg_base
== REG_X
)
4513 if (reg_dest
== REG_X
)
4515 /* "ld r26,-X" is undefined */
4516 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4518 "ld __tmp_reg__,-X" CR_TAB
4521 "mov r27,__tmp_reg__", op
, plen
, -6);
4524 avr_asm_len ("adiw r26,%o1" CR_TAB
4527 "ld %C0,X", op
, plen
, -4);
4529 if (reg_dest
!= REG_W
4530 && !reg_unused_after (insn
, XEXP (base
, 0)))
4531 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
4536 if (reg_dest
== reg_base
)
4537 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4538 "ldd __tmp_reg__,%B1" CR_TAB
4539 "ldd %A0,%A1" CR_TAB
4540 "mov %B0,__tmp_reg__", op
, plen
, -4);
4542 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4543 "ldd %B0,%B1" CR_TAB
4544 "ldd %C0,%C1", op
, plen
, -3);
4546 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4547 return avr_asm_len ("ld %C0,%1" CR_TAB
4549 "ld %A0,%1", op
, plen
, -3);
4550 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4551 return avr_asm_len ("ld %A0,%1" CR_TAB
4553 "ld %C0,%1", op
, plen
, -3);
4555 else if (CONSTANT_ADDRESS_P (base
))
4557 int n_words
= AVR_TINY
? 3 : 6;
4558 return avr_asm_len ("lds %A0,%m1" CR_TAB
4559 "lds %B0,%m1+1" CR_TAB
4560 "lds %C0,%m1+2", op
, plen
, -n_words
);
4563 fatal_insn ("unknown move insn:",insn
);
4569 avr_out_store_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4573 rtx base
= XEXP (dest
, 0);
4574 int reg_base
= true_regnum (base
);
4575 int reg_src
= true_regnum (src
);
4577 if (reg_base
== reg_src
)
4579 avr_asm_len ("st %0,%A1" CR_TAB
4580 "mov __tmp_reg__,%B1" CR_TAB
4581 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
/* st X+, r27 is undefined */
4582 "st %0+,__tmp_reg__" CR_TAB
4583 "st %0,%C1", op
, plen
, -6);
4586 else if (reg_src
== reg_base
- 2)
4588 avr_asm_len ("st %0,%A1" CR_TAB
4589 "mov __tmp_reg__,%C1" CR_TAB
4590 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4592 "st %0,__tmp_reg__", op
, plen
, 6);
4596 avr_asm_len ("st %0+,%A1" CR_TAB
4598 "st %0,%C1", op
, plen
, -3);
4601 if (!reg_unused_after (insn
, base
))
4602 avr_asm_len (TINY_SBIW (%E0
, %F0
, 2), op
, plen
, 2);
4608 avr_out_store_psi_reg_disp_tiny (rtx
*op
, int *plen
)
4612 rtx base
= XEXP (dest
, 0);
4613 int reg_base
= REGNO (XEXP (base
, 0));
4614 int reg_src
= true_regnum (src
);
4616 if (reg_src
== reg_base
)
4618 return avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
4619 "mov __zero_reg__,%B1" CR_TAB
4620 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4621 "st %b0+,__tmp_reg__" CR_TAB
4622 "st %b0+,__zero_reg__" CR_TAB
4624 "clr __zero_reg__" CR_TAB
4625 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -10);
4627 else if (reg_src
== reg_base
- 2)
4629 return avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
4630 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4631 "st %b0+,%A1" CR_TAB
4632 "st %b0+,%B1" CR_TAB
4633 "st %b0,__tmp_reg__" CR_TAB
4634 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -8);
4637 return avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4638 "st %b0+,%A1" CR_TAB
4639 "st %b0+,%B1" CR_TAB
4641 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -7);
4644 /* Handle store of 24-bit type from register or zero to memory. */
4647 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4651 rtx base
= XEXP (dest
, 0);
4652 int reg_base
= true_regnum (base
);
4654 if (CONSTANT_ADDRESS_P (base
))
4656 int n_words
= AVR_TINY
? 3 : 6;
4657 return avr_asm_len ("sts %m0,%A1" CR_TAB
4658 "sts %m0+1,%B1" CR_TAB
4659 "sts %m0+2,%C1", op
, plen
, -n_words
);
4662 if (reg_base
> 0) /* (r) */
4665 return avr_out_store_psi_reg_no_disp_tiny (insn
, op
, plen
);
4667 if (reg_base
== REG_X
) /* (R26) */
4669 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
4671 avr_asm_len ("st %0+,%A1" CR_TAB
4673 "st %0,%C1", op
, plen
, -3);
4675 if (!reg_unused_after (insn
, base
))
4676 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4681 return avr_asm_len ("st %0,%A1" CR_TAB
4682 "std %0+1,%B1" CR_TAB
4683 "std %0+2,%C1", op
, plen
, -3);
4685 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4687 int disp
= INTVAL (XEXP (base
, 1));
4690 return avr_out_store_psi_reg_disp_tiny (op
, plen
);
4692 reg_base
= REGNO (XEXP (base
, 0));
4694 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4696 if (reg_base
!= REG_Y
)
4697 fatal_insn ("incorrect insn:",insn
);
4699 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4700 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4701 "std Y+61,%A1" CR_TAB
4702 "std Y+62,%B1" CR_TAB
4703 "std Y+63,%C1" CR_TAB
4704 "sbiw r28,%o0-61", op
, plen
, -5);
4706 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4707 "sbci r29,hi8(-%o0)" CR_TAB
4709 "std Y+1,%B1" CR_TAB
4710 "std Y+2,%C1" CR_TAB
4711 "subi r28,lo8(%o0)" CR_TAB
4712 "sbci r29,hi8(%o0)", op
, plen
, -7);
4714 if (reg_base
== REG_X
)
4717 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4719 avr_asm_len ("adiw r26,%o0" CR_TAB
4722 "st X,%C1", op
, plen
, -4);
4724 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4725 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4730 return avr_asm_len ("std %A0,%A1" CR_TAB
4731 "std %B0,%B1" CR_TAB
4732 "std %C0,%C1", op
, plen
, -3);
4734 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4735 return avr_asm_len ("st %0,%C1" CR_TAB
4737 "st %0,%A1", op
, plen
, -3);
4738 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4739 return avr_asm_len ("st %0,%A1" CR_TAB
4741 "st %0,%C1", op
, plen
, -3);
4743 fatal_insn ("unknown move insn:",insn
);
4748 /* Move around 24-bit stuff. */
4751 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4756 if (avr_mem_flash_p (src
)
4757 || avr_mem_flash_p (dest
))
4759 return avr_out_lpm (insn
, op
, plen
);
4762 if (register_operand (dest
, VOIDmode
))
4764 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4766 if (true_regnum (dest
) > true_regnum (src
))
4768 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4771 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4773 return avr_asm_len ("mov %B0,%B1" CR_TAB
4774 "mov %A0,%A1", op
, plen
, 2);
4779 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4781 avr_asm_len ("mov %A0,%A1" CR_TAB
4782 "mov %B0,%B1", op
, plen
, -2);
4784 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4787 else if (CONSTANT_P (src
))
4789 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4791 else if (MEM_P (src
))
4792 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4794 else if (MEM_P (dest
))
4799 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4801 return avr_out_store_psi (insn
, xop
, plen
);
4804 fatal_insn ("invalid insn:", insn
);
4809 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4813 rtx x
= XEXP (dest
, 0);
4815 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4817 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4818 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4819 "st %b0,__tmp_reg__", op
, plen
, -4);
4823 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4824 "st %b0,%1" , op
, plen
, -3);
4827 if (!reg_unused_after (insn
, XEXP (x
,0)))
4828 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
4834 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4838 rtx x
= XEXP (dest
, 0);
4840 if (CONSTANT_ADDRESS_P (x
))
4842 int n_words
= AVR_TINY
? 1 : 2;
4843 return optimize
> 0 && io_address_operand (x
, QImode
)
4844 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4845 : avr_asm_len ("sts %m0,%1", op
, plen
, -n_words
);
4847 else if (GET_CODE (x
) == PLUS
4848 && REG_P (XEXP (x
, 0))
4849 && CONST_INT_P (XEXP (x
, 1)))
4851 /* memory access by reg+disp */
4853 int disp
= INTVAL (XEXP (x
, 1));
4856 return avr_out_movqi_mr_r_reg_disp_tiny (insn
, op
, plen
);
4858 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4860 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4861 fatal_insn ("incorrect insn:",insn
);
4863 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4864 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4865 "std Y+63,%1" CR_TAB
4866 "sbiw r28,%o0-63", op
, plen
, -3);
4868 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4869 "sbci r29,hi8(-%o0)" CR_TAB
4871 "subi r28,lo8(%o0)" CR_TAB
4872 "sbci r29,hi8(%o0)", op
, plen
, -5);
4874 else if (REGNO (XEXP (x
,0)) == REG_X
)
4876 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4878 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4879 "adiw r26,%o0" CR_TAB
4880 "st X,__tmp_reg__", op
, plen
, -3);
4884 avr_asm_len ("adiw r26,%o0" CR_TAB
4885 "st X,%1", op
, plen
, -2);
4888 if (!reg_unused_after (insn
, XEXP (x
,0)))
4889 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4894 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4897 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4901 /* Helper for the next function for XMEGA. It does the same
4902 but with low byte first. */
4905 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
4909 rtx base
= XEXP (dest
, 0);
4910 int reg_base
= true_regnum (base
);
4911 int reg_src
= true_regnum (src
);
4913 /* "volatile" forces writing low byte first, even if less efficient,
4914 for correct operation with 16-bit I/O registers like SP. */
4915 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4917 if (CONSTANT_ADDRESS_P (base
))
4919 int n_words
= AVR_TINY
? 2 : 4;
4920 return optimize
> 0 && io_address_operand (base
, HImode
)
4921 ? avr_asm_len ("out %i0,%A1" CR_TAB
4922 "out %i0+1,%B1", op
, plen
, -2)
4924 : avr_asm_len ("sts %m0,%A1" CR_TAB
4925 "sts %m0+1,%B1", op
, plen
, -n_words
);
4930 if (reg_base
!= REG_X
)
4931 return avr_asm_len ("st %0,%A1" CR_TAB
4932 "std %0+1,%B1", op
, plen
, -2);
4934 if (reg_src
== REG_X
)
4935 /* "st X+,r26" and "st -X,r26" are undefined. */
4936 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4939 "st X,__tmp_reg__", op
, plen
, -4);
4941 avr_asm_len ("st X+,%A1" CR_TAB
4942 "st X,%B1", op
, plen
, -2);
4944 return reg_unused_after (insn
, base
)
4946 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4948 else if (GET_CODE (base
) == PLUS
)
4950 int disp
= INTVAL (XEXP (base
, 1));
4951 reg_base
= REGNO (XEXP (base
, 0));
4952 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4954 if (reg_base
!= REG_Y
)
4955 fatal_insn ("incorrect insn:",insn
);
4957 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4958 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4959 "std Y+62,%A1" CR_TAB
4960 "std Y+63,%B1" CR_TAB
4961 "sbiw r28,%o0-62", op
, plen
, -4)
4963 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4964 "sbci r29,hi8(-%o0)" CR_TAB
4966 "std Y+1,%B1" CR_TAB
4967 "subi r28,lo8(%o0)" CR_TAB
4968 "sbci r29,hi8(%o0)", op
, plen
, -6);
4971 if (reg_base
!= REG_X
)
4972 return avr_asm_len ("std %A0,%A1" CR_TAB
4973 "std %B0,%B1", op
, plen
, -2);
4975 return reg_src
== REG_X
4976 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4977 "mov __zero_reg__,r27" CR_TAB
4978 "adiw r26,%o0" CR_TAB
4979 "st X+,__tmp_reg__" CR_TAB
4980 "st X,__zero_reg__" CR_TAB
4981 "clr __zero_reg__" CR_TAB
4982 "sbiw r26,%o0+1", op
, plen
, -7)
4984 : avr_asm_len ("adiw r26,%o0" CR_TAB
4987 "sbiw r26,%o0+1", op
, plen
, -4);
4989 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4991 if (!mem_volatile_p
)
4992 return avr_asm_len ("st %0,%B1" CR_TAB
4993 "st %0,%A1", op
, plen
, -2);
4995 return REGNO (XEXP (base
, 0)) == REG_X
4996 ? avr_asm_len ("sbiw r26,2" CR_TAB
4999 "sbiw r26,1", op
, plen
, -4)
5001 : avr_asm_len ("sbiw %r0,2" CR_TAB
5003 "std %p0+1,%B1", op
, plen
, -3);
5005 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5007 return avr_asm_len ("st %0,%A1" CR_TAB
5008 "st %0,%B1", op
, plen
, -2);
5011 fatal_insn ("unknown move insn:",insn
);
5016 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5020 rtx base
= XEXP (dest
, 0);
5021 int reg_base
= true_regnum (base
);
5022 int reg_src
= true_regnum (src
);
5023 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
5025 if (reg_base
== reg_src
)
5027 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5028 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5030 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5031 "st %0,__tmp_reg__", op
, plen
, -5)
5032 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5033 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5034 "st %0,__tmp_reg__" CR_TAB
5035 TINY_SBIW (%E0
, %F0
, 1) CR_TAB
5036 "st %0, %A1", op
, plen
, -7);
5039 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5040 ? avr_asm_len ("st %0+,%A1" CR_TAB
5041 "st %0,%B1", op
, plen
, -2)
5042 : avr_asm_len (TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5044 "st -%0,%A1", op
, plen
, -4);
5048 avr_out_movhi_mr_r_reg_disp_tiny (rtx op
[], int *plen
)
5052 rtx base
= XEXP (dest
, 0);
5053 int reg_base
= REGNO (XEXP (base
, 0));
5054 int reg_src
= true_regnum (src
);
5056 return reg_src
== reg_base
5057 ? avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5058 "mov __zero_reg__,%B1" CR_TAB
5059 TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5060 "st %b0,__zero_reg__" CR_TAB
5061 "st -%b0,__tmp_reg__" CR_TAB
5062 "clr __zero_reg__" CR_TAB
5063 TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, -9)
5065 : avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5067 "st -%b0,%A1" CR_TAB
5068 TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, -6);
5072 avr_out_movhi_mr_r_post_inc_tiny (rtx op
[], int *plen
)
5074 return avr_asm_len (TINY_ADIW (%I0
, %J0
, 1) CR_TAB
5076 "st -%p0,%A1" CR_TAB
5077 TINY_ADIW (%I0
, %J0
, 2), op
, plen
, -6);
5081 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5085 rtx base
= XEXP (dest
, 0);
5086 int reg_base
= true_regnum (base
);
5087 int reg_src
= true_regnum (src
);
5090 /* "volatile" forces writing high-byte first (no-xmega) resp.
5091 low-byte first (xmega) even if less efficient, for correct
5092 operation with 16-bit I/O registers like. */
5095 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
5097 mem_volatile_p
= MEM_VOLATILE_P (dest
);
5099 if (CONSTANT_ADDRESS_P (base
))
5101 int n_words
= AVR_TINY
? 2 : 4;
5102 return optimize
> 0 && io_address_operand (base
, HImode
)
5103 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5104 "out %i0,%A1", op
, plen
, -2)
5106 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5107 "sts %m0,%A1", op
, plen
, -n_words
);
5113 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn
, op
, plen
);
5115 if (reg_base
!= REG_X
)
5116 return avr_asm_len ("std %0+1,%B1" CR_TAB
5117 "st %0,%A1", op
, plen
, -2);
5119 if (reg_src
== REG_X
)
5120 /* "st X+,r26" and "st -X,r26" are undefined. */
5121 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5122 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5125 "st X,__tmp_reg__", op
, plen
, -4)
5127 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5129 "st X,__tmp_reg__" CR_TAB
5131 "st X,r26", op
, plen
, -5);
5133 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5134 ? avr_asm_len ("st X+,%A1" CR_TAB
5135 "st X,%B1", op
, plen
, -2)
5136 : avr_asm_len ("adiw r26,1" CR_TAB
5138 "st -X,%A1", op
, plen
, -3);
5140 else if (GET_CODE (base
) == PLUS
)
5142 int disp
= INTVAL (XEXP (base
, 1));
5145 return avr_out_movhi_mr_r_reg_disp_tiny (op
, plen
);
5147 reg_base
= REGNO (XEXP (base
, 0));
5148 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5150 if (reg_base
!= REG_Y
)
5151 fatal_insn ("incorrect insn:",insn
);
5153 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5154 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5155 "std Y+63,%B1" CR_TAB
5156 "std Y+62,%A1" CR_TAB
5157 "sbiw r28,%o0-62", op
, plen
, -4)
5159 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5160 "sbci r29,hi8(-%o0)" CR_TAB
5161 "std Y+1,%B1" CR_TAB
5163 "subi r28,lo8(%o0)" CR_TAB
5164 "sbci r29,hi8(%o0)", op
, plen
, -6);
5167 if (reg_base
!= REG_X
)
5168 return avr_asm_len ("std %B0,%B1" CR_TAB
5169 "std %A0,%A1", op
, plen
, -2);
5171 return reg_src
== REG_X
5172 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5173 "mov __zero_reg__,r27" CR_TAB
5174 "adiw r26,%o0+1" CR_TAB
5175 "st X,__zero_reg__" CR_TAB
5176 "st -X,__tmp_reg__" CR_TAB
5177 "clr __zero_reg__" CR_TAB
5178 "sbiw r26,%o0", op
, plen
, -7)
5180 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5183 "sbiw r26,%o0", op
, plen
, -4);
5185 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5187 return avr_asm_len ("st %0,%B1" CR_TAB
5188 "st %0,%A1", op
, plen
, -2);
5190 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5192 if (!mem_volatile_p
)
5193 return avr_asm_len ("st %0,%A1" CR_TAB
5194 "st %0,%B1", op
, plen
, -2);
5197 return avr_out_movhi_mr_r_post_inc_tiny (op
, plen
);
5199 return REGNO (XEXP (base
, 0)) == REG_X
5200 ? avr_asm_len ("adiw r26,1" CR_TAB
5203 "adiw r26,2", op
, plen
, -4)
5205 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5207 "adiw %r0,2", op
, plen
, -3);
5209 fatal_insn ("unknown move insn:",insn
);
5213 /* Return 1 if frame pointer for current function required. */
5216 avr_frame_pointer_required_p (void)
5218 return (cfun
->calls_alloca
5219 || cfun
->calls_setjmp
5220 || cfun
->has_nonlocal_label
5221 || crtl
->args
.info
.nregs
== 0
5222 || get_frame_size () > 0);
5225 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5228 compare_condition (rtx_insn
*insn
)
5230 rtx_insn
*next
= next_real_insn (insn
);
5232 if (next
&& JUMP_P (next
))
5234 rtx pat
= PATTERN (next
);
5235 rtx src
= SET_SRC (pat
);
5237 if (IF_THEN_ELSE
== GET_CODE (src
))
5238 return GET_CODE (XEXP (src
, 0));
5245 /* Returns true iff INSN is a tst insn that only tests the sign. */
5248 compare_sign_p (rtx_insn
*insn
)
5250 RTX_CODE cond
= compare_condition (insn
);
5251 return (cond
== GE
|| cond
== LT
);
5255 /* Returns true iff the next insn is a JUMP_INSN with a condition
5256 that needs to be swapped (GT, GTU, LE, LEU). */
5259 compare_diff_p (rtx_insn
*insn
)
5261 RTX_CODE cond
= compare_condition (insn
);
5262 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
5265 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5268 compare_eq_p (rtx_insn
*insn
)
5270 RTX_CODE cond
= compare_condition (insn
);
5271 return (cond
== EQ
|| cond
== NE
);
5275 /* Output compare instruction
5277 compare (XOP[0], XOP[1])
5279 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5280 XOP[2] is an 8-bit scratch register as needed.
5282 PLEN == NULL: Output instructions.
5283 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5284 Don't output anything. */
5287 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
5289 /* Register to compare and value to compare against. */
5293 /* MODE of the comparison. */
5296 /* Number of bytes to operate on. */
5297 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
5299 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5300 int clobber_val
= -1;
5302 /* Map fixed mode operands to integer operands with the same binary
5303 representation. They are easier to handle in the remainder. */
5305 if (CONST_FIXED_P (xval
))
5307 xreg
= avr_to_int_mode (xop
[0]);
5308 xval
= avr_to_int_mode (xop
[1]);
5311 mode
= GET_MODE (xreg
);
5313 gcc_assert (REG_P (xreg
));
5314 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
5315 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
5320 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5321 against 0 by ORing the bytes. This is one instruction shorter.
5322 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5323 and therefore don't use this. */
5325 if (!test_hard_reg_class (LD_REGS
, xreg
)
5326 && compare_eq_p (insn
)
5327 && reg_unused_after (insn
, xreg
))
5329 if (xval
== const1_rtx
)
5331 avr_asm_len ("dec %A0" CR_TAB
5332 "or %A0,%B0", xop
, plen
, 2);
5335 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
5338 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
5342 else if (xval
== constm1_rtx
)
5345 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
5348 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
5350 return avr_asm_len ("and %A0,%B0" CR_TAB
5351 "com %A0", xop
, plen
, 2);
5355 for (i
= 0; i
< n_bytes
; i
++)
5357 /* We compare byte-wise. */
5358 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
5359 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5361 /* 8-bit value to compare with this byte. */
5362 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5364 /* Registers R16..R31 can operate with immediate. */
5365 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5368 xop
[1] = gen_int_mode (val8
, QImode
);
5370 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5373 && test_hard_reg_class (ADDW_REGS
, reg8
))
5375 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
5377 if (IN_RANGE (val16
, 0, 63)
5379 || reg_unused_after (insn
, xreg
)))
5382 avr_asm_len (TINY_SBIW (%A0
, %B0
, %1), xop
, plen
, 2);
5384 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
5391 && IN_RANGE (val16
, -63, -1)
5392 && compare_eq_p (insn
)
5393 && reg_unused_after (insn
, xreg
))
5396 ? avr_asm_len (TINY_ADIW (%A0
, %B0
, %n1
), xop
, plen
, 2)
5397 : avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
5401 /* Comparing against 0 is easy. */
5406 ? "cp %0,__zero_reg__"
5407 : "cpc %0,__zero_reg__", xop
, plen
, 1);
5411 /* Upper registers can compare and subtract-with-carry immediates.
5412 Notice that compare instructions do the same as respective subtract
5413 instruction; the only difference is that comparisons don't write
5414 the result back to the target register. */
5420 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
5423 else if (reg_unused_after (insn
, xreg
))
5425 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
5430 /* Must load the value into the scratch register. */
5432 gcc_assert (REG_P (xop
[2]));
5434 if (clobber_val
!= (int) val8
)
5435 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
5436 clobber_val
= (int) val8
;
5440 : "cpc %0,%2", xop
, plen
, 1);
5447 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5450 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5454 xop
[0] = gen_rtx_REG (DImode
, 18);
5458 return avr_out_compare (insn
, xop
, plen
);
5461 /* Output test instruction for HImode. */
5464 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5466 if (compare_sign_p (insn
))
5468 avr_asm_len ("tst %B0", op
, plen
, -1);
5470 else if (reg_unused_after (insn
, op
[0])
5471 && compare_eq_p (insn
))
5473 /* Faster than sbiw if we can clobber the operand. */
5474 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
5478 avr_out_compare (insn
, op
, plen
);
5485 /* Output test instruction for PSImode. */
5488 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5490 if (compare_sign_p (insn
))
5492 avr_asm_len ("tst %C0", op
, plen
, -1);
5494 else if (reg_unused_after (insn
, op
[0])
5495 && compare_eq_p (insn
))
5497 /* Faster than sbiw if we can clobber the operand. */
5498 avr_asm_len ("or %A0,%B0" CR_TAB
5499 "or %A0,%C0", op
, plen
, -2);
5503 avr_out_compare (insn
, op
, plen
);
5510 /* Output test instruction for SImode. */
5513 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5515 if (compare_sign_p (insn
))
5517 avr_asm_len ("tst %D0", op
, plen
, -1);
5519 else if (reg_unused_after (insn
, op
[0])
5520 && compare_eq_p (insn
))
5522 /* Faster than sbiw if we can clobber the operand. */
5523 avr_asm_len ("or %A0,%B0" CR_TAB
5525 "or %A0,%D0", op
, plen
, -3);
5529 avr_out_compare (insn
, op
, plen
);
5536 /* Generate asm equivalent for various shifts. This only handles cases
5537 that are not already carefully hand-optimized in ?sh??i3_out.
5539 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5540 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5541 OPERANDS[3] is a QImode scratch register from LD regs if
5542 available and SCRATCH, otherwise (no scratch available)
5544 TEMPL is an assembler template that shifts by one position.
5545 T_LEN is the length of this template. */
5548 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
5549 int *plen
, int t_len
)
5551 bool second_label
= true;
5552 bool saved_in_tmp
= false;
5553 bool use_zero_reg
= false;
5556 op
[0] = operands
[0];
5557 op
[1] = operands
[1];
5558 op
[2] = operands
[2];
5559 op
[3] = operands
[3];
5564 if (CONST_INT_P (operands
[2]))
5566 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
5567 && REG_P (operands
[3]));
5568 int count
= INTVAL (operands
[2]);
5569 int max_len
= 10; /* If larger than this, always use a loop. */
5574 if (count
< 8 && !scratch
)
5575 use_zero_reg
= true;
5578 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
5580 if (t_len
* count
<= max_len
)
5582 /* Output shifts inline with no loop - faster. */
5585 avr_asm_len (templ
, op
, plen
, t_len
);
5592 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
5594 else if (use_zero_reg
)
5596 /* Hack to save one word: use __zero_reg__ as loop counter.
5597 Set one bit, then shift in a loop until it is 0 again. */
5599 op
[3] = zero_reg_rtx
;
5601 avr_asm_len ("set" CR_TAB
5602 "bld %3,%2-1", op
, plen
, 2);
5606 /* No scratch register available, use one from LD_REGS (saved in
5607 __tmp_reg__) that doesn't overlap with registers to shift. */
5609 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
5610 op
[4] = tmp_reg_rtx
;
5611 saved_in_tmp
= true;
5613 avr_asm_len ("mov %4,%3" CR_TAB
5614 "ldi %3,%2", op
, plen
, 2);
5617 second_label
= false;
5619 else if (MEM_P (op
[2]))
5623 op_mov
[0] = op
[3] = tmp_reg_rtx
;
5626 out_movqi_r_mr (insn
, op_mov
, plen
);
5628 else if (register_operand (op
[2], QImode
))
5632 if (!reg_unused_after (insn
, op
[2])
5633 || reg_overlap_mentioned_p (op
[0], op
[2]))
5635 op
[3] = tmp_reg_rtx
;
5636 avr_asm_len ("mov %3,%2", op
, plen
, 1);
5640 fatal_insn ("bad shift insn:", insn
);
5643 avr_asm_len ("rjmp 2f", op
, plen
, 1);
5645 avr_asm_len ("1:", op
, plen
, 0);
5646 avr_asm_len (templ
, op
, plen
, t_len
);
5649 avr_asm_len ("2:", op
, plen
, 0);
5651 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
5652 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
5655 avr_asm_len ("mov %3,%4", op
, plen
, 1);
5659 /* 8bit shift left ((char)x << i) */
5662 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5664 if (GET_CODE (operands
[2]) == CONST_INT
)
5671 switch (INTVAL (operands
[2]))
5674 if (INTVAL (operands
[2]) < 8)
5686 return ("lsl %0" CR_TAB
5691 return ("lsl %0" CR_TAB
5696 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5699 return ("swap %0" CR_TAB
5703 return ("lsl %0" CR_TAB
5709 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5712 return ("swap %0" CR_TAB
5717 return ("lsl %0" CR_TAB
5724 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5727 return ("swap %0" CR_TAB
5733 return ("lsl %0" CR_TAB
5742 return ("ror %0" CR_TAB
5747 else if (CONSTANT_P (operands
[2]))
5748 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5750 out_shift_with_cnt ("lsl %0",
5751 insn
, operands
, len
, 1);
5756 /* 16bit shift left ((short)x << i) */
5759 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5761 if (GET_CODE (operands
[2]) == CONST_INT
)
5763 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5764 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5771 switch (INTVAL (operands
[2]))
5774 if (INTVAL (operands
[2]) < 16)
5778 return ("clr %B0" CR_TAB
5782 if (optimize_size
&& scratch
)
5787 return ("swap %A0" CR_TAB
5789 "andi %B0,0xf0" CR_TAB
5790 "eor %B0,%A0" CR_TAB
5791 "andi %A0,0xf0" CR_TAB
5797 return ("swap %A0" CR_TAB
5799 "ldi %3,0xf0" CR_TAB
5801 "eor %B0,%A0" CR_TAB
5805 break; /* optimize_size ? 6 : 8 */
5809 break; /* scratch ? 5 : 6 */
5813 return ("lsl %A0" CR_TAB
5817 "andi %B0,0xf0" CR_TAB
5818 "eor %B0,%A0" CR_TAB
5819 "andi %A0,0xf0" CR_TAB
5825 return ("lsl %A0" CR_TAB
5829 "ldi %3,0xf0" CR_TAB
5831 "eor %B0,%A0" CR_TAB
5839 break; /* scratch ? 5 : 6 */
5841 return ("clr __tmp_reg__" CR_TAB
5844 "ror __tmp_reg__" CR_TAB
5847 "ror __tmp_reg__" CR_TAB
5848 "mov %B0,%A0" CR_TAB
5849 "mov %A0,__tmp_reg__");
5853 return ("lsr %B0" CR_TAB
5854 "mov %B0,%A0" CR_TAB
5860 return *len
= 2, ("mov %B0,%A1" CR_TAB
5865 return ("mov %B0,%A0" CR_TAB
5871 return ("mov %B0,%A0" CR_TAB
5878 return ("mov %B0,%A0" CR_TAB
5888 return ("mov %B0,%A0" CR_TAB
5896 return ("mov %B0,%A0" CR_TAB
5899 "ldi %3,0xf0" CR_TAB
5903 return ("mov %B0,%A0" CR_TAB
5914 return ("mov %B0,%A0" CR_TAB
5920 if (AVR_HAVE_MUL
&& scratch
)
5923 return ("ldi %3,0x20" CR_TAB
5927 "clr __zero_reg__");
5929 if (optimize_size
&& scratch
)
5934 return ("mov %B0,%A0" CR_TAB
5938 "ldi %3,0xe0" CR_TAB
5944 return ("set" CR_TAB
5949 "clr __zero_reg__");
5952 return ("mov %B0,%A0" CR_TAB
5961 if (AVR_HAVE_MUL
&& ldi_ok
)
5964 return ("ldi %B0,0x40" CR_TAB
5965 "mul %A0,%B0" CR_TAB
5968 "clr __zero_reg__");
5970 if (AVR_HAVE_MUL
&& scratch
)
5973 return ("ldi %3,0x40" CR_TAB
5977 "clr __zero_reg__");
5979 if (optimize_size
&& ldi_ok
)
5982 return ("mov %B0,%A0" CR_TAB
5983 "ldi %A0,6" "\n1:\t"
5988 if (optimize_size
&& scratch
)
5991 return ("clr %B0" CR_TAB
6000 return ("clr %B0" CR_TAB
6007 out_shift_with_cnt ("lsl %A0" CR_TAB
6008 "rol %B0", insn
, operands
, len
, 2);
6013 /* 24-bit shift left */
6016 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6021 if (CONST_INT_P (op
[2]))
6023 switch (INTVAL (op
[2]))
6026 if (INTVAL (op
[2]) < 24)
6029 return avr_asm_len ("clr %A0" CR_TAB
6031 "clr %C0", op
, plen
, 3);
6035 int reg0
= REGNO (op
[0]);
6036 int reg1
= REGNO (op
[1]);
6039 return avr_asm_len ("mov %C0,%B1" CR_TAB
6040 "mov %B0,%A1" CR_TAB
6041 "clr %A0", op
, plen
, 3);
6043 return avr_asm_len ("clr %A0" CR_TAB
6044 "mov %B0,%A1" CR_TAB
6045 "mov %C0,%B1", op
, plen
, 3);
6050 int reg0
= REGNO (op
[0]);
6051 int reg1
= REGNO (op
[1]);
6053 if (reg0
+ 2 != reg1
)
6054 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
6056 return avr_asm_len ("clr %B0" CR_TAB
6057 "clr %A0", op
, plen
, 2);
6061 return avr_asm_len ("clr %C0" CR_TAB
6065 "clr %A0", op
, plen
, 5);
6069 out_shift_with_cnt ("lsl %A0" CR_TAB
6071 "rol %C0", insn
, op
, plen
, 3);
6076 /* 32bit shift left ((long)x << i) */
6079 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6081 if (GET_CODE (operands
[2]) == CONST_INT
)
6089 switch (INTVAL (operands
[2]))
6092 if (INTVAL (operands
[2]) < 32)
6096 return *len
= 3, ("clr %D0" CR_TAB
6100 return ("clr %D0" CR_TAB
6107 int reg0
= true_regnum (operands
[0]);
6108 int reg1
= true_regnum (operands
[1]);
6111 return ("mov %D0,%C1" CR_TAB
6112 "mov %C0,%B1" CR_TAB
6113 "mov %B0,%A1" CR_TAB
6116 return ("clr %A0" CR_TAB
6117 "mov %B0,%A1" CR_TAB
6118 "mov %C0,%B1" CR_TAB
6124 int reg0
= true_regnum (operands
[0]);
6125 int reg1
= true_regnum (operands
[1]);
6126 if (reg0
+ 2 == reg1
)
6127 return *len
= 2, ("clr %B0" CR_TAB
6130 return *len
= 3, ("movw %C0,%A1" CR_TAB
6134 return *len
= 4, ("mov %C0,%A1" CR_TAB
6135 "mov %D0,%B1" CR_TAB
6142 return ("mov %D0,%A1" CR_TAB
6149 return ("clr %D0" CR_TAB
6158 out_shift_with_cnt ("lsl %A0" CR_TAB
6161 "rol %D0", insn
, operands
, len
, 4);
6165 /* 8bit arithmetic shift right ((signed char)x >> i) */
6168 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6170 if (GET_CODE (operands
[2]) == CONST_INT
)
6177 switch (INTVAL (operands
[2]))
6185 return ("asr %0" CR_TAB
6190 return ("asr %0" CR_TAB
6196 return ("asr %0" CR_TAB
6203 return ("asr %0" CR_TAB
6211 return ("bst %0,6" CR_TAB
6217 if (INTVAL (operands
[2]) < 8)
6224 return ("lsl %0" CR_TAB
6228 else if (CONSTANT_P (operands
[2]))
6229 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6231 out_shift_with_cnt ("asr %0",
6232 insn
, operands
, len
, 1);
6237 /* 16bit arithmetic shift right ((signed short)x >> i) */
6240 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6242 if (GET_CODE (operands
[2]) == CONST_INT
)
6244 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6245 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6252 switch (INTVAL (operands
[2]))
6256 /* XXX try to optimize this too? */
6261 break; /* scratch ? 5 : 6 */
6263 return ("mov __tmp_reg__,%A0" CR_TAB
6264 "mov %A0,%B0" CR_TAB
6265 "lsl __tmp_reg__" CR_TAB
6267 "sbc %B0,%B0" CR_TAB
6268 "lsl __tmp_reg__" CR_TAB
6274 return ("lsl %A0" CR_TAB
6275 "mov %A0,%B0" CR_TAB
6281 int reg0
= true_regnum (operands
[0]);
6282 int reg1
= true_regnum (operands
[1]);
6285 return *len
= 3, ("mov %A0,%B0" CR_TAB
6289 return *len
= 4, ("mov %A0,%B1" CR_TAB
6297 return ("mov %A0,%B0" CR_TAB
6299 "sbc %B0,%B0" CR_TAB
6304 return ("mov %A0,%B0" CR_TAB
6306 "sbc %B0,%B0" CR_TAB
6311 if (AVR_HAVE_MUL
&& ldi_ok
)
6314 return ("ldi %A0,0x20" CR_TAB
6315 "muls %B0,%A0" CR_TAB
6317 "sbc %B0,%B0" CR_TAB
6318 "clr __zero_reg__");
6320 if (optimize_size
&& scratch
)
6323 return ("mov %A0,%B0" CR_TAB
6325 "sbc %B0,%B0" CR_TAB
6331 if (AVR_HAVE_MUL
&& ldi_ok
)
6334 return ("ldi %A0,0x10" CR_TAB
6335 "muls %B0,%A0" CR_TAB
6337 "sbc %B0,%B0" CR_TAB
6338 "clr __zero_reg__");
6340 if (optimize_size
&& scratch
)
6343 return ("mov %A0,%B0" CR_TAB
6345 "sbc %B0,%B0" CR_TAB
6352 if (AVR_HAVE_MUL
&& ldi_ok
)
6355 return ("ldi %A0,0x08" CR_TAB
6356 "muls %B0,%A0" CR_TAB
6358 "sbc %B0,%B0" CR_TAB
6359 "clr __zero_reg__");
6362 break; /* scratch ? 5 : 7 */
6364 return ("mov %A0,%B0" CR_TAB
6366 "sbc %B0,%B0" CR_TAB
6375 return ("lsl %B0" CR_TAB
6376 "sbc %A0,%A0" CR_TAB
6378 "mov %B0,%A0" CR_TAB
6382 if (INTVAL (operands
[2]) < 16)
6388 return *len
= 3, ("lsl %B0" CR_TAB
6389 "sbc %A0,%A0" CR_TAB
6394 out_shift_with_cnt ("asr %B0" CR_TAB
6395 "ror %A0", insn
, operands
, len
, 2);
6400 /* 24-bit arithmetic shift right */
6403 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6405 int dest
= REGNO (op
[0]);
6406 int src
= REGNO (op
[1]);
6408 if (CONST_INT_P (op
[2]))
6413 switch (INTVAL (op
[2]))
6417 return avr_asm_len ("mov %A0,%B1" CR_TAB
6418 "mov %B0,%C1" CR_TAB
6421 "dec %C0", op
, plen
, 5);
6423 return avr_asm_len ("clr %C0" CR_TAB
6426 "mov %B0,%C1" CR_TAB
6427 "mov %A0,%B1", op
, plen
, 5);
6430 if (dest
!= src
+ 2)
6431 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6433 return avr_asm_len ("clr %B0" CR_TAB
6436 "mov %C0,%B0", op
, plen
, 4);
6439 if (INTVAL (op
[2]) < 24)
6445 return avr_asm_len ("lsl %C0" CR_TAB
6446 "sbc %A0,%A0" CR_TAB
6447 "mov %B0,%A0" CR_TAB
6448 "mov %C0,%A0", op
, plen
, 4);
6452 out_shift_with_cnt ("asr %C0" CR_TAB
6454 "ror %A0", insn
, op
, plen
, 3);
6459 /* 32-bit arithmetic shift right ((signed long)x >> i) */
6462 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6464 if (GET_CODE (operands
[2]) == CONST_INT
)
6472 switch (INTVAL (operands
[2]))
6476 int reg0
= true_regnum (operands
[0]);
6477 int reg1
= true_regnum (operands
[1]);
6480 return ("mov %A0,%B1" CR_TAB
6481 "mov %B0,%C1" CR_TAB
6482 "mov %C0,%D1" CR_TAB
6487 return ("clr %D0" CR_TAB
6490 "mov %C0,%D1" CR_TAB
6491 "mov %B0,%C1" CR_TAB
6497 int reg0
= true_regnum (operands
[0]);
6498 int reg1
= true_regnum (operands
[1]);
6500 if (reg0
== reg1
+ 2)
6501 return *len
= 4, ("clr %D0" CR_TAB
6506 return *len
= 5, ("movw %A0,%C1" CR_TAB
6512 return *len
= 6, ("mov %B0,%D1" CR_TAB
6513 "mov %A0,%C1" CR_TAB
6521 return *len
= 6, ("mov %A0,%D1" CR_TAB
6525 "mov %B0,%D0" CR_TAB
6529 if (INTVAL (operands
[2]) < 32)
6536 return *len
= 4, ("lsl %D0" CR_TAB
6537 "sbc %A0,%A0" CR_TAB
6538 "mov %B0,%A0" CR_TAB
6541 return *len
= 5, ("lsl %D0" CR_TAB
6542 "sbc %A0,%A0" CR_TAB
6543 "mov %B0,%A0" CR_TAB
6544 "mov %C0,%A0" CR_TAB
6549 out_shift_with_cnt ("asr %D0" CR_TAB
6552 "ror %A0", insn
, operands
, len
, 4);
6556 /* 8-bit logic shift right ((unsigned char)x >> i) */
6559 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6561 if (GET_CODE (operands
[2]) == CONST_INT
)
6568 switch (INTVAL (operands
[2]))
6571 if (INTVAL (operands
[2]) < 8)
6583 return ("lsr %0" CR_TAB
6587 return ("lsr %0" CR_TAB
6592 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6595 return ("swap %0" CR_TAB
6599 return ("lsr %0" CR_TAB
6605 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6608 return ("swap %0" CR_TAB
6613 return ("lsr %0" CR_TAB
6620 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6623 return ("swap %0" CR_TAB
6629 return ("lsr %0" CR_TAB
6638 return ("rol %0" CR_TAB
6643 else if (CONSTANT_P (operands
[2]))
6644 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6646 out_shift_with_cnt ("lsr %0",
6647 insn
, operands
, len
, 1);
6651 /* 16-bit logic shift right ((unsigned short)x >> i) */
6654 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6656 if (GET_CODE (operands
[2]) == CONST_INT
)
6658 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6659 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6666 switch (INTVAL (operands
[2]))
6669 if (INTVAL (operands
[2]) < 16)
6673 return ("clr %B0" CR_TAB
6677 if (optimize_size
&& scratch
)
6682 return ("swap %B0" CR_TAB
6684 "andi %A0,0x0f" CR_TAB
6685 "eor %A0,%B0" CR_TAB
6686 "andi %B0,0x0f" CR_TAB
6692 return ("swap %B0" CR_TAB
6694 "ldi %3,0x0f" CR_TAB
6696 "eor %A0,%B0" CR_TAB
6700 break; /* optimize_size ? 6 : 8 */
6704 break; /* scratch ? 5 : 6 */
6708 return ("lsr %B0" CR_TAB
6712 "andi %A0,0x0f" CR_TAB
6713 "eor %A0,%B0" CR_TAB
6714 "andi %B0,0x0f" CR_TAB
6720 return ("lsr %B0" CR_TAB
6724 "ldi %3,0x0f" CR_TAB
6726 "eor %A0,%B0" CR_TAB
6734 break; /* scratch ? 5 : 6 */
6736 return ("clr __tmp_reg__" CR_TAB
6739 "rol __tmp_reg__" CR_TAB
6742 "rol __tmp_reg__" CR_TAB
6743 "mov %A0,%B0" CR_TAB
6744 "mov %B0,__tmp_reg__");
6748 return ("lsl %A0" CR_TAB
6749 "mov %A0,%B0" CR_TAB
6751 "sbc %B0,%B0" CR_TAB
6755 return *len
= 2, ("mov %A0,%B1" CR_TAB
6760 return ("mov %A0,%B0" CR_TAB
6766 return ("mov %A0,%B0" CR_TAB
6773 return ("mov %A0,%B0" CR_TAB
6783 return ("mov %A0,%B0" CR_TAB
6791 return ("mov %A0,%B0" CR_TAB
6794 "ldi %3,0x0f" CR_TAB
6798 return ("mov %A0,%B0" CR_TAB
6809 return ("mov %A0,%B0" CR_TAB
6815 if (AVR_HAVE_MUL
&& scratch
)
6818 return ("ldi %3,0x08" CR_TAB
6822 "clr __zero_reg__");
6824 if (optimize_size
&& scratch
)
6829 return ("mov %A0,%B0" CR_TAB
6833 "ldi %3,0x07" CR_TAB
6839 return ("set" CR_TAB
6844 "clr __zero_reg__");
6847 return ("mov %A0,%B0" CR_TAB
6856 if (AVR_HAVE_MUL
&& ldi_ok
)
6859 return ("ldi %A0,0x04" CR_TAB
6860 "mul %B0,%A0" CR_TAB
6863 "clr __zero_reg__");
6865 if (AVR_HAVE_MUL
&& scratch
)
6868 return ("ldi %3,0x04" CR_TAB
6872 "clr __zero_reg__");
6874 if (optimize_size
&& ldi_ok
)
6877 return ("mov %A0,%B0" CR_TAB
6878 "ldi %B0,6" "\n1:\t"
6883 if (optimize_size
&& scratch
)
6886 return ("clr %A0" CR_TAB
6895 return ("clr %A0" CR_TAB
6902 out_shift_with_cnt ("lsr %B0" CR_TAB
6903 "ror %A0", insn
, operands
, len
, 2);
6908 /* 24-bit logic shift right */
6911 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6913 int dest
= REGNO (op
[0]);
6914 int src
= REGNO (op
[1]);
6916 if (CONST_INT_P (op
[2]))
6921 switch (INTVAL (op
[2]))
6925 return avr_asm_len ("mov %A0,%B1" CR_TAB
6926 "mov %B0,%C1" CR_TAB
6927 "clr %C0", op
, plen
, 3);
6929 return avr_asm_len ("clr %C0" CR_TAB
6930 "mov %B0,%C1" CR_TAB
6931 "mov %A0,%B1", op
, plen
, 3);
6934 if (dest
!= src
+ 2)
6935 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6937 return avr_asm_len ("clr %B0" CR_TAB
6938 "clr %C0", op
, plen
, 2);
6941 if (INTVAL (op
[2]) < 24)
6947 return avr_asm_len ("clr %A0" CR_TAB
6951 "clr %C0", op
, plen
, 5);
6955 out_shift_with_cnt ("lsr %C0" CR_TAB
6957 "ror %A0", insn
, op
, plen
, 3);
6962 /* 32-bit logic shift right ((unsigned int)x >> i) */
6965 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6967 if (GET_CODE (operands
[2]) == CONST_INT
)
6975 switch (INTVAL (operands
[2]))
6978 if (INTVAL (operands
[2]) < 32)
6982 return *len
= 3, ("clr %D0" CR_TAB
6986 return ("clr %D0" CR_TAB
6993 int reg0
= true_regnum (operands
[0]);
6994 int reg1
= true_regnum (operands
[1]);
6997 return ("mov %A0,%B1" CR_TAB
6998 "mov %B0,%C1" CR_TAB
6999 "mov %C0,%D1" CR_TAB
7002 return ("clr %D0" CR_TAB
7003 "mov %C0,%D1" CR_TAB
7004 "mov %B0,%C1" CR_TAB
7010 int reg0
= true_regnum (operands
[0]);
7011 int reg1
= true_regnum (operands
[1]);
7013 if (reg0
== reg1
+ 2)
7014 return *len
= 2, ("clr %C0" CR_TAB
7017 return *len
= 3, ("movw %A0,%C1" CR_TAB
7021 return *len
= 4, ("mov %B0,%D1" CR_TAB
7022 "mov %A0,%C1" CR_TAB
7028 return *len
= 4, ("mov %A0,%D1" CR_TAB
7035 return ("clr %A0" CR_TAB
7044 out_shift_with_cnt ("lsr %D0" CR_TAB
7047 "ror %A0", insn
, operands
, len
, 4);
7052 /* Output addition of register XOP[0] and compile time constant XOP[2].
7053 CODE == PLUS: perform addition by using ADD instructions or
7054 CODE == MINUS: perform addition by using SUB instructions:
7056 XOP[0] = XOP[0] + XOP[2]
7058 Or perform addition/subtraction with register XOP[2] depending on CODE:
7060 XOP[0] = XOP[0] +/- XOP[2]
7062 If PLEN == NULL, print assembler instructions to perform the operation;
7063 otherwise, set *PLEN to the length of the instruction sequence (in words)
7064 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7065 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7067 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7068 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7069 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7070 the subtrahend in the original insn, provided it is a compile time constant.
7071 In all other cases, SIGN is 0.
7073 If OUT_LABEL is true, print the final 0: label which is needed for
7074 saturated addition / subtraction. The only case where OUT_LABEL = false
7075 is useful is for saturated addition / subtraction performed during
7076 fixed-point rounding, cf. `avr_out_round'. */
7079 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
7080 enum rtx_code code_sat
, int sign
, bool out_label
)
7082 /* MODE of the operation. */
7083 machine_mode mode
= GET_MODE (xop
[0]);
7085 /* INT_MODE of the same size. */
7086 machine_mode imode
= int_mode_for_mode (mode
);
7088 /* Number of bytes to operate on. */
7089 int i
, n_bytes
= GET_MODE_SIZE (mode
);
7091 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7092 int clobber_val
= -1;
7094 /* op[0]: 8-bit destination register
7095 op[1]: 8-bit const int
7096 op[2]: 8-bit scratch register */
7099 /* Started the operation? Before starting the operation we may skip
7100 adding 0. This is no more true after the operation started because
7101 carry must be taken into account. */
7102 bool started
= false;
7104 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7107 /* Output a BRVC instruction. Only needed with saturation. */
7108 bool out_brvc
= true;
7115 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
7117 for (i
= 0; i
< n_bytes
; i
++)
7119 /* We operate byte-wise on the destination. */
7120 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7121 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
7124 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
7127 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
7131 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7133 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
7142 /* Except in the case of ADIW with 16-bit register (see below)
7143 addition does not set cc0 in a usable way. */
7145 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
7147 if (CONST_FIXED_P (xval
))
7148 xval
= avr_to_int_mode (xval
);
7150 /* Adding/Subtracting zero is a no-op. */
7152 if (xval
== const0_rtx
)
7159 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
7163 if (SS_PLUS
== code_sat
&& MINUS
== code
7165 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
7166 & GET_MODE_MASK (QImode
)))
7168 /* We compute x + 0x80 by means of SUB instructions. We negated the
7169 constant subtrahend above and are left with x - (-128) so that we
7170 need something like SUBI r,128 which does not exist because SUBI sets
7171 V according to the sign of the subtrahend. Notice the only case
7172 where this must be done is when NEG overflowed in case [2s] because
7173 the V computation needs the right sign of the subtrahend. */
7175 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7177 avr_asm_len ("subi %0,128" CR_TAB
7178 "brmi 0f", &msb
, plen
, 2);
7184 for (i
= 0; i
< n_bytes
; i
++)
7186 /* We operate byte-wise on the destination. */
7187 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7188 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
7190 /* 8-bit value to operate with this byte. */
7191 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
7193 /* Registers R16..R31 can operate with immediate. */
7194 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
7197 op
[1] = gen_int_mode (val8
, QImode
);
7199 /* To get usable cc0 no low-bytes must have been skipped. */
7207 && test_hard_reg_class (ADDW_REGS
, reg8
))
7209 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
7210 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
7212 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7213 i.e. operate word-wise. */
7220 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
7223 if (n_bytes
== 2 && PLUS
== code
)
7235 avr_asm_len (code
== PLUS
7236 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7240 else if ((val8
== 1 || val8
== 0xff)
7241 && UNKNOWN
== code_sat
7243 && i
== n_bytes
- 1)
7245 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
7255 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
7257 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
7259 /* This belongs to the x + 0x80 corner case. The code with
7260 ADD instruction is not smaller, thus make this case
7261 expensive so that the caller won't pick it. */
7267 if (clobber_val
!= (int) val8
)
7268 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7269 clobber_val
= (int) val8
;
7271 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
7278 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
7281 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
7283 if (clobber_val
!= (int) val8
)
7284 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7285 clobber_val
= (int) val8
;
7287 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
7299 } /* for all sub-bytes */
7303 if (UNKNOWN
== code_sat
)
7306 *pcc
= (int) CC_CLOBBER
;
7308 /* Vanilla addition/subtraction is done. We are left with saturation.
7310 We have to compute A = A <op> B where A is a register and
7311 B is a register or a non-zero compile time constant CONST.
7312 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7313 B stands for the original operand $2 in INSN. In the case of B = CONST,
7314 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7316 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7320 operation | code | sat if | b is | sat value | case
7321 -----------------+-------+----------+--------------+-----------+-------
7322 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7323 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7324 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7325 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7329 operation | code | sat if | b is | sat value | case
7330 -----------------+-------+----------+--------------+-----------+-------
7331 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7332 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7333 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7334 - as a + (-b) | add | V == 1 | const | s- | [4s]
7336 s+ = b < 0 ? -0x80 : 0x7f
7337 s- = b < 0 ? 0x7f : -0x80
7339 The cases a - b actually perform a - (-(-b)) if B is CONST.
7342 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7344 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
7347 bool need_copy
= true;
7348 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
7359 avr_asm_len ("brvc 0f", op
, plen
, 1);
7361 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7366 avr_asm_len ("ldi %0,0x7f" CR_TAB
7367 "adc %0,__zero_reg__", op
, plen
, 2);
7369 avr_asm_len ("ldi %0,0x7f" CR_TAB
7370 "ldi %1,0xff" CR_TAB
7371 "adc %1,__zero_reg__" CR_TAB
7372 "adc %0,__zero_reg__", op
, plen
, 4);
7374 else if (sign
== 0 && PLUS
== code
)
7378 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7381 avr_asm_len ("ldi %0,0x80" CR_TAB
7383 "dec %0", op
, plen
, 3);
7385 avr_asm_len ("ldi %0,0x80" CR_TAB
7388 "sbci %0,0", op
, plen
, 4);
7390 else if (sign
== 0 && MINUS
== code
)
7394 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7397 avr_asm_len ("ldi %0,0x7f" CR_TAB
7399 "inc %0", op
, plen
, 3);
7401 avr_asm_len ("ldi %0,0x7f" CR_TAB
7404 "sbci %0,-1", op
, plen
, 4);
7406 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
7408 /* [1s,const,B < 0] [2s,B < 0] */
7409 /* [3s,const,B > 0] [4s,B > 0] */
7413 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7417 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
7418 if (n_bytes
> 1 && need_copy
)
7419 avr_asm_len ("clr %1", op
, plen
, 1);
7421 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
7423 /* [1s,const,B > 0] [2s,B > 0] */
7424 /* [3s,const,B < 0] [4s,B < 0] */
7428 avr_asm_len ("sec" CR_TAB
7429 "%~call __sbc_8", op
, plen
, 1 + len_call
);
7433 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
7434 if (n_bytes
> 1 && need_copy
)
7435 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
7445 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
7450 avr_asm_len ("sec", op
, plen
, 1);
7451 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
7457 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
7458 avr_asm_len ("sec" CR_TAB
7459 "sbc %0,%0", op
, plen
, 2);
7461 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
7464 break; /* US_PLUS */
7469 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
7473 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7477 avr_asm_len ("clr %0", op
, plen
, 1);
7482 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7483 Now copy the right value to the LSBs. */
7485 if (need_copy
&& n_bytes
> 1)
7487 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
7489 avr_asm_len ("mov %1,%0", op
, plen
, 1);
7495 avr_asm_len ("movw %0,%1", op
, plen
, 1);
7497 avr_asm_len ("mov %A0,%1" CR_TAB
7498 "mov %B0,%1", op
, plen
, 2);
7501 else if (n_bytes
> 2)
7504 avr_asm_len ("mov %A0,%1" CR_TAB
7505 "mov %B0,%1", op
, plen
, 2);
7509 if (need_copy
&& n_bytes
== 8)
7512 avr_asm_len ("movw %r0+2,%0" CR_TAB
7513 "movw %r0+4,%0", xop
, plen
, 2);
7515 avr_asm_len ("mov %r0+2,%0" CR_TAB
7516 "mov %r0+3,%0" CR_TAB
7517 "mov %r0+4,%0" CR_TAB
7518 "mov %r0+5,%0", xop
, plen
, 4);
7522 avr_asm_len ("0:", op
, plen
, 0);
7526 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7527 is ont a compile-time constant:
7529 XOP[0] = XOP[0] +/- XOP[2]
7531 This is a helper for the function below. The only insns that need this
7532 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7535 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
7537 machine_mode mode
= GET_MODE (xop
[0]);
7539 /* Only pointer modes want to add symbols. */
7541 gcc_assert (mode
== HImode
|| mode
== PSImode
);
7543 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
7545 avr_asm_len (PLUS
== code
7546 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
7547 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
7550 if (PSImode
== mode
)
7551 avr_asm_len (PLUS
== code
7552 ? "sbci %C0,hlo8(-(%2))"
7553 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
7558 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7560 INSN is a single_set insn or an insn pattern with a binary operation as
7561 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7563 XOP are the operands of INSN. In the case of 64-bit operations with
7564 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7565 The non-saturating insns up to 32 bits may or may not supply a "d" class
7568 If PLEN == NULL output the instructions.
7569 If PLEN != NULL set *PLEN to the length of the sequence in words.
7571 PCC is a pointer to store the instructions' effect on cc0.
7574 PLEN and PCC default to NULL.
7576 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
7581 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
7583 int cc_plus
, cc_minus
, cc_dummy
;
7584 int len_plus
, len_minus
;
7586 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
7587 rtx xdest
= SET_DEST (xpattern
);
7588 machine_mode mode
= GET_MODE (xdest
);
7589 machine_mode imode
= int_mode_for_mode (mode
);
7590 int n_bytes
= GET_MODE_SIZE (mode
);
7591 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
7593 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
7599 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
7601 if (PLUS
== code_sat
|| MINUS
== code_sat
)
7604 if (n_bytes
<= 4 && REG_P (xop
[2]))
7606 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
7612 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
7613 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
7614 op
[2] = avr_to_int_mode (xop
[0]);
7619 && !CONST_INT_P (xop
[2])
7620 && !CONST_FIXED_P (xop
[2]))
7622 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
7625 op
[0] = avr_to_int_mode (xop
[0]);
7626 op
[1] = avr_to_int_mode (xop
[1]);
7627 op
[2] = avr_to_int_mode (xop
[2]);
7630 /* Saturations and 64-bit operations don't have a clobber operand.
7631 For the other cases, the caller will provide a proper XOP[3]. */
7633 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
7634 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
7636 /* Saturation will need the sign of the original operand. */
7638 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
7639 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
7641 /* If we subtract and the subtrahend is a constant, then negate it
7642 so that avr_out_plus_1 can be used. */
7645 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
7647 /* Work out the shortest sequence. */
7649 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
7650 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
7654 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
7655 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
7657 else if (len_minus
<= len_plus
)
7658 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
7660 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
7666 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7667 time constant XOP[2]:
7669 XOP[0] = XOP[0] <op> XOP[2]
7671 and return "". If PLEN == NULL, print assembler instructions to perform the
7672 operation; otherwise, set *PLEN to the length of the instruction sequence
7673 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
7674 register or SCRATCH if no clobber register is needed for the operation.
7675 INSN is an INSN_P or a pattern of an insn. */
7678 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
7680 /* CODE and MODE of the operation. */
7681 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
7682 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
7683 machine_mode mode
= GET_MODE (xop
[0]);
7685 /* Number of bytes to operate on. */
7686 int i
, n_bytes
= GET_MODE_SIZE (mode
);
7688 /* Value of T-flag (0 or 1) or -1 if unknow. */
7691 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7692 int clobber_val
= -1;
7694 /* op[0]: 8-bit destination register
7695 op[1]: 8-bit const int
7696 op[2]: 8-bit clobber register or SCRATCH
7697 op[3]: 8-bit register containing 0xff or NULL_RTX */
7706 for (i
= 0; i
< n_bytes
; i
++)
7708 /* We operate byte-wise on the destination. */
7709 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7710 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
7712 /* 8-bit value to operate with this byte. */
7713 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
7715 /* Number of bits set in the current byte of the constant. */
7716 int pop8
= avr_popcount (val8
);
7718 /* Registers R16..R31 can operate with immediate. */
7719 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
7722 op
[1] = GEN_INT (val8
);
7731 avr_asm_len ("ori %0,%1", op
, plen
, 1);
7735 avr_asm_len ("set", op
, plen
, 1);
7738 op
[1] = GEN_INT (exact_log2 (val8
));
7739 avr_asm_len ("bld %0,%1", op
, plen
, 1);
7743 if (op
[3] != NULL_RTX
)
7744 avr_asm_len ("mov %0,%3", op
, plen
, 1);
7746 avr_asm_len ("clr %0" CR_TAB
7747 "dec %0", op
, plen
, 2);
7753 if (clobber_val
!= (int) val8
)
7754 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7755 clobber_val
= (int) val8
;
7757 avr_asm_len ("or %0,%2", op
, plen
, 1);
7767 avr_asm_len ("clr %0", op
, plen
, 1);
7769 avr_asm_len ("andi %0,%1", op
, plen
, 1);
7773 avr_asm_len ("clt", op
, plen
, 1);
7776 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
7777 avr_asm_len ("bld %0,%1", op
, plen
, 1);
7781 if (clobber_val
!= (int) val8
)
7782 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7783 clobber_val
= (int) val8
;
7785 avr_asm_len ("and %0,%2", op
, plen
, 1);
7795 avr_asm_len ("com %0", op
, plen
, 1);
7796 else if (ld_reg_p
&& val8
== (1 << 7))
7797 avr_asm_len ("subi %0,%1", op
, plen
, 1);
7800 if (clobber_val
!= (int) val8
)
7801 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7802 clobber_val
= (int) val8
;
7804 avr_asm_len ("eor %0,%2", op
, plen
, 1);
7810 /* Unknown rtx_code */
7813 } /* for all sub-bytes */
7819 /* Output sign extension from XOP[1] to XOP[0] and return "".
7820 If PLEN == NULL, print assembler instructions to perform the operation;
7821 otherwise, set *PLEN to the length of the instruction sequence (in words)
7822 as printed with PLEN == NULL. */
7825 avr_out_sign_extend (rtx_insn
*insn
, rtx
*xop
, int *plen
)
7827 // Size in bytes of source resp. destination operand.
7828 unsigned n_src
= GET_MODE_SIZE (GET_MODE (xop
[1]));
7829 unsigned n_dest
= GET_MODE_SIZE (GET_MODE (xop
[0]));
7830 rtx r_msb
= all_regs_rtx
[REGNO (xop
[1]) + n_src
- 1];
7835 // Copy destination to source
7837 if (REGNO (xop
[0]) != REGNO (xop
[1]))
7839 gcc_assert (n_src
<= 2);
7842 avr_asm_len (AVR_HAVE_MOVW
7844 : "mov %B0,%B1", xop
, plen
, 1);
7845 if (n_src
== 1 || !AVR_HAVE_MOVW
)
7846 avr_asm_len ("mov %A0,%A1", xop
, plen
, 1);
7849 // Set Carry to the sign bit MSB.7...
7851 if (REGNO (xop
[0]) == REGNO (xop
[1])
7852 || !reg_unused_after (insn
, r_msb
))
7854 avr_asm_len ("mov __tmp_reg__,%0", &r_msb
, plen
, 1);
7855 r_msb
= tmp_reg_rtx
;
7858 avr_asm_len ("lsl %0", &r_msb
, plen
, 1);
7860 // ...and propagate it to all the new sign bits
7862 for (unsigned n
= n_src
; n
< n_dest
; n
++)
7863 avr_asm_len ("sbc %0,%0", &all_regs_rtx
[REGNO (xop
[0]) + n
], plen
, 1);
7869 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7870 PLEN != NULL: Set *PLEN to the length of that sequence.
7874 avr_out_addto_sp (rtx
*op
, int *plen
)
7876 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
7877 int addend
= INTVAL (op
[0]);
7884 if (flag_verbose_asm
|| flag_print_asm_name
)
7885 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7887 while (addend
<= -pc_len
)
7890 avr_asm_len ("rcall .", op
, plen
, 1);
7893 while (addend
++ < 0)
7894 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7896 else if (addend
> 0)
7898 if (flag_verbose_asm
|| flag_print_asm_name
)
7899 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7901 while (addend
-- > 0)
7902 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7909 /* Outputs instructions needed for fixed point type conversion.
7910 This includes converting between any fixed point type, as well
7911 as converting to any integer type. Conversion between integer
7912 types is not supported.
7914 Converting signed fractional types requires a bit shift if converting
7915 to or from any unsigned fractional type because the decimal place is
7916 shifted by 1 bit. When the destination is a signed fractional, the sign
7917 is stored in either the carry or T bit. */
7920 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
7924 RTX_CODE shift
= UNKNOWN
;
7925 bool sign_in_carry
= false;
7926 bool msb_in_carry
= false;
7927 bool lsb_in_tmp_reg
= false;
7928 bool lsb_in_carry
= false;
7929 bool frac_rounded
= false;
7930 const char *code_ashift
= "lsl %0";
7933 #define MAY_CLOBBER(RR) \
7934 /* Shorthand used below. */ \
7936 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7937 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7938 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7939 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7943 /* bytes : Length of operand in bytes.
7944 ibyte : Length of integral part in bytes.
7945 fbyte, fbit : Length of fractional part in bytes, bits. */
7948 unsigned fbit
, bytes
, ibyte
, fbyte
;
7949 unsigned regno
, regno_msb
;
7950 } dest
, src
, *val
[2] = { &dest
, &src
};
7955 /* Step 0: Determine information on source and destination operand we
7956 ====== will need in the remainder. */
7958 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7962 xop
[i
] = operands
[i
];
7964 mode
= GET_MODE (xop
[i
]);
7966 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7967 val
[i
]->regno
= REGNO (xop
[i
]);
7968 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7970 if (SCALAR_INT_MODE_P (mode
))
7972 val
[i
]->sbit
= intsigned
;
7975 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7977 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7978 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7981 fatal_insn ("unsupported fixed-point conversion", insn
);
7983 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7984 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7987 // Byte offset of the decimal point taking into account different place
7988 // of the decimal point in input and output and different register numbers
7989 // of input and output.
7990 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7992 // Number of destination bytes that will come from sign / zero extension.
7993 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7995 // Number of bytes at the low end to be filled with zeros.
7996 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7998 // Do we have a 16-Bit register that is cleared?
7999 rtx clrw
= NULL_RTX
;
8001 bool sign_extend
= src
.sbit
&& sign_bytes
;
8003 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
8005 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
8007 else if (dest
.fbit
% 8 == src
.fbit
% 8)
8012 /* If we need to round the fraction part, we might need to save/round it
8013 before clobbering any of it in Step 1. Also, we might want to do
8014 the rounding now to make use of LD_REGS. */
8015 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8016 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
8017 && !TARGET_FRACT_CONV_TRUNC
)
8021 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
8022 && dest
.regno
- offset
-1 >= dest
.regno
);
8023 unsigned s0
= dest
.regno
- offset
-1;
8024 bool use_src
= true;
8026 unsigned copied_msb
= src
.regno_msb
;
8027 bool have_carry
= false;
8029 if (src
.ibyte
> dest
.ibyte
)
8030 copied_msb
-= src
.ibyte
- dest
.ibyte
;
8032 for (sn
= s0
; sn
<= copied_msb
; sn
++)
8033 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
8034 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
8036 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
8038 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8039 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8043 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
8044 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
8046 avr_asm_len ("sec" CR_TAB
8047 "cpc %0,__zero_reg__",
8048 &all_regs_rtx
[sn
], plen
, 2);
8052 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8054 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
8055 &all_regs_rtx
[s0
], plen
, 1);
8056 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8057 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
8058 avr_asm_len ("\n0:", NULL
, plen
, 0);
8059 frac_rounded
= true;
8061 else if (use_src
&& overlap
)
8063 avr_asm_len ("clr __tmp_reg__" CR_TAB
8065 "dec __tmp_reg__", xop
, plen
, 1);
8069 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8074 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8077 avr_asm_len ("clt" CR_TAB
8078 "bld __tmp_reg__,7" CR_TAB
8079 "adc %0,__tmp_reg__",
8080 &all_regs_rtx
[s0
], plen
, 1);
8082 avr_asm_len ("lsr __tmp_reg" CR_TAB
8083 "add %0,__tmp_reg__",
8084 &all_regs_rtx
[s0
], plen
, 2);
8085 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8086 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8087 frac_rounded
= true;
8092 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
8093 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
8094 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
8095 xop
[2] = all_regs_rtx
[s0
];
8096 unsigned sn
= src
.regno
;
8097 if (!use_src
|| sn
== s0
)
8098 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8099 /* We need to consider to-be-discarded bits
8100 if the value is negative. */
8103 avr_asm_len ("tst %0" CR_TAB
8105 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8106 /* Test to-be-discarded bytes for any nozero bits.
8107 ??? Could use OR or SBIW to test two registers at once. */
8109 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8112 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8113 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8115 avr_asm_len ("breq 0f" CR_TAB
8117 "\n0:\t" "mov __tmp_reg__,%2",
8120 avr_asm_len ("breq 0f" CR_TAB
8122 "bld __tmp_reg__,0\n0:",
8125 lsb_in_tmp_reg
= true;
8129 /* Step 1: Clear bytes at the low end and copy payload bits from source
8130 ====== to destination. */
8132 int step
= offset
< 0 ? 1 : -1;
8133 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
8135 // We cleared at least that number of registers.
8138 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
8140 // Next regno of destination is needed for MOVW
8141 unsigned d1
= d0
+ step
;
8143 // Current and next regno of source
8144 signed s0
= d0
- offset
;
8145 signed s1
= s0
+ step
;
8147 // Must current resp. next regno be CLRed? This applies to the low
8148 // bytes of the destination that have no associated source bytes.
8149 bool clr0
= s0
< (signed) src
.regno
;
8150 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
8152 // First gather what code to emit (if any) and additional step to
8153 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8154 // is the source rtx for the current loop iteration.
8155 const char *code
= NULL
;
8160 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
8162 xop
[2] = all_regs_rtx
[d0
& ~1];
8164 code
= "movw %2,%3";
8169 xop
[2] = all_regs_rtx
[d0
];
8174 && d0
% 2 == (step
> 0))
8176 clrw
= all_regs_rtx
[d0
& ~1];
8180 else if (offset
&& s0
<= (signed) src
.regno_msb
)
8182 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
8183 && d0
% 2 == (offset
> 0)
8184 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
8185 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
8187 xop
[2] = all_regs_rtx
[d0
& ~movw
];
8188 xop
[3] = all_regs_rtx
[s0
& ~movw
];
8189 code
= movw
? "movw %2,%3" : "mov %2,%3";
8190 stepw
= step
* movw
;
8195 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
8196 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
8198 /* We are going to override the sign bit. If we sign-extend,
8199 store the sign in the Carry flag. This is not needed if
8200 the destination will be ASHIFT in the remainder because
8201 the ASHIFT will set Carry without extra instruction. */
8203 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
8204 sign_in_carry
= true;
8207 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8209 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8210 && src
.ibyte
> dest
.ibyte
8211 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
8213 /* We are going to override the MSB. If we shift right,
8214 store the MSB in the Carry flag. This is only needed if
8215 we don't sign-extend becaue with sign-extension the MSB
8216 (the sign) will be produced by the sign extension. */
8218 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
8219 msb_in_carry
= true;
8222 unsigned src_lsb
= dest
.regno
- offset
-1;
8224 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
8226 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
8228 /* We are going to override the new LSB; store it into carry. */
8230 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
8231 code_ashift
= "rol %0";
8232 lsb_in_carry
= true;
8235 avr_asm_len (code
, xop
, plen
, 1);
8240 /* Step 2: Shift destination left by 1 bit position. This might be needed
8241 ====== for signed input and unsigned output. */
8243 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
8245 unsigned s0
= dest
.regno
- offset
-1;
8247 /* n1169 4.1.4 says:
8248 "Conversions from a fixed-point to an integer type round toward zero."
8249 Hence, converting a fract type to integer only gives a non-zero result
8251 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8252 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
8253 && !TARGET_FRACT_CONV_TRUNC
)
8255 gcc_assert (s0
== src
.regno_msb
);
8256 /* Check if the input is -1. We do that by checking if negating
8257 the input causes an integer overflow. */
8258 unsigned sn
= src
.regno
;
8259 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8261 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8263 /* Overflow goes with set carry. Clear carry otherwise. */
8264 avr_asm_len ("brvs 0f" CR_TAB
8265 "clc\n0:", NULL
, plen
, 2);
8267 /* Likewise, when converting from accumulator types to integer, we
8268 need to round up negative values. */
8269 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8270 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
8271 && !TARGET_FRACT_CONV_TRUNC
8274 bool have_carry
= false;
8276 xop
[2] = all_regs_rtx
[s0
];
8277 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
8278 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8279 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8280 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8281 if (!lsb_in_tmp_reg
)
8283 unsigned sn
= src
.regno
;
8286 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
8291 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
8292 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
8294 /* Add in C and the rounding value 127. */
8295 /* If the destination msb is a sign byte, and in LD_REGS,
8296 grab it as a temporary. */
8298 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
8301 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
8302 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
8303 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
8304 : have_carry
? "adc %2,%3"
8305 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
8311 /* Fall back to use __zero_reg__ as a temporary. */
8312 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
8314 avr_asm_len ("clt" CR_TAB
8315 "bld __zero_reg__,7", NULL
, plen
, 2);
8317 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
8318 avr_asm_len (have_carry
&& lsb_in_tmp_reg
8319 ? "adc __tmp_reg__,__zero_reg__"
8320 : have_carry
? "adc %2,__zero_reg__"
8321 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
8322 : "add %2,__zero_reg__",
8324 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
8327 for (d0
= dest
.regno
+ zero_bytes
;
8328 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8329 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
8331 avr_asm_len (lsb_in_tmp_reg
8332 ? "\n0:\t" "lsl __tmp_reg__"
8333 : "\n0:\t" "lsl %2",
8336 else if (MAY_CLOBBER (s0
))
8337 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8339 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8340 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8342 code_ashift
= "rol %0";
8343 lsb_in_carry
= true;
8346 if (shift
== ASHIFT
)
8348 for (d0
= dest
.regno
+ zero_bytes
;
8349 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8351 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
8352 code_ashift
= "rol %0";
8355 lsb_in_carry
= false;
8356 sign_in_carry
= true;
8359 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8360 ======= it in sign-extension below. */
8362 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8363 && src
.ibyte
> dest
.ibyte
)
8365 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8367 if (MAY_CLOBBER (s0
))
8368 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
8370 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8371 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8373 msb_in_carry
= true;
8376 /* Step 3: Sign-extend or zero-extend the destination as needed.
8379 if (sign_extend
&& !sign_in_carry
)
8381 unsigned s0
= src
.regno_msb
;
8383 if (MAY_CLOBBER (s0
))
8384 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8386 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8387 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8389 sign_in_carry
= true;
8392 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
8394 unsigned copies
= 0;
8395 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
8397 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
8399 if (AVR_HAVE_MOVW
&& movw
8400 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
8402 xop
[2] = all_regs_rtx
[d0
];
8404 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
8409 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
8410 &all_regs_rtx
[d0
], plen
, 1);
8412 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
8413 movw
= all_regs_rtx
[d0
-1];
8418 /* Step 4: Right shift the destination. This might be needed for
8419 ====== conversions from unsigned to signed. */
8421 if (shift
== ASHIFTRT
)
8423 const char *code_ashiftrt
= "lsr %0";
8425 if (sign_extend
|| msb_in_carry
)
8426 code_ashiftrt
= "ror %0";
8428 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
8429 code_ashiftrt
= "asr %0";
8431 for (d0
= dest
.regno_msb
- sign_bytes
;
8432 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
8434 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
8435 code_ashiftrt
= "ror %0";
8445 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8446 XOP[2] is the rounding point, a CONST_INT. The function prints the
8447 instruction sequence if PLEN = NULL and computes the length in words
8448 of the sequence if PLEN != NULL. Most of this function deals with
8449 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8452 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
8454 machine_mode mode
= GET_MODE (xop
[0]);
8455 machine_mode imode
= int_mode_for_mode (mode
);
8456 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8457 int fbit
= (int) GET_MODE_FBIT (mode
);
8458 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
8459 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
8460 GET_MODE_PRECISION (imode
));
8461 // Lengths of PLUS and AND parts.
8462 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
8463 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
8465 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8466 // the saturated addition so that we can emit the "rjmp 1f" before the
8469 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
8470 rtx xpattern
, xsrc
, op
[4];
8472 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
8473 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
8474 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
8475 xpattern
= gen_rtx_SET (xop
[0], xsrc
);
8480 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
8482 avr_asm_len ("rjmp 1f" CR_TAB
8483 "0:", NULL
, plen_add
, 1);
8485 // Keep all bits from RP and higher: ... 2^(-RP)
8486 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8487 // Rounding point ^^^^^^^
8488 // Added above ^^^^^^^^^
8489 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
8490 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
8492 xpattern
= gen_rtx_SET (xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
8497 op
[3] = gen_rtx_SCRATCH (QImode
);
8498 avr_out_bitop (xpattern
, op
, plen_and
);
8499 avr_asm_len ("1:", NULL
, plen
, 0);
8502 *plen
= len_add
+ len_and
;
8508 /* Create RTL split patterns for byte sized rotate expressions. This
8509 produces a series of move instructions and considers overlap situations.
8510 Overlapping non-HImode operands need a scratch register. */
8513 avr_rotate_bytes (rtx operands
[])
8516 machine_mode mode
= GET_MODE (operands
[0]);
8517 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
8518 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
8519 int num
= INTVAL (operands
[2]);
8520 rtx scratch
= operands
[3];
8521 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
8522 Word move if no scratch is needed, otherwise use size of scratch. */
8523 machine_mode move_mode
= QImode
;
8524 int move_size
, offset
, size
;
8528 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
8531 move_mode
= GET_MODE (scratch
);
8533 /* Force DI rotate to use QI moves since other DI moves are currently split
8534 into QI moves so forward propagation works better. */
8537 /* Make scratch smaller if needed. */
8538 if (SCRATCH
!= GET_CODE (scratch
)
8539 && HImode
== GET_MODE (scratch
)
8540 && QImode
== move_mode
)
8541 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
8543 move_size
= GET_MODE_SIZE (move_mode
);
8544 /* Number of bytes/words to rotate. */
8545 offset
= (num
>> 3) / move_size
;
8546 /* Number of moves needed. */
8547 size
= GET_MODE_SIZE (mode
) / move_size
;
8548 /* Himode byte swap is special case to avoid a scratch register. */
8549 if (mode
== HImode
&& same_reg
)
8551 /* HImode byte swap, using xor. This is as quick as using scratch. */
8553 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
8554 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
8555 if (!rtx_equal_p (dst
, src
))
8557 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
8558 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
8559 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
8564 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
8565 /* Create linked list of moves to determine move order. */
8569 } move
[MAX_SIZE
+ 8];
8572 gcc_assert (size
<= MAX_SIZE
);
8573 /* Generate list of subreg moves. */
8574 for (i
= 0; i
< size
; i
++)
8577 int to
= (from
+ offset
) % size
;
8578 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
8579 mode
, from
* move_size
);
8580 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
8581 mode
, to
* move_size
);
8584 /* Mark dependence where a dst of one move is the src of another move.
8585 The first move is a conflict as it must wait until second is
8586 performed. We ignore moves to self - we catch this later. */
8588 for (i
= 0; i
< size
; i
++)
8589 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
8590 for (j
= 0; j
< size
; j
++)
8591 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
8593 /* The dst of move i is the src of move j. */
8600 /* Go through move list and perform non-conflicting moves. As each
8601 non-overlapping move is made, it may remove other conflicts
8602 so the process is repeated until no conflicts remain. */
8607 /* Emit move where dst is not also a src or we have used that
8609 for (i
= 0; i
< size
; i
++)
8610 if (move
[i
].src
!= NULL_RTX
)
8612 if (move
[i
].links
== -1
8613 || move
[move
[i
].links
].src
== NULL_RTX
)
8616 /* Ignore NOP moves to self. */
8617 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
8618 emit_move_insn (move
[i
].dst
, move
[i
].src
);
8620 /* Remove conflict from list. */
8621 move
[i
].src
= NULL_RTX
;
8627 /* Check for deadlock. This is when no moves occurred and we have
8628 at least one blocked move. */
8629 if (moves
== 0 && blocked
!= -1)
8631 /* Need to use scratch register to break deadlock.
8632 Add move to put dst of blocked move into scratch.
8633 When this move occurs, it will break chain deadlock.
8634 The scratch register is substituted for real move. */
8636 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
8638 move
[size
].src
= move
[blocked
].dst
;
8639 move
[size
].dst
= scratch
;
8640 /* Scratch move is never blocked. */
8641 move
[size
].links
= -1;
8642 /* Make sure we have valid link. */
8643 gcc_assert (move
[blocked
].links
!= -1);
8644 /* Replace src of blocking move with scratch reg. */
8645 move
[move
[blocked
].links
].src
= scratch
;
8646 /* Make dependent on scratch move occurring. */
8647 move
[blocked
].links
= size
;
8651 while (blocked
!= -1);
8657 /* Worker function for `ADJUST_INSN_LENGTH'. */
8658 /* Modifies the length assigned to instruction INSN
8659 LEN is the initially computed length of the insn. */
8662 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
8664 rtx
*op
= recog_data
.operand
;
8665 enum attr_adjust_len adjust_len
;
8667 /* Some complex insns don't need length adjustment and therefore
8668 the length need not/must not be adjusted for these insns.
8669 It is easier to state this in an insn attribute "adjust_len" than
8670 to clutter up code here... */
8672 if (!NONDEBUG_INSN_P (insn
)
8673 || -1 == recog_memoized (insn
))
8678 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
8680 adjust_len
= get_attr_adjust_len (insn
);
8682 if (adjust_len
== ADJUST_LEN_NO
)
8684 /* Nothing to adjust: The length from attribute "length" is fine.
8685 This is the default. */
8690 /* Extract insn's operands. */
8692 extract_constrain_insn_cached (insn
);
8694 /* Dispatch to right function. */
8698 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
8699 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
8700 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
8702 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
8704 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
8705 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
8707 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
8708 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
8709 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
8710 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
8711 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
8712 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
8713 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
8714 case ADJUST_LEN_SEXT
: avr_out_sign_extend (insn
, op
, &len
); break;
8716 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
8717 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
8718 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
8720 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
8721 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
8722 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
8723 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
8724 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
8726 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
8727 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
8728 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
8730 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
8731 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
8732 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
8734 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
8735 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
8736 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
8738 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
8739 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
8740 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
8742 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
8744 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
8753 /* Return nonzero if register REG dead after INSN. */
8756 reg_unused_after (rtx_insn
*insn
, rtx reg
)
8758 return (dead_or_set_p (insn
, reg
)
8759 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
8762 /* Return nonzero if REG is not used after INSN.
8763 We assume REG is a reload reg, and therefore does
8764 not live past labels. It may live past calls or jumps though. */
8767 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
8772 /* If the reg is set by this instruction, then it is safe for our
8773 case. Disregard the case where this is a store to memory, since
8774 we are checking a register used in the store address. */
8775 set
= single_set (insn
);
8776 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
8777 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8780 while ((insn
= NEXT_INSN (insn
)))
8783 code
= GET_CODE (insn
);
8786 /* If this is a label that existed before reload, then the register
8787 if dead here. However, if this is a label added by reorg, then
8788 the register may still be live here. We can't tell the difference,
8789 so we just ignore labels completely. */
8790 if (code
== CODE_LABEL
)
8798 if (code
== JUMP_INSN
)
8801 /* If this is a sequence, we must handle them all at once.
8802 We could have for instance a call that sets the target register,
8803 and an insn in a delay slot that uses the register. In this case,
8804 we must return 0. */
8805 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
8807 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
8811 for (i
= 0; i
< seq
->len (); i
++)
8813 rtx_insn
*this_insn
= seq
->insn (i
);
8814 rtx set
= single_set (this_insn
);
8816 if (CALL_P (this_insn
))
8818 else if (JUMP_P (this_insn
))
8820 if (INSN_ANNULLED_BRANCH_P (this_insn
))
8825 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
8827 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8829 if (GET_CODE (SET_DEST (set
)) != MEM
)
8835 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
8840 else if (code
== JUMP_INSN
)
8844 if (code
== CALL_INSN
)
8847 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
8848 if (GET_CODE (XEXP (tem
, 0)) == USE
8849 && REG_P (XEXP (XEXP (tem
, 0), 0))
8850 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
8852 if (call_used_regs
[REGNO (reg
)])
8856 set
= single_set (insn
);
8858 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
8860 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8861 return GET_CODE (SET_DEST (set
)) != MEM
;
8862 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
8869 /* Implement `TARGET_ASM_INTEGER'. */
8870 /* Target hook for assembling integer objects. The AVR version needs
8871 special handling for references to certain labels. */
8874 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
8876 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
8877 && text_segment_operand (x
, VOIDmode
))
8879 fputs ("\t.word\tgs(", asm_out_file
);
8880 output_addr_const (asm_out_file
, x
);
8881 fputs (")\n", asm_out_file
);
8885 else if (GET_MODE (x
) == PSImode
)
8887 /* This needs binutils 2.23+, see PR binutils/13503 */
8889 fputs ("\t.byte\tlo8(", asm_out_file
);
8890 output_addr_const (asm_out_file
, x
);
8891 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8893 fputs ("\t.byte\thi8(", asm_out_file
);
8894 output_addr_const (asm_out_file
, x
);
8895 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8897 fputs ("\t.byte\thh8(", asm_out_file
);
8898 output_addr_const (asm_out_file
, x
);
8899 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8903 else if (CONST_FIXED_P (x
))
8907 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8909 for (n
= 0; n
< size
; n
++)
8911 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8912 default_assemble_integer (xn
, 1, aligned_p
);
8918 return default_assemble_integer (x
, size
, aligned_p
);
8922 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8923 /* Return value is nonzero if pseudos that have been
8924 assigned to registers of class CLASS would likely be spilled
8925 because registers of CLASS are needed for spill registers. */
8928 avr_class_likely_spilled_p (reg_class_t c
)
8930 return (c
!= ALL_REGS
&&
8931 (AVR_TINY
? 1 : c
!= ADDW_REGS
));
8935 /* Valid attributes:
8936 progmem - Put data to program memory.
8937 signal - Make a function to be hardware interrupt.
8938 After function prologue interrupts remain disabled.
8939 interrupt - Make a function to be hardware interrupt. Before function
8940 prologue interrupts are enabled by means of SEI.
8941 naked - Don't generate function prologue/epilogue and RET
8944 /* Handle a "progmem" attribute; arguments as in
8945 struct attribute_spec.handler. */
8948 avr_handle_progmem_attribute (tree
*node
, tree name
,
8949 tree args ATTRIBUTE_UNUSED
,
8950 int flags ATTRIBUTE_UNUSED
,
8955 if (TREE_CODE (*node
) == TYPE_DECL
)
8957 /* This is really a decl attribute, not a type attribute,
8958 but try to handle it for GCC 3.0 backwards compatibility. */
8960 tree type
= TREE_TYPE (*node
);
8961 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8962 tree newtype
= build_type_attribute_variant (type
, attr
);
8964 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8965 TREE_TYPE (*node
) = newtype
;
8966 *no_add_attrs
= true;
8968 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8970 *no_add_attrs
= false;
8974 warning (OPT_Wattributes
, "%qE attribute ignored",
8976 *no_add_attrs
= true;
8983 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8984 struct attribute_spec.handler. */
8987 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8988 tree args ATTRIBUTE_UNUSED
,
8989 int flags ATTRIBUTE_UNUSED
,
8992 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8994 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8996 *no_add_attrs
= true;
9003 avr_handle_fntype_attribute (tree
*node
, tree name
,
9004 tree args ATTRIBUTE_UNUSED
,
9005 int flags ATTRIBUTE_UNUSED
,
9008 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
9010 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
9012 *no_add_attrs
= true;
9019 avr_handle_addr_attribute (tree
*node
, tree name
, tree args
,
9020 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
9022 bool io_p
= (strncmp (IDENTIFIER_POINTER (name
), "io", 2) == 0);
9023 location_t loc
= DECL_SOURCE_LOCATION (*node
);
9025 if (TREE_CODE (*node
) != VAR_DECL
)
9027 warning_at (loc
, 0, "%qE attribute only applies to variables", name
);
9031 if (args
!= NULL_TREE
)
9033 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
9034 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
9035 tree arg
= TREE_VALUE (args
);
9036 if (TREE_CODE (arg
) != INTEGER_CST
)
9038 warning (0, "%qE attribute allows only an integer constant argument",
9043 && (!tree_fits_shwi_p (arg
)
9044 || !(strcmp (IDENTIFIER_POINTER (name
), "io_low") == 0
9045 ? low_io_address_operand
: io_address_operand
)
9046 (GEN_INT (TREE_INT_CST_LOW (arg
)), QImode
)))
9048 warning_at (loc
, 0, "%qE attribute address out of range", name
);
9053 tree attribs
= DECL_ATTRIBUTES (*node
);
9054 const char *names
[] = { "io", "io_low", "address", NULL
} ;
9055 for (const char **p
= names
; *p
; p
++)
9057 tree other
= lookup_attribute (*p
, attribs
);
9058 if (other
&& TREE_VALUE (other
))
9061 "both %s and %qE attribute provide address",
9070 if (*no_add
== false && io_p
&& !TREE_THIS_VOLATILE (*node
))
9071 warning_at (loc
, 0, "%qE attribute on non-volatile variable", name
);
9077 avr_eval_addr_attrib (rtx x
)
9079 if (GET_CODE (x
) == SYMBOL_REF
9080 && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_ADDRESS
))
9082 tree decl
= SYMBOL_REF_DECL (x
);
9083 tree attr
= NULL_TREE
;
9085 if (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
)
9087 attr
= lookup_attribute ("io", DECL_ATTRIBUTES (decl
));
9090 if (!attr
|| !TREE_VALUE (attr
))
9091 attr
= lookup_attribute ("address", DECL_ATTRIBUTES (decl
));
9092 gcc_assert (attr
&& TREE_VALUE (attr
) && TREE_VALUE (TREE_VALUE (attr
)));
9093 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
))));
9099 /* AVR attributes. */
9100 static const struct attribute_spec
9101 avr_attribute_table
[] =
9103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9104 affects_type_identity } */
9105 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
9107 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9109 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9111 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9113 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9115 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9117 { "io", 0, 1, false, false, false, avr_handle_addr_attribute
,
9119 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute
,
9121 { "address", 1, 1, false, false, false, avr_handle_addr_attribute
,
9123 { NULL
, 0, 0, false, false, false, NULL
, false }
9127 /* Look if DECL shall be placed in program memory space by
9128 means of attribute `progmem' or some address-space qualifier.
9129 Return non-zero if DECL is data that must end up in Flash and
9130 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9132 Return 2 if DECL is located in 24-bit flash address-space
9133 Return 1 if DECL is located in 16-bit flash address-space
9134 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9135 Return 0 otherwise */
9138 avr_progmem_p (tree decl
, tree attributes
)
9142 if (TREE_CODE (decl
) != VAR_DECL
)
9145 if (avr_decl_memx_p (decl
))
9148 if (avr_decl_flash_p (decl
))
9152 != lookup_attribute ("progmem", attributes
))
9159 while (TREE_CODE (a
) == ARRAY_TYPE
);
9161 if (a
== error_mark_node
)
9164 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
9171 /* Scan type TYP for pointer references to address space ASn.
9172 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9173 the AS are also declared to be CONST.
9174 Otherwise, return the respective address space, i.e. a value != 0. */
9177 avr_nonconst_pointer_addrspace (tree typ
)
9179 while (ARRAY_TYPE
== TREE_CODE (typ
))
9180 typ
= TREE_TYPE (typ
);
9182 if (POINTER_TYPE_P (typ
))
9185 tree target
= TREE_TYPE (typ
);
9187 /* Pointer to function: Test the function's return type. */
9189 if (FUNCTION_TYPE
== TREE_CODE (target
))
9190 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
9192 /* "Ordinary" pointers... */
9194 while (TREE_CODE (target
) == ARRAY_TYPE
)
9195 target
= TREE_TYPE (target
);
9197 /* Pointers to non-generic address space must be const.
9198 Refuse address spaces outside the device's flash. */
9200 as
= TYPE_ADDR_SPACE (target
);
9202 if (!ADDR_SPACE_GENERIC_P (as
)
9203 && (!TYPE_READONLY (target
)
9204 || avr_addrspace
[as
].segment
>= avr_n_flash
9205 /* Also refuse __memx address space if we can't support it. */
9206 || (!AVR_HAVE_LPM
&& avr_addrspace
[as
].pointer_size
> 2)))
9211 /* Scan pointer's target type. */
9213 return avr_nonconst_pointer_addrspace (target
);
9216 return ADDR_SPACE_GENERIC
;
9220 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9221 go along with CONST qualifier. Writing to these address spaces should
9222 be detected and complained about as early as possible. */
9225 avr_pgm_check_var_decl (tree node
)
9227 const char *reason
= NULL
;
9229 addr_space_t as
= ADDR_SPACE_GENERIC
;
9231 gcc_assert (as
== 0);
9233 if (avr_log
.progmem
)
9234 avr_edump ("%?: %t\n", node
);
9236 switch (TREE_CODE (node
))
9242 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9243 reason
= "variable";
9247 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9248 reason
= "function parameter";
9252 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9253 reason
= "structure field";
9257 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
9259 reason
= "return type of function";
9263 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
9270 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
9273 error ("%qT uses address space %qs beyond flash of %d KiB",
9274 node
, avr_addrspace
[as
].name
, avr_n_flash
);
9276 error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9277 reason
, node
, avr_addrspace
[as
].name
, avr_n_flash
);
9282 error ("pointer targeting address space %qs must be const in %qT",
9283 avr_addrspace
[as
].name
, node
);
9285 error ("pointer targeting address space %qs must be const"
9287 avr_addrspace
[as
].name
, reason
, node
);
9291 return reason
== NULL
;
9295 /* Add the section attribute if the variable is in progmem. */
9298 avr_insert_attributes (tree node
, tree
*attributes
)
9300 avr_pgm_check_var_decl (node
);
9302 if (TREE_CODE (node
) == VAR_DECL
9303 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
9304 && avr_progmem_p (node
, *attributes
))
9309 /* For C++, we have to peel arrays in order to get correct
9310 determination of readonlyness. */
9313 node0
= TREE_TYPE (node0
);
9314 while (TREE_CODE (node0
) == ARRAY_TYPE
);
9316 if (error_mark_node
== node0
)
9319 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
9321 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
9323 error ("variable %q+D located in address space %qs beyond flash "
9324 "of %d KiB", node
, avr_addrspace
[as
].name
, avr_n_flash
);
9326 else if (!AVR_HAVE_LPM
&& avr_addrspace
[as
].pointer_size
> 2)
9328 error ("variable %q+D located in address space %qs"
9329 " which is not supported for architecture %qs",
9330 node
, avr_addrspace
[as
].name
, avr_arch
->name
);
9333 if (!TYPE_READONLY (node0
)
9334 && !TREE_READONLY (node
))
9336 const char *reason
= "__attribute__((progmem))";
9338 if (!ADDR_SPACE_GENERIC_P (as
))
9339 reason
= avr_addrspace
[as
].name
;
9341 if (avr_log
.progmem
)
9342 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
9344 error ("variable %q+D must be const in order to be put into"
9345 " read-only section by means of %qs", node
, reason
);
9351 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9352 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9353 /* Track need of __do_clear_bss. */
9356 avr_asm_output_aligned_decl_common (FILE * stream
,
9359 unsigned HOST_WIDE_INT size
,
9360 unsigned int align
, bool local_p
)
9362 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9365 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9366 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9367 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9372 fprintf (stream
, "\t.globl\t");
9373 assemble_name (stream
, name
);
9374 fprintf (stream
, "\n");
9376 if (SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
)
9378 assemble_name (stream
, name
);
9379 fprintf (stream
, " = %ld\n",
9380 (long) INTVAL (avr_eval_addr_attrib (symbol
)));
9383 error_at (DECL_SOURCE_LOCATION (decl
),
9384 "static IO declaration for %q+D needs an address", decl
);
9388 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9389 There is no need to trigger __do_clear_bss code for them. */
9391 if (!STR_PREFIX_P (name
, "__gnu_lto"))
9392 avr_need_clear_bss_p
= true;
9395 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
9397 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
9401 avr_asm_asm_output_aligned_bss (FILE *file
, tree decl
, const char *name
,
9402 unsigned HOST_WIDE_INT size
, int align
,
9403 void (*default_func
)
9404 (FILE *, tree
, const char *,
9405 unsigned HOST_WIDE_INT
, int))
9407 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9410 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9411 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9412 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9414 if (!(SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
))
9415 error_at (DECL_SOURCE_LOCATION (decl
),
9416 "IO definition for %q+D needs an address", decl
);
9417 avr_asm_output_aligned_decl_common (file
, decl
, name
, size
, align
, false);
9420 default_func (file
, decl
, name
, size
, align
);
9424 /* Unnamed section callback for data_section
9425 to track need of __do_copy_data. */
9428 avr_output_data_section_asm_op (const void *data
)
9430 avr_need_copy_data_p
= true;
9432 /* Dispatch to default. */
9433 output_section_asm_op (data
);
9437 /* Unnamed section callback for bss_section
9438 to track need of __do_clear_bss. */
9441 avr_output_bss_section_asm_op (const void *data
)
9443 avr_need_clear_bss_p
= true;
9445 /* Dispatch to default. */
9446 output_section_asm_op (data
);
9450 /* Unnamed section callback for progmem*.data sections. */
9453 avr_output_progmem_section_asm_op (const void *data
)
9455 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
9456 (const char*) data
);
9460 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
9463 avr_asm_init_sections (void)
9465 /* Set up a section for jump tables. Alignment is handled by
9466 ASM_OUTPUT_BEFORE_CASE_LABEL. */
9468 if (AVR_HAVE_JMP_CALL
)
9470 progmem_swtable_section
9471 = get_unnamed_section (0, output_section_asm_op
,
9472 "\t.section\t.progmem.gcc_sw_table"
9473 ",\"a\",@progbits");
9477 progmem_swtable_section
9478 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
9479 "\t.section\t.progmem.gcc_sw_table"
9480 ",\"ax\",@progbits");
9483 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9484 resp. `avr_need_copy_data_p'. */
9486 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
9487 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
9488 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
9492 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
9495 avr_asm_function_rodata_section (tree decl
)
9497 /* If a function is unused and optimized out by -ffunction-sections
9498 and --gc-sections, ensure that the same will happen for its jump
9499 tables by putting them into individual sections. */
9504 /* Get the frodata section from the default function in varasm.c
9505 but treat function-associated data-like jump tables as code
9506 rather than as user defined data. AVR has no constant pools. */
9508 int fdata
= flag_data_sections
;
9510 flag_data_sections
= flag_function_sections
;
9511 frodata
= default_function_rodata_section (decl
);
9512 flag_data_sections
= fdata
;
9513 flags
= frodata
->common
.flags
;
9516 if (frodata
!= readonly_data_section
9517 && flags
& SECTION_NAMED
)
9519 /* Adjust section flags and replace section name prefix. */
9523 static const char* const prefix
[] =
9525 ".rodata", ".progmem.gcc_sw_table",
9526 ".gnu.linkonce.r.", ".gnu.linkonce.t."
9529 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
9531 const char * old_prefix
= prefix
[i
];
9532 const char * new_prefix
= prefix
[i
+1];
9533 const char * name
= frodata
->named
.name
;
9535 if (STR_PREFIX_P (name
, old_prefix
))
9537 const char *rname
= ACONCAT ((new_prefix
,
9538 name
+ strlen (old_prefix
), NULL
));
9539 flags
&= ~SECTION_CODE
;
9540 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
9542 return get_section (rname
, flags
, frodata
->named
.decl
);
9547 return progmem_swtable_section
;
9551 /* Implement `TARGET_ASM_NAMED_SECTION'. */
9552 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
9555 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
9557 if (flags
& AVR_SECTION_PROGMEM
)
9559 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
9560 const char *old_prefix
= ".rodata";
9561 const char *new_prefix
= avr_addrspace
[as
].section_name
;
9563 if (STR_PREFIX_P (name
, old_prefix
))
9565 const char *sname
= ACONCAT ((new_prefix
,
9566 name
+ strlen (old_prefix
), NULL
));
9567 default_elf_asm_named_section (sname
, flags
, decl
);
9571 default_elf_asm_named_section (new_prefix
, flags
, decl
);
9575 if (!avr_need_copy_data_p
)
9576 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
9577 || STR_PREFIX_P (name
, ".rodata")
9578 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
9580 if (!avr_need_clear_bss_p
)
9581 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
9583 default_elf_asm_named_section (name
, flags
, decl
);
9587 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
9590 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
9592 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
9594 if (STR_PREFIX_P (name
, ".noinit"))
9596 if (decl
&& TREE_CODE (decl
) == VAR_DECL
9597 && DECL_INITIAL (decl
) == NULL_TREE
)
9598 flags
|= SECTION_BSS
; /* @nobits */
9600 warning (0, "only uninitialized variables can be placed in the "
9604 if (decl
&& DECL_P (decl
)
9605 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9607 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
9609 /* Attribute progmem puts data in generic address space.
9610 Set section flags as if it was in __flash to get the right
9611 section prefix in the remainder. */
9613 if (ADDR_SPACE_GENERIC_P (as
))
9614 as
= ADDR_SPACE_FLASH
;
9616 flags
|= as
* SECTION_MACH_DEP
;
9617 flags
&= ~SECTION_WRITE
;
9618 flags
&= ~SECTION_BSS
;
9625 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
9628 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
9630 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9631 readily available, see PR34734. So we postpone the warning
9632 about uninitialized data in program memory section until here. */
9635 && decl
&& DECL_P (decl
)
9636 && NULL_TREE
== DECL_INITIAL (decl
)
9637 && !DECL_EXTERNAL (decl
)
9638 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9640 warning (OPT_Wuninitialized
,
9641 "uninitialized variable %q+D put into "
9642 "program memory area", decl
);
9645 default_encode_section_info (decl
, rtl
, new_decl_p
);
9647 if (decl
&& DECL_P (decl
)
9648 && TREE_CODE (decl
) != FUNCTION_DECL
9650 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
9652 rtx sym
= XEXP (rtl
, 0);
9653 tree type
= TREE_TYPE (decl
);
9654 tree attr
= DECL_ATTRIBUTES (decl
);
9655 if (type
== error_mark_node
)
9658 addr_space_t as
= TYPE_ADDR_SPACE (type
);
9660 /* PSTR strings are in generic space but located in flash:
9661 patch address space. */
9663 if (-1 == avr_progmem_p (decl
, attr
))
9664 as
= ADDR_SPACE_FLASH
;
9666 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
9668 tree io_low_attr
= lookup_attribute ("io_low", attr
);
9669 tree io_attr
= lookup_attribute ("io", attr
);
9672 && TREE_VALUE (io_low_attr
) && TREE_VALUE (TREE_VALUE (io_low_attr
)))
9673 addr_attr
= io_attr
;
9675 && TREE_VALUE (io_attr
) && TREE_VALUE (TREE_VALUE (io_attr
)))
9676 addr_attr
= io_attr
;
9678 addr_attr
= lookup_attribute ("address", attr
);
9680 || (io_attr
&& addr_attr
9681 && low_io_address_operand
9682 (GEN_INT (TREE_INT_CST_LOW
9683 (TREE_VALUE (TREE_VALUE (addr_attr
)))), QImode
)))
9684 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO_LOW
;
9685 if (io_attr
|| io_low_attr
)
9686 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO
;
9687 /* If we have an (io) address attribute specification, but the variable
9688 is external, treat the address as only a tentative definition
9689 to be used to determine if an io port is in the lower range, but
9690 don't use the exact value for constant propagation. */
9691 if (addr_attr
&& !DECL_EXTERNAL (decl
))
9692 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_ADDRESS
;
9697 /* Implement `TARGET_ASM_SELECT_SECTION' */
9700 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
9702 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
9704 if (decl
&& DECL_P (decl
)
9705 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9707 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
9709 /* __progmem__ goes in generic space but shall be allocated to
9712 if (ADDR_SPACE_GENERIC_P (as
))
9713 as
= ADDR_SPACE_FLASH
;
9715 if (sect
->common
.flags
& SECTION_NAMED
)
9717 const char * name
= sect
->named
.name
;
9718 const char * old_prefix
= ".rodata";
9719 const char * new_prefix
= avr_addrspace
[as
].section_name
;
9721 if (STR_PREFIX_P (name
, old_prefix
))
9723 const char *sname
= ACONCAT ((new_prefix
,
9724 name
+ strlen (old_prefix
), NULL
));
9725 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
9729 if (!progmem_section
[as
])
9732 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
9733 avr_addrspace
[as
].section_name
);
9736 return progmem_section
[as
];
9742 /* Implement `TARGET_ASM_FILE_START'. */
9743 /* Outputs some text at the start of each assembler file. */
9746 avr_file_start (void)
9748 int sfr_offset
= avr_arch
->sfr_offset
;
9750 if (avr_arch
->asm_only
)
9751 error ("architecture %qs supported for assembler only", avr_mmcu
);
9753 default_file_start ();
9755 /* Print I/O addresses of some SFRs used with IN and OUT. */
9758 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
9760 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
9761 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
9763 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
9765 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
9767 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
9769 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
9770 if (AVR_XMEGA
|| AVR_TINY
)
9771 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
9772 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", AVR_TMP_REGNO
);
9773 fprintf (asm_out_file
, "__zero_reg__ = %d\n", AVR_ZERO_REGNO
);
9777 /* Implement `TARGET_ASM_FILE_END'. */
9778 /* Outputs to the stdio stream FILE some
9779 appropriate text to go at the end of an assembler file. */
9784 /* Output these only if there is anything in the
9785 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9786 input section(s) - some code size can be saved by not
9787 linking in the initialization code from libgcc if resp.
9788 sections are empty, see PR18145. */
9790 if (avr_need_copy_data_p
)
9791 fputs (".global __do_copy_data\n", asm_out_file
);
9793 if (avr_need_clear_bss_p
)
9794 fputs (".global __do_clear_bss\n", asm_out_file
);
9798 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
9799 /* Choose the order in which to allocate hard registers for
9800 pseudo-registers local to a basic block.
9802 Store the desired register order in the array `reg_alloc_order'.
9803 Element 0 should be the register to allocate first; element 1, the
9804 next register; and so on. */
9807 avr_adjust_reg_alloc_order (void)
9810 static const int order_0
[] =
9813 18, 19, 20, 21, 22, 23,
9816 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9820 static const int tiny_order_0
[] = {
9830 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9832 static const int order_1
[] =
9834 18, 19, 20, 21, 22, 23, 24, 25,
9837 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9841 static const int tiny_order_1
[] = {
9850 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9852 static const int order_2
[] =
9854 25, 24, 23, 22, 21, 20, 19, 18,
9857 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9862 /* Select specific register allocation order.
9863 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9864 so different allocation order should be used. */
9866 const int *order
= (TARGET_ORDER_1
? (AVR_TINY
? tiny_order_1
: order_1
)
9867 : TARGET_ORDER_2
? (AVR_TINY
? tiny_order_0
: order_2
)
9868 : (AVR_TINY
? tiny_order_0
: order_0
));
9870 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
9871 reg_alloc_order
[i
] = order
[i
];
9875 /* Implement `TARGET_REGISTER_MOVE_COST' */
9878 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
9879 reg_class_t from
, reg_class_t to
)
9881 return (from
== STACK_REG
? 6
9882 : to
== STACK_REG
? 12
9887 /* Implement `TARGET_MEMORY_MOVE_COST' */
9890 avr_memory_move_cost (machine_mode mode
,
9891 reg_class_t rclass ATTRIBUTE_UNUSED
,
9892 bool in ATTRIBUTE_UNUSED
)
9894 return (mode
== QImode
? 2
9895 : mode
== HImode
? 4
9896 : mode
== SImode
? 8
9897 : mode
== SFmode
? 8
9902 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9903 cost of an RTX operand given its context. X is the rtx of the
9904 operand, MODE is its mode, and OUTER is the rtx_code of this
9905 operand's parent operator. */
9908 avr_operand_rtx_cost (rtx x
, machine_mode mode
, enum rtx_code outer
,
9909 int opno
, bool speed
)
9911 enum rtx_code code
= GET_CODE (x
);
9923 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9930 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
9934 /* Worker function for AVR backend's rtx_cost function.
9935 X is rtx expression whose cost is to be calculated.
9936 Return true if the complete cost has been computed.
9937 Return false if subexpressions should be scanned.
9938 In either case, *TOTAL contains the cost result. */
9941 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
9942 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
9944 enum rtx_code code
= (enum rtx_code
) codearg
;
9945 machine_mode mode
= GET_MODE (x
);
9956 /* Immediate constants are as cheap as registers. */
9961 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9969 *total
= COSTS_N_INSNS (1);
9975 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
9981 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9989 *total
= COSTS_N_INSNS (1);
9995 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9999 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10000 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10004 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
10005 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
10006 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10010 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
10011 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
10012 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10020 && MULT
== GET_CODE (XEXP (x
, 0))
10021 && register_operand (XEXP (x
, 1), QImode
))
10024 *total
= COSTS_N_INSNS (speed
? 4 : 3);
10025 /* multiply-add with constant: will be split and load constant. */
10026 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10027 *total
= COSTS_N_INSNS (1) + *total
;
10030 *total
= COSTS_N_INSNS (1);
10031 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10032 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10037 && (MULT
== GET_CODE (XEXP (x
, 0))
10038 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
10039 && register_operand (XEXP (x
, 1), HImode
)
10040 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
10041 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
10044 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10045 /* multiply-add with constant: will be split and load constant. */
10046 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10047 *total
= COSTS_N_INSNS (1) + *total
;
10050 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10052 *total
= COSTS_N_INSNS (2);
10053 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10056 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10057 *total
= COSTS_N_INSNS (1);
10059 *total
= COSTS_N_INSNS (2);
10063 if (!CONST_INT_P (XEXP (x
, 1)))
10065 *total
= COSTS_N_INSNS (3);
10066 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10069 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10070 *total
= COSTS_N_INSNS (2);
10072 *total
= COSTS_N_INSNS (3);
10076 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10078 *total
= COSTS_N_INSNS (4);
10079 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10082 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10083 *total
= COSTS_N_INSNS (1);
10085 *total
= COSTS_N_INSNS (4);
10091 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10097 && register_operand (XEXP (x
, 0), QImode
)
10098 && MULT
== GET_CODE (XEXP (x
, 1)))
10101 *total
= COSTS_N_INSNS (speed
? 4 : 3);
10102 /* multiply-sub with constant: will be split and load constant. */
10103 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10104 *total
= COSTS_N_INSNS (1) + *total
;
10109 && register_operand (XEXP (x
, 0), HImode
)
10110 && (MULT
== GET_CODE (XEXP (x
, 1))
10111 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
10112 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
10113 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
10116 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10117 /* multiply-sub with constant: will be split and load constant. */
10118 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10119 *total
= COSTS_N_INSNS (1) + *total
;
10125 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10126 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10127 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10128 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10132 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10133 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10134 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10142 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
10144 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10152 rtx op0
= XEXP (x
, 0);
10153 rtx op1
= XEXP (x
, 1);
10154 enum rtx_code code0
= GET_CODE (op0
);
10155 enum rtx_code code1
= GET_CODE (op1
);
10156 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
10157 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
10160 && (u8_operand (op1
, HImode
)
10161 || s8_operand (op1
, HImode
)))
10163 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10167 && register_operand (op1
, HImode
))
10169 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10172 else if (ex0
|| ex1
)
10174 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
10177 else if (register_operand (op0
, HImode
)
10178 && (u8_operand (op1
, HImode
)
10179 || s8_operand (op1
, HImode
)))
10181 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
10185 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
10188 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10195 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10205 /* Add some additional costs besides CALL like moves etc. */
10207 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10211 /* Just a rough estimate. Even with -O2 we don't want bulky
10212 code expanded inline. */
10214 *total
= COSTS_N_INSNS (25);
10220 *total
= COSTS_N_INSNS (300);
10222 /* Add some additional costs besides CALL like moves etc. */
10223 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10231 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10232 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10240 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10242 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
10243 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10244 /* For div/mod with const-int divisor we have at least the cost of
10245 loading the divisor. */
10246 if (CONST_INT_P (XEXP (x
, 1)))
10247 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10248 /* Add some overall penaly for clobbering and moving around registers */
10249 *total
+= COSTS_N_INSNS (2);
10256 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
10257 *total
= COSTS_N_INSNS (1);
10262 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
10263 *total
= COSTS_N_INSNS (3);
10268 if (CONST_INT_P (XEXP (x
, 1)))
10269 switch (INTVAL (XEXP (x
, 1)))
10273 *total
= COSTS_N_INSNS (5);
10276 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
10284 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10291 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10293 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10294 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10299 val
= INTVAL (XEXP (x
, 1));
10301 *total
= COSTS_N_INSNS (3);
10302 else if (val
>= 0 && val
<= 7)
10303 *total
= COSTS_N_INSNS (val
);
10305 *total
= COSTS_N_INSNS (1);
10312 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
10313 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
10314 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
10316 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10321 if (const1_rtx
== (XEXP (x
, 1))
10322 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
10324 *total
= COSTS_N_INSNS (2);
10328 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10330 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10331 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10335 switch (INTVAL (XEXP (x
, 1)))
10342 *total
= COSTS_N_INSNS (2);
10345 *total
= COSTS_N_INSNS (3);
10351 *total
= COSTS_N_INSNS (4);
10356 *total
= COSTS_N_INSNS (5);
10359 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10362 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
10365 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
10368 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10369 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10375 if (!CONST_INT_P (XEXP (x
, 1)))
10377 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10380 switch (INTVAL (XEXP (x
, 1)))
10388 *total
= COSTS_N_INSNS (3);
10391 *total
= COSTS_N_INSNS (5);
10394 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10400 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10402 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10403 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10407 switch (INTVAL (XEXP (x
, 1)))
10413 *total
= COSTS_N_INSNS (3);
10418 *total
= COSTS_N_INSNS (4);
10421 *total
= COSTS_N_INSNS (6);
10424 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10427 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10428 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10436 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10443 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10445 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10446 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10451 val
= INTVAL (XEXP (x
, 1));
10453 *total
= COSTS_N_INSNS (4);
10455 *total
= COSTS_N_INSNS (2);
10456 else if (val
>= 0 && val
<= 7)
10457 *total
= COSTS_N_INSNS (val
);
10459 *total
= COSTS_N_INSNS (1);
10464 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10466 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10467 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10471 switch (INTVAL (XEXP (x
, 1)))
10477 *total
= COSTS_N_INSNS (2);
10480 *total
= COSTS_N_INSNS (3);
10486 *total
= COSTS_N_INSNS (4);
10490 *total
= COSTS_N_INSNS (5);
10493 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
10496 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
10500 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10503 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10504 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10510 if (!CONST_INT_P (XEXP (x
, 1)))
10512 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10515 switch (INTVAL (XEXP (x
, 1)))
10521 *total
= COSTS_N_INSNS (3);
10525 *total
= COSTS_N_INSNS (5);
10528 *total
= COSTS_N_INSNS (4);
10531 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10537 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10539 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10540 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10544 switch (INTVAL (XEXP (x
, 1)))
10550 *total
= COSTS_N_INSNS (4);
10555 *total
= COSTS_N_INSNS (6);
10558 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10561 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
10564 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10565 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10573 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10580 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10582 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10583 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10588 val
= INTVAL (XEXP (x
, 1));
10590 *total
= COSTS_N_INSNS (3);
10591 else if (val
>= 0 && val
<= 7)
10592 *total
= COSTS_N_INSNS (val
);
10594 *total
= COSTS_N_INSNS (1);
10599 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10601 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10602 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10606 switch (INTVAL (XEXP (x
, 1)))
10613 *total
= COSTS_N_INSNS (2);
10616 *total
= COSTS_N_INSNS (3);
10621 *total
= COSTS_N_INSNS (4);
10625 *total
= COSTS_N_INSNS (5);
10631 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
10634 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
10638 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
10641 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10642 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10648 if (!CONST_INT_P (XEXP (x
, 1)))
10650 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10653 switch (INTVAL (XEXP (x
, 1)))
10661 *total
= COSTS_N_INSNS (3);
10664 *total
= COSTS_N_INSNS (5);
10667 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10673 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10675 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10676 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10680 switch (INTVAL (XEXP (x
, 1)))
10686 *total
= COSTS_N_INSNS (4);
10689 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10694 *total
= COSTS_N_INSNS (4);
10697 *total
= COSTS_N_INSNS (6);
10700 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10701 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10709 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10713 switch (GET_MODE (XEXP (x
, 0)))
10716 *total
= COSTS_N_INSNS (1);
10717 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10718 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10722 *total
= COSTS_N_INSNS (2);
10723 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10724 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10725 else if (INTVAL (XEXP (x
, 1)) != 0)
10726 *total
+= COSTS_N_INSNS (1);
10730 *total
= COSTS_N_INSNS (3);
10731 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
10732 *total
+= COSTS_N_INSNS (2);
10736 *total
= COSTS_N_INSNS (4);
10737 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10738 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10739 else if (INTVAL (XEXP (x
, 1)) != 0)
10740 *total
+= COSTS_N_INSNS (3);
10746 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10751 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
10752 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
10753 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10755 if (QImode
== mode
|| HImode
== mode
)
10757 *total
= COSTS_N_INSNS (2);
10770 /* Implement `TARGET_RTX_COSTS'. */
10773 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
10774 int opno
, int *total
, bool speed
)
10776 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
10777 opno
, total
, speed
);
10779 if (avr_log
.rtx_costs
)
10781 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10782 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
10789 /* Implement `TARGET_ADDRESS_COST'. */
10792 avr_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
10793 addr_space_t as ATTRIBUTE_UNUSED
,
10794 bool speed ATTRIBUTE_UNUSED
)
10798 if (GET_CODE (x
) == PLUS
10799 && CONST_INT_P (XEXP (x
, 1))
10800 && (REG_P (XEXP (x
, 0))
10801 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
10803 if (INTVAL (XEXP (x
, 1)) >= 61)
10806 else if (CONSTANT_ADDRESS_P (x
))
10809 && io_address_operand (x
, QImode
))
10813 if (avr_log
.address_cost
)
10814 avr_edump ("\n%?: %d = %r\n", cost
, x
);
10819 /* Test for extra memory constraint 'Q'.
10820 It's a memory address based on Y or Z pointer with valid displacement. */
10823 extra_constraint_Q (rtx x
)
10827 if (GET_CODE (XEXP (x
,0)) == PLUS
10828 && REG_P (XEXP (XEXP (x
,0), 0))
10829 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
10830 && (INTVAL (XEXP (XEXP (x
,0), 1))
10831 <= MAX_LD_OFFSET (GET_MODE (x
))))
10833 rtx xx
= XEXP (XEXP (x
,0), 0);
10834 int regno
= REGNO (xx
);
10836 ok
= (/* allocate pseudos */
10837 regno
>= FIRST_PSEUDO_REGISTER
10838 /* strictly check */
10839 || regno
== REG_Z
|| regno
== REG_Y
10840 /* XXX frame & arg pointer checks */
10841 || xx
== frame_pointer_rtx
10842 || xx
== arg_pointer_rtx
);
10844 if (avr_log
.constraints
)
10845 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10846 ok
, reload_completed
, reload_in_progress
, x
);
10852 /* Convert condition code CONDITION to the valid AVR condition code. */
10855 avr_normalize_condition (RTX_CODE condition
)
10868 gcc_unreachable ();
10872 /* Helper function for `avr_reorg'. */
10875 avr_compare_pattern (rtx_insn
*insn
)
10877 rtx pattern
= single_set (insn
);
10880 && NONJUMP_INSN_P (insn
)
10881 && SET_DEST (pattern
) == cc0_rtx
10882 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
10884 machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
10885 machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
10887 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10888 They must not be swapped, thus skip them. */
10890 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
10891 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
10898 /* Helper function for `avr_reorg'. */
10900 /* Expansion of switch/case decision trees leads to code like
10902 cc0 = compare (Reg, Num)
10906 cc0 = compare (Reg, Num)
10910 The second comparison is superfluous and can be deleted.
10911 The second jump condition can be transformed from a
10912 "difficult" one to a "simple" one because "cc0 > 0" and
10913 "cc0 >= 0" will have the same effect here.
10915 This function relies on the way switch/case is being expaned
10916 as binary decision tree. For example code see PR 49903.
10918 Return TRUE if optimization performed.
10919 Return FALSE if nothing changed.
10921 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10923 We don't want to do this in text peephole because it is
10924 tedious to work out jump offsets there and the second comparison
10925 might have been transormed by `avr_reorg'.
10927 RTL peephole won't do because peephole2 does not scan across
10931 avr_reorg_remove_redundant_compare (rtx_insn
*insn1
)
10933 rtx comp1
, ifelse1
, xcond1
;
10935 rtx comp2
, ifelse2
, xcond2
;
10936 rtx_insn
*branch2
, *insn2
;
10937 enum rtx_code code
;
10941 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10943 branch1
= next_nonnote_nondebug_insn (insn1
);
10944 if (!branch1
|| !JUMP_P (branch1
))
10947 insn2
= next_nonnote_nondebug_insn (branch1
);
10948 if (!insn2
|| !avr_compare_pattern (insn2
))
10951 branch2
= next_nonnote_nondebug_insn (insn2
);
10952 if (!branch2
|| !JUMP_P (branch2
))
10955 comp1
= avr_compare_pattern (insn1
);
10956 comp2
= avr_compare_pattern (insn2
);
10957 xcond1
= single_set (branch1
);
10958 xcond2
= single_set (branch2
);
10960 if (!comp1
|| !comp2
10961 || !rtx_equal_p (comp1
, comp2
)
10962 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
10963 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
10964 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
10965 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
10970 comp1
= SET_SRC (comp1
);
10971 ifelse1
= SET_SRC (xcond1
);
10972 ifelse2
= SET_SRC (xcond2
);
10974 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10976 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
10977 || !REG_P (XEXP (comp1
, 0))
10978 || !CONST_INT_P (XEXP (comp1
, 1))
10979 || XEXP (ifelse1
, 2) != pc_rtx
10980 || XEXP (ifelse2
, 2) != pc_rtx
10981 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
10982 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
10983 || !COMPARISON_P (XEXP (ifelse2
, 0))
10984 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
10985 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
10986 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
10987 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
10992 /* We filtered the insn sequence to look like
10998 (if_then_else (eq (cc0)
11007 (if_then_else (CODE (cc0)
11013 code
= GET_CODE (XEXP (ifelse2
, 0));
11015 /* Map GT/GTU to GE/GEU which is easier for AVR.
11016 The first two instructions compare/branch on EQ
11017 so we may replace the difficult
11019 if (x == VAL) goto L1;
11020 if (x > VAL) goto L2;
11024 if (x == VAL) goto L1;
11025 if (x >= VAL) goto L2;
11027 Similarly, replace LE/LEU by LT/LTU. */
11038 code
= avr_normalize_condition (code
);
11045 /* Wrap the branches into UNSPECs so they won't be changed or
11046 optimized in the remainder. */
11048 target
= XEXP (XEXP (ifelse1
, 1), 0);
11049 cond
= XEXP (ifelse1
, 0);
11050 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
11052 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
11054 target
= XEXP (XEXP (ifelse2
, 1), 0);
11055 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
11056 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
11058 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
11060 /* The comparisons in insn1 and insn2 are exactly the same;
11061 insn2 is superfluous so delete it. */
11063 delete_insn (insn2
);
11064 delete_insn (branch1
);
11065 delete_insn (branch2
);
11071 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11072 /* Optimize conditional jumps. */
11077 rtx_insn
*insn
= get_insns();
11079 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
11081 rtx pattern
= avr_compare_pattern (insn
);
11087 && avr_reorg_remove_redundant_compare (insn
))
11092 if (compare_diff_p (insn
))
11094 /* Now we work under compare insn with difficult branch. */
11096 rtx_insn
*next
= next_real_insn (insn
);
11097 rtx pat
= PATTERN (next
);
11099 pattern
= SET_SRC (pattern
);
11101 if (true_regnum (XEXP (pattern
, 0)) >= 0
11102 && true_regnum (XEXP (pattern
, 1)) >= 0)
11104 rtx x
= XEXP (pattern
, 0);
11105 rtx src
= SET_SRC (pat
);
11106 rtx t
= XEXP (src
,0);
11107 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11108 XEXP (pattern
, 0) = XEXP (pattern
, 1);
11109 XEXP (pattern
, 1) = x
;
11110 INSN_CODE (next
) = -1;
11112 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11113 && XEXP (pattern
, 1) == const0_rtx
)
11115 /* This is a tst insn, we can reverse it. */
11116 rtx src
= SET_SRC (pat
);
11117 rtx t
= XEXP (src
,0);
11119 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11120 XEXP (pattern
, 1) = XEXP (pattern
, 0);
11121 XEXP (pattern
, 0) = const0_rtx
;
11122 INSN_CODE (next
) = -1;
11123 INSN_CODE (insn
) = -1;
11125 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11126 && CONST_INT_P (XEXP (pattern
, 1)))
11128 rtx x
= XEXP (pattern
, 1);
11129 rtx src
= SET_SRC (pat
);
11130 rtx t
= XEXP (src
,0);
11131 machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
11133 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
11135 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
11136 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
11137 INSN_CODE (next
) = -1;
11138 INSN_CODE (insn
) = -1;
11145 /* Returns register number for function return value.*/
11147 static inline unsigned int
11148 avr_ret_register (void)
11154 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
11157 avr_function_value_regno_p (const unsigned int regno
)
11159 return (regno
== avr_ret_register ());
11163 /* Implement `TARGET_LIBCALL_VALUE'. */
11164 /* Create an RTX representing the place where a
11165 library function returns a value of mode MODE. */
11168 avr_libcall_value (machine_mode mode
,
11169 const_rtx func ATTRIBUTE_UNUSED
)
11171 int offs
= GET_MODE_SIZE (mode
);
11174 offs
= (offs
+ 1) & ~1;
11176 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
11180 /* Implement `TARGET_FUNCTION_VALUE'. */
11181 /* Create an RTX representing the place where a
11182 function returns a value of data type VALTYPE. */
11185 avr_function_value (const_tree type
,
11186 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
11187 bool outgoing ATTRIBUTE_UNUSED
)
11191 if (TYPE_MODE (type
) != BLKmode
)
11192 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
11194 offs
= int_size_in_bytes (type
);
11197 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
11198 offs
= GET_MODE_SIZE (SImode
);
11199 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
11200 offs
= GET_MODE_SIZE (DImode
);
11202 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
11206 test_hard_reg_class (enum reg_class rclass
, rtx x
)
11208 int regno
= true_regnum (x
);
11212 if (TEST_HARD_REG_CLASS (rclass
, regno
))
11219 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11220 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11223 avr_2word_insn_p (rtx_insn
*insn
)
11225 if (TARGET_SKIP_BUG
11227 || 2 != get_attr_length (insn
))
11232 switch (INSN_CODE (insn
))
11237 case CODE_FOR_movqi_insn
:
11238 case CODE_FOR_movuqq_insn
:
11239 case CODE_FOR_movqq_insn
:
11241 rtx set
= single_set (insn
);
11242 rtx src
= SET_SRC (set
);
11243 rtx dest
= SET_DEST (set
);
11245 /* Factor out LDS and STS from movqi_insn. */
11248 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
11250 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
11252 else if (REG_P (dest
)
11255 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
11261 case CODE_FOR_call_insn
:
11262 case CODE_FOR_call_value_insn
:
11269 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
11271 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
11274 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
11275 int dest_addr
= INSN_ADDRESSES (uid
);
11276 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
11278 return (jump_offset
== 1
11279 || (jump_offset
== 2
11280 && avr_2word_insn_p (next_active_insn (insn
))));
11284 /* Worker function for `HARD_REGNO_MODE_OK'. */
11285 /* Returns 1 if a value of mode MODE can be stored starting with hard
11286 register number REGNO. On the enhanced core, anything larger than
11287 1 byte must start in even numbered register for "movw" to work
11288 (this way we don't have to check for odd registers everywhere). */
11291 avr_hard_regno_mode_ok (int regno
, machine_mode mode
)
11293 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11294 Disallowing QI et al. in these regs might lead to code like
11295 (set (subreg:QI (reg:HI 28) n) ...)
11296 which will result in wrong code because reload does not
11297 handle SUBREGs of hard regsisters like this.
11298 This could be fixed in reload. However, it appears
11299 that fixing reload is not wanted by reload people. */
11301 /* Any GENERAL_REGS register can hold 8-bit values. */
11303 if (GET_MODE_SIZE (mode
) == 1)
11306 /* FIXME: Ideally, the following test is not needed.
11307 However, it turned out that it can reduce the number
11308 of spill fails. AVR and it's poor endowment with
11309 address registers is extreme stress test for reload. */
11311 if (GET_MODE_SIZE (mode
) >= 4
11315 /* All modes larger than 8 bits should start in an even register. */
11317 return !(regno
& 1);
11321 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11324 avr_hard_regno_call_part_clobbered (unsigned regno
, machine_mode mode
)
11326 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11327 represent valid hard registers like, e.g. HI:29. Returning TRUE
11328 for such registers can lead to performance degradation as mentioned
11329 in PR53595. Thus, report invalid hard registers as FALSE. */
11331 if (!avr_hard_regno_mode_ok (regno
, mode
))
11334 /* Return true if any of the following boundaries is crossed:
11335 17/18, 27/28 and 29/30. */
11337 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
11338 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
11339 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
11343 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
11346 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED
,
11347 addr_space_t as
, RTX_CODE outer_code
,
11348 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11350 if (!ADDR_SPACE_GENERIC_P (as
))
11352 return POINTER_Z_REGS
;
11356 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
11358 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
11362 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11365 avr_regno_mode_code_ok_for_base_p (int regno
,
11366 machine_mode mode ATTRIBUTE_UNUSED
,
11367 addr_space_t as ATTRIBUTE_UNUSED
,
11368 RTX_CODE outer_code
,
11369 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11373 if (!ADDR_SPACE_GENERIC_P (as
))
11375 if (regno
< FIRST_PSEUDO_REGISTER
11383 regno
= reg_renumber
[regno
];
11385 if (regno
== REG_Z
)
11394 if (regno
< FIRST_PSEUDO_REGISTER
11398 || regno
== ARG_POINTER_REGNUM
))
11402 else if (reg_renumber
)
11404 regno
= reg_renumber
[regno
];
11409 || regno
== ARG_POINTER_REGNUM
)
11416 && PLUS
== outer_code
11426 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
11427 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11428 CLOBBER_REG is a QI clobber register or NULL_RTX.
11429 LEN == NULL: output instructions.
11430 LEN != NULL: set *LEN to the length of the instruction sequence
11431 (in words) printed with LEN = NULL.
11432 If CLEAR_P is true, OP[0] had been cleard to Zero already.
11433 If CLEAR_P is false, nothing is known about OP[0].
11435 The effect on cc0 is as follows:
11437 Load 0 to any register except ZERO_REG : NONE
11438 Load ld register with any value : NONE
11439 Anything else: : CLOBBER */
11442 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
11446 rtx xval
, xdest
[4];
11448 int clobber_val
= 1234;
11449 bool cooked_clobber_p
= false;
11450 bool set_p
= false;
11451 machine_mode mode
= GET_MODE (dest
);
11452 int n
, n_bytes
= GET_MODE_SIZE (mode
);
11454 gcc_assert (REG_P (dest
)
11455 && CONSTANT_P (src
));
11460 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11461 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
11463 if (REGNO (dest
) < 16
11464 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
11466 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
11469 /* We might need a clobber reg but don't have one. Look at the value to
11470 be loaded more closely. A clobber is only needed if it is a symbol
11471 or contains a byte that is neither 0, -1 or a power of 2. */
11473 if (NULL_RTX
== clobber_reg
11474 && !test_hard_reg_class (LD_REGS
, dest
)
11475 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
11476 || !avr_popcount_each_byte (src
, n_bytes
,
11477 (1 << 0) | (1 << 1) | (1 << 8))))
11479 /* We have no clobber register but need one. Cook one up.
11480 That's cheaper than loading from constant pool. */
11482 cooked_clobber_p
= true;
11483 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
11484 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
11487 /* Now start filling DEST from LSB to MSB. */
11489 for (n
= 0; n
< n_bytes
; n
++)
11492 bool done_byte
= false;
11496 /* Crop the n-th destination byte. */
11498 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
11499 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
11501 if (!CONST_INT_P (src
)
11502 && !CONST_FIXED_P (src
)
11503 && !CONST_DOUBLE_P (src
))
11505 static const char* const asm_code
[][2] =
11507 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
11508 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
11509 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
11510 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
11515 xop
[2] = clobber_reg
;
11517 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
11522 /* Crop the n-th source byte. */
11524 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
11525 ival
[n
] = INTVAL (xval
);
11527 /* Look if we can reuse the low word by means of MOVW. */
11533 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
11534 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
11536 if (INTVAL (lo16
) == INTVAL (hi16
))
11538 if (0 != INTVAL (lo16
)
11541 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
11548 /* Don't use CLR so that cc0 is set as expected. */
11553 avr_asm_len (ldreg_p
? "ldi %0,0"
11554 : AVR_ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
11555 : "mov %0,__zero_reg__",
11556 &xdest
[n
], len
, 1);
11560 if (clobber_val
== ival
[n
]
11561 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
11566 /* LD_REGS can use LDI to move a constant value */
11572 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
11576 /* Try to reuse value already loaded in some lower byte. */
11578 for (j
= 0; j
< n
; j
++)
11579 if (ival
[j
] == ival
[n
])
11584 avr_asm_len ("mov %0,%1", xop
, len
, 1);
11592 /* Need no clobber reg for -1: Use CLR/DEC */
11597 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
11599 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
11602 else if (1 == ival
[n
])
11605 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
11607 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
11611 /* Use T flag or INC to manage powers of 2 if we have
11614 if (NULL_RTX
== clobber_reg
11615 && single_one_operand (xval
, QImode
))
11618 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
11620 gcc_assert (constm1_rtx
!= xop
[1]);
11625 avr_asm_len ("set", xop
, len
, 1);
11629 avr_asm_len ("clr %0", xop
, len
, 1);
11631 avr_asm_len ("bld %0,%1", xop
, len
, 1);
11635 /* We actually need the LD_REGS clobber reg. */
11637 gcc_assert (NULL_RTX
!= clobber_reg
);
11641 xop
[2] = clobber_reg
;
11642 clobber_val
= ival
[n
];
11644 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11645 "mov %0,%2", xop
, len
, 2);
11648 /* If we cooked up a clobber reg above, restore it. */
11650 if (cooked_clobber_p
)
11652 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
11657 /* Reload the constant OP[1] into the HI register OP[0].
11658 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11659 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11660 need a clobber reg or have to cook one up.
11662 PLEN == NULL: Output instructions.
11663 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
11664 by the insns printed.
11669 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
11671 output_reload_in_const (op
, clobber_reg
, plen
, false);
11676 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11677 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11678 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11679 need a clobber reg or have to cook one up.
11681 LEN == NULL: Output instructions.
11683 LEN != NULL: Output nothing. Set *LEN to number of words occupied
11684 by the insns printed.
11689 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
11692 && !test_hard_reg_class (LD_REGS
, op
[0])
11693 && (CONST_INT_P (op
[1])
11694 || CONST_FIXED_P (op
[1])
11695 || CONST_DOUBLE_P (op
[1])))
11697 int len_clr
, len_noclr
;
11699 /* In some cases it is better to clear the destination beforehand, e.g.
11701 CLR R2 CLR R3 MOVW R4,R2 INC R2
11705 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
11707 We find it too tedious to work that out in the print function.
11708 Instead, we call the print function twice to get the lengths of
11709 both methods and use the shortest one. */
11711 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
11712 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
11714 if (len_noclr
- len_clr
== 4)
11716 /* Default needs 4 CLR instructions: clear register beforehand. */
11718 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11719 "mov %B0,__zero_reg__" CR_TAB
11720 "movw %C0,%A0", &op
[0], len
, 3);
11722 output_reload_in_const (op
, clobber_reg
, len
, true);
11731 /* Default: destination not pre-cleared. */
11733 output_reload_in_const (op
, clobber_reg
, len
, false);
11738 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
11740 output_reload_in_const (op
, clobber_reg
, len
, false);
11745 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
11748 avr_output_addr_vec_elt (FILE *stream
, int value
)
11750 if (AVR_HAVE_JMP_CALL
)
11751 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
11753 fprintf (stream
, "\trjmp .L%d\n", value
);
11757 avr_conditional_register_usage(void)
11763 const int tiny_reg_alloc_order
[] = {
11772 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11775 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11776 - R0-R15 are not available in Tiny Core devices
11777 - R16 and R17 are fixed registers. */
11779 for (i
= 0; i
<= 17; i
++)
11782 call_used_regs
[i
] = 1;
11785 /* Set R18 to R21 as callee saved registers
11786 - R18, R19, R20 and R21 are the callee saved registers in
11787 Tiny Core devices */
11789 for (i
= 18; i
<= LAST_CALLEE_SAVED_REG
; i
++)
11791 call_used_regs
[i
] = 0;
11794 /* Update register allocation order for Tiny Core devices */
11796 for (i
= 0; i
< ARRAY_SIZE (tiny_reg_alloc_order
); i
++)
11798 reg_alloc_order
[i
] = tiny_reg_alloc_order
[i
];
11801 CLEAR_HARD_REG_SET (reg_class_contents
[(int) ADDW_REGS
]);
11802 CLEAR_HARD_REG_SET (reg_class_contents
[(int) NO_LD_REGS
]);
11806 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
11807 /* Returns true if SCRATCH are safe to be allocated as a scratch
11808 registers (for a define_peephole2) in the current function. */
11811 avr_hard_regno_scratch_ok (unsigned int regno
)
11813 /* Interrupt functions can only use registers that have already been saved
11814 by the prologue, even if they would normally be call-clobbered. */
11816 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
11817 && !df_regs_ever_live_p (regno
))
11820 /* Don't allow hard registers that might be part of the frame pointer.
11821 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11822 and don't care for a frame pointer that spans more than one register. */
11824 if ((!reload_completed
|| frame_pointer_needed
)
11825 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
11834 /* Worker function for `HARD_REGNO_RENAME_OK'. */
11835 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
11838 avr_hard_regno_rename_ok (unsigned int old_reg
,
11839 unsigned int new_reg
)
11841 /* Interrupt functions can only use registers that have already been
11842 saved by the prologue, even if they would normally be
11845 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
11846 && !df_regs_ever_live_p (new_reg
))
11849 /* Don't allow hard registers that might be part of the frame pointer.
11850 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11851 and don't care for a frame pointer that spans more than one register. */
11853 if ((!reload_completed
|| frame_pointer_needed
)
11854 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
11855 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
11863 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11864 or memory location in the I/O space (QImode only).
11866 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11867 Operand 1: register operand to test, or CONST_INT memory address.
11868 Operand 2: bit number.
11869 Operand 3: label to jump to if the test is true. */
11872 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
11874 enum rtx_code comp
= GET_CODE (operands
[0]);
11875 bool long_jump
= get_attr_length (insn
) >= 4;
11876 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
11880 else if (comp
== LT
)
11884 comp
= reverse_condition (comp
);
11886 switch (GET_CODE (operands
[1]))
11895 if (low_io_address_operand (operands
[1], QImode
))
11898 output_asm_insn ("sbis %i1,%2", operands
);
11900 output_asm_insn ("sbic %i1,%2", operands
);
11904 gcc_assert (io_address_operand (operands
[1], QImode
));
11905 output_asm_insn ("in __tmp_reg__,%i1", operands
);
11907 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
11909 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
11912 break; /* CONST_INT */
11917 output_asm_insn ("sbrs %T1%T2", operands
);
11919 output_asm_insn ("sbrc %T1%T2", operands
);
11925 return ("rjmp .+4" CR_TAB
11934 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
11937 avr_asm_out_ctor (rtx symbol
, int priority
)
11939 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
11940 default_ctor_section_asm_out_constructor (symbol
, priority
);
11944 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11947 avr_asm_out_dtor (rtx symbol
, int priority
)
11949 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
11950 default_dtor_section_asm_out_destructor (symbol
, priority
);
11954 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11957 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
11959 HOST_WIDE_INT size
= int_size_in_bytes (type
);
11960 HOST_WIDE_INT ret_size_limit
= AVR_TINY
? 4 : 8;
11962 /* In avr, there are 8 return registers. But, for Tiny Core
11963 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11964 Return true if size is unknown or greater than the limit. */
11966 if (size
== -1 || size
> ret_size_limit
)
11977 /* Implement `CASE_VALUES_THRESHOLD'. */
11978 /* Supply the default for --param case-values-threshold=0 */
11980 static unsigned int
11981 avr_case_values_threshold (void)
11983 /* The exact break-even point between a jump table and an if-else tree
11984 depends on several factors not available here like, e.g. if 8-bit
11985 comparisons can be used in the if-else tree or not, on the
11986 range of the case values, if the case value can be reused, on the
11987 register allocation, etc. '7' appears to be a good choice. */
11993 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11995 static machine_mode
11996 avr_addr_space_address_mode (addr_space_t as
)
11998 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
12002 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12004 static machine_mode
12005 avr_addr_space_pointer_mode (addr_space_t as
)
12007 return avr_addr_space_address_mode (as
);
12011 /* Helper for following function. */
12014 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
12016 gcc_assert (REG_P (reg
));
12020 return REGNO (reg
) == REG_Z
;
12023 /* Avoid combine to propagate hard regs. */
12025 if (can_create_pseudo_p()
12026 && REGNO (reg
) < REG_Z
)
12035 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12038 avr_addr_space_legitimate_address_p (machine_mode mode
, rtx x
,
12039 bool strict
, addr_space_t as
)
12048 case ADDR_SPACE_GENERIC
:
12049 return avr_legitimate_address_p (mode
, x
, strict
);
12051 case ADDR_SPACE_FLASH
:
12052 case ADDR_SPACE_FLASH1
:
12053 case ADDR_SPACE_FLASH2
:
12054 case ADDR_SPACE_FLASH3
:
12055 case ADDR_SPACE_FLASH4
:
12056 case ADDR_SPACE_FLASH5
:
12058 switch (GET_CODE (x
))
12061 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
12065 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
12074 case ADDR_SPACE_MEMX
:
12077 && can_create_pseudo_p());
12079 if (LO_SUM
== GET_CODE (x
))
12081 rtx hi
= XEXP (x
, 0);
12082 rtx lo
= XEXP (x
, 1);
12085 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
12087 && REGNO (lo
) == REG_Z
);
12093 if (avr_log
.legitimate_address_p
)
12095 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12096 "reload_completed=%d reload_in_progress=%d %s:",
12097 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
12098 reg_renumber
? "(reg_renumber)" : "");
12100 if (GET_CODE (x
) == PLUS
12101 && REG_P (XEXP (x
, 0))
12102 && CONST_INT_P (XEXP (x
, 1))
12103 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
12106 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
12107 true_regnum (XEXP (x
, 0)));
12110 avr_edump ("\n%r\n", x
);
12117 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12120 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
12121 machine_mode mode
, addr_space_t as
)
12123 if (ADDR_SPACE_GENERIC_P (as
))
12124 return avr_legitimize_address (x
, old_x
, mode
);
12126 if (avr_log
.legitimize_address
)
12128 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
12135 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12138 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
12140 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
12141 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
12143 if (avr_log
.progmem
)
12144 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12145 src
, type_from
, type_to
);
12147 /* Up-casting from 16-bit to 24-bit pointer. */
12149 if (as_from
!= ADDR_SPACE_MEMX
12150 && as_to
== ADDR_SPACE_MEMX
)
12154 rtx reg
= gen_reg_rtx (PSImode
);
12156 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
12157 sym
= XEXP (sym
, 0);
12159 /* Look at symbol flags: avr_encode_section_info set the flags
12160 also if attribute progmem was seen so that we get the right
12161 promotion for, e.g. PSTR-like strings that reside in generic space
12162 but are located in flash. In that case we patch the incoming
12165 if (SYMBOL_REF
== GET_CODE (sym
)
12166 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
12168 as_from
= ADDR_SPACE_FLASH
;
12171 /* Linearize memory: RAM has bit 23 set. */
12173 msb
= ADDR_SPACE_GENERIC_P (as_from
)
12175 : avr_addrspace
[as_from
].segment
;
12177 src
= force_reg (Pmode
, src
);
12179 emit_insn (msb
== 0
12180 ? gen_zero_extendhipsi2 (reg
, src
)
12181 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
12186 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
12188 if (as_from
== ADDR_SPACE_MEMX
12189 && as_to
!= ADDR_SPACE_MEMX
)
12191 rtx new_src
= gen_reg_rtx (Pmode
);
12193 src
= force_reg (PSImode
, src
);
12195 emit_move_insn (new_src
,
12196 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
12204 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12207 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
12208 addr_space_t superset ATTRIBUTE_UNUSED
)
12210 /* Allow any kind of pointer mess. */
12216 /* Implement `TARGET_CONVERT_TO_TYPE'. */
12219 avr_convert_to_type (tree type
, tree expr
)
12221 /* Print a diagnose for pointer conversion that changes the address
12222 space of the pointer target to a non-enclosing address space,
12223 provided -Waddr-space-convert is on.
12225 FIXME: Filter out cases where the target object is known to
12226 be located in the right memory, like in
12228 (const __flash*) PSTR ("text")
12230 Also try to distinguish between explicit casts requested by
12231 the user and implicit casts like
12233 void f (const __flash char*);
12235 void g (const char *p)
12237 f ((const __flash*) p);
12240 under the assumption that an explicit casts means that the user
12241 knows what he is doing, e.g. interface with PSTR or old style
12242 code with progmem and pgm_read_xxx.
12245 if (avr_warn_addr_space_convert
12246 && expr
!= error_mark_node
12247 && POINTER_TYPE_P (type
)
12248 && POINTER_TYPE_P (TREE_TYPE (expr
)))
12250 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
12251 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
12253 if (avr_log
.progmem
)
12254 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
12256 if (as_new
!= ADDR_SPACE_MEMX
12257 && as_new
!= as_old
)
12259 location_t loc
= EXPR_LOCATION (expr
);
12260 const char *name_old
= avr_addrspace
[as_old
].name
;
12261 const char *name_new
= avr_addrspace
[as_new
].name
;
12263 warning (OPT_Waddr_space_convert
,
12264 "conversion from address space %qs to address space %qs",
12265 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
12266 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
12268 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
12276 /* PR63633: The middle-end might come up with hard regs as input operands.
12278 RMASK is a bit mask representing a subset of hard registers R0...R31:
12279 Rn is an element of that set iff bit n of RMASK is set.
12280 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12281 OP[n] has to be fixed; otherwise OP[n] is left alone.
12283 For each element of OPMASK which is a hard register overlapping RMASK,
12284 replace OP[n] with a newly created pseudo register
12286 HREG == 0: Also emit a move insn that copies the contents of that
12287 hard register into the new pseudo.
12289 HREG != 0: Also set HREG[n] to the hard register. */
12292 avr_fix_operands (rtx
*op
, rtx
*hreg
, unsigned opmask
, unsigned rmask
)
12294 for (; opmask
; opmask
>>= 1, op
++)
12303 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
12304 // This hard-reg overlaps other prohibited hard regs?
12305 && (rmask
& regmask (GET_MODE (reg
), REGNO (reg
))))
12307 *op
= gen_reg_rtx (GET_MODE (reg
));
12309 emit_move_insn (*op
, reg
);
12321 avr_fix_inputs (rtx
*op
, unsigned opmask
, unsigned rmask
)
12323 avr_fix_operands (op
, NULL
, opmask
, rmask
);
12327 /* Helper for the function below: If bit n of MASK is set and
12328 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12329 Otherwise do nothing for that n. Return TRUE. */
12332 avr_move_fixed_operands (rtx
*op
, rtx
*hreg
, unsigned mask
)
12334 for (; mask
; mask
>>= 1, op
++, hreg
++)
12337 emit_move_insn (*hreg
, *op
);
12343 /* PR63633: The middle-end might come up with hard regs as output operands.
12345 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12346 RMASK is a bit mask representing a subset of hard registers R0...R31:
12347 Rn is an element of that set iff bit n of RMASK is set.
12348 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12349 OP[n] has to be fixed; otherwise OP[n] is left alone.
12351 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12352 which are hard registers overlapping RMASK replaced by newly created
12353 pseudo registers. After the sequence has been emitted, emit insns that
12354 move the contents of respective pseudos to their hard regs. */
12357 avr_emit3_fix_outputs (rtx (*gen
)(rtx
,rtx
,rtx
), rtx
*op
,
12358 unsigned opmask
, unsigned rmask
)
12363 /* It is letigimate for GEN to call this function, and in order not to
12364 get self-recursive we use the following static kludge. This is the
12365 only way not to duplicate all expanders and to avoid ugly and
12366 hard-to-maintain C-code instead of the much more appreciated RTL
12367 representation as supplied by define_expand. */
12368 static bool lock
= false;
12370 gcc_assert (opmask
< (1u << n
));
12375 avr_fix_operands (op
, hreg
, opmask
, rmask
);
12378 emit_insn (gen (op
[0], op
[1], op
[2]));
12381 return avr_move_fixed_operands (op
, hreg
, opmask
);
12385 /* Worker function for movmemhi expander.
12386 XOP[0] Destination as MEM:BLK
12388 XOP[2] # Bytes to copy
12390 Return TRUE if the expansion is accomplished.
12391 Return FALSE if the operand compination is not supported. */
12394 avr_emit_movmemhi (rtx
*xop
)
12396 HOST_WIDE_INT count
;
12397 machine_mode loop_mode
;
12398 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
12399 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
12400 rtx a_hi8
= NULL_RTX
;
12402 if (avr_mem_flash_p (xop
[0]))
12405 if (!CONST_INT_P (xop
[2]))
12408 count
= INTVAL (xop
[2]);
12412 a_src
= XEXP (xop
[1], 0);
12413 a_dest
= XEXP (xop
[0], 0);
12415 if (PSImode
== GET_MODE (a_src
))
12417 gcc_assert (as
== ADDR_SPACE_MEMX
);
12419 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
12420 loop_reg
= gen_rtx_REG (loop_mode
, 24);
12421 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
12423 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
12424 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
12428 int segment
= avr_addrspace
[as
].segment
;
12431 && avr_n_flash
> 1)
12433 a_hi8
= GEN_INT (segment
);
12434 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
12436 else if (!ADDR_SPACE_GENERIC_P (as
))
12438 as
= ADDR_SPACE_FLASH
;
12443 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
12444 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
12447 xas
= GEN_INT (as
);
12449 /* FIXME: Register allocator might come up with spill fails if it is left
12450 on its own. Thus, we allocate the pointer registers by hand:
12452 X = destination address */
12454 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
12455 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
12457 /* FIXME: Register allocator does a bad job and might spill address
12458 register(s) inside the loop leading to additional move instruction
12459 to/from stack which could clobber tmp_reg. Thus, do *not* emit
12460 load and store as separate insns. Instead, we perform the copy
12461 by means of one monolithic insn. */
12463 gcc_assert (TMP_REGNO
== LPM_REGNO
);
12465 if (as
!= ADDR_SPACE_MEMX
)
12467 /* Load instruction ([E]LPM or LD) is known at compile time:
12468 Do the copy-loop inline. */
12470 rtx (*fun
) (rtx
, rtx
, rtx
)
12471 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
12473 insn
= fun (xas
, loop_reg
, loop_reg
);
12477 rtx (*fun
) (rtx
, rtx
)
12478 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
12480 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
12482 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
12485 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
12492 /* Print assembler for movmem_qi, movmem_hi insns...
12494 $1, $2 : Loop register
12496 X : Destination address
12500 avr_out_movmem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
12502 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
12503 machine_mode loop_mode
= GET_MODE (op
[1]);
12504 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
12512 xop
[2] = tmp_reg_rtx
;
12516 avr_asm_len ("0:", xop
, plen
, 0);
12518 /* Load with post-increment */
12525 case ADDR_SPACE_GENERIC
:
12527 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
12530 case ADDR_SPACE_FLASH
:
12533 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
12535 avr_asm_len ("lpm" CR_TAB
12536 "adiw r30,1", xop
, plen
, 2);
12539 case ADDR_SPACE_FLASH1
:
12540 case ADDR_SPACE_FLASH2
:
12541 case ADDR_SPACE_FLASH3
:
12542 case ADDR_SPACE_FLASH4
:
12543 case ADDR_SPACE_FLASH5
:
12545 if (AVR_HAVE_ELPMX
)
12546 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
12548 avr_asm_len ("elpm" CR_TAB
12549 "adiw r30,1", xop
, plen
, 2);
12553 /* Store with post-increment */
12555 avr_asm_len ("st X+,%2", xop
, plen
, 1);
12557 /* Decrement loop-counter and set Z-flag */
12559 if (QImode
== loop_mode
)
12561 avr_asm_len ("dec %1", xop
, plen
, 1);
12565 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
12569 avr_asm_len ("subi %A1,1" CR_TAB
12570 "sbci %B1,0", xop
, plen
, 2);
12573 /* Loop until zero */
12575 return avr_asm_len ("brne 0b", xop
, plen
, 1);
12580 /* Helper for __builtin_avr_delay_cycles */
12583 avr_mem_clobber (void)
12585 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
12586 MEM_VOLATILE_P (mem
) = 1;
12591 avr_expand_delay_cycles (rtx operands0
)
12593 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
12594 unsigned HOST_WIDE_INT cycles_used
;
12595 unsigned HOST_WIDE_INT loop_count
;
12597 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
12599 loop_count
= ((cycles
- 9) / 6) + 1;
12600 cycles_used
= ((loop_count
- 1) * 6) + 9;
12601 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
12602 avr_mem_clobber()));
12603 cycles
-= cycles_used
;
12606 if (IN_RANGE (cycles
, 262145, 83886081))
12608 loop_count
= ((cycles
- 7) / 5) + 1;
12609 if (loop_count
> 0xFFFFFF)
12610 loop_count
= 0xFFFFFF;
12611 cycles_used
= ((loop_count
- 1) * 5) + 7;
12612 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
12613 avr_mem_clobber()));
12614 cycles
-= cycles_used
;
12617 if (IN_RANGE (cycles
, 768, 262144))
12619 loop_count
= ((cycles
- 5) / 4) + 1;
12620 if (loop_count
> 0xFFFF)
12621 loop_count
= 0xFFFF;
12622 cycles_used
= ((loop_count
- 1) * 4) + 5;
12623 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
12624 avr_mem_clobber()));
12625 cycles
-= cycles_used
;
12628 if (IN_RANGE (cycles
, 6, 767))
12630 loop_count
= cycles
/ 3;
12631 if (loop_count
> 255)
12633 cycles_used
= loop_count
* 3;
12634 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
12635 avr_mem_clobber()));
12636 cycles
-= cycles_used
;
12639 while (cycles
>= 2)
12641 emit_insn (gen_nopv (GEN_INT(2)));
12647 emit_insn (gen_nopv (GEN_INT(1)));
12653 /* Compute the image of x under f, i.e. perform x --> f(x) */
12656 avr_map (unsigned int f
, int x
)
12658 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
12662 /* Return some metrics of map A. */
12666 /* Number of fixed points in { 0 ... 7 } */
12669 /* Size of preimage of non-fixed points in { 0 ... 7 } */
12672 /* Mask representing the fixed points in { 0 ... 7 } */
12673 MAP_MASK_FIXED_0_7
,
12675 /* Size of the preimage of { 0 ... 7 } */
12678 /* Mask that represents the preimage of { f } */
12679 MAP_MASK_PREIMAGE_F
12683 avr_map_metric (unsigned int a
, int mode
)
12685 unsigned i
, metric
= 0;
12687 for (i
= 0; i
< 8; i
++)
12689 unsigned ai
= avr_map (a
, i
);
12691 if (mode
== MAP_FIXED_0_7
)
12693 else if (mode
== MAP_NONFIXED_0_7
)
12694 metric
+= ai
< 8 && ai
!= i
;
12695 else if (mode
== MAP_MASK_FIXED_0_7
)
12696 metric
|= ((unsigned) (ai
== i
)) << i
;
12697 else if (mode
== MAP_PREIMAGE_0_7
)
12699 else if (mode
== MAP_MASK_PREIMAGE_F
)
12700 metric
|= ((unsigned) (ai
== 0xf)) << i
;
12709 /* Return true if IVAL has a 0xf in its hexadecimal representation
12710 and false, otherwise. Only nibbles 0..7 are taken into account.
12711 Used as constraint helper for C0f and Cxf. */
12714 avr_has_nibble_0xf (rtx ival
)
12716 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
12717 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12721 /* We have a set of bits that are mapped by a function F.
12722 Try to decompose F by means of a second function G so that
12728 cost (F o G^-1) + cost (G) < cost (F)
12730 Example: Suppose builtin insert_bits supplies us with the map
12731 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
12732 nibble of the result, we can just as well rotate the bits before inserting
12733 them and use the map 0x7654ffff which is cheaper than the original map.
12734 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
12738 /* tree code of binary function G */
12739 enum tree_code code
;
12741 /* The constant second argument of G */
12744 /* G^-1, the inverse of G (*, arg) */
12747 /* The cost of appplying G (*, arg) */
12750 /* The composition F o G^-1 (*, arg) for some function F */
12753 /* For debug purpose only */
12757 static const avr_map_op_t avr_map_op
[] =
12759 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
12760 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
12761 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
12762 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
12763 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
12764 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
12765 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
12766 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
12767 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
12768 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
12769 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
12770 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
12771 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
12772 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
12773 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
12777 /* Try to decompose F as F = (F o G^-1) o G as described above.
12778 The result is a struct representing F o G^-1 and G.
12779 If result.cost < 0 then such a decomposition does not exist. */
12781 static avr_map_op_t
12782 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
12785 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
12786 avr_map_op_t f_ginv
= *g
;
12787 unsigned int ginv
= g
->ginv
;
12791 /* Step 1: Computing F o G^-1 */
12793 for (i
= 7; i
>= 0; i
--)
12795 int x
= avr_map (f
, i
);
12799 x
= avr_map (ginv
, x
);
12801 /* The bit is no element of the image of G: no avail (cost = -1) */
12807 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
12810 /* Step 2: Compute the cost of the operations.
12811 The overall cost of doing an operation prior to the insertion is
12812 the cost of the insertion plus the cost of the operation. */
12814 /* Step 2a: Compute cost of F o G^-1 */
12816 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
12818 /* The mapping consists only of fixed points and can be folded
12819 to AND/OR logic in the remainder. Reasonable cost is 3. */
12821 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
12827 /* Get the cost of the insn by calling the output worker with some
12828 fake values. Mimic effect of reloading xop[3]: Unused operands
12829 are mapped to 0 and used operands are reloaded to xop[0]. */
12831 xop
[0] = all_regs_rtx
[24];
12832 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
12833 xop
[2] = all_regs_rtx
[25];
12834 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
12836 avr_out_insert_bits (xop
, &f_ginv
.cost
);
12838 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
12841 /* Step 2b: Add cost of G */
12843 f_ginv
.cost
+= g
->cost
;
12845 if (avr_log
.builtin
)
12846 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
12852 /* Insert bits from XOP[1] into XOP[0] according to MAP.
12853 XOP[0] and XOP[1] don't overlap.
12854 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
12855 If FIXP_P = false: Just move the bit if its position in the destination
12856 is different to its source position. */
12859 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
12863 /* T-flag contains this bit of the source, i.e. of XOP[1] */
12864 int t_bit_src
= -1;
12866 /* We order the operations according to the requested source bit b. */
12868 for (b
= 0; b
< 8; b
++)
12869 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
12871 int bit_src
= avr_map (map
, bit_dest
);
12875 /* Same position: No need to copy as requested by FIXP_P. */
12876 || (bit_dest
== bit_src
&& !fixp_p
))
12879 if (t_bit_src
!= bit_src
)
12881 /* Source bit is not yet in T: Store it to T. */
12883 t_bit_src
= bit_src
;
12885 xop
[3] = GEN_INT (bit_src
);
12886 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
12889 /* Load destination bit with T. */
12891 xop
[3] = GEN_INT (bit_dest
);
12892 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
12897 /* PLEN == 0: Print assembler code for `insert_bits'.
12898 PLEN != 0: Compute code length in bytes.
12901 OP[1]: The mapping composed of nibbles. If nibble no. N is
12902 0: Bit N of result is copied from bit OP[2].0
12904 7: Bit N of result is copied from bit OP[2].7
12905 0xf: Bit N of result is copied from bit OP[3].N
12906 OP[2]: Bits to be inserted
12907 OP[3]: Target value */
12910 avr_out_insert_bits (rtx
*op
, int *plen
)
12912 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
12913 unsigned mask_fixed
;
12914 bool fixp_p
= true;
12921 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
12925 else if (flag_print_asm_name
)
12926 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
12928 /* If MAP has fixed points it might be better to initialize the result
12929 with the bits to be inserted instead of moving all bits by hand. */
12931 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
12933 if (REGNO (xop
[0]) == REGNO (xop
[1]))
12935 /* Avoid early-clobber conflicts */
12937 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
12938 xop
[1] = tmp_reg_rtx
;
12942 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12944 /* XOP[2] is used and reloaded to XOP[0] already */
12946 int n_fix
= 0, n_nofix
= 0;
12948 gcc_assert (REG_P (xop
[2]));
12950 /* Get the code size of the bit insertions; once with all bits
12951 moved and once with fixed points omitted. */
12953 avr_move_bits (xop
, map
, true, &n_fix
);
12954 avr_move_bits (xop
, map
, false, &n_nofix
);
12956 if (fixp_p
&& n_fix
- n_nofix
> 3)
12958 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
12960 avr_asm_len ("eor %0,%1" CR_TAB
12961 "andi %0,%3" CR_TAB
12962 "eor %0,%1", xop
, plen
, 3);
12968 /* XOP[2] is unused */
12970 if (fixp_p
&& mask_fixed
)
12972 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
12977 /* Move/insert remaining bits. */
12979 avr_move_bits (xop
, map
, fixp_p
, plen
);
12985 /* IDs for all the AVR builtins. */
12987 enum avr_builtin_id
12989 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12990 AVR_BUILTIN_ ## NAME,
12991 #include "builtins.def"
12997 struct GTY(()) avr_builtin_description
12999 enum insn_code icode
;
13005 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13006 that a built-in's ID can be used to access the built-in by means of
13009 static GTY(()) struct avr_builtin_description
13010 avr_bdesc
[AVR_BUILTIN_COUNT
] =
13012 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13013 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13014 #include "builtins.def"
13019 /* Implement `TARGET_BUILTIN_DECL'. */
13022 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
13024 if (id
< AVR_BUILTIN_COUNT
)
13025 return avr_bdesc
[id
].fndecl
;
13027 return error_mark_node
;
13032 avr_init_builtin_int24 (void)
13034 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
13035 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
13037 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
13038 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
13042 /* Implement `TARGET_INIT_BUILTINS' */
13043 /* Set up all builtin functions for this target. */
13046 avr_init_builtins (void)
13048 tree void_ftype_void
13049 = build_function_type_list (void_type_node
, NULL_TREE
);
13050 tree uchar_ftype_uchar
13051 = build_function_type_list (unsigned_char_type_node
,
13052 unsigned_char_type_node
,
13054 tree uint_ftype_uchar_uchar
13055 = build_function_type_list (unsigned_type_node
,
13056 unsigned_char_type_node
,
13057 unsigned_char_type_node
,
13059 tree int_ftype_char_char
13060 = build_function_type_list (integer_type_node
,
13064 tree int_ftype_char_uchar
13065 = build_function_type_list (integer_type_node
,
13067 unsigned_char_type_node
,
13069 tree void_ftype_ulong
13070 = build_function_type_list (void_type_node
,
13071 long_unsigned_type_node
,
13074 tree uchar_ftype_ulong_uchar_uchar
13075 = build_function_type_list (unsigned_char_type_node
,
13076 long_unsigned_type_node
,
13077 unsigned_char_type_node
,
13078 unsigned_char_type_node
,
13081 tree const_memx_void_node
13082 = build_qualified_type (void_type_node
,
13084 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
13086 tree const_memx_ptr_type_node
13087 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
13089 tree char_ftype_const_memx_ptr
13090 = build_function_type_list (char_type_node
,
13091 const_memx_ptr_type_node
,
13095 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13097 #define FX_FTYPE_FX(fx) \
13098 tree fx##r_ftype_##fx##r \
13099 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13100 tree fx##k_ftype_##fx##k \
13101 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13103 #define FX_FTYPE_FX_INT(fx) \
13104 tree fx##r_ftype_##fx##r_int \
13105 = build_function_type_list (node_##fx##r, node_##fx##r, \
13106 integer_type_node, NULL); \
13107 tree fx##k_ftype_##fx##k_int \
13108 = build_function_type_list (node_##fx##k, node_##fx##k, \
13109 integer_type_node, NULL)
13111 #define INT_FTYPE_FX(fx) \
13112 tree int_ftype_##fx##r \
13113 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13114 tree int_ftype_##fx##k \
13115 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13117 #define INTX_FTYPE_FX(fx) \
13118 tree int##fx##r_ftype_##fx##r \
13119 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13120 tree int##fx##k_ftype_##fx##k \
13121 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13123 #define FX_FTYPE_INTX(fx) \
13124 tree fx##r_ftype_int##fx##r \
13125 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13126 tree fx##k_ftype_int##fx##k \
13127 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13129 tree node_hr
= short_fract_type_node
;
13130 tree node_nr
= fract_type_node
;
13131 tree node_lr
= long_fract_type_node
;
13132 tree node_llr
= long_long_fract_type_node
;
13134 tree node_uhr
= unsigned_short_fract_type_node
;
13135 tree node_unr
= unsigned_fract_type_node
;
13136 tree node_ulr
= unsigned_long_fract_type_node
;
13137 tree node_ullr
= unsigned_long_long_fract_type_node
;
13139 tree node_hk
= short_accum_type_node
;
13140 tree node_nk
= accum_type_node
;
13141 tree node_lk
= long_accum_type_node
;
13142 tree node_llk
= long_long_accum_type_node
;
13144 tree node_uhk
= unsigned_short_accum_type_node
;
13145 tree node_unk
= unsigned_accum_type_node
;
13146 tree node_ulk
= unsigned_long_accum_type_node
;
13147 tree node_ullk
= unsigned_long_long_accum_type_node
;
13150 /* For absfx builtins. */
13157 /* For roundfx builtins. */
13159 FX_FTYPE_FX_INT (h
);
13160 FX_FTYPE_FX_INT (n
);
13161 FX_FTYPE_FX_INT (l
);
13162 FX_FTYPE_FX_INT (ll
);
13164 FX_FTYPE_FX_INT (uh
);
13165 FX_FTYPE_FX_INT (un
);
13166 FX_FTYPE_FX_INT (ul
);
13167 FX_FTYPE_FX_INT (ull
);
13169 /* For countlsfx builtins. */
13179 INT_FTYPE_FX (ull
);
13181 /* For bitsfx builtins. */
13186 INTX_FTYPE_FX (ll
);
13188 INTX_FTYPE_FX (uh
);
13189 INTX_FTYPE_FX (un
);
13190 INTX_FTYPE_FX (ul
);
13191 INTX_FTYPE_FX (ull
);
13193 /* For fxbits builtins. */
13198 FX_FTYPE_INTX (ll
);
13200 FX_FTYPE_INTX (uh
);
13201 FX_FTYPE_INTX (un
);
13202 FX_FTYPE_INTX (ul
);
13203 FX_FTYPE_INTX (ull
);
13206 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13208 int id = AVR_BUILTIN_ ## NAME; \
13209 const char *Name = "__builtin_avr_" #NAME; \
13210 char *name = (char*) alloca (1 + strlen (Name)); \
13212 gcc_assert (id < AVR_BUILTIN_COUNT); \
13213 avr_bdesc[id].fndecl \
13214 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
13215 BUILT_IN_MD, LIBNAME, NULL_TREE); \
13217 #include "builtins.def"
13220 avr_init_builtin_int24 ();
13224 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13225 with non-void result and 1 ... 3 arguments. */
13228 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
13231 int n
, n_args
= call_expr_nargs (exp
);
13232 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
13234 gcc_assert (n_args
>= 1 && n_args
<= 3);
13236 if (target
== NULL_RTX
13237 || GET_MODE (target
) != tmode
13238 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
13240 target
= gen_reg_rtx (tmode
);
13243 for (n
= 0; n
< n_args
; n
++)
13245 tree arg
= CALL_EXPR_ARG (exp
, n
);
13246 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13247 machine_mode opmode
= GET_MODE (op
);
13248 machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
13250 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
13253 op
= gen_lowpart (HImode
, op
);
13256 /* In case the insn wants input operands in modes different from
13257 the result, abort. */
13259 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
13261 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
13262 op
= copy_to_mode_reg (mode
, op
);
13269 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
13270 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
13271 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
13277 if (pat
== NULL_RTX
)
13286 /* Implement `TARGET_EXPAND_BUILTIN'. */
13287 /* Expand an expression EXP that calls a built-in function,
13288 with result going to TARGET if that's convenient
13289 (and in mode MODE if that's convenient).
13290 SUBTARGET may be used as the target for computing one of EXP's operands.
13291 IGNORE is nonzero if the value is to be ignored. */
13294 avr_expand_builtin (tree exp
, rtx target
,
13295 rtx subtarget ATTRIBUTE_UNUSED
,
13296 machine_mode mode ATTRIBUTE_UNUSED
,
13299 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
13300 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
13301 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
13302 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
13306 gcc_assert (id
< AVR_BUILTIN_COUNT
);
13310 case AVR_BUILTIN_NOP
:
13311 emit_insn (gen_nopv (GEN_INT(1)));
13314 case AVR_BUILTIN_DELAY_CYCLES
:
13316 arg0
= CALL_EXPR_ARG (exp
, 0);
13317 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13319 if (!CONST_INT_P (op0
))
13320 error ("%s expects a compile time integer constant", bname
);
13322 avr_expand_delay_cycles (op0
);
13327 case AVR_BUILTIN_INSERT_BITS
:
13329 arg0
= CALL_EXPR_ARG (exp
, 0);
13330 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13332 if (!CONST_INT_P (op0
))
13334 error ("%s expects a compile time long integer constant"
13335 " as first argument", bname
);
13342 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
13343 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
13344 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
13345 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
13347 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
13348 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
13349 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
13350 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
13352 /* Warn about odd rounding. Rounding points >= FBIT will have
13355 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
13358 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
13360 if (rbit
>= (int) GET_MODE_FBIT (mode
))
13362 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
13363 "fixed-point value with %d fractional bits",
13364 rbit
, GET_MODE_FBIT (mode
));
13366 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
13369 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
13371 warning (0, "rounding result will always be 0");
13372 return CONST0_RTX (mode
);
13375 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13377 TR 18037 only specifies results for RP > 0. However, the
13378 remaining cases of -IBIT < RP <= 0 can easily be supported
13379 without any additional overhead. */
13384 /* No fold found and no insn: Call support function from libgcc. */
13386 if (d
->icode
== CODE_FOR_nothing
13387 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
13389 return expand_call (exp
, target
, ignore
);
13392 /* No special treatment needed: vanilla expand. */
13394 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
13395 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
13397 if (d
->n_args
== 0)
13399 emit_insn ((GEN_FCN (d
->icode
)) (target
));
13403 return avr_default_expand_builtin (d
->icode
, exp
, target
);
13407 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13410 avr_fold_absfx (tree tval
)
13412 if (FIXED_CST
!= TREE_CODE (tval
))
13415 /* Our fixed-points have no padding: Use double_int payload directly. */
13417 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
13418 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
13419 double_int ival
= fval
.data
.sext (bits
);
13421 if (!ival
.is_negative())
13424 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13426 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
13427 ? double_int::max_value (bits
, false)
13430 return build_fixed (TREE_TYPE (tval
), fval
);
13434 /* Implement `TARGET_FOLD_BUILTIN'. */
13437 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
13438 bool ignore ATTRIBUTE_UNUSED
)
13440 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
13441 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
13451 case AVR_BUILTIN_SWAP
:
13453 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
13454 build_int_cst (val_type
, 4));
13457 case AVR_BUILTIN_ABSHR
:
13458 case AVR_BUILTIN_ABSR
:
13459 case AVR_BUILTIN_ABSLR
:
13460 case AVR_BUILTIN_ABSLLR
:
13462 case AVR_BUILTIN_ABSHK
:
13463 case AVR_BUILTIN_ABSK
:
13464 case AVR_BUILTIN_ABSLK
:
13465 case AVR_BUILTIN_ABSLLK
:
13466 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
13468 return avr_fold_absfx (arg
[0]);
13470 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
13471 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
13472 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
13473 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
13475 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
13476 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
13477 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
13478 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
13480 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
13481 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
13482 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
13483 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
13485 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
13486 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
13487 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
13488 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
13490 gcc_assert (TYPE_PRECISION (val_type
)
13491 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
13493 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
13495 case AVR_BUILTIN_INSERT_BITS
:
13497 tree tbits
= arg
[1];
13498 tree tval
= arg
[2];
13500 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
13502 bool changed
= false;
13504 avr_map_op_t best_g
;
13506 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
13508 /* No constant as first argument: Don't fold this and run into
13509 error in avr_expand_builtin. */
13514 tmap
= wide_int_to_tree (map_type
, arg
[0]);
13515 map
= TREE_INT_CST_LOW (tmap
);
13517 if (TREE_CODE (tval
) != INTEGER_CST
13518 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
13520 /* There are no F in the map, i.e. 3rd operand is unused.
13521 Replace that argument with some constant to render
13522 respective input unused. */
13524 tval
= build_int_cst (val_type
, 0);
13528 if (TREE_CODE (tbits
) != INTEGER_CST
13529 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
13531 /* Similar for the bits to be inserted. If they are unused,
13532 we can just as well pass 0. */
13534 tbits
= build_int_cst (val_type
, 0);
13537 if (TREE_CODE (tbits
) == INTEGER_CST
)
13539 /* Inserting bits known at compile time is easy and can be
13540 performed by AND and OR with appropriate masks. */
13542 int bits
= TREE_INT_CST_LOW (tbits
);
13543 int mask_ior
= 0, mask_and
= 0xff;
13545 for (i
= 0; i
< 8; i
++)
13547 int mi
= avr_map (map
, i
);
13551 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
13552 else mask_and
&= ~(1 << i
);
13556 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
13557 build_int_cst (val_type
, mask_ior
));
13558 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
13559 build_int_cst (val_type
, mask_and
));
13563 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
13565 /* If bits don't change their position we can use vanilla logic
13566 to merge the two arguments. */
13568 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
13570 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
13571 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
13573 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
13574 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
13575 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
13578 /* Try to decomposing map to reduce overall cost. */
13580 if (avr_log
.builtin
)
13581 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
13583 best_g
= avr_map_op
[0];
13584 best_g
.cost
= 1000;
13586 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
13589 = avr_map_decompose (map
, avr_map_op
+ i
,
13590 TREE_CODE (tval
) == INTEGER_CST
);
13592 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
13596 if (avr_log
.builtin
)
13599 if (best_g
.arg
== 0)
13600 /* No optimization found */
13603 /* Apply operation G to the 2nd argument. */
13605 if (avr_log
.builtin
)
13606 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13607 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
13609 /* Do right-shifts arithmetically: They copy the MSB instead of
13610 shifting in a non-usable value (0) as with logic right-shift. */
13612 tbits
= fold_convert (signed_char_type_node
, tbits
);
13613 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
13614 build_int_cst (val_type
, best_g
.arg
));
13615 tbits
= fold_convert (val_type
, tbits
);
13617 /* Use map o G^-1 instead of original map to undo the effect of G. */
13619 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
13621 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
13622 } /* AVR_BUILTIN_INSERT_BITS */
13630 /* Initialize the GCC target structure. */
13632 #undef TARGET_ASM_ALIGNED_HI_OP
13633 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13634 #undef TARGET_ASM_ALIGNED_SI_OP
13635 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13636 #undef TARGET_ASM_UNALIGNED_HI_OP
13637 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13638 #undef TARGET_ASM_UNALIGNED_SI_OP
13639 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13640 #undef TARGET_ASM_INTEGER
13641 #define TARGET_ASM_INTEGER avr_assemble_integer
13642 #undef TARGET_ASM_FILE_START
13643 #define TARGET_ASM_FILE_START avr_file_start
13644 #undef TARGET_ASM_FILE_END
13645 #define TARGET_ASM_FILE_END avr_file_end
13647 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
13648 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13649 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13650 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13652 #undef TARGET_FUNCTION_VALUE
13653 #define TARGET_FUNCTION_VALUE avr_function_value
13654 #undef TARGET_LIBCALL_VALUE
13655 #define TARGET_LIBCALL_VALUE avr_libcall_value
13656 #undef TARGET_FUNCTION_VALUE_REGNO_P
13657 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13659 #undef TARGET_ATTRIBUTE_TABLE
13660 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13661 #undef TARGET_INSERT_ATTRIBUTES
13662 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13663 #undef TARGET_SECTION_TYPE_FLAGS
13664 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13666 #undef TARGET_ASM_NAMED_SECTION
13667 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13668 #undef TARGET_ASM_INIT_SECTIONS
13669 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13670 #undef TARGET_ENCODE_SECTION_INFO
13671 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13672 #undef TARGET_ASM_SELECT_SECTION
13673 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13675 #undef TARGET_REGISTER_MOVE_COST
13676 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13677 #undef TARGET_MEMORY_MOVE_COST
13678 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13679 #undef TARGET_RTX_COSTS
13680 #define TARGET_RTX_COSTS avr_rtx_costs
13681 #undef TARGET_ADDRESS_COST
13682 #define TARGET_ADDRESS_COST avr_address_cost
13683 #undef TARGET_MACHINE_DEPENDENT_REORG
13684 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13685 #undef TARGET_FUNCTION_ARG
13686 #define TARGET_FUNCTION_ARG avr_function_arg
13687 #undef TARGET_FUNCTION_ARG_ADVANCE
13688 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13690 #undef TARGET_SET_CURRENT_FUNCTION
13691 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13693 #undef TARGET_RETURN_IN_MEMORY
13694 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13696 #undef TARGET_STRICT_ARGUMENT_NAMING
13697 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13699 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
13700 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13702 #undef TARGET_CONDITIONAL_REGISTER_USAGE
13703 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13705 #undef TARGET_HARD_REGNO_SCRATCH_OK
13706 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13707 #undef TARGET_CASE_VALUES_THRESHOLD
13708 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13710 #undef TARGET_FRAME_POINTER_REQUIRED
13711 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13712 #undef TARGET_CAN_ELIMINATE
13713 #define TARGET_CAN_ELIMINATE avr_can_eliminate
13715 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13716 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13718 #undef TARGET_WARN_FUNC_RETURN
13719 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13721 #undef TARGET_CLASS_LIKELY_SPILLED_P
13722 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13724 #undef TARGET_OPTION_OVERRIDE
13725 #define TARGET_OPTION_OVERRIDE avr_option_override
13727 #undef TARGET_CANNOT_MODIFY_JUMPS_P
13728 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13730 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
13731 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13733 #undef TARGET_INIT_BUILTINS
13734 #define TARGET_INIT_BUILTINS avr_init_builtins
13736 #undef TARGET_BUILTIN_DECL
13737 #define TARGET_BUILTIN_DECL avr_builtin_decl
13739 #undef TARGET_EXPAND_BUILTIN
13740 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
13742 #undef TARGET_FOLD_BUILTIN
13743 #define TARGET_FOLD_BUILTIN avr_fold_builtin
13745 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
13746 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13748 #undef TARGET_SCALAR_MODE_SUPPORTED_P
13749 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13751 #undef TARGET_BUILD_BUILTIN_VA_LIST
13752 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13754 #undef TARGET_FIXED_POINT_SUPPORTED_P
13755 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13757 #undef TARGET_CONVERT_TO_TYPE
13758 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13760 #undef TARGET_ADDR_SPACE_SUBSET_P
13761 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13763 #undef TARGET_ADDR_SPACE_CONVERT
13764 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13766 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
13767 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13769 #undef TARGET_ADDR_SPACE_POINTER_MODE
13770 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13772 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13773 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
13774 avr_addr_space_legitimate_address_p
13776 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13777 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13779 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
13780 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13782 #undef TARGET_SECONDARY_RELOAD
13783 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
13785 #undef TARGET_PRINT_OPERAND
13786 #define TARGET_PRINT_OPERAND avr_print_operand
13787 #undef TARGET_PRINT_OPERAND_ADDRESS
13788 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13789 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
13790 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13792 struct gcc_target targetm
= TARGET_INITIALIZER
;
13795 #include "gt-avr.h"