Reverting merge from trunk
[official-gcc.git] / gcc / config / avr / avr.c
blobe7e1c2f138bd1b57a23521027b62de18cf87a960
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "c-family/c-common.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
58 address space where data is to be located.
59 As the only non-generic address spaces are all located in flash,
60 this can be used to test if data shall go into some .progmem* section.
61 This must be the rightmost field of machine dependent section flags. */
62 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
65 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
68 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
69 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
70 do { \
71 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
72 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
73 } while (0)
75 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
76 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
77 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
78 / SYMBOL_FLAG_MACH_DEP)
80 /* Known address spaces. The order must be the same as in the respective
81 enum from avr.h (or designated initialized must be used). */
82 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
84 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
85 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
86 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
87 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
88 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
89 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
90 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
91 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
95 /* Holding RAM addresses of some SFRs used by the compiler and that
96 are unique over all devices in an architecture like 'avr4'. */
98 typedef struct
100 /* SREG: The processor status */
101 int sreg;
103 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
104 int ccp;
105 int rampd;
106 int rampx;
107 int rampy;
109 /* RAMPZ: The high byte of 24-bit address used with ELPM */
110 int rampz;
112 /* SP: The stack pointer and its low and high byte */
113 int sp_l;
114 int sp_h;
115 } avr_addr_t;
117 static avr_addr_t avr_addr;
120 /* Prototypes for local helper functions. */
122 static const char* out_movqi_r_mr (rtx, rtx[], int*);
123 static const char* out_movhi_r_mr (rtx, rtx[], int*);
124 static const char* out_movsi_r_mr (rtx, rtx[], int*);
125 static const char* out_movqi_mr_r (rtx, rtx[], int*);
126 static const char* out_movhi_mr_r (rtx, rtx[], int*);
127 static const char* out_movsi_mr_r (rtx, rtx[], int*);
129 static int get_sequence_length (rtx insns);
130 static int sequent_regs_live (void);
131 static const char *ptrreg_to_str (int);
132 static const char *cond_string (enum rtx_code);
133 static int avr_num_arg_regs (enum machine_mode, const_tree);
134 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
135 int, bool);
136 static void output_reload_in_const (rtx*, rtx, int*, bool);
137 static struct machine_function * avr_init_machine_status (void);
140 /* Prototypes for hook implementors if needed before their implementation. */
142 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
145 /* Allocate registers from r25 to r8 for parameters for function calls. */
146 #define FIRST_CUM_REG 26
148 /* Implicit target register of LPM instruction (R0) */
149 extern GTY(()) rtx lpm_reg_rtx;
150 rtx lpm_reg_rtx;
152 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
153 extern GTY(()) rtx lpm_addr_reg_rtx;
154 rtx lpm_addr_reg_rtx;
156 /* Temporary register RTX (reg:QI TMP_REGNO) */
157 extern GTY(()) rtx tmp_reg_rtx;
158 rtx tmp_reg_rtx;
160 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
161 extern GTY(()) rtx zero_reg_rtx;
162 rtx zero_reg_rtx;
164 /* RTXs for all general purpose registers as QImode */
165 extern GTY(()) rtx all_regs_rtx[32];
166 rtx all_regs_rtx[32];
168 /* SREG, the processor status */
169 extern GTY(()) rtx sreg_rtx;
170 rtx sreg_rtx;
172 /* RAMP* special function registers */
173 extern GTY(()) rtx rampd_rtx;
174 extern GTY(()) rtx rampx_rtx;
175 extern GTY(()) rtx rampy_rtx;
176 extern GTY(()) rtx rampz_rtx;
177 rtx rampd_rtx;
178 rtx rampx_rtx;
179 rtx rampy_rtx;
180 rtx rampz_rtx;
182 /* RTX containing the strings "" and "e", respectively */
183 static GTY(()) rtx xstring_empty;
184 static GTY(()) rtx xstring_e;
186 /* Current architecture. */
187 const avr_arch_t *avr_current_arch;
189 /* Current device. */
190 const avr_mcu_t *avr_current_device;
192 /* Section to put switch tables in. */
193 static GTY(()) section *progmem_swtable_section;
195 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
196 or to address space __flash* or __memx. Only used as singletons inside
197 avr_asm_select_section, but it must not be local there because of GTY. */
198 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
200 /* Condition for insns/expanders from avr-dimode.md. */
201 bool avr_have_dimode = true;
203 /* To track if code will use .bss and/or .data. */
204 bool avr_need_clear_bss_p = false;
205 bool avr_need_copy_data_p = false;
208 /* Transform UP into lowercase and write the result to LO.
209 You must provide enough space for LO. Return LO. */
211 static char*
212 avr_tolower (char *lo, const char *up)
214 char *lo0 = lo;
216 for (; *up; up++, lo++)
217 *lo = TOLOWER (*up);
219 *lo = '\0';
221 return lo0;
225 /* Custom function to count number of set bits. */
227 static inline int
228 avr_popcount (unsigned int val)
230 int pop = 0;
232 while (val)
234 val &= val-1;
235 pop++;
238 return pop;
242 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
243 Return true if the least significant N_BYTES bytes of XVAL all have a
244 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
245 of integers which contains an integer N iff bit N of POP_MASK is set. */
247 bool
248 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
250 int i;
252 enum machine_mode mode = GET_MODE (xval);
254 if (VOIDmode == mode)
255 mode = SImode;
257 for (i = 0; i < n_bytes; i++)
259 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
260 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
262 if (0 == (pop_mask & (1 << avr_popcount (val8))))
263 return false;
266 return true;
270 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
271 the bit representation of X by "casting" it to CONST_INT. */
274 avr_to_int_mode (rtx x)
276 enum machine_mode mode = GET_MODE (x);
278 return VOIDmode == mode
280 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
284 /* Implement `TARGET_OPTION_OVERRIDE'. */
286 static void
287 avr_option_override (void)
289 flag_delete_null_pointer_checks = 0;
291 /* caller-save.c looks for call-clobbered hard registers that are assigned
292 to pseudos that cross calls and tries so save-restore them around calls
293 in order to reduce the number of stack slots needed.
295 This might lead to situations where reload is no more able to cope
296 with the challenge of AVR's very few address registers and fails to
297 perform the requested spills. */
299 if (avr_strict_X)
300 flag_caller_saves = 0;
302 /* Unwind tables currently require a frame pointer for correctness,
303 see toplev.c:process_options(). */
305 if ((flag_unwind_tables
306 || flag_non_call_exceptions
307 || flag_asynchronous_unwind_tables)
308 && !ACCUMULATE_OUTGOING_ARGS)
310 flag_omit_frame_pointer = 0;
313 avr_current_device = &avr_mcu_types[avr_mcu_index];
314 avr_current_arch = &avr_arch_types[avr_current_device->arch];
316 /* RAM addresses of some SFRs common to all devices in respective arch. */
318 /* SREG: Status Register containing flags like I (global IRQ) */
319 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
321 /* RAMPZ: Address' high part when loading via ELPM */
322 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
324 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
325 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
326 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
327 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
329 /* SP: Stack Pointer (SP_H:SP_L) */
330 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
331 avr_addr.sp_h = avr_addr.sp_l + 1;
333 init_machine_status = avr_init_machine_status;
335 avr_log_set_avr_log();
338 /* Function to set up the backend function structure. */
340 static struct machine_function *
341 avr_init_machine_status (void)
343 return ggc_alloc_cleared_machine_function ();
347 /* Implement `INIT_EXPANDERS'. */
348 /* The function works like a singleton. */
350 void
351 avr_init_expanders (void)
353 int regno;
355 for (regno = 0; regno < 32; regno ++)
356 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
358 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
359 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
360 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
362 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
364 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
365 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
366 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
367 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
368 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
370 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
371 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
375 /* Implement `REGNO_REG_CLASS'. */
376 /* Return register class for register R. */
378 enum reg_class
379 avr_regno_reg_class (int r)
381 static const enum reg_class reg_class_tab[] =
383 R0_REG,
384 /* r1 - r15 */
385 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
387 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
388 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
389 /* r16 - r23 */
390 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
391 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
392 /* r24, r25 */
393 ADDW_REGS, ADDW_REGS,
394 /* X: r26, 27 */
395 POINTER_X_REGS, POINTER_X_REGS,
396 /* Y: r28, r29 */
397 POINTER_Y_REGS, POINTER_Y_REGS,
398 /* Z: r30, r31 */
399 POINTER_Z_REGS, POINTER_Z_REGS,
400 /* SP: SPL, SPH */
401 STACK_REG, STACK_REG
404 if (r <= 33)
405 return reg_class_tab[r];
407 return ALL_REGS;
411 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
413 static bool
414 avr_scalar_mode_supported_p (enum machine_mode mode)
416 if (ALL_FIXED_POINT_MODE_P (mode))
417 return true;
419 if (PSImode == mode)
420 return true;
422 return default_scalar_mode_supported_p (mode);
426 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
428 static bool
429 avr_decl_flash_p (tree decl)
431 if (TREE_CODE (decl) != VAR_DECL
432 || TREE_TYPE (decl) == error_mark_node)
434 return false;
437 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
441 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
442 address space and FALSE, otherwise. */
444 static bool
445 avr_decl_memx_p (tree decl)
447 if (TREE_CODE (decl) != VAR_DECL
448 || TREE_TYPE (decl) == error_mark_node)
450 return false;
453 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
457 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
459 bool
460 avr_mem_flash_p (rtx x)
462 return (MEM_P (x)
463 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
467 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
468 address space and FALSE, otherwise. */
470 bool
471 avr_mem_memx_p (rtx x)
473 return (MEM_P (x)
474 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
478 /* A helper for the subsequent function attribute used to dig for
479 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
481 static inline int
482 avr_lookup_function_attribute1 (const_tree func, const char *name)
484 if (FUNCTION_DECL == TREE_CODE (func))
486 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
488 return true;
491 func = TREE_TYPE (func);
494 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
495 || TREE_CODE (func) == METHOD_TYPE);
497 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
500 /* Return nonzero if FUNC is a naked function. */
502 static int
503 avr_naked_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "naked");
508 /* Return nonzero if FUNC is an interrupt function as specified
509 by the "interrupt" attribute. */
511 static int
512 avr_interrupt_function_p (tree func)
514 return avr_lookup_function_attribute1 (func, "interrupt");
517 /* Return nonzero if FUNC is a signal function as specified
518 by the "signal" attribute. */
520 static int
521 avr_signal_function_p (tree func)
523 return avr_lookup_function_attribute1 (func, "signal");
526 /* Return nonzero if FUNC is an OS_task function. */
528 static int
529 avr_OS_task_function_p (tree func)
531 return avr_lookup_function_attribute1 (func, "OS_task");
534 /* Return nonzero if FUNC is an OS_main function. */
536 static int
537 avr_OS_main_function_p (tree func)
539 return avr_lookup_function_attribute1 (func, "OS_main");
543 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
544 /* Sanity cheching for above function attributes. */
546 static void
547 avr_set_current_function (tree decl)
549 location_t loc;
550 const char *isr;
552 if (decl == NULL_TREE
553 || current_function_decl == NULL_TREE
554 || current_function_decl == error_mark_node
555 || ! cfun->machine
556 || cfun->machine->attributes_checked_p)
557 return;
559 loc = DECL_SOURCE_LOCATION (decl);
561 cfun->machine->is_naked = avr_naked_function_p (decl);
562 cfun->machine->is_signal = avr_signal_function_p (decl);
563 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
564 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
565 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
567 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
569 /* Too much attributes make no sense as they request conflicting features. */
571 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
572 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
573 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
574 " exclusive", "OS_task", "OS_main", isr);
576 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
578 if (cfun->machine->is_naked
579 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
580 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
581 " no effect on %qs function", "OS_task", "OS_main", "naked");
583 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
585 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
586 tree ret = TREE_TYPE (TREE_TYPE (decl));
587 const char *name;
589 name = DECL_ASSEMBLER_NAME_SET_P (decl)
590 /* Remove the leading '*' added in set_user_assembler_name. */
591 ? 1 + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
592 : IDENTIFIER_POINTER (DECL_NAME (decl));
594 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
595 using this when it switched from SIGNAL and INTERRUPT to ISR. */
597 if (cfun->machine->is_interrupt)
598 cfun->machine->is_signal = 0;
600 /* Interrupt handlers must be void __vector (void) functions. */
602 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
603 error_at (loc, "%qs function cannot have arguments", isr);
605 if (TREE_CODE (ret) != VOID_TYPE)
606 error_at (loc, "%qs function cannot return a value", isr);
608 /* If the function has the 'signal' or 'interrupt' attribute, ensure
609 that the name of the function is "__vector_NN" so as to catch
610 when the user misspells the vector name. */
612 if (!STR_PREFIX_P (name, "__vector"))
613 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
614 name, isr);
617 /* Don't print the above diagnostics more than once. */
619 cfun->machine->attributes_checked_p = 1;
623 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
626 avr_accumulate_outgoing_args (void)
628 if (!cfun)
629 return TARGET_ACCUMULATE_OUTGOING_ARGS;
631 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
632 what offset is correct. In some cases it is relative to
633 virtual_outgoing_args_rtx and in others it is relative to
634 virtual_stack_vars_rtx. For example code see
635 gcc.c-torture/execute/built-in-setjmp.c
636 gcc.c-torture/execute/builtins/sprintf-chk.c */
638 return (TARGET_ACCUMULATE_OUTGOING_ARGS
639 && !(cfun->calls_setjmp
640 || cfun->has_nonlocal_label));
644 /* Report contribution of accumulated outgoing arguments to stack size. */
646 static inline int
647 avr_outgoing_args_size (void)
649 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
653 /* Implement `STARTING_FRAME_OFFSET'. */
654 /* This is the offset from the frame pointer register to the first stack slot
655 that contains a variable living in the frame. */
658 avr_starting_frame_offset (void)
660 return 1 + avr_outgoing_args_size ();
664 /* Return the number of hard registers to push/pop in the prologue/epilogue
665 of the current function, and optionally store these registers in SET. */
667 static int
668 avr_regs_to_save (HARD_REG_SET *set)
670 int reg, count;
671 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
673 if (set)
674 CLEAR_HARD_REG_SET (*set);
675 count = 0;
677 /* No need to save any registers if the function never returns or
678 has the "OS_task" or "OS_main" attribute. */
680 if (TREE_THIS_VOLATILE (current_function_decl)
681 || cfun->machine->is_OS_task
682 || cfun->machine->is_OS_main)
683 return 0;
685 for (reg = 0; reg < 32; reg++)
687 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
688 any global register variables. */
690 if (fixed_regs[reg])
691 continue;
693 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
694 || (df_regs_ever_live_p (reg)
695 && (int_or_sig_p || !call_used_regs[reg])
696 /* Don't record frame pointer registers here. They are treated
697 indivitually in prologue. */
698 && !(frame_pointer_needed
699 && (reg == REG_Y || reg == (REG_Y+1)))))
701 if (set)
702 SET_HARD_REG_BIT (*set, reg);
703 count++;
706 return count;
710 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
712 static bool
713 avr_allocate_stack_slots_for_args (void)
715 return !cfun->machine->is_naked;
719 /* Return true if register FROM can be eliminated via register TO. */
721 static bool
722 avr_can_eliminate (const int from, const int to)
724 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
725 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
726 || ((from == FRAME_POINTER_REGNUM
727 || from == FRAME_POINTER_REGNUM + 1)
728 && !frame_pointer_needed));
732 /* Implement `TARGET_WARN_FUNC_RETURN'. */
734 static bool
735 avr_warn_func_return (tree decl)
737 /* Naked functions are implemented entirely in assembly, including the
738 return sequence, so suppress warnings about this. */
740 return !avr_naked_function_p (decl);
743 /* Compute offset between arg_pointer and frame_pointer. */
746 avr_initial_elimination_offset (int from, int to)
748 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
749 return 0;
750 else
752 int offset = frame_pointer_needed ? 2 : 0;
753 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
755 offset += avr_regs_to_save (NULL);
756 return (get_frame_size () + avr_outgoing_args_size()
757 + avr_pc_size + 1 + offset);
762 /* Helper for the function below. */
764 static void
765 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
767 *node = make_node (FIXED_POINT_TYPE);
768 TYPE_SATURATING (*node) = sat_p;
769 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
770 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
771 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
772 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
773 TYPE_ALIGN (*node) = 8;
774 SET_TYPE_MODE (*node, mode);
776 layout_type (*node);
780 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
782 static tree
783 avr_build_builtin_va_list (void)
785 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
786 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
787 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
788 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
789 to the long long accum modes instead of the desired [U]TAmode.
791 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
792 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
793 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
794 libgcc to detect IBIT and FBIT. */
796 avr_adjust_type_node (&ta_type_node, TAmode, 0);
797 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
798 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
799 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
801 unsigned_long_long_accum_type_node = uta_type_node;
802 long_long_accum_type_node = ta_type_node;
803 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
804 sat_long_long_accum_type_node = sat_ta_type_node;
806 /* Dispatch to the default handler. */
808 return std_build_builtin_va_list ();
812 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
813 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
814 frame pointer by +STARTING_FRAME_OFFSET.
815 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
816 avoids creating add/sub of offset in nonlocal goto and setjmp. */
818 static rtx
819 avr_builtin_setjmp_frame_value (void)
821 rtx xval = gen_reg_rtx (Pmode);
822 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
823 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
824 return xval;
828 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
829 This is return address of function. */
832 avr_return_addr_rtx (int count, rtx tem)
834 rtx r;
836 /* Can only return this function's return address. Others not supported. */
837 if (count)
838 return NULL;
840 if (AVR_3_BYTE_PC)
842 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
843 warning (0, "%<builtin_return_address%> contains only 2 bytes"
844 " of address");
846 else
847 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
849 r = gen_rtx_PLUS (Pmode, tem, r);
850 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
851 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
852 return r;
855 /* Return 1 if the function epilogue is just a single "ret". */
858 avr_simple_epilogue (void)
860 return (! frame_pointer_needed
861 && get_frame_size () == 0
862 && avr_outgoing_args_size() == 0
863 && avr_regs_to_save (NULL) == 0
864 && ! cfun->machine->is_interrupt
865 && ! cfun->machine->is_signal
866 && ! cfun->machine->is_naked
867 && ! TREE_THIS_VOLATILE (current_function_decl));
870 /* This function checks sequence of live registers. */
872 static int
873 sequent_regs_live (void)
875 int reg;
876 int live_seq = 0;
877 int cur_seq = 0;
879 for (reg = 0; reg < 18; ++reg)
881 if (fixed_regs[reg])
883 /* Don't recognize sequences that contain global register
884 variables. */
886 if (live_seq != 0)
887 return 0;
888 else
889 continue;
892 if (!call_used_regs[reg])
894 if (df_regs_ever_live_p (reg))
896 ++live_seq;
897 ++cur_seq;
899 else
900 cur_seq = 0;
904 if (!frame_pointer_needed)
906 if (df_regs_ever_live_p (REG_Y))
908 ++live_seq;
909 ++cur_seq;
911 else
912 cur_seq = 0;
914 if (df_regs_ever_live_p (REG_Y+1))
916 ++live_seq;
917 ++cur_seq;
919 else
920 cur_seq = 0;
922 else
924 cur_seq += 2;
925 live_seq += 2;
927 return (cur_seq == live_seq) ? live_seq : 0;
930 /* Obtain the length sequence of insns. */
933 get_sequence_length (rtx insns)
935 rtx insn;
936 int length;
938 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
939 length += get_attr_length (insn);
941 return length;
945 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
948 avr_incoming_return_addr_rtx (void)
950 /* The return address is at the top of the stack. Note that the push
951 was via post-decrement, which means the actual address is off by one. */
952 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
955 /* Helper for expand_prologue. Emit a push of a byte register. */
957 static void
958 emit_push_byte (unsigned regno, bool frame_related_p)
960 rtx mem, reg, insn;
962 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
963 mem = gen_frame_mem (QImode, mem);
964 reg = gen_rtx_REG (QImode, regno);
966 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
967 if (frame_related_p)
968 RTX_FRAME_RELATED_P (insn) = 1;
970 cfun->machine->stack_usage++;
974 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
975 SFR is a MEM representing the memory location of the SFR.
976 If CLR_P then clear the SFR after the push using zero_reg. */
978 static void
979 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
981 rtx insn;
983 gcc_assert (MEM_P (sfr));
985 /* IN __tmp_reg__, IO(SFR) */
986 insn = emit_move_insn (tmp_reg_rtx, sfr);
987 if (frame_related_p)
988 RTX_FRAME_RELATED_P (insn) = 1;
990 /* PUSH __tmp_reg__ */
991 emit_push_byte (TMP_REGNO, frame_related_p);
993 if (clr_p)
995 /* OUT IO(SFR), __zero_reg__ */
996 insn = emit_move_insn (sfr, const0_rtx);
997 if (frame_related_p)
998 RTX_FRAME_RELATED_P (insn) = 1;
1002 static void
1003 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1005 rtx insn;
1006 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1007 int live_seq = sequent_regs_live ();
1009 HOST_WIDE_INT size_max
1010 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1012 bool minimize = (TARGET_CALL_PROLOGUES
1013 && size < size_max
1014 && live_seq
1015 && !isr_p
1016 && !cfun->machine->is_OS_task
1017 && !cfun->machine->is_OS_main);
1019 if (minimize
1020 && (frame_pointer_needed
1021 || avr_outgoing_args_size() > 8
1022 || (AVR_2_BYTE_PC && live_seq > 6)
1023 || live_seq > 7))
1025 rtx pattern;
1026 int first_reg, reg, offset;
1028 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1029 gen_int_mode (size, HImode));
1031 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1032 gen_int_mode (live_seq+size, HImode));
1033 insn = emit_insn (pattern);
1034 RTX_FRAME_RELATED_P (insn) = 1;
1036 /* Describe the effect of the unspec_volatile call to prologue_saves.
1037 Note that this formulation assumes that add_reg_note pushes the
1038 notes to the front. Thus we build them in the reverse order of
1039 how we want dwarf2out to process them. */
1041 /* The function does always set frame_pointer_rtx, but whether that
1042 is going to be permanent in the function is frame_pointer_needed. */
1044 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1045 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1046 ? frame_pointer_rtx
1047 : stack_pointer_rtx),
1048 plus_constant (Pmode, stack_pointer_rtx,
1049 -(size + live_seq))));
1051 /* Note that live_seq always contains r28+r29, but the other
1052 registers to be saved are all below 18. */
1054 first_reg = 18 - (live_seq - 2);
1056 for (reg = 29, offset = -live_seq + 1;
1057 reg >= first_reg;
1058 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1060 rtx m, r;
1062 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1063 offset));
1064 r = gen_rtx_REG (QImode, reg);
1065 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1068 cfun->machine->stack_usage += size + live_seq;
1070 else /* !minimize */
1072 int reg;
1074 for (reg = 0; reg < 32; ++reg)
1075 if (TEST_HARD_REG_BIT (set, reg))
1076 emit_push_byte (reg, true);
1078 if (frame_pointer_needed
1079 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1081 /* Push frame pointer. Always be consistent about the
1082 ordering of pushes -- epilogue_restores expects the
1083 register pair to be pushed low byte first. */
1085 emit_push_byte (REG_Y, true);
1086 emit_push_byte (REG_Y + 1, true);
1089 if (frame_pointer_needed
1090 && size == 0)
1092 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1093 RTX_FRAME_RELATED_P (insn) = 1;
1096 if (size != 0)
1098 /* Creating a frame can be done by direct manipulation of the
1099 stack or via the frame pointer. These two methods are:
1100 fp = sp
1101 fp -= size
1102 sp = fp
1104 sp -= size
1105 fp = sp (*)
1106 the optimum method depends on function type, stack and
1107 frame size. To avoid a complex logic, both methods are
1108 tested and shortest is selected.
1110 There is also the case where SIZE != 0 and no frame pointer is
1111 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1112 In that case, insn (*) is not needed in that case.
1113 We use the X register as scratch. This is save because in X
1114 is call-clobbered.
1115 In an interrupt routine, the case of SIZE != 0 together with
1116 !frame_pointer_needed can only occur if the function is not a
1117 leaf function and thus X has already been saved. */
1119 int irq_state = -1;
1120 HOST_WIDE_INT size_cfa = size, neg_size;
1121 rtx fp_plus_insns, fp, my_fp;
1123 gcc_assert (frame_pointer_needed
1124 || !isr_p
1125 || !crtl->is_leaf);
1127 fp = my_fp = (frame_pointer_needed
1128 ? frame_pointer_rtx
1129 : gen_rtx_REG (Pmode, REG_X));
1131 if (AVR_HAVE_8BIT_SP)
1133 /* The high byte (r29) does not change:
1134 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1136 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1139 /* Cut down size and avoid size = 0 so that we don't run
1140 into ICE like PR52488 in the remainder. */
1142 if (size > size_max)
1144 /* Don't error so that insane code from newlib still compiles
1145 and does not break building newlib. As PR51345 is implemented
1146 now, there are multilib variants with -msp8.
1148 If user wants sanity checks he can use -Wstack-usage=
1149 or similar options.
1151 For CFA we emit the original, non-saturated size so that
1152 the generic machinery is aware of the real stack usage and
1153 will print the above diagnostic as expected. */
1155 size = size_max;
1158 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1159 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1161 /************ Method 1: Adjust frame pointer ************/
1163 start_sequence ();
1165 /* Normally, the dwarf2out frame-related-expr interpreter does
1166 not expect to have the CFA change once the frame pointer is
1167 set up. Thus, we avoid marking the move insn below and
1168 instead indicate that the entire operation is complete after
1169 the frame pointer subtraction is done. */
1171 insn = emit_move_insn (fp, stack_pointer_rtx);
1172 if (frame_pointer_needed)
1174 RTX_FRAME_RELATED_P (insn) = 1;
1175 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1176 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1179 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1180 my_fp, neg_size));
1182 if (frame_pointer_needed)
1184 RTX_FRAME_RELATED_P (insn) = 1;
1185 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1186 gen_rtx_SET (VOIDmode, fp,
1187 plus_constant (Pmode, fp,
1188 -size_cfa)));
1191 /* Copy to stack pointer. Note that since we've already
1192 changed the CFA to the frame pointer this operation
1193 need not be annotated if frame pointer is needed.
1194 Always move through unspec, see PR50063.
1195 For meaning of irq_state see movhi_sp_r insn. */
1197 if (cfun->machine->is_interrupt)
1198 irq_state = 1;
1200 if (TARGET_NO_INTERRUPTS
1201 || cfun->machine->is_signal
1202 || cfun->machine->is_OS_main)
1203 irq_state = 0;
1205 if (AVR_HAVE_8BIT_SP)
1206 irq_state = 2;
1208 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1209 fp, GEN_INT (irq_state)));
1210 if (!frame_pointer_needed)
1212 RTX_FRAME_RELATED_P (insn) = 1;
1213 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1214 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1215 plus_constant (Pmode,
1216 stack_pointer_rtx,
1217 -size_cfa)));
1220 fp_plus_insns = get_insns ();
1221 end_sequence ();
1223 /************ Method 2: Adjust Stack pointer ************/
1225 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1226 can only handle specific offsets. */
1228 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1230 rtx sp_plus_insns;
1232 start_sequence ();
1234 insn = emit_move_insn (stack_pointer_rtx,
1235 plus_constant (Pmode, stack_pointer_rtx,
1236 -size));
1237 RTX_FRAME_RELATED_P (insn) = 1;
1238 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1239 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1240 plus_constant (Pmode,
1241 stack_pointer_rtx,
1242 -size_cfa)));
1243 if (frame_pointer_needed)
1245 insn = emit_move_insn (fp, stack_pointer_rtx);
1246 RTX_FRAME_RELATED_P (insn) = 1;
1249 sp_plus_insns = get_insns ();
1250 end_sequence ();
1252 /************ Use shortest method ************/
1254 emit_insn (get_sequence_length (sp_plus_insns)
1255 < get_sequence_length (fp_plus_insns)
1256 ? sp_plus_insns
1257 : fp_plus_insns);
1259 else
1261 emit_insn (fp_plus_insns);
1264 cfun->machine->stack_usage += size_cfa;
1265 } /* !minimize && size != 0 */
1266 } /* !minimize */
1270 /* Output function prologue. */
1272 void
1273 avr_expand_prologue (void)
1275 HARD_REG_SET set;
1276 HOST_WIDE_INT size;
1278 size = get_frame_size() + avr_outgoing_args_size();
1280 cfun->machine->stack_usage = 0;
1282 /* Prologue: naked. */
1283 if (cfun->machine->is_naked)
1285 return;
1288 avr_regs_to_save (&set);
1290 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1292 /* Enable interrupts. */
1293 if (cfun->machine->is_interrupt)
1294 emit_insn (gen_enable_interrupt ());
1296 /* Push zero reg. */
1297 emit_push_byte (ZERO_REGNO, true);
1299 /* Push tmp reg. */
1300 emit_push_byte (TMP_REGNO, true);
1302 /* Push SREG. */
1303 /* ??? There's no dwarf2 column reserved for SREG. */
1304 emit_push_sfr (sreg_rtx, false, false /* clr */);
1306 /* Clear zero reg. */
1307 emit_move_insn (zero_reg_rtx, const0_rtx);
1309 /* Prevent any attempt to delete the setting of ZERO_REG! */
1310 emit_use (zero_reg_rtx);
1312 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1313 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1315 if (AVR_HAVE_RAMPD)
1316 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1318 if (AVR_HAVE_RAMPX
1319 && TEST_HARD_REG_BIT (set, REG_X)
1320 && TEST_HARD_REG_BIT (set, REG_X + 1))
1322 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1325 if (AVR_HAVE_RAMPY
1326 && (frame_pointer_needed
1327 || (TEST_HARD_REG_BIT (set, REG_Y)
1328 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1330 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1333 if (AVR_HAVE_RAMPZ
1334 && TEST_HARD_REG_BIT (set, REG_Z)
1335 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1337 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1339 } /* is_interrupt is_signal */
1341 avr_prologue_setup_frame (size, set);
1343 if (flag_stack_usage_info)
1344 current_function_static_stack_size = cfun->machine->stack_usage;
1348 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1349 /* Output summary at end of function prologue. */
1351 static void
1352 avr_asm_function_end_prologue (FILE *file)
1354 if (cfun->machine->is_naked)
1356 fputs ("/* prologue: naked */\n", file);
1358 else
1360 if (cfun->machine->is_interrupt)
1362 fputs ("/* prologue: Interrupt */\n", file);
1364 else if (cfun->machine->is_signal)
1366 fputs ("/* prologue: Signal */\n", file);
1368 else
1369 fputs ("/* prologue: function */\n", file);
1372 if (ACCUMULATE_OUTGOING_ARGS)
1373 fprintf (file, "/* outgoing args size = %d */\n",
1374 avr_outgoing_args_size());
1376 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1377 get_frame_size());
1378 fprintf (file, "/* stack size = %d */\n",
1379 cfun->machine->stack_usage);
1380 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1381 usage for offset so that SP + .L__stack_offset = return address. */
1382 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1386 /* Implement `EPILOGUE_USES'. */
1389 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1391 if (reload_completed
1392 && cfun->machine
1393 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1394 return 1;
1395 return 0;
1398 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1400 static void
1401 emit_pop_byte (unsigned regno)
1403 rtx mem, reg;
1405 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1406 mem = gen_frame_mem (QImode, mem);
1407 reg = gen_rtx_REG (QImode, regno);
1409 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1412 /* Output RTL epilogue. */
1414 void
1415 avr_expand_epilogue (bool sibcall_p)
1417 int reg;
1418 int live_seq;
1419 HARD_REG_SET set;
1420 int minimize;
1421 HOST_WIDE_INT size;
1422 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1424 size = get_frame_size() + avr_outgoing_args_size();
1426 /* epilogue: naked */
1427 if (cfun->machine->is_naked)
1429 gcc_assert (!sibcall_p);
1431 emit_jump_insn (gen_return ());
1432 return;
1435 avr_regs_to_save (&set);
1436 live_seq = sequent_regs_live ();
1438 minimize = (TARGET_CALL_PROLOGUES
1439 && live_seq
1440 && !isr_p
1441 && !cfun->machine->is_OS_task
1442 && !cfun->machine->is_OS_main);
1444 if (minimize
1445 && (live_seq > 4
1446 || frame_pointer_needed
1447 || size))
1449 /* Get rid of frame. */
1451 if (!frame_pointer_needed)
1453 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1456 if (size)
1458 emit_move_insn (frame_pointer_rtx,
1459 plus_constant (Pmode, frame_pointer_rtx, size));
1462 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1463 return;
1466 if (size)
1468 /* Try two methods to adjust stack and select shortest. */
1470 int irq_state = -1;
1471 rtx fp, my_fp;
1472 rtx fp_plus_insns;
1473 HOST_WIDE_INT size_max;
1475 gcc_assert (frame_pointer_needed
1476 || !isr_p
1477 || !crtl->is_leaf);
1479 fp = my_fp = (frame_pointer_needed
1480 ? frame_pointer_rtx
1481 : gen_rtx_REG (Pmode, REG_X));
1483 if (AVR_HAVE_8BIT_SP)
1485 /* The high byte (r29) does not change:
1486 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1488 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1491 /* For rationale see comment in prologue generation. */
1493 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1494 if (size > size_max)
1495 size = size_max;
1496 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1498 /********** Method 1: Adjust fp register **********/
1500 start_sequence ();
1502 if (!frame_pointer_needed)
1503 emit_move_insn (fp, stack_pointer_rtx);
1505 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1507 /* Copy to stack pointer. */
1509 if (TARGET_NO_INTERRUPTS)
1510 irq_state = 0;
1512 if (AVR_HAVE_8BIT_SP)
1513 irq_state = 2;
1515 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1516 GEN_INT (irq_state)));
1518 fp_plus_insns = get_insns ();
1519 end_sequence ();
1521 /********** Method 2: Adjust Stack pointer **********/
1523 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1525 rtx sp_plus_insns;
1527 start_sequence ();
1529 emit_move_insn (stack_pointer_rtx,
1530 plus_constant (Pmode, stack_pointer_rtx, size));
1532 sp_plus_insns = get_insns ();
1533 end_sequence ();
1535 /************ Use shortest method ************/
1537 emit_insn (get_sequence_length (sp_plus_insns)
1538 < get_sequence_length (fp_plus_insns)
1539 ? sp_plus_insns
1540 : fp_plus_insns);
1542 else
1543 emit_insn (fp_plus_insns);
1544 } /* size != 0 */
1546 if (frame_pointer_needed
1547 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1549 /* Restore previous frame_pointer. See avr_expand_prologue for
1550 rationale for not using pophi. */
1552 emit_pop_byte (REG_Y + 1);
1553 emit_pop_byte (REG_Y);
1556 /* Restore used registers. */
1558 for (reg = 31; reg >= 0; --reg)
1559 if (TEST_HARD_REG_BIT (set, reg))
1560 emit_pop_byte (reg);
1562 if (isr_p)
1564 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1565 The conditions to restore them must be tha same as in prologue. */
1567 if (AVR_HAVE_RAMPZ
1568 && TEST_HARD_REG_BIT (set, REG_Z)
1569 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1571 emit_pop_byte (TMP_REGNO);
1572 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1575 if (AVR_HAVE_RAMPY
1576 && (frame_pointer_needed
1577 || (TEST_HARD_REG_BIT (set, REG_Y)
1578 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1580 emit_pop_byte (TMP_REGNO);
1581 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1584 if (AVR_HAVE_RAMPX
1585 && TEST_HARD_REG_BIT (set, REG_X)
1586 && TEST_HARD_REG_BIT (set, REG_X + 1))
1588 emit_pop_byte (TMP_REGNO);
1589 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1592 if (AVR_HAVE_RAMPD)
1594 emit_pop_byte (TMP_REGNO);
1595 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1598 /* Restore SREG using tmp_reg as scratch. */
1600 emit_pop_byte (TMP_REGNO);
1601 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1603 /* Restore tmp REG. */
1604 emit_pop_byte (TMP_REGNO);
1606 /* Restore zero REG. */
1607 emit_pop_byte (ZERO_REGNO);
1610 if (!sibcall_p)
1611 emit_jump_insn (gen_return ());
1615 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1617 static void
1618 avr_asm_function_begin_epilogue (FILE *file)
1620 fprintf (file, "/* epilogue start */\n");
1624 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1626 static bool
1627 avr_cannot_modify_jumps_p (void)
1630 /* Naked Functions must not have any instructions after
1631 their epilogue, see PR42240 */
1633 if (reload_completed
1634 && cfun->machine
1635 && cfun->machine->is_naked)
1637 return true;
1640 return false;
1644 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1646 static bool
1647 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1649 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1650 This hook just serves to hack around PR rtl-optimization/52543 by
1651 claiming that non-generic addresses were mode-dependent so that
1652 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1653 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1654 generic address space which is not true. */
1656 return !ADDR_SPACE_GENERIC_P (as);
1660 /* Helper function for `avr_legitimate_address_p'. */
1662 static inline bool
1663 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1664 RTX_CODE outer_code, bool strict)
1666 return (REG_P (reg)
1667 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1668 as, outer_code, UNKNOWN)
1669 || (!strict
1670 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1674 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1675 machine for a memory operand of mode MODE. */
1677 static bool
1678 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1680 bool ok = CONSTANT_ADDRESS_P (x);
1682 switch (GET_CODE (x))
1684 case REG:
1685 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1686 MEM, strict);
1688 if (strict
1689 && GET_MODE_SIZE (mode) > 4
1690 && REG_X == REGNO (x))
1692 ok = false;
1694 break;
1696 case POST_INC:
1697 case PRE_DEC:
1698 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1699 GET_CODE (x), strict);
1700 break;
1702 case PLUS:
1704 rtx reg = XEXP (x, 0);
1705 rtx op1 = XEXP (x, 1);
1707 if (REG_P (reg)
1708 && CONST_INT_P (op1)
1709 && INTVAL (op1) >= 0)
1711 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1713 if (fit)
1715 ok = (! strict
1716 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1717 PLUS, strict));
1719 if (reg == frame_pointer_rtx
1720 || reg == arg_pointer_rtx)
1722 ok = true;
1725 else if (frame_pointer_needed
1726 && reg == frame_pointer_rtx)
1728 ok = true;
1732 break;
1734 default:
1735 break;
1738 if (avr_log.legitimate_address_p)
1740 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1741 "reload_completed=%d reload_in_progress=%d %s:",
1742 ok, mode, strict, reload_completed, reload_in_progress,
1743 reg_renumber ? "(reg_renumber)" : "");
1745 if (GET_CODE (x) == PLUS
1746 && REG_P (XEXP (x, 0))
1747 && CONST_INT_P (XEXP (x, 1))
1748 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1749 && reg_renumber)
1751 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1752 true_regnum (XEXP (x, 0)));
1755 avr_edump ("\n%r\n", x);
1758 return ok;
1762 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1763 now only a helper for avr_addr_space_legitimize_address. */
1764 /* Attempts to replace X with a valid
1765 memory address for an operand of mode MODE */
1767 static rtx
1768 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1770 bool big_offset_p = false;
1772 x = oldx;
1774 if (GET_CODE (oldx) == PLUS
1775 && REG_P (XEXP (oldx, 0)))
1777 if (REG_P (XEXP (oldx, 1)))
1778 x = force_reg (GET_MODE (oldx), oldx);
1779 else if (CONST_INT_P (XEXP (oldx, 1)))
1781 int offs = INTVAL (XEXP (oldx, 1));
1782 if (frame_pointer_rtx != XEXP (oldx, 0)
1783 && offs > MAX_LD_OFFSET (mode))
1785 big_offset_p = true;
1786 x = force_reg (GET_MODE (oldx), oldx);
1791 if (avr_log.legitimize_address)
1793 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1795 if (x != oldx)
1796 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1799 return x;
1803 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1804 /* This will allow register R26/27 to be used where it is no worse than normal
1805 base pointers R28/29 or R30/31. For example, if base offset is greater
1806 than 63 bytes or for R++ or --R addressing. */
1809 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1810 int opnum, int type, int addr_type,
1811 int ind_levels ATTRIBUTE_UNUSED,
1812 rtx (*mk_memloc)(rtx,int))
1814 rtx x = *px;
1816 if (avr_log.legitimize_reload_address)
1817 avr_edump ("\n%?:%m %r\n", mode, x);
1819 if (1 && (GET_CODE (x) == POST_INC
1820 || GET_CODE (x) == PRE_DEC))
1822 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1823 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1824 opnum, RELOAD_OTHER);
1826 if (avr_log.legitimize_reload_address)
1827 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1828 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1830 return x;
1833 if (GET_CODE (x) == PLUS
1834 && REG_P (XEXP (x, 0))
1835 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1836 && CONST_INT_P (XEXP (x, 1))
1837 && INTVAL (XEXP (x, 1)) >= 1)
1839 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1841 if (fit)
1843 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1845 int regno = REGNO (XEXP (x, 0));
1846 rtx mem = mk_memloc (x, regno);
1848 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1849 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1850 1, (enum reload_type) addr_type);
1852 if (avr_log.legitimize_reload_address)
1853 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1854 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1856 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1857 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1858 opnum, (enum reload_type) type);
1860 if (avr_log.legitimize_reload_address)
1861 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1862 BASE_POINTER_REGS, mem, NULL_RTX);
1864 return x;
1867 else if (! (frame_pointer_needed
1868 && XEXP (x, 0) == frame_pointer_rtx))
1870 push_reload (x, NULL_RTX, px, NULL,
1871 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1872 opnum, (enum reload_type) type);
1874 if (avr_log.legitimize_reload_address)
1875 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1876 POINTER_REGS, x, NULL_RTX);
1878 return x;
1882 return NULL_RTX;
1886 /* Implement `TARGET_SECONDARY_RELOAD' */
1888 static reg_class_t
1889 avr_secondary_reload (bool in_p, rtx x,
1890 reg_class_t reload_class ATTRIBUTE_UNUSED,
1891 enum machine_mode mode, secondary_reload_info *sri)
1893 if (in_p
1894 && MEM_P (x)
1895 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1896 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1898 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1900 switch (mode)
1902 default:
1903 gcc_unreachable();
1905 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1906 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1907 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1909 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1910 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1911 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1912 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1913 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1915 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1917 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1918 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1919 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1920 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1921 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1922 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1926 return NO_REGS;
1930 /* Helper function to print assembler resp. track instruction
1931 sequence lengths. Always return "".
1933 If PLEN == NULL:
1934 Output assembler code from template TPL with operands supplied
1935 by OPERANDS. This is just forwarding to output_asm_insn.
1937 If PLEN != NULL:
1938 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1939 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1940 Don't output anything.
1943 static const char*
1944 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1946 if (NULL == plen)
1948 output_asm_insn (tpl, operands);
1950 else
1952 if (n_words < 0)
1953 *plen = -n_words;
1954 else
1955 *plen += n_words;
1958 return "";
1962 /* Return a pointer register name as a string. */
1964 static const char*
1965 ptrreg_to_str (int regno)
1967 switch (regno)
1969 case REG_X: return "X";
1970 case REG_Y: return "Y";
1971 case REG_Z: return "Z";
1972 default:
1973 output_operand_lossage ("address operand requires constraint for"
1974 " X, Y, or Z register");
1976 return NULL;
1979 /* Return the condition name as a string.
1980 Used in conditional jump constructing */
1982 static const char*
1983 cond_string (enum rtx_code code)
1985 switch (code)
1987 case NE:
1988 return "ne";
1989 case EQ:
1990 return "eq";
1991 case GE:
1992 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1993 return "pl";
1994 else
1995 return "ge";
1996 case LT:
1997 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1998 return "mi";
1999 else
2000 return "lt";
2001 case GEU:
2002 return "sh";
2003 case LTU:
2004 return "lo";
2005 default:
2006 gcc_unreachable ();
2009 return "";
2013 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2014 /* Output ADDR to FILE as address. */
2016 static void
2017 avr_print_operand_address (FILE *file, rtx addr)
2019 switch (GET_CODE (addr))
2021 case REG:
2022 fprintf (file, ptrreg_to_str (REGNO (addr)));
2023 break;
2025 case PRE_DEC:
2026 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2027 break;
2029 case POST_INC:
2030 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2031 break;
2033 default:
2034 if (CONSTANT_ADDRESS_P (addr)
2035 && text_segment_operand (addr, VOIDmode))
2037 rtx x = addr;
2038 if (GET_CODE (x) == CONST)
2039 x = XEXP (x, 0);
2040 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2042 /* Assembler gs() will implant word address. Make offset
2043 a byte offset inside gs() for assembler. This is
2044 needed because the more logical (constant+gs(sym)) is not
2045 accepted by gas. For 128K and smaller devices this is ok.
2046 For large devices it will create a trampoline to offset
2047 from symbol which may not be what the user really wanted. */
2049 fprintf (file, "gs(");
2050 output_addr_const (file, XEXP (x,0));
2051 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2052 2 * INTVAL (XEXP (x, 1)));
2053 if (AVR_3_BYTE_PC)
2054 if (warning (0, "pointer offset from symbol maybe incorrect"))
2056 output_addr_const (stderr, addr);
2057 fprintf(stderr,"\n");
2060 else
2062 fprintf (file, "gs(");
2063 output_addr_const (file, addr);
2064 fprintf (file, ")");
2067 else
2068 output_addr_const (file, addr);
2073 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2075 static bool
2076 avr_print_operand_punct_valid_p (unsigned char code)
2078 return code == '~' || code == '!';
2082 /* Implement `TARGET_PRINT_OPERAND'. */
2083 /* Output X as assembler operand to file FILE.
2084 For a description of supported %-codes, see top of avr.md. */
2086 static void
2087 avr_print_operand (FILE *file, rtx x, int code)
2089 int abcd = 0;
2091 if (code >= 'A' && code <= 'D')
2092 abcd = code - 'A';
2094 if (code == '~')
2096 if (!AVR_HAVE_JMP_CALL)
2097 fputc ('r', file);
2099 else if (code == '!')
2101 if (AVR_HAVE_EIJMP_EICALL)
2102 fputc ('e', file);
2104 else if (code == 't'
2105 || code == 'T')
2107 static int t_regno = -1;
2108 static int t_nbits = -1;
2110 if (REG_P (x) && t_regno < 0 && code == 'T')
2112 t_regno = REGNO (x);
2113 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2115 else if (CONST_INT_P (x) && t_regno >= 0
2116 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2118 int bpos = INTVAL (x);
2120 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2121 if (code == 'T')
2122 fprintf (file, ",%d", bpos % 8);
2124 t_regno = -1;
2126 else
2127 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2129 else if (REG_P (x))
2131 if (x == zero_reg_rtx)
2132 fprintf (file, "__zero_reg__");
2133 else if (code == 'r' && REGNO (x) < 32)
2134 fprintf (file, "%d", (int) REGNO (x));
2135 else
2136 fprintf (file, reg_names[REGNO (x) + abcd]);
2138 else if (CONST_INT_P (x))
2140 HOST_WIDE_INT ival = INTVAL (x);
2142 if ('i' != code)
2143 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2144 else if (low_io_address_operand (x, VOIDmode)
2145 || high_io_address_operand (x, VOIDmode))
2147 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2148 fprintf (file, "__RAMPZ__");
2149 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2150 fprintf (file, "__RAMPY__");
2151 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2152 fprintf (file, "__RAMPX__");
2153 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2154 fprintf (file, "__RAMPD__");
2155 else if (AVR_XMEGA && ival == avr_addr.ccp)
2156 fprintf (file, "__CCP__");
2157 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2158 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2159 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2160 else
2162 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2163 ival - avr_current_arch->sfr_offset);
2166 else
2167 fatal_insn ("bad address, not an I/O address:", x);
2169 else if (MEM_P (x))
2171 rtx addr = XEXP (x, 0);
2173 if (code == 'm')
2175 if (!CONSTANT_P (addr))
2176 fatal_insn ("bad address, not a constant:", addr);
2177 /* Assembler template with m-code is data - not progmem section */
2178 if (text_segment_operand (addr, VOIDmode))
2179 if (warning (0, "accessing data memory with"
2180 " program memory address"))
2182 output_addr_const (stderr, addr);
2183 fprintf(stderr,"\n");
2185 output_addr_const (file, addr);
2187 else if (code == 'i')
2189 avr_print_operand (file, addr, 'i');
2191 else if (code == 'o')
2193 if (GET_CODE (addr) != PLUS)
2194 fatal_insn ("bad address, not (reg+disp):", addr);
2196 avr_print_operand (file, XEXP (addr, 1), 0);
2198 else if (code == 'p' || code == 'r')
2200 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2201 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2203 if (code == 'p')
2204 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2205 else
2206 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2208 else if (GET_CODE (addr) == PLUS)
2210 avr_print_operand_address (file, XEXP (addr,0));
2211 if (REGNO (XEXP (addr, 0)) == REG_X)
2212 fatal_insn ("internal compiler error. Bad address:"
2213 ,addr);
2214 fputc ('+', file);
2215 avr_print_operand (file, XEXP (addr,1), code);
2217 else
2218 avr_print_operand_address (file, addr);
2220 else if (code == 'i')
2222 fatal_insn ("bad address, not an I/O address:", x);
2224 else if (code == 'x')
2226 /* Constant progmem address - like used in jmp or call */
2227 if (0 == text_segment_operand (x, VOIDmode))
2228 if (warning (0, "accessing program memory"
2229 " with data memory address"))
2231 output_addr_const (stderr, x);
2232 fprintf(stderr,"\n");
2234 /* Use normal symbol for direct address no linker trampoline needed */
2235 output_addr_const (file, x);
2237 else if (CONST_FIXED_P (x))
2239 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2240 if (code != 0)
2241 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2242 code);
2243 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2245 else if (GET_CODE (x) == CONST_DOUBLE)
2247 long val;
2248 REAL_VALUE_TYPE rv;
2249 if (GET_MODE (x) != SFmode)
2250 fatal_insn ("internal compiler error. Unknown mode:", x);
2251 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2252 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2253 fprintf (file, "0x%lx", val);
2255 else if (GET_CODE (x) == CONST_STRING)
2256 fputs (XSTR (x, 0), file);
2257 else if (code == 'j')
2258 fputs (cond_string (GET_CODE (x)), file);
2259 else if (code == 'k')
2260 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2261 else
2262 avr_print_operand_address (file, x);
2266 /* Worker function for `NOTICE_UPDATE_CC'. */
2267 /* Update the condition code in the INSN. */
2269 void
2270 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2272 rtx set;
2273 enum attr_cc cc = get_attr_cc (insn);
2275 switch (cc)
2277 default:
2278 break;
2280 case CC_PLUS:
2281 case CC_LDI:
2283 rtx *op = recog_data.operand;
2284 int len_dummy, icc;
2286 /* Extract insn's operands. */
2287 extract_constrain_insn_cached (insn);
2289 switch (cc)
2291 default:
2292 gcc_unreachable();
2294 case CC_PLUS:
2295 avr_out_plus (insn, op, &len_dummy, &icc);
2296 cc = (enum attr_cc) icc;
2297 break;
2299 case CC_LDI:
2301 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2302 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2303 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2304 ? CC_CLOBBER
2305 /* Any other "r,rL" combination does not alter cc0. */
2306 : CC_NONE;
2308 break;
2309 } /* inner switch */
2311 break;
2313 } /* outer swicth */
2315 switch (cc)
2317 default:
2318 /* Special values like CC_OUT_PLUS from above have been
2319 mapped to "standard" CC_* values so we never come here. */
2321 gcc_unreachable();
2322 break;
2324 case CC_NONE:
2325 /* Insn does not affect CC at all. */
2326 break;
2328 case CC_SET_N:
2329 CC_STATUS_INIT;
2330 break;
2332 case CC_SET_ZN:
2333 set = single_set (insn);
2334 CC_STATUS_INIT;
2335 if (set)
2337 cc_status.flags |= CC_NO_OVERFLOW;
2338 cc_status.value1 = SET_DEST (set);
2340 break;
2342 case CC_SET_CZN:
2343 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2344 The V flag may or may not be known but that's ok because
2345 alter_cond will change tests to use EQ/NE. */
2346 set = single_set (insn);
2347 CC_STATUS_INIT;
2348 if (set)
2350 cc_status.value1 = SET_DEST (set);
2351 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2353 break;
2355 case CC_COMPARE:
2356 set = single_set (insn);
2357 CC_STATUS_INIT;
2358 if (set)
2359 cc_status.value1 = SET_SRC (set);
2360 break;
2362 case CC_CLOBBER:
2363 /* Insn doesn't leave CC in a usable state. */
2364 CC_STATUS_INIT;
2365 break;
2369 /* Choose mode for jump insn:
2370 1 - relative jump in range -63 <= x <= 62 ;
2371 2 - relative jump in range -2046 <= x <= 2045 ;
2372 3 - absolute jump (only for ATmega[16]03). */
2375 avr_jump_mode (rtx x, rtx insn)
2377 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2378 ? XEXP (x, 0) : x));
2379 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2380 int jump_distance = cur_addr - dest_addr;
2382 if (-63 <= jump_distance && jump_distance <= 62)
2383 return 1;
2384 else if (-2046 <= jump_distance && jump_distance <= 2045)
2385 return 2;
2386 else if (AVR_HAVE_JMP_CALL)
2387 return 3;
2389 return 2;
2392 /* Return an AVR condition jump commands.
2393 X is a comparison RTX.
2394 LEN is a number returned by avr_jump_mode function.
2395 If REVERSE nonzero then condition code in X must be reversed. */
2397 const char*
2398 ret_cond_branch (rtx x, int len, int reverse)
2400 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2402 switch (cond)
2404 case GT:
2405 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2406 return (len == 1 ? ("breq .+2" CR_TAB
2407 "brpl %0") :
2408 len == 2 ? ("breq .+4" CR_TAB
2409 "brmi .+2" CR_TAB
2410 "rjmp %0") :
2411 ("breq .+6" CR_TAB
2412 "brmi .+4" CR_TAB
2413 "jmp %0"));
2415 else
2416 return (len == 1 ? ("breq .+2" CR_TAB
2417 "brge %0") :
2418 len == 2 ? ("breq .+4" CR_TAB
2419 "brlt .+2" CR_TAB
2420 "rjmp %0") :
2421 ("breq .+6" CR_TAB
2422 "brlt .+4" CR_TAB
2423 "jmp %0"));
2424 case GTU:
2425 return (len == 1 ? ("breq .+2" CR_TAB
2426 "brsh %0") :
2427 len == 2 ? ("breq .+4" CR_TAB
2428 "brlo .+2" CR_TAB
2429 "rjmp %0") :
2430 ("breq .+6" CR_TAB
2431 "brlo .+4" CR_TAB
2432 "jmp %0"));
2433 case LE:
2434 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2435 return (len == 1 ? ("breq %0" CR_TAB
2436 "brmi %0") :
2437 len == 2 ? ("breq .+2" CR_TAB
2438 "brpl .+2" CR_TAB
2439 "rjmp %0") :
2440 ("breq .+2" CR_TAB
2441 "brpl .+4" CR_TAB
2442 "jmp %0"));
2443 else
2444 return (len == 1 ? ("breq %0" CR_TAB
2445 "brlt %0") :
2446 len == 2 ? ("breq .+2" CR_TAB
2447 "brge .+2" CR_TAB
2448 "rjmp %0") :
2449 ("breq .+2" CR_TAB
2450 "brge .+4" CR_TAB
2451 "jmp %0"));
2452 case LEU:
2453 return (len == 1 ? ("breq %0" CR_TAB
2454 "brlo %0") :
2455 len == 2 ? ("breq .+2" CR_TAB
2456 "brsh .+2" CR_TAB
2457 "rjmp %0") :
2458 ("breq .+2" CR_TAB
2459 "brsh .+4" CR_TAB
2460 "jmp %0"));
2461 default:
2462 if (reverse)
2464 switch (len)
2466 case 1:
2467 return "br%k1 %0";
2468 case 2:
2469 return ("br%j1 .+2" CR_TAB
2470 "rjmp %0");
2471 default:
2472 return ("br%j1 .+4" CR_TAB
2473 "jmp %0");
2476 else
2478 switch (len)
2480 case 1:
2481 return "br%j1 %0";
2482 case 2:
2483 return ("br%k1 .+2" CR_TAB
2484 "rjmp %0");
2485 default:
2486 return ("br%k1 .+4" CR_TAB
2487 "jmp %0");
2491 return "";
2495 /* Worker function for `FINAL_PRESCAN_INSN'. */
2496 /* Output insn cost for next insn. */
2498 void
2499 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2500 int num_operands ATTRIBUTE_UNUSED)
2502 if (avr_log.rtx_costs)
2504 rtx set = single_set (insn);
2506 if (set)
2507 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2508 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2509 else
2510 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2511 rtx_cost (PATTERN (insn), INSN, 0,
2512 optimize_insn_for_speed_p()));
2516 /* Return 0 if undefined, 1 if always true or always false. */
2519 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2521 unsigned int max = (mode == QImode ? 0xff :
2522 mode == HImode ? 0xffff :
2523 mode == PSImode ? 0xffffff :
2524 mode == SImode ? 0xffffffff : 0);
2525 if (max && op && CONST_INT_P (x))
2527 if (unsigned_condition (op) != op)
2528 max >>= 1;
2530 if (max != (INTVAL (x) & max)
2531 && INTVAL (x) != 0xff)
2532 return 1;
2534 return 0;
2538 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2539 /* Returns nonzero if REGNO is the number of a hard
2540 register in which function arguments are sometimes passed. */
2543 avr_function_arg_regno_p(int r)
2545 return (r >= 8 && r <= 25);
2549 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2550 /* Initializing the variable cum for the state at the beginning
2551 of the argument list. */
2553 void
2554 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2555 tree fndecl ATTRIBUTE_UNUSED)
2557 cum->nregs = 18;
2558 cum->regno = FIRST_CUM_REG;
2559 if (!libname && stdarg_p (fntype))
2560 cum->nregs = 0;
2562 /* Assume the calle may be tail called */
2564 cfun->machine->sibcall_fails = 0;
2567 /* Returns the number of registers to allocate for a function argument. */
2569 static int
2570 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2572 int size;
2574 if (mode == BLKmode)
2575 size = int_size_in_bytes (type);
2576 else
2577 size = GET_MODE_SIZE (mode);
2579 /* Align all function arguments to start in even-numbered registers.
2580 Odd-sized arguments leave holes above them. */
2582 return (size + 1) & ~1;
2586 /* Implement `TARGET_FUNCTION_ARG'. */
2587 /* Controls whether a function argument is passed
2588 in a register, and which register. */
2590 static rtx
2591 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2592 const_tree type, bool named ATTRIBUTE_UNUSED)
2594 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2595 int bytes = avr_num_arg_regs (mode, type);
2597 if (cum->nregs && bytes <= cum->nregs)
2598 return gen_rtx_REG (mode, cum->regno - bytes);
2600 return NULL_RTX;
2604 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2605 /* Update the summarizer variable CUM to advance past an argument
2606 in the argument list. */
2608 static void
2609 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2610 const_tree type, bool named ATTRIBUTE_UNUSED)
2612 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2613 int bytes = avr_num_arg_regs (mode, type);
2615 cum->nregs -= bytes;
2616 cum->regno -= bytes;
2618 /* A parameter is being passed in a call-saved register. As the original
2619 contents of these regs has to be restored before leaving the function,
2620 a function must not pass arguments in call-saved regs in order to get
2621 tail-called. */
2623 if (cum->regno >= 8
2624 && cum->nregs >= 0
2625 && !call_used_regs[cum->regno])
2627 /* FIXME: We ship info on failing tail-call in struct machine_function.
2628 This uses internals of calls.c:expand_call() and the way args_so_far
2629 is used. targetm.function_ok_for_sibcall() needs to be extended to
2630 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2631 dependent so that such an extension is not wanted. */
2633 cfun->machine->sibcall_fails = 1;
2636 /* Test if all registers needed by the ABI are actually available. If the
2637 user has fixed a GPR needed to pass an argument, an (implicit) function
2638 call will clobber that fixed register. See PR45099 for an example. */
2640 if (cum->regno >= 8
2641 && cum->nregs >= 0)
2643 int regno;
2645 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2646 if (fixed_regs[regno])
2647 warning (0, "fixed register %s used to pass parameter to function",
2648 reg_names[regno]);
2651 if (cum->nregs <= 0)
2653 cum->nregs = 0;
2654 cum->regno = FIRST_CUM_REG;
2658 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2659 /* Decide whether we can make a sibling call to a function. DECL is the
2660 declaration of the function being targeted by the call and EXP is the
2661 CALL_EXPR representing the call. */
2663 static bool
2664 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2666 tree fntype_callee;
2668 /* Tail-calling must fail if callee-saved regs are used to pass
2669 function args. We must not tail-call when `epilogue_restores'
2670 is used. Unfortunately, we cannot tell at this point if that
2671 actually will happen or not, and we cannot step back from
2672 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2674 if (cfun->machine->sibcall_fails
2675 || TARGET_CALL_PROLOGUES)
2677 return false;
2680 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2682 if (decl_callee)
2684 decl_callee = TREE_TYPE (decl_callee);
2686 else
2688 decl_callee = fntype_callee;
2690 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2691 && METHOD_TYPE != TREE_CODE (decl_callee))
2693 decl_callee = TREE_TYPE (decl_callee);
2697 /* Ensure that caller and callee have compatible epilogues */
2699 if (cfun->machine->is_interrupt
2700 || cfun->machine->is_signal
2701 || cfun->machine->is_naked
2702 || avr_naked_function_p (decl_callee)
2703 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2704 || (avr_OS_task_function_p (decl_callee)
2705 != cfun->machine->is_OS_task)
2706 || (avr_OS_main_function_p (decl_callee)
2707 != cfun->machine->is_OS_main))
2709 return false;
2712 return true;
2715 /***********************************************************************
2716 Functions for outputting various mov's for a various modes
2717 ************************************************************************/
2719 /* Return true if a value of mode MODE is read from flash by
2720 __load_* function from libgcc. */
2722 bool
2723 avr_load_libgcc_p (rtx op)
2725 enum machine_mode mode = GET_MODE (op);
2726 int n_bytes = GET_MODE_SIZE (mode);
2728 return (n_bytes > 2
2729 && !AVR_HAVE_LPMX
2730 && avr_mem_flash_p (op));
2733 /* Return true if a value of mode MODE is read by __xload_* function. */
2735 bool
2736 avr_xload_libgcc_p (enum machine_mode mode)
2738 int n_bytes = GET_MODE_SIZE (mode);
2740 return (n_bytes > 1
2741 || avr_current_device->n_flash > 1);
2745 /* Fixme: This is a hack because secondary reloads don't works as expected.
2747 Find an unused d-register to be used as scratch in INSN.
2748 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2749 is a register, skip all possible return values that overlap EXCLUDE.
2750 The policy for the returned register is similar to that of
2751 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2752 of INSN.
2754 Return a QImode d-register or NULL_RTX if nothing found. */
2756 static rtx
2757 avr_find_unused_d_reg (rtx insn, rtx exclude)
2759 int regno;
2760 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2761 || avr_signal_function_p (current_function_decl));
2763 for (regno = 16; regno < 32; regno++)
2765 rtx reg = all_regs_rtx[regno];
2767 if ((exclude
2768 && reg_overlap_mentioned_p (exclude, reg))
2769 || fixed_regs[regno])
2771 continue;
2774 /* Try non-live register */
2776 if (!df_regs_ever_live_p (regno)
2777 && (TREE_THIS_VOLATILE (current_function_decl)
2778 || cfun->machine->is_OS_task
2779 || cfun->machine->is_OS_main
2780 || (!isr_p && call_used_regs[regno])))
2782 return reg;
2785 /* Any live register can be used if it is unused after.
2786 Prologue/epilogue will care for it as needed. */
2788 if (df_regs_ever_live_p (regno)
2789 && reg_unused_after (insn, reg))
2791 return reg;
2795 return NULL_RTX;
2799 /* Helper function for the next function in the case where only restricted
2800 version of LPM instruction is available. */
2802 static const char*
2803 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2805 rtx dest = xop[0];
2806 rtx addr = xop[1];
2807 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2808 int regno_dest;
2810 regno_dest = REGNO (dest);
2812 /* The implicit target register of LPM. */
2813 xop[3] = lpm_reg_rtx;
2815 switch (GET_CODE (addr))
2817 default:
2818 gcc_unreachable();
2820 case REG:
2822 gcc_assert (REG_Z == REGNO (addr));
2824 switch (n_bytes)
2826 default:
2827 gcc_unreachable();
2829 case 1:
2830 avr_asm_len ("%4lpm", xop, plen, 1);
2832 if (regno_dest != LPM_REGNO)
2833 avr_asm_len ("mov %0,%3", xop, plen, 1);
2835 return "";
2837 case 2:
2838 if (REGNO (dest) == REG_Z)
2839 return avr_asm_len ("%4lpm" CR_TAB
2840 "push %3" CR_TAB
2841 "adiw %2,1" CR_TAB
2842 "%4lpm" CR_TAB
2843 "mov %B0,%3" CR_TAB
2844 "pop %A0", xop, plen, 6);
2846 avr_asm_len ("%4lpm" CR_TAB
2847 "mov %A0,%3" CR_TAB
2848 "adiw %2,1" CR_TAB
2849 "%4lpm" CR_TAB
2850 "mov %B0,%3", xop, plen, 5);
2852 if (!reg_unused_after (insn, addr))
2853 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2855 break; /* 2 */
2858 break; /* REG */
2860 case POST_INC:
2862 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2863 && n_bytes <= 4);
2865 if (regno_dest == LPM_REGNO)
2866 avr_asm_len ("%4lpm" CR_TAB
2867 "adiw %2,1", xop, plen, 2);
2868 else
2869 avr_asm_len ("%4lpm" CR_TAB
2870 "mov %A0,%3" CR_TAB
2871 "adiw %2,1", xop, plen, 3);
2873 if (n_bytes >= 2)
2874 avr_asm_len ("%4lpm" CR_TAB
2875 "mov %B0,%3" CR_TAB
2876 "adiw %2,1", xop, plen, 3);
2878 if (n_bytes >= 3)
2879 avr_asm_len ("%4lpm" CR_TAB
2880 "mov %C0,%3" CR_TAB
2881 "adiw %2,1", xop, plen, 3);
2883 if (n_bytes >= 4)
2884 avr_asm_len ("%4lpm" CR_TAB
2885 "mov %D0,%3" CR_TAB
2886 "adiw %2,1", xop, plen, 3);
2888 break; /* POST_INC */
2890 } /* switch CODE (addr) */
2892 return "";
2896 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2897 OP[1] in AS1 to register OP[0].
2898 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2899 Return "". */
2901 const char*
2902 avr_out_lpm (rtx insn, rtx *op, int *plen)
2904 rtx xop[7];
2905 rtx dest = op[0];
2906 rtx src = SET_SRC (single_set (insn));
2907 rtx addr;
2908 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2909 int segment;
2910 RTX_CODE code;
2911 addr_space_t as = MEM_ADDR_SPACE (src);
2913 if (plen)
2914 *plen = 0;
2916 if (MEM_P (dest))
2918 warning (0, "writing to address space %qs not supported",
2919 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2921 return "";
2924 addr = XEXP (src, 0);
2925 code = GET_CODE (addr);
2927 gcc_assert (REG_P (dest));
2928 gcc_assert (REG == code || POST_INC == code);
2930 xop[0] = dest;
2931 xop[1] = addr;
2932 xop[2] = lpm_addr_reg_rtx;
2933 xop[4] = xstring_empty;
2934 xop[5] = tmp_reg_rtx;
2935 xop[6] = XEXP (rampz_rtx, 0);
2937 segment = avr_addrspace[as].segment;
2939 /* Set RAMPZ as needed. */
2941 if (segment)
2943 xop[4] = GEN_INT (segment);
2944 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2946 if (xop[3] != NULL_RTX)
2948 avr_asm_len ("ldi %3,%4" CR_TAB
2949 "out %i6,%3", xop, plen, 2);
2951 else if (segment == 1)
2953 avr_asm_len ("clr %5" CR_TAB
2954 "inc %5" CR_TAB
2955 "out %i6,%5", xop, plen, 3);
2957 else
2959 avr_asm_len ("mov %5,%2" CR_TAB
2960 "ldi %2,%4" CR_TAB
2961 "out %i6,%2" CR_TAB
2962 "mov %2,%5", xop, plen, 4);
2965 xop[4] = xstring_e;
2967 if (!AVR_HAVE_ELPMX)
2968 return avr_out_lpm_no_lpmx (insn, xop, plen);
2970 else if (!AVR_HAVE_LPMX)
2972 return avr_out_lpm_no_lpmx (insn, xop, plen);
2975 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2977 switch (GET_CODE (addr))
2979 default:
2980 gcc_unreachable();
2982 case REG:
2984 gcc_assert (REG_Z == REGNO (addr));
2986 switch (n_bytes)
2988 default:
2989 gcc_unreachable();
2991 case 1:
2992 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2994 case 2:
2995 if (REGNO (dest) == REG_Z)
2996 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2997 "%4lpm %B0,%a2" CR_TAB
2998 "mov %A0,%5", xop, plen, 3);
2999 else
3001 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3002 "%4lpm %B0,%a2", xop, plen, 2);
3004 if (!reg_unused_after (insn, addr))
3005 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3008 break; /* 2 */
3010 case 3:
3012 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3013 "%4lpm %B0,%a2+" CR_TAB
3014 "%4lpm %C0,%a2", xop, plen, 3);
3016 if (!reg_unused_after (insn, addr))
3017 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3019 break; /* 3 */
3021 case 4:
3023 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3024 "%4lpm %B0,%a2+", xop, plen, 2);
3026 if (REGNO (dest) == REG_Z - 2)
3027 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3028 "%4lpm %C0,%a2" CR_TAB
3029 "mov %D0,%5", xop, plen, 3);
3030 else
3032 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3033 "%4lpm %D0,%a2", xop, plen, 2);
3035 if (!reg_unused_after (insn, addr))
3036 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3039 break; /* 4 */
3040 } /* n_bytes */
3042 break; /* REG */
3044 case POST_INC:
3046 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3047 && n_bytes <= 4);
3049 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3050 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3051 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3052 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3054 break; /* POST_INC */
3056 } /* switch CODE (addr) */
3058 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3060 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3062 xop[0] = zero_reg_rtx;
3063 avr_asm_len ("out %i6,%0", xop, plen, 1);
3066 return "";
3070 /* Worker function for xload_8 insn. */
3072 const char*
3073 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3075 rtx xop[4];
3077 xop[0] = op[0];
3078 xop[1] = op[1];
3079 xop[2] = lpm_addr_reg_rtx;
3080 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3082 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3084 avr_asm_len ("sbrc %1,7" CR_TAB
3085 "ld %3,%a2", xop, plen, 2);
3087 if (REGNO (xop[0]) != REGNO (xop[3]))
3088 avr_asm_len ("mov %0,%3", xop, plen, 1);
3090 return "";
3094 const char*
3095 output_movqi (rtx insn, rtx operands[], int *plen)
3097 rtx dest = operands[0];
3098 rtx src = operands[1];
3100 if (avr_mem_flash_p (src)
3101 || avr_mem_flash_p (dest))
3103 return avr_out_lpm (insn, operands, plen);
3106 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3108 if (REG_P (dest))
3110 if (REG_P (src)) /* mov r,r */
3112 if (test_hard_reg_class (STACK_REG, dest))
3113 return avr_asm_len ("out %0,%1", operands, plen, -1);
3114 else if (test_hard_reg_class (STACK_REG, src))
3115 return avr_asm_len ("in %0,%1", operands, plen, -1);
3117 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3119 else if (CONSTANT_P (src))
3121 output_reload_in_const (operands, NULL_RTX, plen, false);
3122 return "";
3124 else if (MEM_P (src))
3125 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3127 else if (MEM_P (dest))
3129 rtx xop[2];
3131 xop[0] = dest;
3132 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3134 return out_movqi_mr_r (insn, xop, plen);
3137 return "";
3141 const char *
3142 output_movhi (rtx insn, rtx xop[], int *plen)
3144 rtx dest = xop[0];
3145 rtx src = xop[1];
3147 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3149 if (avr_mem_flash_p (src)
3150 || avr_mem_flash_p (dest))
3152 return avr_out_lpm (insn, xop, plen);
3155 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3157 if (REG_P (dest))
3159 if (REG_P (src)) /* mov r,r */
3161 if (test_hard_reg_class (STACK_REG, dest))
3163 if (AVR_HAVE_8BIT_SP)
3164 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3166 if (AVR_XMEGA)
3167 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3168 "out __SP_H__,%B1", xop, plen, -2);
3170 /* Use simple load of SP if no interrupts are used. */
3172 return TARGET_NO_INTERRUPTS
3173 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3174 "out __SP_L__,%A1", xop, plen, -2)
3175 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3176 "cli" CR_TAB
3177 "out __SP_H__,%B1" CR_TAB
3178 "out __SREG__,__tmp_reg__" CR_TAB
3179 "out __SP_L__,%A1", xop, plen, -5);
3181 else if (test_hard_reg_class (STACK_REG, src))
3183 return !AVR_HAVE_SPH
3184 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3185 "clr %B0", xop, plen, -2)
3187 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3188 "in %B0,__SP_H__", xop, plen, -2);
3191 return AVR_HAVE_MOVW
3192 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3194 : avr_asm_len ("mov %A0,%A1" CR_TAB
3195 "mov %B0,%B1", xop, plen, -2);
3196 } /* REG_P (src) */
3197 else if (CONSTANT_P (src))
3199 return output_reload_inhi (xop, NULL, plen);
3201 else if (MEM_P (src))
3203 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3206 else if (MEM_P (dest))
3208 rtx xop[2];
3210 xop[0] = dest;
3211 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3213 return out_movhi_mr_r (insn, xop, plen);
3216 fatal_insn ("invalid insn:", insn);
3218 return "";
3221 static const char*
3222 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3224 rtx dest = op[0];
3225 rtx src = op[1];
3226 rtx x = XEXP (src, 0);
3228 if (CONSTANT_ADDRESS_P (x))
3230 return optimize > 0 && io_address_operand (x, QImode)
3231 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3232 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3234 else if (GET_CODE (x) == PLUS
3235 && REG_P (XEXP (x, 0))
3236 && CONST_INT_P (XEXP (x, 1)))
3238 /* memory access by reg+disp */
3240 int disp = INTVAL (XEXP (x, 1));
3242 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3244 if (REGNO (XEXP (x, 0)) != REG_Y)
3245 fatal_insn ("incorrect insn:",insn);
3247 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3248 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3249 "ldd %0,Y+63" CR_TAB
3250 "sbiw r28,%o1-63", op, plen, -3);
3252 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3253 "sbci r29,hi8(-%o1)" CR_TAB
3254 "ld %0,Y" CR_TAB
3255 "subi r28,lo8(%o1)" CR_TAB
3256 "sbci r29,hi8(%o1)", op, plen, -5);
3258 else if (REGNO (XEXP (x, 0)) == REG_X)
3260 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3261 it but I have this situation with extremal optimizing options. */
3263 avr_asm_len ("adiw r26,%o1" CR_TAB
3264 "ld %0,X", op, plen, -2);
3266 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3267 && !reg_unused_after (insn, XEXP (x,0)))
3269 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3272 return "";
3275 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3278 return avr_asm_len ("ld %0,%1", op, plen, -1);
3281 static const char*
3282 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3284 rtx dest = op[0];
3285 rtx src = op[1];
3286 rtx base = XEXP (src, 0);
3287 int reg_dest = true_regnum (dest);
3288 int reg_base = true_regnum (base);
3289 /* "volatile" forces reading low byte first, even if less efficient,
3290 for correct operation with 16-bit I/O registers. */
3291 int mem_volatile_p = MEM_VOLATILE_P (src);
3293 if (reg_base > 0)
3295 if (reg_dest == reg_base) /* R = (R) */
3296 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3297 "ld %B0,%1" CR_TAB
3298 "mov %A0,__tmp_reg__", op, plen, -3);
3300 if (reg_base != REG_X)
3301 return avr_asm_len ("ld %A0,%1" CR_TAB
3302 "ldd %B0,%1+1", op, plen, -2);
3304 avr_asm_len ("ld %A0,X+" CR_TAB
3305 "ld %B0,X", op, plen, -2);
3307 if (!reg_unused_after (insn, base))
3308 avr_asm_len ("sbiw r26,1", op, plen, 1);
3310 return "";
3312 else if (GET_CODE (base) == PLUS) /* (R + i) */
3314 int disp = INTVAL (XEXP (base, 1));
3315 int reg_base = true_regnum (XEXP (base, 0));
3317 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3319 if (REGNO (XEXP (base, 0)) != REG_Y)
3320 fatal_insn ("incorrect insn:",insn);
3322 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3323 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3324 "ldd %A0,Y+62" CR_TAB
3325 "ldd %B0,Y+63" CR_TAB
3326 "sbiw r28,%o1-62", op, plen, -4)
3328 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3329 "sbci r29,hi8(-%o1)" CR_TAB
3330 "ld %A0,Y" CR_TAB
3331 "ldd %B0,Y+1" CR_TAB
3332 "subi r28,lo8(%o1)" CR_TAB
3333 "sbci r29,hi8(%o1)", op, plen, -6);
3336 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3337 it but I have this situation with extremal
3338 optimization options. */
3340 if (reg_base == REG_X)
3341 return reg_base == reg_dest
3342 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3343 "ld __tmp_reg__,X+" CR_TAB
3344 "ld %B0,X" CR_TAB
3345 "mov %A0,__tmp_reg__", op, plen, -4)
3347 : avr_asm_len ("adiw r26,%o1" CR_TAB
3348 "ld %A0,X+" CR_TAB
3349 "ld %B0,X" CR_TAB
3350 "sbiw r26,%o1+1", op, plen, -4);
3352 return reg_base == reg_dest
3353 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3354 "ldd %B0,%B1" CR_TAB
3355 "mov %A0,__tmp_reg__", op, plen, -3)
3357 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3358 "ldd %B0,%B1", op, plen, -2);
3360 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3362 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3363 fatal_insn ("incorrect insn:", insn);
3365 if (!mem_volatile_p)
3366 return avr_asm_len ("ld %B0,%1" CR_TAB
3367 "ld %A0,%1", op, plen, -2);
3369 return REGNO (XEXP (base, 0)) == REG_X
3370 ? avr_asm_len ("sbiw r26,2" CR_TAB
3371 "ld %A0,X+" CR_TAB
3372 "ld %B0,X" CR_TAB
3373 "sbiw r26,1", op, plen, -4)
3375 : avr_asm_len ("sbiw %r1,2" CR_TAB
3376 "ld %A0,%p1" CR_TAB
3377 "ldd %B0,%p1+1", op, plen, -3);
3379 else if (GET_CODE (base) == POST_INC) /* (R++) */
3381 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3382 fatal_insn ("incorrect insn:", insn);
3384 return avr_asm_len ("ld %A0,%1" CR_TAB
3385 "ld %B0,%1", op, plen, -2);
3387 else if (CONSTANT_ADDRESS_P (base))
3389 return optimize > 0 && io_address_operand (base, HImode)
3390 ? avr_asm_len ("in %A0,%i1" CR_TAB
3391 "in %B0,%i1+1", op, plen, -2)
3393 : avr_asm_len ("lds %A0,%m1" CR_TAB
3394 "lds %B0,%m1+1", op, plen, -4);
3397 fatal_insn ("unknown move insn:",insn);
3398 return "";
3401 static const char*
3402 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3404 rtx dest = op[0];
3405 rtx src = op[1];
3406 rtx base = XEXP (src, 0);
3407 int reg_dest = true_regnum (dest);
3408 int reg_base = true_regnum (base);
3409 int tmp;
3411 if (!l)
3412 l = &tmp;
3414 if (reg_base > 0)
3416 if (reg_base == REG_X) /* (R26) */
3418 if (reg_dest == REG_X)
3419 /* "ld r26,-X" is undefined */
3420 return *l=7, ("adiw r26,3" CR_TAB
3421 "ld r29,X" CR_TAB
3422 "ld r28,-X" CR_TAB
3423 "ld __tmp_reg__,-X" CR_TAB
3424 "sbiw r26,1" CR_TAB
3425 "ld r26,X" CR_TAB
3426 "mov r27,__tmp_reg__");
3427 else if (reg_dest == REG_X - 2)
3428 return *l=5, ("ld %A0,X+" CR_TAB
3429 "ld %B0,X+" CR_TAB
3430 "ld __tmp_reg__,X+" CR_TAB
3431 "ld %D0,X" CR_TAB
3432 "mov %C0,__tmp_reg__");
3433 else if (reg_unused_after (insn, base))
3434 return *l=4, ("ld %A0,X+" CR_TAB
3435 "ld %B0,X+" CR_TAB
3436 "ld %C0,X+" CR_TAB
3437 "ld %D0,X");
3438 else
3439 return *l=5, ("ld %A0,X+" CR_TAB
3440 "ld %B0,X+" CR_TAB
3441 "ld %C0,X+" CR_TAB
3442 "ld %D0,X" CR_TAB
3443 "sbiw r26,3");
3445 else
3447 if (reg_dest == reg_base)
3448 return *l=5, ("ldd %D0,%1+3" CR_TAB
3449 "ldd %C0,%1+2" CR_TAB
3450 "ldd __tmp_reg__,%1+1" CR_TAB
3451 "ld %A0,%1" CR_TAB
3452 "mov %B0,__tmp_reg__");
3453 else if (reg_base == reg_dest + 2)
3454 return *l=5, ("ld %A0,%1" CR_TAB
3455 "ldd %B0,%1+1" CR_TAB
3456 "ldd __tmp_reg__,%1+2" CR_TAB
3457 "ldd %D0,%1+3" CR_TAB
3458 "mov %C0,__tmp_reg__");
3459 else
3460 return *l=4, ("ld %A0,%1" CR_TAB
3461 "ldd %B0,%1+1" CR_TAB
3462 "ldd %C0,%1+2" CR_TAB
3463 "ldd %D0,%1+3");
3466 else if (GET_CODE (base) == PLUS) /* (R + i) */
3468 int disp = INTVAL (XEXP (base, 1));
3470 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3472 if (REGNO (XEXP (base, 0)) != REG_Y)
3473 fatal_insn ("incorrect insn:",insn);
3475 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3476 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3477 "ldd %A0,Y+60" CR_TAB
3478 "ldd %B0,Y+61" CR_TAB
3479 "ldd %C0,Y+62" CR_TAB
3480 "ldd %D0,Y+63" CR_TAB
3481 "sbiw r28,%o1-60");
3483 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3484 "sbci r29,hi8(-%o1)" CR_TAB
3485 "ld %A0,Y" CR_TAB
3486 "ldd %B0,Y+1" CR_TAB
3487 "ldd %C0,Y+2" CR_TAB
3488 "ldd %D0,Y+3" CR_TAB
3489 "subi r28,lo8(%o1)" CR_TAB
3490 "sbci r29,hi8(%o1)");
3493 reg_base = true_regnum (XEXP (base, 0));
3494 if (reg_base == REG_X)
3496 /* R = (X + d) */
3497 if (reg_dest == REG_X)
3499 *l = 7;
3500 /* "ld r26,-X" is undefined */
3501 return ("adiw r26,%o1+3" CR_TAB
3502 "ld r29,X" CR_TAB
3503 "ld r28,-X" CR_TAB
3504 "ld __tmp_reg__,-X" CR_TAB
3505 "sbiw r26,1" CR_TAB
3506 "ld r26,X" CR_TAB
3507 "mov r27,__tmp_reg__");
3509 *l = 6;
3510 if (reg_dest == REG_X - 2)
3511 return ("adiw r26,%o1" CR_TAB
3512 "ld r24,X+" CR_TAB
3513 "ld r25,X+" CR_TAB
3514 "ld __tmp_reg__,X+" CR_TAB
3515 "ld r27,X" CR_TAB
3516 "mov r26,__tmp_reg__");
3518 return ("adiw r26,%o1" CR_TAB
3519 "ld %A0,X+" CR_TAB
3520 "ld %B0,X+" CR_TAB
3521 "ld %C0,X+" CR_TAB
3522 "ld %D0,X" CR_TAB
3523 "sbiw r26,%o1+3");
3525 if (reg_dest == reg_base)
3526 return *l=5, ("ldd %D0,%D1" CR_TAB
3527 "ldd %C0,%C1" CR_TAB
3528 "ldd __tmp_reg__,%B1" CR_TAB
3529 "ldd %A0,%A1" CR_TAB
3530 "mov %B0,__tmp_reg__");
3531 else if (reg_dest == reg_base - 2)
3532 return *l=5, ("ldd %A0,%A1" CR_TAB
3533 "ldd %B0,%B1" CR_TAB
3534 "ldd __tmp_reg__,%C1" CR_TAB
3535 "ldd %D0,%D1" CR_TAB
3536 "mov %C0,__tmp_reg__");
3537 return *l=4, ("ldd %A0,%A1" CR_TAB
3538 "ldd %B0,%B1" CR_TAB
3539 "ldd %C0,%C1" CR_TAB
3540 "ldd %D0,%D1");
3542 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3543 return *l=4, ("ld %D0,%1" CR_TAB
3544 "ld %C0,%1" CR_TAB
3545 "ld %B0,%1" CR_TAB
3546 "ld %A0,%1");
3547 else if (GET_CODE (base) == POST_INC) /* (R++) */
3548 return *l=4, ("ld %A0,%1" CR_TAB
3549 "ld %B0,%1" CR_TAB
3550 "ld %C0,%1" CR_TAB
3551 "ld %D0,%1");
3552 else if (CONSTANT_ADDRESS_P (base))
3553 return *l=8, ("lds %A0,%m1" CR_TAB
3554 "lds %B0,%m1+1" CR_TAB
3555 "lds %C0,%m1+2" CR_TAB
3556 "lds %D0,%m1+3");
3558 fatal_insn ("unknown move insn:",insn);
3559 return "";
3562 static const char*
3563 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3565 rtx dest = op[0];
3566 rtx src = op[1];
3567 rtx base = XEXP (dest, 0);
3568 int reg_base = true_regnum (base);
3569 int reg_src = true_regnum (src);
3570 int tmp;
3572 if (!l)
3573 l = &tmp;
3575 if (CONSTANT_ADDRESS_P (base))
3576 return *l=8,("sts %m0,%A1" CR_TAB
3577 "sts %m0+1,%B1" CR_TAB
3578 "sts %m0+2,%C1" CR_TAB
3579 "sts %m0+3,%D1");
3580 if (reg_base > 0) /* (r) */
3582 if (reg_base == REG_X) /* (R26) */
3584 if (reg_src == REG_X)
3586 /* "st X+,r26" is undefined */
3587 if (reg_unused_after (insn, base))
3588 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3589 "st X,r26" CR_TAB
3590 "adiw r26,1" CR_TAB
3591 "st X+,__tmp_reg__" CR_TAB
3592 "st X+,r28" CR_TAB
3593 "st X,r29");
3594 else
3595 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3596 "st X,r26" CR_TAB
3597 "adiw r26,1" CR_TAB
3598 "st X+,__tmp_reg__" CR_TAB
3599 "st X+,r28" CR_TAB
3600 "st X,r29" CR_TAB
3601 "sbiw r26,3");
3603 else if (reg_base == reg_src + 2)
3605 if (reg_unused_after (insn, base))
3606 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3607 "mov __tmp_reg__,%D1" CR_TAB
3608 "st %0+,%A1" CR_TAB
3609 "st %0+,%B1" CR_TAB
3610 "st %0+,__zero_reg__" CR_TAB
3611 "st %0,__tmp_reg__" CR_TAB
3612 "clr __zero_reg__");
3613 else
3614 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3615 "mov __tmp_reg__,%D1" CR_TAB
3616 "st %0+,%A1" CR_TAB
3617 "st %0+,%B1" CR_TAB
3618 "st %0+,__zero_reg__" CR_TAB
3619 "st %0,__tmp_reg__" CR_TAB
3620 "clr __zero_reg__" CR_TAB
3621 "sbiw r26,3");
3623 return *l=5, ("st %0+,%A1" CR_TAB
3624 "st %0+,%B1" CR_TAB
3625 "st %0+,%C1" CR_TAB
3626 "st %0,%D1" CR_TAB
3627 "sbiw r26,3");
3629 else
3630 return *l=4, ("st %0,%A1" CR_TAB
3631 "std %0+1,%B1" CR_TAB
3632 "std %0+2,%C1" CR_TAB
3633 "std %0+3,%D1");
3635 else if (GET_CODE (base) == PLUS) /* (R + i) */
3637 int disp = INTVAL (XEXP (base, 1));
3638 reg_base = REGNO (XEXP (base, 0));
3639 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3641 if (reg_base != REG_Y)
3642 fatal_insn ("incorrect insn:",insn);
3644 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3645 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3646 "std Y+60,%A1" CR_TAB
3647 "std Y+61,%B1" CR_TAB
3648 "std Y+62,%C1" CR_TAB
3649 "std Y+63,%D1" CR_TAB
3650 "sbiw r28,%o0-60");
3652 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3653 "sbci r29,hi8(-%o0)" CR_TAB
3654 "st Y,%A1" CR_TAB
3655 "std Y+1,%B1" CR_TAB
3656 "std Y+2,%C1" CR_TAB
3657 "std Y+3,%D1" CR_TAB
3658 "subi r28,lo8(%o0)" CR_TAB
3659 "sbci r29,hi8(%o0)");
3661 if (reg_base == REG_X)
3663 /* (X + d) = R */
3664 if (reg_src == REG_X)
3666 *l = 9;
3667 return ("mov __tmp_reg__,r26" CR_TAB
3668 "mov __zero_reg__,r27" CR_TAB
3669 "adiw r26,%o0" CR_TAB
3670 "st X+,__tmp_reg__" CR_TAB
3671 "st X+,__zero_reg__" CR_TAB
3672 "st X+,r28" CR_TAB
3673 "st X,r29" CR_TAB
3674 "clr __zero_reg__" CR_TAB
3675 "sbiw r26,%o0+3");
3677 else if (reg_src == REG_X - 2)
3679 *l = 9;
3680 return ("mov __tmp_reg__,r26" CR_TAB
3681 "mov __zero_reg__,r27" CR_TAB
3682 "adiw r26,%o0" CR_TAB
3683 "st X+,r24" CR_TAB
3684 "st X+,r25" CR_TAB
3685 "st X+,__tmp_reg__" CR_TAB
3686 "st X,__zero_reg__" CR_TAB
3687 "clr __zero_reg__" CR_TAB
3688 "sbiw r26,%o0+3");
3690 *l = 6;
3691 return ("adiw r26,%o0" CR_TAB
3692 "st X+,%A1" CR_TAB
3693 "st X+,%B1" CR_TAB
3694 "st X+,%C1" CR_TAB
3695 "st X,%D1" CR_TAB
3696 "sbiw r26,%o0+3");
3698 return *l=4, ("std %A0,%A1" CR_TAB
3699 "std %B0,%B1" CR_TAB
3700 "std %C0,%C1" CR_TAB
3701 "std %D0,%D1");
3703 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3704 return *l=4, ("st %0,%D1" CR_TAB
3705 "st %0,%C1" CR_TAB
3706 "st %0,%B1" CR_TAB
3707 "st %0,%A1");
3708 else if (GET_CODE (base) == POST_INC) /* (R++) */
3709 return *l=4, ("st %0,%A1" CR_TAB
3710 "st %0,%B1" CR_TAB
3711 "st %0,%C1" CR_TAB
3712 "st %0,%D1");
3713 fatal_insn ("unknown move insn:",insn);
3714 return "";
3717 const char *
3718 output_movsisf (rtx insn, rtx operands[], int *l)
3720 int dummy;
3721 rtx dest = operands[0];
3722 rtx src = operands[1];
3723 int *real_l = l;
3725 if (avr_mem_flash_p (src)
3726 || avr_mem_flash_p (dest))
3728 return avr_out_lpm (insn, operands, real_l);
3731 if (!l)
3732 l = &dummy;
3734 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3735 if (REG_P (dest))
3737 if (REG_P (src)) /* mov r,r */
3739 if (true_regnum (dest) > true_regnum (src))
3741 if (AVR_HAVE_MOVW)
3743 *l = 2;
3744 return ("movw %C0,%C1" CR_TAB
3745 "movw %A0,%A1");
3747 *l = 4;
3748 return ("mov %D0,%D1" CR_TAB
3749 "mov %C0,%C1" CR_TAB
3750 "mov %B0,%B1" CR_TAB
3751 "mov %A0,%A1");
3753 else
3755 if (AVR_HAVE_MOVW)
3757 *l = 2;
3758 return ("movw %A0,%A1" CR_TAB
3759 "movw %C0,%C1");
3761 *l = 4;
3762 return ("mov %A0,%A1" CR_TAB
3763 "mov %B0,%B1" CR_TAB
3764 "mov %C0,%C1" CR_TAB
3765 "mov %D0,%D1");
3768 else if (CONSTANT_P (src))
3770 return output_reload_insisf (operands, NULL_RTX, real_l);
3772 else if (MEM_P (src))
3773 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3775 else if (MEM_P (dest))
3777 const char *templ;
3779 if (src == CONST0_RTX (GET_MODE (dest)))
3780 operands[1] = zero_reg_rtx;
3782 templ = out_movsi_mr_r (insn, operands, real_l);
3784 if (!real_l)
3785 output_asm_insn (templ, operands);
3787 operands[1] = src;
3788 return "";
3790 fatal_insn ("invalid insn:", insn);
3791 return "";
3795 /* Handle loads of 24-bit types from memory to register. */
3797 static const char*
3798 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3800 rtx dest = op[0];
3801 rtx src = op[1];
3802 rtx base = XEXP (src, 0);
3803 int reg_dest = true_regnum (dest);
3804 int reg_base = true_regnum (base);
3806 if (reg_base > 0)
3808 if (reg_base == REG_X) /* (R26) */
3810 if (reg_dest == REG_X)
3811 /* "ld r26,-X" is undefined */
3812 return avr_asm_len ("adiw r26,2" CR_TAB
3813 "ld r28,X" CR_TAB
3814 "ld __tmp_reg__,-X" CR_TAB
3815 "sbiw r26,1" CR_TAB
3816 "ld r26,X" CR_TAB
3817 "mov r27,__tmp_reg__", op, plen, -6);
3818 else
3820 avr_asm_len ("ld %A0,X+" CR_TAB
3821 "ld %B0,X+" CR_TAB
3822 "ld %C0,X", op, plen, -3);
3824 if (reg_dest != REG_X - 2
3825 && !reg_unused_after (insn, base))
3827 avr_asm_len ("sbiw r26,2", op, plen, 1);
3830 return "";
3833 else /* reg_base != REG_X */
3835 if (reg_dest == reg_base)
3836 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3837 "ldd __tmp_reg__,%1+1" CR_TAB
3838 "ld %A0,%1" CR_TAB
3839 "mov %B0,__tmp_reg__", op, plen, -4);
3840 else
3841 return avr_asm_len ("ld %A0,%1" CR_TAB
3842 "ldd %B0,%1+1" CR_TAB
3843 "ldd %C0,%1+2", op, plen, -3);
3846 else if (GET_CODE (base) == PLUS) /* (R + i) */
3848 int disp = INTVAL (XEXP (base, 1));
3850 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3852 if (REGNO (XEXP (base, 0)) != REG_Y)
3853 fatal_insn ("incorrect insn:",insn);
3855 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3856 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3857 "ldd %A0,Y+61" CR_TAB
3858 "ldd %B0,Y+62" CR_TAB
3859 "ldd %C0,Y+63" CR_TAB
3860 "sbiw r28,%o1-61", op, plen, -5);
3862 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3863 "sbci r29,hi8(-%o1)" CR_TAB
3864 "ld %A0,Y" CR_TAB
3865 "ldd %B0,Y+1" CR_TAB
3866 "ldd %C0,Y+2" CR_TAB
3867 "subi r28,lo8(%o1)" CR_TAB
3868 "sbci r29,hi8(%o1)", op, plen, -7);
3871 reg_base = true_regnum (XEXP (base, 0));
3872 if (reg_base == REG_X)
3874 /* R = (X + d) */
3875 if (reg_dest == REG_X)
3877 /* "ld r26,-X" is undefined */
3878 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3879 "ld r28,X" CR_TAB
3880 "ld __tmp_reg__,-X" CR_TAB
3881 "sbiw r26,1" CR_TAB
3882 "ld r26,X" CR_TAB
3883 "mov r27,__tmp_reg__", op, plen, -6);
3886 avr_asm_len ("adiw r26,%o1" CR_TAB
3887 "ld %A0,X+" CR_TAB
3888 "ld %B0,X+" CR_TAB
3889 "ld %C0,X", op, plen, -4);
3891 if (reg_dest != REG_W
3892 && !reg_unused_after (insn, XEXP (base, 0)))
3893 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3895 return "";
3898 if (reg_dest == reg_base)
3899 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3900 "ldd __tmp_reg__,%B1" CR_TAB
3901 "ldd %A0,%A1" CR_TAB
3902 "mov %B0,__tmp_reg__", op, plen, -4);
3904 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3905 "ldd %B0,%B1" CR_TAB
3906 "ldd %C0,%C1", op, plen, -3);
3908 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3909 return avr_asm_len ("ld %C0,%1" CR_TAB
3910 "ld %B0,%1" CR_TAB
3911 "ld %A0,%1", op, plen, -3);
3912 else if (GET_CODE (base) == POST_INC) /* (R++) */
3913 return avr_asm_len ("ld %A0,%1" CR_TAB
3914 "ld %B0,%1" CR_TAB
3915 "ld %C0,%1", op, plen, -3);
3917 else if (CONSTANT_ADDRESS_P (base))
3918 return avr_asm_len ("lds %A0,%m1" CR_TAB
3919 "lds %B0,%m1+1" CR_TAB
3920 "lds %C0,%m1+2", op, plen , -6);
3922 fatal_insn ("unknown move insn:",insn);
3923 return "";
3926 /* Handle store of 24-bit type from register or zero to memory. */
3928 static const char*
3929 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3931 rtx dest = op[0];
3932 rtx src = op[1];
3933 rtx base = XEXP (dest, 0);
3934 int reg_base = true_regnum (base);
3936 if (CONSTANT_ADDRESS_P (base))
3937 return avr_asm_len ("sts %m0,%A1" CR_TAB
3938 "sts %m0+1,%B1" CR_TAB
3939 "sts %m0+2,%C1", op, plen, -6);
3941 if (reg_base > 0) /* (r) */
3943 if (reg_base == REG_X) /* (R26) */
3945 gcc_assert (!reg_overlap_mentioned_p (base, src));
3947 avr_asm_len ("st %0+,%A1" CR_TAB
3948 "st %0+,%B1" CR_TAB
3949 "st %0,%C1", op, plen, -3);
3951 if (!reg_unused_after (insn, base))
3952 avr_asm_len ("sbiw r26,2", op, plen, 1);
3954 return "";
3956 else
3957 return avr_asm_len ("st %0,%A1" CR_TAB
3958 "std %0+1,%B1" CR_TAB
3959 "std %0+2,%C1", op, plen, -3);
3961 else if (GET_CODE (base) == PLUS) /* (R + i) */
3963 int disp = INTVAL (XEXP (base, 1));
3964 reg_base = REGNO (XEXP (base, 0));
3966 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3968 if (reg_base != REG_Y)
3969 fatal_insn ("incorrect insn:",insn);
3971 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3972 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3973 "std Y+61,%A1" CR_TAB
3974 "std Y+62,%B1" CR_TAB
3975 "std Y+63,%C1" CR_TAB
3976 "sbiw r28,%o0-60", op, plen, -5);
3978 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3979 "sbci r29,hi8(-%o0)" CR_TAB
3980 "st Y,%A1" CR_TAB
3981 "std Y+1,%B1" CR_TAB
3982 "std Y+2,%C1" CR_TAB
3983 "subi r28,lo8(%o0)" CR_TAB
3984 "sbci r29,hi8(%o0)", op, plen, -7);
3986 if (reg_base == REG_X)
3988 /* (X + d) = R */
3989 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3991 avr_asm_len ("adiw r26,%o0" CR_TAB
3992 "st X+,%A1" CR_TAB
3993 "st X+,%B1" CR_TAB
3994 "st X,%C1", op, plen, -4);
3996 if (!reg_unused_after (insn, XEXP (base, 0)))
3997 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3999 return "";
4002 return avr_asm_len ("std %A0,%A1" CR_TAB
4003 "std %B0,%B1" CR_TAB
4004 "std %C0,%C1", op, plen, -3);
4006 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4007 return avr_asm_len ("st %0,%C1" CR_TAB
4008 "st %0,%B1" CR_TAB
4009 "st %0,%A1", op, plen, -3);
4010 else if (GET_CODE (base) == POST_INC) /* (R++) */
4011 return avr_asm_len ("st %0,%A1" CR_TAB
4012 "st %0,%B1" CR_TAB
4013 "st %0,%C1", op, plen, -3);
4015 fatal_insn ("unknown move insn:",insn);
4016 return "";
4020 /* Move around 24-bit stuff. */
4022 const char *
4023 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4025 rtx dest = op[0];
4026 rtx src = op[1];
4028 if (avr_mem_flash_p (src)
4029 || avr_mem_flash_p (dest))
4031 return avr_out_lpm (insn, op, plen);
4034 if (register_operand (dest, VOIDmode))
4036 if (register_operand (src, VOIDmode)) /* mov r,r */
4038 if (true_regnum (dest) > true_regnum (src))
4040 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4042 if (AVR_HAVE_MOVW)
4043 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4044 else
4045 return avr_asm_len ("mov %B0,%B1" CR_TAB
4046 "mov %A0,%A1", op, plen, 2);
4048 else
4050 if (AVR_HAVE_MOVW)
4051 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4052 else
4053 avr_asm_len ("mov %A0,%A1" CR_TAB
4054 "mov %B0,%B1", op, plen, -2);
4056 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4059 else if (CONSTANT_P (src))
4061 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4063 else if (MEM_P (src))
4064 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4066 else if (MEM_P (dest))
4068 rtx xop[2];
4070 xop[0] = dest;
4071 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4073 return avr_out_store_psi (insn, xop, plen);
4076 fatal_insn ("invalid insn:", insn);
4077 return "";
4081 static const char*
4082 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4084 rtx dest = op[0];
4085 rtx src = op[1];
4086 rtx x = XEXP (dest, 0);
4088 if (CONSTANT_ADDRESS_P (x))
4090 return optimize > 0 && io_address_operand (x, QImode)
4091 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4092 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4094 else if (GET_CODE (x) == PLUS
4095 && REG_P (XEXP (x, 0))
4096 && CONST_INT_P (XEXP (x, 1)))
4098 /* memory access by reg+disp */
4100 int disp = INTVAL (XEXP (x, 1));
4102 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4104 if (REGNO (XEXP (x, 0)) != REG_Y)
4105 fatal_insn ("incorrect insn:",insn);
4107 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4108 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4109 "std Y+63,%1" CR_TAB
4110 "sbiw r28,%o0-63", op, plen, -3);
4112 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4113 "sbci r29,hi8(-%o0)" CR_TAB
4114 "st Y,%1" CR_TAB
4115 "subi r28,lo8(%o0)" CR_TAB
4116 "sbci r29,hi8(%o0)", op, plen, -5);
4118 else if (REGNO (XEXP (x,0)) == REG_X)
4120 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4122 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4123 "adiw r26,%o0" CR_TAB
4124 "st X,__tmp_reg__", op, plen, -3);
4126 else
4128 avr_asm_len ("adiw r26,%o0" CR_TAB
4129 "st X,%1", op, plen, -2);
4132 if (!reg_unused_after (insn, XEXP (x,0)))
4133 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4135 return "";
4138 return avr_asm_len ("std %0,%1", op, plen, -1);
4141 return avr_asm_len ("st %0,%1", op, plen, -1);
4145 /* Helper for the next function for XMEGA. It does the same
4146 but with low byte first. */
4148 static const char*
4149 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4151 rtx dest = op[0];
4152 rtx src = op[1];
4153 rtx base = XEXP (dest, 0);
4154 int reg_base = true_regnum (base);
4155 int reg_src = true_regnum (src);
4157 /* "volatile" forces writing low byte first, even if less efficient,
4158 for correct operation with 16-bit I/O registers like SP. */
4159 int mem_volatile_p = MEM_VOLATILE_P (dest);
4161 if (CONSTANT_ADDRESS_P (base))
4162 return optimize > 0 && io_address_operand (base, HImode)
4163 ? avr_asm_len ("out %i0,%A1" CR_TAB
4164 "out %i0+1,%B1", op, plen, -2)
4166 : avr_asm_len ("sts %m0,%A1" CR_TAB
4167 "sts %m0+1,%B1", op, plen, -4);
4169 if (reg_base > 0)
4171 if (reg_base != REG_X)
4172 return avr_asm_len ("st %0,%A1" CR_TAB
4173 "std %0+1,%B1", op, plen, -2);
4175 if (reg_src == REG_X)
4176 /* "st X+,r26" and "st -X,r26" are undefined. */
4177 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4178 "st X,r26" CR_TAB
4179 "adiw r26,1" CR_TAB
4180 "st X,__tmp_reg__", op, plen, -4);
4181 else
4182 avr_asm_len ("st X+,%A1" CR_TAB
4183 "st X,%B1", op, plen, -2);
4185 return reg_unused_after (insn, base)
4186 ? ""
4187 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4189 else if (GET_CODE (base) == PLUS)
4191 int disp = INTVAL (XEXP (base, 1));
4192 reg_base = REGNO (XEXP (base, 0));
4193 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4195 if (reg_base != REG_Y)
4196 fatal_insn ("incorrect insn:",insn);
4198 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4199 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4200 "std Y+62,%A1" CR_TAB
4201 "std Y+63,%B1" CR_TAB
4202 "sbiw r28,%o0-62", op, plen, -4)
4204 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4205 "sbci r29,hi8(-%o0)" CR_TAB
4206 "st Y,%A1" CR_TAB
4207 "std Y+1,%B1" CR_TAB
4208 "subi r28,lo8(%o0)" CR_TAB
4209 "sbci r29,hi8(%o0)", op, plen, -6);
4212 if (reg_base != REG_X)
4213 return avr_asm_len ("std %A0,%A1" CR_TAB
4214 "std %B0,%B1", op, plen, -2);
4215 /* (X + d) = R */
4216 return reg_src == REG_X
4217 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4218 "mov __zero_reg__,r27" CR_TAB
4219 "adiw r26,%o0" CR_TAB
4220 "st X+,__tmp_reg__" CR_TAB
4221 "st X,__zero_reg__" CR_TAB
4222 "clr __zero_reg__" CR_TAB
4223 "sbiw r26,%o0+1", op, plen, -7)
4225 : avr_asm_len ("adiw r26,%o0" CR_TAB
4226 "st X+,%A1" CR_TAB
4227 "st X,%B1" CR_TAB
4228 "sbiw r26,%o0+1", op, plen, -4);
4230 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4232 if (!mem_volatile_p)
4233 return avr_asm_len ("st %0,%B1" CR_TAB
4234 "st %0,%A1", op, plen, -2);
4236 return REGNO (XEXP (base, 0)) == REG_X
4237 ? avr_asm_len ("sbiw r26,2" CR_TAB
4238 "st X+,%A1" CR_TAB
4239 "st X,%B1" CR_TAB
4240 "sbiw r26,1", op, plen, -4)
4242 : avr_asm_len ("sbiw %r0,2" CR_TAB
4243 "st %p0,%A1" CR_TAB
4244 "std %p0+1,%B1", op, plen, -3);
4246 else if (GET_CODE (base) == POST_INC) /* (R++) */
4248 return avr_asm_len ("st %0,%A1" CR_TAB
4249 "st %0,%B1", op, plen, -2);
4252 fatal_insn ("unknown move insn:",insn);
4253 return "";
4257 static const char*
4258 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4260 rtx dest = op[0];
4261 rtx src = op[1];
4262 rtx base = XEXP (dest, 0);
4263 int reg_base = true_regnum (base);
4264 int reg_src = true_regnum (src);
4265 int mem_volatile_p;
4267 /* "volatile" forces writing high-byte first (no-xmega) resp.
4268 low-byte first (xmega) even if less efficient, for correct
4269 operation with 16-bit I/O registers like. */
4271 if (AVR_XMEGA)
4272 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4274 mem_volatile_p = MEM_VOLATILE_P (dest);
4276 if (CONSTANT_ADDRESS_P (base))
4277 return optimize > 0 && io_address_operand (base, HImode)
4278 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4279 "out %i0,%A1", op, plen, -2)
4281 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4282 "sts %m0,%A1", op, plen, -4);
4284 if (reg_base > 0)
4286 if (reg_base != REG_X)
4287 return avr_asm_len ("std %0+1,%B1" CR_TAB
4288 "st %0,%A1", op, plen, -2);
4290 if (reg_src == REG_X)
4291 /* "st X+,r26" and "st -X,r26" are undefined. */
4292 return !mem_volatile_p && reg_unused_after (insn, src)
4293 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4294 "st X,r26" CR_TAB
4295 "adiw r26,1" CR_TAB
4296 "st X,__tmp_reg__", op, plen, -4)
4298 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4299 "adiw r26,1" CR_TAB
4300 "st X,__tmp_reg__" CR_TAB
4301 "sbiw r26,1" CR_TAB
4302 "st X,r26", op, plen, -5);
4304 return !mem_volatile_p && reg_unused_after (insn, base)
4305 ? avr_asm_len ("st X+,%A1" CR_TAB
4306 "st X,%B1", op, plen, -2)
4307 : avr_asm_len ("adiw r26,1" CR_TAB
4308 "st X,%B1" CR_TAB
4309 "st -X,%A1", op, plen, -3);
4311 else if (GET_CODE (base) == PLUS)
4313 int disp = INTVAL (XEXP (base, 1));
4314 reg_base = REGNO (XEXP (base, 0));
4315 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4317 if (reg_base != REG_Y)
4318 fatal_insn ("incorrect insn:",insn);
4320 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4321 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4322 "std Y+63,%B1" CR_TAB
4323 "std Y+62,%A1" CR_TAB
4324 "sbiw r28,%o0-62", op, plen, -4)
4326 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4327 "sbci r29,hi8(-%o0)" CR_TAB
4328 "std Y+1,%B1" CR_TAB
4329 "st Y,%A1" CR_TAB
4330 "subi r28,lo8(%o0)" CR_TAB
4331 "sbci r29,hi8(%o0)", op, plen, -6);
4334 if (reg_base != REG_X)
4335 return avr_asm_len ("std %B0,%B1" CR_TAB
4336 "std %A0,%A1", op, plen, -2);
4337 /* (X + d) = R */
4338 return reg_src == REG_X
4339 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4340 "mov __zero_reg__,r27" CR_TAB
4341 "adiw r26,%o0+1" CR_TAB
4342 "st X,__zero_reg__" CR_TAB
4343 "st -X,__tmp_reg__" CR_TAB
4344 "clr __zero_reg__" CR_TAB
4345 "sbiw r26,%o0", op, plen, -7)
4347 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4348 "st X,%B1" CR_TAB
4349 "st -X,%A1" CR_TAB
4350 "sbiw r26,%o0", op, plen, -4);
4352 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4354 return avr_asm_len ("st %0,%B1" CR_TAB
4355 "st %0,%A1", op, plen, -2);
4357 else if (GET_CODE (base) == POST_INC) /* (R++) */
4359 if (!mem_volatile_p)
4360 return avr_asm_len ("st %0,%A1" CR_TAB
4361 "st %0,%B1", op, plen, -2);
4363 return REGNO (XEXP (base, 0)) == REG_X
4364 ? avr_asm_len ("adiw r26,1" CR_TAB
4365 "st X,%B1" CR_TAB
4366 "st -X,%A1" CR_TAB
4367 "adiw r26,2", op, plen, -4)
4369 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4370 "st %p0,%A1" CR_TAB
4371 "adiw %r0,2", op, plen, -3);
4373 fatal_insn ("unknown move insn:",insn);
4374 return "";
4377 /* Return 1 if frame pointer for current function required. */
4379 static bool
4380 avr_frame_pointer_required_p (void)
4382 return (cfun->calls_alloca
4383 || cfun->calls_setjmp
4384 || cfun->has_nonlocal_label
4385 || crtl->args.info.nregs == 0
4386 || get_frame_size () > 0);
4389 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4391 static RTX_CODE
4392 compare_condition (rtx insn)
4394 rtx next = next_real_insn (insn);
4396 if (next && JUMP_P (next))
4398 rtx pat = PATTERN (next);
4399 rtx src = SET_SRC (pat);
4401 if (IF_THEN_ELSE == GET_CODE (src))
4402 return GET_CODE (XEXP (src, 0));
4405 return UNKNOWN;
4409 /* Returns true iff INSN is a tst insn that only tests the sign. */
4411 static bool
4412 compare_sign_p (rtx insn)
4414 RTX_CODE cond = compare_condition (insn);
4415 return (cond == GE || cond == LT);
4419 /* Returns true iff the next insn is a JUMP_INSN with a condition
4420 that needs to be swapped (GT, GTU, LE, LEU). */
4422 static bool
4423 compare_diff_p (rtx insn)
4425 RTX_CODE cond = compare_condition (insn);
4426 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4429 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4431 static bool
4432 compare_eq_p (rtx insn)
4434 RTX_CODE cond = compare_condition (insn);
4435 return (cond == EQ || cond == NE);
4439 /* Output compare instruction
4441 compare (XOP[0], XOP[1])
4443 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4444 XOP[2] is an 8-bit scratch register as needed.
4446 PLEN == NULL: Output instructions.
4447 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4448 Don't output anything. */
4450 const char*
4451 avr_out_compare (rtx insn, rtx *xop, int *plen)
4453 /* Register to compare and value to compare against. */
4454 rtx xreg = xop[0];
4455 rtx xval = xop[1];
4457 /* MODE of the comparison. */
4458 enum machine_mode mode;
4460 /* Number of bytes to operate on. */
4461 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4463 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4464 int clobber_val = -1;
4466 /* Map fixed mode operands to integer operands with the same binary
4467 representation. They are easier to handle in the remainder. */
4469 if (CONST_FIXED_P (xval))
4471 xreg = avr_to_int_mode (xop[0]);
4472 xval = avr_to_int_mode (xop[1]);
4475 mode = GET_MODE (xreg);
4477 gcc_assert (REG_P (xreg));
4478 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4479 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4481 if (plen)
4482 *plen = 0;
4484 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4485 against 0 by ORing the bytes. This is one instruction shorter.
4486 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4487 and therefore don't use this. */
4489 if (!test_hard_reg_class (LD_REGS, xreg)
4490 && compare_eq_p (insn)
4491 && reg_unused_after (insn, xreg))
4493 if (xval == const1_rtx)
4495 avr_asm_len ("dec %A0" CR_TAB
4496 "or %A0,%B0", xop, plen, 2);
4498 if (n_bytes >= 3)
4499 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4501 if (n_bytes >= 4)
4502 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4504 return "";
4506 else if (xval == constm1_rtx)
4508 if (n_bytes >= 4)
4509 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4511 if (n_bytes >= 3)
4512 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4514 return avr_asm_len ("and %A0,%B0" CR_TAB
4515 "com %A0", xop, plen, 2);
4519 for (i = 0; i < n_bytes; i++)
4521 /* We compare byte-wise. */
4522 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4523 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4525 /* 8-bit value to compare with this byte. */
4526 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4528 /* Registers R16..R31 can operate with immediate. */
4529 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4531 xop[0] = reg8;
4532 xop[1] = gen_int_mode (val8, QImode);
4534 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4536 if (i == 0
4537 && test_hard_reg_class (ADDW_REGS, reg8))
4539 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4541 if (IN_RANGE (val16, 0, 63)
4542 && (val8 == 0
4543 || reg_unused_after (insn, xreg)))
4545 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4546 i++;
4547 continue;
4550 if (n_bytes == 2
4551 && IN_RANGE (val16, -63, -1)
4552 && compare_eq_p (insn)
4553 && reg_unused_after (insn, xreg))
4555 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4559 /* Comparing against 0 is easy. */
4561 if (val8 == 0)
4563 avr_asm_len (i == 0
4564 ? "cp %0,__zero_reg__"
4565 : "cpc %0,__zero_reg__", xop, plen, 1);
4566 continue;
4569 /* Upper registers can compare and subtract-with-carry immediates.
4570 Notice that compare instructions do the same as respective subtract
4571 instruction; the only difference is that comparisons don't write
4572 the result back to the target register. */
4574 if (ld_reg_p)
4576 if (i == 0)
4578 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4579 continue;
4581 else if (reg_unused_after (insn, xreg))
4583 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4584 continue;
4588 /* Must load the value into the scratch register. */
4590 gcc_assert (REG_P (xop[2]));
4592 if (clobber_val != (int) val8)
4593 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4594 clobber_val = (int) val8;
4596 avr_asm_len (i == 0
4597 ? "cp %0,%2"
4598 : "cpc %0,%2", xop, plen, 1);
4601 return "";
4605 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4607 const char*
4608 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4610 rtx xop[3];
4612 xop[0] = gen_rtx_REG (DImode, 18);
4613 xop[1] = op[0];
4614 xop[2] = op[1];
4616 return avr_out_compare (insn, xop, plen);
4619 /* Output test instruction for HImode. */
4621 const char*
4622 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4624 if (compare_sign_p (insn))
4626 avr_asm_len ("tst %B0", op, plen, -1);
4628 else if (reg_unused_after (insn, op[0])
4629 && compare_eq_p (insn))
4631 /* Faster than sbiw if we can clobber the operand. */
4632 avr_asm_len ("or %A0,%B0", op, plen, -1);
4634 else
4636 avr_out_compare (insn, op, plen);
4639 return "";
4643 /* Output test instruction for PSImode. */
4645 const char*
4646 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4648 if (compare_sign_p (insn))
4650 avr_asm_len ("tst %C0", op, plen, -1);
4652 else if (reg_unused_after (insn, op[0])
4653 && compare_eq_p (insn))
4655 /* Faster than sbiw if we can clobber the operand. */
4656 avr_asm_len ("or %A0,%B0" CR_TAB
4657 "or %A0,%C0", op, plen, -2);
4659 else
4661 avr_out_compare (insn, op, plen);
4664 return "";
4668 /* Output test instruction for SImode. */
4670 const char*
4671 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4673 if (compare_sign_p (insn))
4675 avr_asm_len ("tst %D0", op, plen, -1);
4677 else if (reg_unused_after (insn, op[0])
4678 && compare_eq_p (insn))
4680 /* Faster than sbiw if we can clobber the operand. */
4681 avr_asm_len ("or %A0,%B0" CR_TAB
4682 "or %A0,%C0" CR_TAB
4683 "or %A0,%D0", op, plen, -3);
4685 else
4687 avr_out_compare (insn, op, plen);
4690 return "";
4694 /* Generate asm equivalent for various shifts. This only handles cases
4695 that are not already carefully hand-optimized in ?sh??i3_out.
4697 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4698 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4699 OPERANDS[3] is a QImode scratch register from LD regs if
4700 available and SCRATCH, otherwise (no scratch available)
4702 TEMPL is an assembler template that shifts by one position.
4703 T_LEN is the length of this template. */
4705 void
4706 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4707 int *plen, int t_len)
4709 bool second_label = true;
4710 bool saved_in_tmp = false;
4711 bool use_zero_reg = false;
4712 rtx op[5];
4714 op[0] = operands[0];
4715 op[1] = operands[1];
4716 op[2] = operands[2];
4717 op[3] = operands[3];
4719 if (plen)
4720 *plen = 0;
4722 if (CONST_INT_P (operands[2]))
4724 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4725 && REG_P (operands[3]));
4726 int count = INTVAL (operands[2]);
4727 int max_len = 10; /* If larger than this, always use a loop. */
4729 if (count <= 0)
4730 return;
4732 if (count < 8 && !scratch)
4733 use_zero_reg = true;
4735 if (optimize_size)
4736 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4738 if (t_len * count <= max_len)
4740 /* Output shifts inline with no loop - faster. */
4742 while (count-- > 0)
4743 avr_asm_len (templ, op, plen, t_len);
4745 return;
4748 if (scratch)
4750 avr_asm_len ("ldi %3,%2", op, plen, 1);
4752 else if (use_zero_reg)
4754 /* Hack to save one word: use __zero_reg__ as loop counter.
4755 Set one bit, then shift in a loop until it is 0 again. */
4757 op[3] = zero_reg_rtx;
4759 avr_asm_len ("set" CR_TAB
4760 "bld %3,%2-1", op, plen, 2);
4762 else
4764 /* No scratch register available, use one from LD_REGS (saved in
4765 __tmp_reg__) that doesn't overlap with registers to shift. */
4767 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4768 op[4] = tmp_reg_rtx;
4769 saved_in_tmp = true;
4771 avr_asm_len ("mov %4,%3" CR_TAB
4772 "ldi %3,%2", op, plen, 2);
4775 second_label = false;
4777 else if (MEM_P (op[2]))
4779 rtx op_mov[2];
4781 op_mov[0] = op[3] = tmp_reg_rtx;
4782 op_mov[1] = op[2];
4784 out_movqi_r_mr (insn, op_mov, plen);
4786 else if (register_operand (op[2], QImode))
4788 op[3] = op[2];
4790 if (!reg_unused_after (insn, op[2])
4791 || reg_overlap_mentioned_p (op[0], op[2]))
4793 op[3] = tmp_reg_rtx;
4794 avr_asm_len ("mov %3,%2", op, plen, 1);
4797 else
4798 fatal_insn ("bad shift insn:", insn);
4800 if (second_label)
4801 avr_asm_len ("rjmp 2f", op, plen, 1);
4803 avr_asm_len ("1:", op, plen, 0);
4804 avr_asm_len (templ, op, plen, t_len);
4806 if (second_label)
4807 avr_asm_len ("2:", op, plen, 0);
4809 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4810 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4812 if (saved_in_tmp)
4813 avr_asm_len ("mov %3,%4", op, plen, 1);
4817 /* 8bit shift left ((char)x << i) */
4819 const char *
4820 ashlqi3_out (rtx insn, rtx operands[], int *len)
4822 if (GET_CODE (operands[2]) == CONST_INT)
4824 int k;
4826 if (!len)
4827 len = &k;
4829 switch (INTVAL (operands[2]))
4831 default:
4832 if (INTVAL (operands[2]) < 8)
4833 break;
4835 *len = 1;
4836 return "clr %0";
4838 case 1:
4839 *len = 1;
4840 return "lsl %0";
4842 case 2:
4843 *len = 2;
4844 return ("lsl %0" CR_TAB
4845 "lsl %0");
4847 case 3:
4848 *len = 3;
4849 return ("lsl %0" CR_TAB
4850 "lsl %0" CR_TAB
4851 "lsl %0");
4853 case 4:
4854 if (test_hard_reg_class (LD_REGS, operands[0]))
4856 *len = 2;
4857 return ("swap %0" CR_TAB
4858 "andi %0,0xf0");
4860 *len = 4;
4861 return ("lsl %0" CR_TAB
4862 "lsl %0" CR_TAB
4863 "lsl %0" CR_TAB
4864 "lsl %0");
4866 case 5:
4867 if (test_hard_reg_class (LD_REGS, operands[0]))
4869 *len = 3;
4870 return ("swap %0" CR_TAB
4871 "lsl %0" CR_TAB
4872 "andi %0,0xe0");
4874 *len = 5;
4875 return ("lsl %0" CR_TAB
4876 "lsl %0" CR_TAB
4877 "lsl %0" CR_TAB
4878 "lsl %0" CR_TAB
4879 "lsl %0");
4881 case 6:
4882 if (test_hard_reg_class (LD_REGS, operands[0]))
4884 *len = 4;
4885 return ("swap %0" CR_TAB
4886 "lsl %0" CR_TAB
4887 "lsl %0" CR_TAB
4888 "andi %0,0xc0");
4890 *len = 6;
4891 return ("lsl %0" CR_TAB
4892 "lsl %0" CR_TAB
4893 "lsl %0" CR_TAB
4894 "lsl %0" CR_TAB
4895 "lsl %0" CR_TAB
4896 "lsl %0");
4898 case 7:
4899 *len = 3;
4900 return ("ror %0" CR_TAB
4901 "clr %0" CR_TAB
4902 "ror %0");
4905 else if (CONSTANT_P (operands[2]))
4906 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4908 out_shift_with_cnt ("lsl %0",
4909 insn, operands, len, 1);
4910 return "";
4914 /* 16bit shift left ((short)x << i) */
4916 const char *
4917 ashlhi3_out (rtx insn, rtx operands[], int *len)
4919 if (GET_CODE (operands[2]) == CONST_INT)
4921 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4922 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4923 int k;
4924 int *t = len;
4926 if (!len)
4927 len = &k;
4929 switch (INTVAL (operands[2]))
4931 default:
4932 if (INTVAL (operands[2]) < 16)
4933 break;
4935 *len = 2;
4936 return ("clr %B0" CR_TAB
4937 "clr %A0");
4939 case 4:
4940 if (optimize_size && scratch)
4941 break; /* 5 */
4942 if (ldi_ok)
4944 *len = 6;
4945 return ("swap %A0" CR_TAB
4946 "swap %B0" CR_TAB
4947 "andi %B0,0xf0" CR_TAB
4948 "eor %B0,%A0" CR_TAB
4949 "andi %A0,0xf0" CR_TAB
4950 "eor %B0,%A0");
4952 if (scratch)
4954 *len = 7;
4955 return ("swap %A0" CR_TAB
4956 "swap %B0" CR_TAB
4957 "ldi %3,0xf0" CR_TAB
4958 "and %B0,%3" CR_TAB
4959 "eor %B0,%A0" CR_TAB
4960 "and %A0,%3" CR_TAB
4961 "eor %B0,%A0");
4963 break; /* optimize_size ? 6 : 8 */
4965 case 5:
4966 if (optimize_size)
4967 break; /* scratch ? 5 : 6 */
4968 if (ldi_ok)
4970 *len = 8;
4971 return ("lsl %A0" CR_TAB
4972 "rol %B0" CR_TAB
4973 "swap %A0" CR_TAB
4974 "swap %B0" CR_TAB
4975 "andi %B0,0xf0" CR_TAB
4976 "eor %B0,%A0" CR_TAB
4977 "andi %A0,0xf0" CR_TAB
4978 "eor %B0,%A0");
4980 if (scratch)
4982 *len = 9;
4983 return ("lsl %A0" CR_TAB
4984 "rol %B0" CR_TAB
4985 "swap %A0" CR_TAB
4986 "swap %B0" CR_TAB
4987 "ldi %3,0xf0" CR_TAB
4988 "and %B0,%3" CR_TAB
4989 "eor %B0,%A0" CR_TAB
4990 "and %A0,%3" CR_TAB
4991 "eor %B0,%A0");
4993 break; /* 10 */
4995 case 6:
4996 if (optimize_size)
4997 break; /* scratch ? 5 : 6 */
4998 *len = 9;
4999 return ("clr __tmp_reg__" CR_TAB
5000 "lsr %B0" CR_TAB
5001 "ror %A0" CR_TAB
5002 "ror __tmp_reg__" CR_TAB
5003 "lsr %B0" CR_TAB
5004 "ror %A0" CR_TAB
5005 "ror __tmp_reg__" CR_TAB
5006 "mov %B0,%A0" CR_TAB
5007 "mov %A0,__tmp_reg__");
5009 case 7:
5010 *len = 5;
5011 return ("lsr %B0" CR_TAB
5012 "mov %B0,%A0" CR_TAB
5013 "clr %A0" CR_TAB
5014 "ror %B0" CR_TAB
5015 "ror %A0");
5017 case 8:
5018 return *len = 2, ("mov %B0,%A1" CR_TAB
5019 "clr %A0");
5021 case 9:
5022 *len = 3;
5023 return ("mov %B0,%A0" CR_TAB
5024 "clr %A0" CR_TAB
5025 "lsl %B0");
5027 case 10:
5028 *len = 4;
5029 return ("mov %B0,%A0" CR_TAB
5030 "clr %A0" CR_TAB
5031 "lsl %B0" CR_TAB
5032 "lsl %B0");
5034 case 11:
5035 *len = 5;
5036 return ("mov %B0,%A0" CR_TAB
5037 "clr %A0" CR_TAB
5038 "lsl %B0" CR_TAB
5039 "lsl %B0" CR_TAB
5040 "lsl %B0");
5042 case 12:
5043 if (ldi_ok)
5045 *len = 4;
5046 return ("mov %B0,%A0" CR_TAB
5047 "clr %A0" CR_TAB
5048 "swap %B0" CR_TAB
5049 "andi %B0,0xf0");
5051 if (scratch)
5053 *len = 5;
5054 return ("mov %B0,%A0" CR_TAB
5055 "clr %A0" CR_TAB
5056 "swap %B0" CR_TAB
5057 "ldi %3,0xf0" CR_TAB
5058 "and %B0,%3");
5060 *len = 6;
5061 return ("mov %B0,%A0" CR_TAB
5062 "clr %A0" CR_TAB
5063 "lsl %B0" CR_TAB
5064 "lsl %B0" CR_TAB
5065 "lsl %B0" CR_TAB
5066 "lsl %B0");
5068 case 13:
5069 if (ldi_ok)
5071 *len = 5;
5072 return ("mov %B0,%A0" CR_TAB
5073 "clr %A0" CR_TAB
5074 "swap %B0" CR_TAB
5075 "lsl %B0" CR_TAB
5076 "andi %B0,0xe0");
5078 if (AVR_HAVE_MUL && scratch)
5080 *len = 5;
5081 return ("ldi %3,0x20" CR_TAB
5082 "mul %A0,%3" CR_TAB
5083 "mov %B0,r0" CR_TAB
5084 "clr %A0" CR_TAB
5085 "clr __zero_reg__");
5087 if (optimize_size && scratch)
5088 break; /* 5 */
5089 if (scratch)
5091 *len = 6;
5092 return ("mov %B0,%A0" CR_TAB
5093 "clr %A0" CR_TAB
5094 "swap %B0" CR_TAB
5095 "lsl %B0" CR_TAB
5096 "ldi %3,0xe0" CR_TAB
5097 "and %B0,%3");
5099 if (AVR_HAVE_MUL)
5101 *len = 6;
5102 return ("set" CR_TAB
5103 "bld r1,5" CR_TAB
5104 "mul %A0,r1" CR_TAB
5105 "mov %B0,r0" CR_TAB
5106 "clr %A0" CR_TAB
5107 "clr __zero_reg__");
5109 *len = 7;
5110 return ("mov %B0,%A0" CR_TAB
5111 "clr %A0" CR_TAB
5112 "lsl %B0" CR_TAB
5113 "lsl %B0" CR_TAB
5114 "lsl %B0" CR_TAB
5115 "lsl %B0" CR_TAB
5116 "lsl %B0");
5118 case 14:
5119 if (AVR_HAVE_MUL && ldi_ok)
5121 *len = 5;
5122 return ("ldi %B0,0x40" CR_TAB
5123 "mul %A0,%B0" CR_TAB
5124 "mov %B0,r0" CR_TAB
5125 "clr %A0" CR_TAB
5126 "clr __zero_reg__");
5128 if (AVR_HAVE_MUL && scratch)
5130 *len = 5;
5131 return ("ldi %3,0x40" CR_TAB
5132 "mul %A0,%3" CR_TAB
5133 "mov %B0,r0" CR_TAB
5134 "clr %A0" CR_TAB
5135 "clr __zero_reg__");
5137 if (optimize_size && ldi_ok)
5139 *len = 5;
5140 return ("mov %B0,%A0" CR_TAB
5141 "ldi %A0,6" "\n1:\t"
5142 "lsl %B0" CR_TAB
5143 "dec %A0" CR_TAB
5144 "brne 1b");
5146 if (optimize_size && scratch)
5147 break; /* 5 */
5148 *len = 6;
5149 return ("clr %B0" CR_TAB
5150 "lsr %A0" CR_TAB
5151 "ror %B0" CR_TAB
5152 "lsr %A0" CR_TAB
5153 "ror %B0" CR_TAB
5154 "clr %A0");
5156 case 15:
5157 *len = 4;
5158 return ("clr %B0" CR_TAB
5159 "lsr %A0" CR_TAB
5160 "ror %B0" CR_TAB
5161 "clr %A0");
5163 len = t;
5165 out_shift_with_cnt ("lsl %A0" CR_TAB
5166 "rol %B0", insn, operands, len, 2);
5167 return "";
5171 /* 24-bit shift left */
5173 const char*
5174 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5176 if (plen)
5177 *plen = 0;
5179 if (CONST_INT_P (op[2]))
5181 switch (INTVAL (op[2]))
5183 default:
5184 if (INTVAL (op[2]) < 24)
5185 break;
5187 return avr_asm_len ("clr %A0" CR_TAB
5188 "clr %B0" CR_TAB
5189 "clr %C0", op, plen, 3);
5191 case 8:
5193 int reg0 = REGNO (op[0]);
5194 int reg1 = REGNO (op[1]);
5196 if (reg0 >= reg1)
5197 return avr_asm_len ("mov %C0,%B1" CR_TAB
5198 "mov %B0,%A1" CR_TAB
5199 "clr %A0", op, plen, 3);
5200 else
5201 return avr_asm_len ("clr %A0" CR_TAB
5202 "mov %B0,%A1" CR_TAB
5203 "mov %C0,%B1", op, plen, 3);
5206 case 16:
5208 int reg0 = REGNO (op[0]);
5209 int reg1 = REGNO (op[1]);
5211 if (reg0 + 2 != reg1)
5212 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5214 return avr_asm_len ("clr %B0" CR_TAB
5215 "clr %A0", op, plen, 2);
5218 case 23:
5219 return avr_asm_len ("clr %C0" CR_TAB
5220 "lsr %A0" CR_TAB
5221 "ror %C0" CR_TAB
5222 "clr %B0" CR_TAB
5223 "clr %A0", op, plen, 5);
5227 out_shift_with_cnt ("lsl %A0" CR_TAB
5228 "rol %B0" CR_TAB
5229 "rol %C0", insn, op, plen, 3);
5230 return "";
5234 /* 32bit shift left ((long)x << i) */
5236 const char *
5237 ashlsi3_out (rtx insn, rtx operands[], int *len)
5239 if (GET_CODE (operands[2]) == CONST_INT)
5241 int k;
5242 int *t = len;
5244 if (!len)
5245 len = &k;
5247 switch (INTVAL (operands[2]))
5249 default:
5250 if (INTVAL (operands[2]) < 32)
5251 break;
5253 if (AVR_HAVE_MOVW)
5254 return *len = 3, ("clr %D0" CR_TAB
5255 "clr %C0" CR_TAB
5256 "movw %A0,%C0");
5257 *len = 4;
5258 return ("clr %D0" CR_TAB
5259 "clr %C0" CR_TAB
5260 "clr %B0" CR_TAB
5261 "clr %A0");
5263 case 8:
5265 int reg0 = true_regnum (operands[0]);
5266 int reg1 = true_regnum (operands[1]);
5267 *len = 4;
5268 if (reg0 >= reg1)
5269 return ("mov %D0,%C1" CR_TAB
5270 "mov %C0,%B1" CR_TAB
5271 "mov %B0,%A1" CR_TAB
5272 "clr %A0");
5273 else
5274 return ("clr %A0" CR_TAB
5275 "mov %B0,%A1" CR_TAB
5276 "mov %C0,%B1" CR_TAB
5277 "mov %D0,%C1");
5280 case 16:
5282 int reg0 = true_regnum (operands[0]);
5283 int reg1 = true_regnum (operands[1]);
5284 if (reg0 + 2 == reg1)
5285 return *len = 2, ("clr %B0" CR_TAB
5286 "clr %A0");
5287 if (AVR_HAVE_MOVW)
5288 return *len = 3, ("movw %C0,%A1" CR_TAB
5289 "clr %B0" CR_TAB
5290 "clr %A0");
5291 else
5292 return *len = 4, ("mov %C0,%A1" CR_TAB
5293 "mov %D0,%B1" CR_TAB
5294 "clr %B0" CR_TAB
5295 "clr %A0");
5298 case 24:
5299 *len = 4;
5300 return ("mov %D0,%A1" CR_TAB
5301 "clr %C0" CR_TAB
5302 "clr %B0" CR_TAB
5303 "clr %A0");
5305 case 31:
5306 *len = 6;
5307 return ("clr %D0" CR_TAB
5308 "lsr %A0" CR_TAB
5309 "ror %D0" CR_TAB
5310 "clr %C0" CR_TAB
5311 "clr %B0" CR_TAB
5312 "clr %A0");
5314 len = t;
5316 out_shift_with_cnt ("lsl %A0" CR_TAB
5317 "rol %B0" CR_TAB
5318 "rol %C0" CR_TAB
5319 "rol %D0", insn, operands, len, 4);
5320 return "";
5323 /* 8bit arithmetic shift right ((signed char)x >> i) */
5325 const char *
5326 ashrqi3_out (rtx insn, rtx operands[], int *len)
5328 if (GET_CODE (operands[2]) == CONST_INT)
5330 int k;
5332 if (!len)
5333 len = &k;
5335 switch (INTVAL (operands[2]))
5337 case 1:
5338 *len = 1;
5339 return "asr %0";
5341 case 2:
5342 *len = 2;
5343 return ("asr %0" CR_TAB
5344 "asr %0");
5346 case 3:
5347 *len = 3;
5348 return ("asr %0" CR_TAB
5349 "asr %0" CR_TAB
5350 "asr %0");
5352 case 4:
5353 *len = 4;
5354 return ("asr %0" CR_TAB
5355 "asr %0" CR_TAB
5356 "asr %0" CR_TAB
5357 "asr %0");
5359 case 5:
5360 *len = 5;
5361 return ("asr %0" CR_TAB
5362 "asr %0" CR_TAB
5363 "asr %0" CR_TAB
5364 "asr %0" CR_TAB
5365 "asr %0");
5367 case 6:
5368 *len = 4;
5369 return ("bst %0,6" CR_TAB
5370 "lsl %0" CR_TAB
5371 "sbc %0,%0" CR_TAB
5372 "bld %0,0");
5374 default:
5375 if (INTVAL (operands[2]) < 8)
5376 break;
5378 /* fall through */
5380 case 7:
5381 *len = 2;
5382 return ("lsl %0" CR_TAB
5383 "sbc %0,%0");
5386 else if (CONSTANT_P (operands[2]))
5387 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5389 out_shift_with_cnt ("asr %0",
5390 insn, operands, len, 1);
5391 return "";
5395 /* 16bit arithmetic shift right ((signed short)x >> i) */
5397 const char *
5398 ashrhi3_out (rtx insn, rtx operands[], int *len)
5400 if (GET_CODE (operands[2]) == CONST_INT)
5402 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5403 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5404 int k;
5405 int *t = len;
5407 if (!len)
5408 len = &k;
5410 switch (INTVAL (operands[2]))
5412 case 4:
5413 case 5:
5414 /* XXX try to optimize this too? */
5415 break;
5417 case 6:
5418 if (optimize_size)
5419 break; /* scratch ? 5 : 6 */
5420 *len = 8;
5421 return ("mov __tmp_reg__,%A0" CR_TAB
5422 "mov %A0,%B0" CR_TAB
5423 "lsl __tmp_reg__" CR_TAB
5424 "rol %A0" CR_TAB
5425 "sbc %B0,%B0" CR_TAB
5426 "lsl __tmp_reg__" CR_TAB
5427 "rol %A0" CR_TAB
5428 "rol %B0");
5430 case 7:
5431 *len = 4;
5432 return ("lsl %A0" CR_TAB
5433 "mov %A0,%B0" CR_TAB
5434 "rol %A0" CR_TAB
5435 "sbc %B0,%B0");
5437 case 8:
5439 int reg0 = true_regnum (operands[0]);
5440 int reg1 = true_regnum (operands[1]);
5442 if (reg0 == reg1)
5443 return *len = 3, ("mov %A0,%B0" CR_TAB
5444 "lsl %B0" CR_TAB
5445 "sbc %B0,%B0");
5446 else
5447 return *len = 4, ("mov %A0,%B1" CR_TAB
5448 "clr %B0" CR_TAB
5449 "sbrc %A0,7" CR_TAB
5450 "dec %B0");
5453 case 9:
5454 *len = 4;
5455 return ("mov %A0,%B0" CR_TAB
5456 "lsl %B0" CR_TAB
5457 "sbc %B0,%B0" CR_TAB
5458 "asr %A0");
5460 case 10:
5461 *len = 5;
5462 return ("mov %A0,%B0" CR_TAB
5463 "lsl %B0" CR_TAB
5464 "sbc %B0,%B0" CR_TAB
5465 "asr %A0" CR_TAB
5466 "asr %A0");
5468 case 11:
5469 if (AVR_HAVE_MUL && ldi_ok)
5471 *len = 5;
5472 return ("ldi %A0,0x20" CR_TAB
5473 "muls %B0,%A0" CR_TAB
5474 "mov %A0,r1" CR_TAB
5475 "sbc %B0,%B0" CR_TAB
5476 "clr __zero_reg__");
5478 if (optimize_size && scratch)
5479 break; /* 5 */
5480 *len = 6;
5481 return ("mov %A0,%B0" CR_TAB
5482 "lsl %B0" CR_TAB
5483 "sbc %B0,%B0" CR_TAB
5484 "asr %A0" CR_TAB
5485 "asr %A0" CR_TAB
5486 "asr %A0");
5488 case 12:
5489 if (AVR_HAVE_MUL && ldi_ok)
5491 *len = 5;
5492 return ("ldi %A0,0x10" CR_TAB
5493 "muls %B0,%A0" CR_TAB
5494 "mov %A0,r1" CR_TAB
5495 "sbc %B0,%B0" CR_TAB
5496 "clr __zero_reg__");
5498 if (optimize_size && scratch)
5499 break; /* 5 */
5500 *len = 7;
5501 return ("mov %A0,%B0" CR_TAB
5502 "lsl %B0" CR_TAB
5503 "sbc %B0,%B0" CR_TAB
5504 "asr %A0" CR_TAB
5505 "asr %A0" CR_TAB
5506 "asr %A0" CR_TAB
5507 "asr %A0");
5509 case 13:
5510 if (AVR_HAVE_MUL && ldi_ok)
5512 *len = 5;
5513 return ("ldi %A0,0x08" CR_TAB
5514 "muls %B0,%A0" CR_TAB
5515 "mov %A0,r1" CR_TAB
5516 "sbc %B0,%B0" CR_TAB
5517 "clr __zero_reg__");
5519 if (optimize_size)
5520 break; /* scratch ? 5 : 7 */
5521 *len = 8;
5522 return ("mov %A0,%B0" CR_TAB
5523 "lsl %B0" CR_TAB
5524 "sbc %B0,%B0" CR_TAB
5525 "asr %A0" CR_TAB
5526 "asr %A0" CR_TAB
5527 "asr %A0" CR_TAB
5528 "asr %A0" CR_TAB
5529 "asr %A0");
5531 case 14:
5532 *len = 5;
5533 return ("lsl %B0" CR_TAB
5534 "sbc %A0,%A0" CR_TAB
5535 "lsl %B0" CR_TAB
5536 "mov %B0,%A0" CR_TAB
5537 "rol %A0");
5539 default:
5540 if (INTVAL (operands[2]) < 16)
5541 break;
5543 /* fall through */
5545 case 15:
5546 return *len = 3, ("lsl %B0" CR_TAB
5547 "sbc %A0,%A0" CR_TAB
5548 "mov %B0,%A0");
5550 len = t;
5552 out_shift_with_cnt ("asr %B0" CR_TAB
5553 "ror %A0", insn, operands, len, 2);
5554 return "";
5558 /* 24-bit arithmetic shift right */
5560 const char*
5561 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5563 int dest = REGNO (op[0]);
5564 int src = REGNO (op[1]);
5566 if (CONST_INT_P (op[2]))
5568 if (plen)
5569 *plen = 0;
5571 switch (INTVAL (op[2]))
5573 case 8:
5574 if (dest <= src)
5575 return avr_asm_len ("mov %A0,%B1" CR_TAB
5576 "mov %B0,%C1" CR_TAB
5577 "clr %C0" CR_TAB
5578 "sbrc %B0,7" CR_TAB
5579 "dec %C0", op, plen, 5);
5580 else
5581 return avr_asm_len ("clr %C0" CR_TAB
5582 "sbrc %C1,7" CR_TAB
5583 "dec %C0" CR_TAB
5584 "mov %B0,%C1" CR_TAB
5585 "mov %A0,%B1", op, plen, 5);
5587 case 16:
5588 if (dest != src + 2)
5589 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5591 return avr_asm_len ("clr %B0" CR_TAB
5592 "sbrc %A0,7" CR_TAB
5593 "com %B0" CR_TAB
5594 "mov %C0,%B0", op, plen, 4);
5596 default:
5597 if (INTVAL (op[2]) < 24)
5598 break;
5600 /* fall through */
5602 case 23:
5603 return avr_asm_len ("lsl %C0" CR_TAB
5604 "sbc %A0,%A0" CR_TAB
5605 "mov %B0,%A0" CR_TAB
5606 "mov %C0,%A0", op, plen, 4);
5607 } /* switch */
5610 out_shift_with_cnt ("asr %C0" CR_TAB
5611 "ror %B0" CR_TAB
5612 "ror %A0", insn, op, plen, 3);
5613 return "";
5617 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5619 const char *
5620 ashrsi3_out (rtx insn, rtx operands[], int *len)
5622 if (GET_CODE (operands[2]) == CONST_INT)
5624 int k;
5625 int *t = len;
5627 if (!len)
5628 len = &k;
5630 switch (INTVAL (operands[2]))
5632 case 8:
5634 int reg0 = true_regnum (operands[0]);
5635 int reg1 = true_regnum (operands[1]);
5636 *len=6;
5637 if (reg0 <= reg1)
5638 return ("mov %A0,%B1" CR_TAB
5639 "mov %B0,%C1" CR_TAB
5640 "mov %C0,%D1" CR_TAB
5641 "clr %D0" CR_TAB
5642 "sbrc %C0,7" CR_TAB
5643 "dec %D0");
5644 else
5645 return ("clr %D0" CR_TAB
5646 "sbrc %D1,7" CR_TAB
5647 "dec %D0" CR_TAB
5648 "mov %C0,%D1" CR_TAB
5649 "mov %B0,%C1" CR_TAB
5650 "mov %A0,%B1");
5653 case 16:
5655 int reg0 = true_regnum (operands[0]);
5656 int reg1 = true_regnum (operands[1]);
5658 if (reg0 == reg1 + 2)
5659 return *len = 4, ("clr %D0" CR_TAB
5660 "sbrc %B0,7" CR_TAB
5661 "com %D0" CR_TAB
5662 "mov %C0,%D0");
5663 if (AVR_HAVE_MOVW)
5664 return *len = 5, ("movw %A0,%C1" CR_TAB
5665 "clr %D0" CR_TAB
5666 "sbrc %B0,7" CR_TAB
5667 "com %D0" CR_TAB
5668 "mov %C0,%D0");
5669 else
5670 return *len = 6, ("mov %B0,%D1" CR_TAB
5671 "mov %A0,%C1" CR_TAB
5672 "clr %D0" CR_TAB
5673 "sbrc %B0,7" CR_TAB
5674 "com %D0" CR_TAB
5675 "mov %C0,%D0");
5678 case 24:
5679 return *len = 6, ("mov %A0,%D1" CR_TAB
5680 "clr %D0" CR_TAB
5681 "sbrc %A0,7" CR_TAB
5682 "com %D0" CR_TAB
5683 "mov %B0,%D0" CR_TAB
5684 "mov %C0,%D0");
5686 default:
5687 if (INTVAL (operands[2]) < 32)
5688 break;
5690 /* fall through */
5692 case 31:
5693 if (AVR_HAVE_MOVW)
5694 return *len = 4, ("lsl %D0" CR_TAB
5695 "sbc %A0,%A0" CR_TAB
5696 "mov %B0,%A0" CR_TAB
5697 "movw %C0,%A0");
5698 else
5699 return *len = 5, ("lsl %D0" CR_TAB
5700 "sbc %A0,%A0" CR_TAB
5701 "mov %B0,%A0" CR_TAB
5702 "mov %C0,%A0" CR_TAB
5703 "mov %D0,%A0");
5705 len = t;
5707 out_shift_with_cnt ("asr %D0" CR_TAB
5708 "ror %C0" CR_TAB
5709 "ror %B0" CR_TAB
5710 "ror %A0", insn, operands, len, 4);
5711 return "";
5714 /* 8-bit logic shift right ((unsigned char)x >> i) */
5716 const char *
5717 lshrqi3_out (rtx insn, rtx operands[], int *len)
5719 if (GET_CODE (operands[2]) == CONST_INT)
5721 int k;
5723 if (!len)
5724 len = &k;
5726 switch (INTVAL (operands[2]))
5728 default:
5729 if (INTVAL (operands[2]) < 8)
5730 break;
5732 *len = 1;
5733 return "clr %0";
5735 case 1:
5736 *len = 1;
5737 return "lsr %0";
5739 case 2:
5740 *len = 2;
5741 return ("lsr %0" CR_TAB
5742 "lsr %0");
5743 case 3:
5744 *len = 3;
5745 return ("lsr %0" CR_TAB
5746 "lsr %0" CR_TAB
5747 "lsr %0");
5749 case 4:
5750 if (test_hard_reg_class (LD_REGS, operands[0]))
5752 *len=2;
5753 return ("swap %0" CR_TAB
5754 "andi %0,0x0f");
5756 *len = 4;
5757 return ("lsr %0" CR_TAB
5758 "lsr %0" CR_TAB
5759 "lsr %0" CR_TAB
5760 "lsr %0");
5762 case 5:
5763 if (test_hard_reg_class (LD_REGS, operands[0]))
5765 *len = 3;
5766 return ("swap %0" CR_TAB
5767 "lsr %0" CR_TAB
5768 "andi %0,0x7");
5770 *len = 5;
5771 return ("lsr %0" CR_TAB
5772 "lsr %0" CR_TAB
5773 "lsr %0" CR_TAB
5774 "lsr %0" CR_TAB
5775 "lsr %0");
5777 case 6:
5778 if (test_hard_reg_class (LD_REGS, operands[0]))
5780 *len = 4;
5781 return ("swap %0" CR_TAB
5782 "lsr %0" CR_TAB
5783 "lsr %0" CR_TAB
5784 "andi %0,0x3");
5786 *len = 6;
5787 return ("lsr %0" CR_TAB
5788 "lsr %0" CR_TAB
5789 "lsr %0" CR_TAB
5790 "lsr %0" CR_TAB
5791 "lsr %0" CR_TAB
5792 "lsr %0");
5794 case 7:
5795 *len = 3;
5796 return ("rol %0" CR_TAB
5797 "clr %0" CR_TAB
5798 "rol %0");
5801 else if (CONSTANT_P (operands[2]))
5802 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5804 out_shift_with_cnt ("lsr %0",
5805 insn, operands, len, 1);
5806 return "";
5809 /* 16-bit logic shift right ((unsigned short)x >> i) */
5811 const char *
5812 lshrhi3_out (rtx insn, rtx operands[], int *len)
5814 if (GET_CODE (operands[2]) == CONST_INT)
5816 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5817 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5818 int k;
5819 int *t = len;
5821 if (!len)
5822 len = &k;
5824 switch (INTVAL (operands[2]))
5826 default:
5827 if (INTVAL (operands[2]) < 16)
5828 break;
5830 *len = 2;
5831 return ("clr %B0" CR_TAB
5832 "clr %A0");
5834 case 4:
5835 if (optimize_size && scratch)
5836 break; /* 5 */
5837 if (ldi_ok)
5839 *len = 6;
5840 return ("swap %B0" CR_TAB
5841 "swap %A0" CR_TAB
5842 "andi %A0,0x0f" CR_TAB
5843 "eor %A0,%B0" CR_TAB
5844 "andi %B0,0x0f" CR_TAB
5845 "eor %A0,%B0");
5847 if (scratch)
5849 *len = 7;
5850 return ("swap %B0" CR_TAB
5851 "swap %A0" CR_TAB
5852 "ldi %3,0x0f" CR_TAB
5853 "and %A0,%3" CR_TAB
5854 "eor %A0,%B0" CR_TAB
5855 "and %B0,%3" CR_TAB
5856 "eor %A0,%B0");
5858 break; /* optimize_size ? 6 : 8 */
5860 case 5:
5861 if (optimize_size)
5862 break; /* scratch ? 5 : 6 */
5863 if (ldi_ok)
5865 *len = 8;
5866 return ("lsr %B0" CR_TAB
5867 "ror %A0" CR_TAB
5868 "swap %B0" CR_TAB
5869 "swap %A0" CR_TAB
5870 "andi %A0,0x0f" CR_TAB
5871 "eor %A0,%B0" CR_TAB
5872 "andi %B0,0x0f" CR_TAB
5873 "eor %A0,%B0");
5875 if (scratch)
5877 *len = 9;
5878 return ("lsr %B0" CR_TAB
5879 "ror %A0" CR_TAB
5880 "swap %B0" CR_TAB
5881 "swap %A0" CR_TAB
5882 "ldi %3,0x0f" CR_TAB
5883 "and %A0,%3" CR_TAB
5884 "eor %A0,%B0" CR_TAB
5885 "and %B0,%3" CR_TAB
5886 "eor %A0,%B0");
5888 break; /* 10 */
5890 case 6:
5891 if (optimize_size)
5892 break; /* scratch ? 5 : 6 */
5893 *len = 9;
5894 return ("clr __tmp_reg__" CR_TAB
5895 "lsl %A0" CR_TAB
5896 "rol %B0" CR_TAB
5897 "rol __tmp_reg__" CR_TAB
5898 "lsl %A0" CR_TAB
5899 "rol %B0" CR_TAB
5900 "rol __tmp_reg__" CR_TAB
5901 "mov %A0,%B0" CR_TAB
5902 "mov %B0,__tmp_reg__");
5904 case 7:
5905 *len = 5;
5906 return ("lsl %A0" CR_TAB
5907 "mov %A0,%B0" CR_TAB
5908 "rol %A0" CR_TAB
5909 "sbc %B0,%B0" CR_TAB
5910 "neg %B0");
5912 case 8:
5913 return *len = 2, ("mov %A0,%B1" CR_TAB
5914 "clr %B0");
5916 case 9:
5917 *len = 3;
5918 return ("mov %A0,%B0" CR_TAB
5919 "clr %B0" CR_TAB
5920 "lsr %A0");
5922 case 10:
5923 *len = 4;
5924 return ("mov %A0,%B0" CR_TAB
5925 "clr %B0" CR_TAB
5926 "lsr %A0" CR_TAB
5927 "lsr %A0");
5929 case 11:
5930 *len = 5;
5931 return ("mov %A0,%B0" CR_TAB
5932 "clr %B0" CR_TAB
5933 "lsr %A0" CR_TAB
5934 "lsr %A0" CR_TAB
5935 "lsr %A0");
5937 case 12:
5938 if (ldi_ok)
5940 *len = 4;
5941 return ("mov %A0,%B0" CR_TAB
5942 "clr %B0" CR_TAB
5943 "swap %A0" CR_TAB
5944 "andi %A0,0x0f");
5946 if (scratch)
5948 *len = 5;
5949 return ("mov %A0,%B0" CR_TAB
5950 "clr %B0" CR_TAB
5951 "swap %A0" CR_TAB
5952 "ldi %3,0x0f" CR_TAB
5953 "and %A0,%3");
5955 *len = 6;
5956 return ("mov %A0,%B0" CR_TAB
5957 "clr %B0" CR_TAB
5958 "lsr %A0" CR_TAB
5959 "lsr %A0" CR_TAB
5960 "lsr %A0" CR_TAB
5961 "lsr %A0");
5963 case 13:
5964 if (ldi_ok)
5966 *len = 5;
5967 return ("mov %A0,%B0" CR_TAB
5968 "clr %B0" CR_TAB
5969 "swap %A0" CR_TAB
5970 "lsr %A0" CR_TAB
5971 "andi %A0,0x07");
5973 if (AVR_HAVE_MUL && scratch)
5975 *len = 5;
5976 return ("ldi %3,0x08" CR_TAB
5977 "mul %B0,%3" CR_TAB
5978 "mov %A0,r1" CR_TAB
5979 "clr %B0" CR_TAB
5980 "clr __zero_reg__");
5982 if (optimize_size && scratch)
5983 break; /* 5 */
5984 if (scratch)
5986 *len = 6;
5987 return ("mov %A0,%B0" CR_TAB
5988 "clr %B0" CR_TAB
5989 "swap %A0" CR_TAB
5990 "lsr %A0" CR_TAB
5991 "ldi %3,0x07" CR_TAB
5992 "and %A0,%3");
5994 if (AVR_HAVE_MUL)
5996 *len = 6;
5997 return ("set" CR_TAB
5998 "bld r1,3" CR_TAB
5999 "mul %B0,r1" CR_TAB
6000 "mov %A0,r1" CR_TAB
6001 "clr %B0" CR_TAB
6002 "clr __zero_reg__");
6004 *len = 7;
6005 return ("mov %A0,%B0" CR_TAB
6006 "clr %B0" CR_TAB
6007 "lsr %A0" CR_TAB
6008 "lsr %A0" CR_TAB
6009 "lsr %A0" CR_TAB
6010 "lsr %A0" CR_TAB
6011 "lsr %A0");
6013 case 14:
6014 if (AVR_HAVE_MUL && ldi_ok)
6016 *len = 5;
6017 return ("ldi %A0,0x04" CR_TAB
6018 "mul %B0,%A0" CR_TAB
6019 "mov %A0,r1" CR_TAB
6020 "clr %B0" CR_TAB
6021 "clr __zero_reg__");
6023 if (AVR_HAVE_MUL && scratch)
6025 *len = 5;
6026 return ("ldi %3,0x04" CR_TAB
6027 "mul %B0,%3" CR_TAB
6028 "mov %A0,r1" CR_TAB
6029 "clr %B0" CR_TAB
6030 "clr __zero_reg__");
6032 if (optimize_size && ldi_ok)
6034 *len = 5;
6035 return ("mov %A0,%B0" CR_TAB
6036 "ldi %B0,6" "\n1:\t"
6037 "lsr %A0" CR_TAB
6038 "dec %B0" CR_TAB
6039 "brne 1b");
6041 if (optimize_size && scratch)
6042 break; /* 5 */
6043 *len = 6;
6044 return ("clr %A0" CR_TAB
6045 "lsl %B0" CR_TAB
6046 "rol %A0" CR_TAB
6047 "lsl %B0" CR_TAB
6048 "rol %A0" CR_TAB
6049 "clr %B0");
6051 case 15:
6052 *len = 4;
6053 return ("clr %A0" CR_TAB
6054 "lsl %B0" CR_TAB
6055 "rol %A0" CR_TAB
6056 "clr %B0");
6058 len = t;
6060 out_shift_with_cnt ("lsr %B0" CR_TAB
6061 "ror %A0", insn, operands, len, 2);
6062 return "";
6066 /* 24-bit logic shift right */
6068 const char*
6069 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6071 int dest = REGNO (op[0]);
6072 int src = REGNO (op[1]);
6074 if (CONST_INT_P (op[2]))
6076 if (plen)
6077 *plen = 0;
6079 switch (INTVAL (op[2]))
6081 case 8:
6082 if (dest <= src)
6083 return avr_asm_len ("mov %A0,%B1" CR_TAB
6084 "mov %B0,%C1" CR_TAB
6085 "clr %C0", op, plen, 3);
6086 else
6087 return avr_asm_len ("clr %C0" CR_TAB
6088 "mov %B0,%C1" CR_TAB
6089 "mov %A0,%B1", op, plen, 3);
6091 case 16:
6092 if (dest != src + 2)
6093 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6095 return avr_asm_len ("clr %B0" CR_TAB
6096 "clr %C0", op, plen, 2);
6098 default:
6099 if (INTVAL (op[2]) < 24)
6100 break;
6102 /* fall through */
6104 case 23:
6105 return avr_asm_len ("clr %A0" CR_TAB
6106 "sbrc %C0,7" CR_TAB
6107 "inc %A0" CR_TAB
6108 "clr %B0" CR_TAB
6109 "clr %C0", op, plen, 5);
6110 } /* switch */
6113 out_shift_with_cnt ("lsr %C0" CR_TAB
6114 "ror %B0" CR_TAB
6115 "ror %A0", insn, op, plen, 3);
6116 return "";
6120 /* 32-bit logic shift right ((unsigned int)x >> i) */
6122 const char *
6123 lshrsi3_out (rtx insn, rtx operands[], int *len)
6125 if (GET_CODE (operands[2]) == CONST_INT)
6127 int k;
6128 int *t = len;
6130 if (!len)
6131 len = &k;
6133 switch (INTVAL (operands[2]))
6135 default:
6136 if (INTVAL (operands[2]) < 32)
6137 break;
6139 if (AVR_HAVE_MOVW)
6140 return *len = 3, ("clr %D0" CR_TAB
6141 "clr %C0" CR_TAB
6142 "movw %A0,%C0");
6143 *len = 4;
6144 return ("clr %D0" CR_TAB
6145 "clr %C0" CR_TAB
6146 "clr %B0" CR_TAB
6147 "clr %A0");
6149 case 8:
6151 int reg0 = true_regnum (operands[0]);
6152 int reg1 = true_regnum (operands[1]);
6153 *len = 4;
6154 if (reg0 <= reg1)
6155 return ("mov %A0,%B1" CR_TAB
6156 "mov %B0,%C1" CR_TAB
6157 "mov %C0,%D1" CR_TAB
6158 "clr %D0");
6159 else
6160 return ("clr %D0" CR_TAB
6161 "mov %C0,%D1" CR_TAB
6162 "mov %B0,%C1" CR_TAB
6163 "mov %A0,%B1");
6166 case 16:
6168 int reg0 = true_regnum (operands[0]);
6169 int reg1 = true_regnum (operands[1]);
6171 if (reg0 == reg1 + 2)
6172 return *len = 2, ("clr %C0" CR_TAB
6173 "clr %D0");
6174 if (AVR_HAVE_MOVW)
6175 return *len = 3, ("movw %A0,%C1" CR_TAB
6176 "clr %C0" CR_TAB
6177 "clr %D0");
6178 else
6179 return *len = 4, ("mov %B0,%D1" CR_TAB
6180 "mov %A0,%C1" CR_TAB
6181 "clr %C0" CR_TAB
6182 "clr %D0");
6185 case 24:
6186 return *len = 4, ("mov %A0,%D1" CR_TAB
6187 "clr %B0" CR_TAB
6188 "clr %C0" CR_TAB
6189 "clr %D0");
6191 case 31:
6192 *len = 6;
6193 return ("clr %A0" CR_TAB
6194 "sbrc %D0,7" CR_TAB
6195 "inc %A0" CR_TAB
6196 "clr %B0" CR_TAB
6197 "clr %C0" CR_TAB
6198 "clr %D0");
6200 len = t;
6202 out_shift_with_cnt ("lsr %D0" CR_TAB
6203 "ror %C0" CR_TAB
6204 "ror %B0" CR_TAB
6205 "ror %A0", insn, operands, len, 4);
6206 return "";
6210 /* Output addition of register XOP[0] and compile time constant XOP[2].
6211 CODE == PLUS: perform addition by using ADD instructions or
6212 CODE == MINUS: perform addition by using SUB instructions:
6214 XOP[0] = XOP[0] + XOP[2]
6216 Or perform addition/subtraction with register XOP[2] depending on CODE:
6218 XOP[0] = XOP[0] +/- XOP[2]
6220 If PLEN == NULL, print assembler instructions to perform the operation;
6221 otherwise, set *PLEN to the length of the instruction sequence (in words)
6222 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6223 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6225 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6226 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6227 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6228 the subtrahend in the original insn, provided it is a compile time constant.
6229 In all other cases, SIGN is 0.
6231 If OUT_LABEL is true, print the final 0: label which is needed for
6232 saturated addition / subtraction. The only case where OUT_LABEL = false
6233 is useful is for saturated addition / subtraction performed during
6234 fixed-point rounding, cf. `avr_out_round'. */
6236 static void
6237 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6238 enum rtx_code code_sat, int sign, bool out_label)
6240 /* MODE of the operation. */
6241 enum machine_mode mode = GET_MODE (xop[0]);
6243 /* INT_MODE of the same size. */
6244 enum machine_mode imode = int_mode_for_mode (mode);
6246 /* Number of bytes to operate on. */
6247 int i, n_bytes = GET_MODE_SIZE (mode);
6249 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6250 int clobber_val = -1;
6252 /* op[0]: 8-bit destination register
6253 op[1]: 8-bit const int
6254 op[2]: 8-bit scratch register */
6255 rtx op[3];
6257 /* Started the operation? Before starting the operation we may skip
6258 adding 0. This is no more true after the operation started because
6259 carry must be taken into account. */
6260 bool started = false;
6262 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6263 rtx xval = xop[2];
6265 /* Output a BRVC instruction. Only needed with saturation. */
6266 bool out_brvc = true;
6268 if (plen)
6269 *plen = 0;
6271 if (REG_P (xop[2]))
6273 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6275 for (i = 0; i < n_bytes; i++)
6277 /* We operate byte-wise on the destination. */
6278 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6279 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6281 if (i == 0)
6282 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6283 op, plen, 1);
6284 else
6285 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6286 op, plen, 1);
6289 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6291 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6293 if (MINUS == code)
6294 return;
6297 goto saturate;
6300 /* Except in the case of ADIW with 16-bit register (see below)
6301 addition does not set cc0 in a usable way. */
6303 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6305 if (CONST_FIXED_P (xval))
6306 xval = avr_to_int_mode (xval);
6308 /* Adding/Subtracting zero is a no-op. */
6310 if (xval == const0_rtx)
6312 *pcc = CC_NONE;
6313 return;
6316 if (MINUS == code)
6317 xval = simplify_unary_operation (NEG, imode, xval, imode);
6319 op[2] = xop[3];
6321 if (SS_PLUS == code_sat && MINUS == code
6322 && sign < 0
6323 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6324 & GET_MODE_MASK (QImode)))
6326 /* We compute x + 0x80 by means of SUB instructions. We negated the
6327 constant subtrahend above and are left with x - (-128) so that we
6328 need something like SUBI r,128 which does not exist because SUBI sets
6329 V according to the sign of the subtrahend. Notice the only case
6330 where this must be done is when NEG overflowed in case [2s] because
6331 the V computation needs the right sign of the subtrahend. */
6333 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6335 avr_asm_len ("subi %0,128" CR_TAB
6336 "brmi 0f", &msb, plen, 2);
6337 out_brvc = false;
6339 goto saturate;
6342 for (i = 0; i < n_bytes; i++)
6344 /* We operate byte-wise on the destination. */
6345 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6346 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6348 /* 8-bit value to operate with this byte. */
6349 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6351 /* Registers R16..R31 can operate with immediate. */
6352 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6354 op[0] = reg8;
6355 op[1] = gen_int_mode (val8, QImode);
6357 /* To get usable cc0 no low-bytes must have been skipped. */
6359 if (i && !started)
6360 *pcc = CC_CLOBBER;
6362 if (!started
6363 && i % 2 == 0
6364 && i + 2 <= n_bytes
6365 && test_hard_reg_class (ADDW_REGS, reg8))
6367 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6368 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6370 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6371 i.e. operate word-wise. */
6373 if (val16 < 64)
6375 if (val16 != 0)
6377 started = true;
6378 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6379 op, plen, 1);
6381 if (n_bytes == 2 && PLUS == code)
6382 *pcc = CC_SET_ZN;
6385 i++;
6386 continue;
6390 if (val8 == 0)
6392 if (started)
6393 avr_asm_len (code == PLUS
6394 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6395 op, plen, 1);
6396 continue;
6398 else if ((val8 == 1 || val8 == 0xff)
6399 && UNKNOWN == code_sat
6400 && !started
6401 && i == n_bytes - 1)
6403 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6404 op, plen, 1);
6405 break;
6408 switch (code)
6410 case PLUS:
6412 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6414 if (plen != NULL && UNKNOWN != code_sat)
6416 /* This belongs to the x + 0x80 corner case. The code with
6417 ADD instruction is not smaller, thus make this case
6418 expensive so that the caller won't pick it. */
6420 *plen += 10;
6421 break;
6424 if (clobber_val != (int) val8)
6425 avr_asm_len ("ldi %2,%1", op, plen, 1);
6426 clobber_val = (int) val8;
6428 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6430 break; /* PLUS */
6432 case MINUS:
6434 if (ld_reg_p)
6435 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6436 else
6438 gcc_assert (plen != NULL || REG_P (op[2]));
6440 if (clobber_val != (int) val8)
6441 avr_asm_len ("ldi %2,%1", op, plen, 1);
6442 clobber_val = (int) val8;
6444 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6447 break; /* MINUS */
6449 default:
6450 /* Unknown code */
6451 gcc_unreachable();
6454 started = true;
6456 } /* for all sub-bytes */
6458 saturate:
6460 if (UNKNOWN == code_sat)
6461 return;
6463 *pcc = (int) CC_CLOBBER;
6465 /* Vanilla addition/subtraction is done. We are left with saturation.
6467 We have to compute A = A <op> B where A is a register and
6468 B is a register or a non-zero compile time constant CONST.
6469 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6470 B stands for the original operand $2 in INSN. In the case of B = CONST,
6471 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6473 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6476 unsigned
6477 operation | code | sat if | b is | sat value | case
6478 -----------------+-------+----------+--------------+-----------+-------
6479 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6480 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6481 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6482 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6485 signed
6486 operation | code | sat if | b is | sat value | case
6487 -----------------+-------+----------+--------------+-----------+-------
6488 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6489 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6490 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6491 - as a + (-b) | add | V == 1 | const | s- | [4s]
6493 s+ = b < 0 ? -0x80 : 0x7f
6494 s- = b < 0 ? 0x7f : -0x80
6496 The cases a - b actually perform a - (-(-b)) if B is CONST.
6499 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6500 op[1] = n_bytes > 1
6501 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6502 : NULL_RTX;
6504 bool need_copy = true;
6505 int len_call = 1 + AVR_HAVE_JMP_CALL;
6507 switch (code_sat)
6509 default:
6510 gcc_unreachable();
6512 case SS_PLUS:
6513 case SS_MINUS:
6515 if (out_brvc)
6516 avr_asm_len ("brvc 0f", op, plen, 1);
6518 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6520 /* [1s,reg] */
6522 if (n_bytes == 1)
6523 avr_asm_len ("ldi %0,0x7f" CR_TAB
6524 "adc %0,__zero_reg__", op, plen, 2);
6525 else
6526 avr_asm_len ("ldi %0,0x7f" CR_TAB
6527 "ldi %1,0xff" CR_TAB
6528 "adc %1,__zero_reg__" CR_TAB
6529 "adc %0,__zero_reg__", op, plen, 4);
6531 else if (sign == 0 && PLUS == code)
6533 /* [1s,reg] */
6535 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6537 if (n_bytes == 1)
6538 avr_asm_len ("ldi %0,0x80" CR_TAB
6539 "sbrs %2,7" CR_TAB
6540 "dec %0", op, plen, 3);
6541 else
6542 avr_asm_len ("ldi %0,0x80" CR_TAB
6543 "cp %2,%0" CR_TAB
6544 "sbc %1,%1" CR_TAB
6545 "sbci %0,0", op, plen, 4);
6547 else if (sign == 0 && MINUS == code)
6549 /* [3s,reg] */
6551 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6553 if (n_bytes == 1)
6554 avr_asm_len ("ldi %0,0x7f" CR_TAB
6555 "sbrs %2,7" CR_TAB
6556 "inc %0", op, plen, 3);
6557 else
6558 avr_asm_len ("ldi %0,0x7f" CR_TAB
6559 "cp %0,%2" CR_TAB
6560 "sbc %1,%1" CR_TAB
6561 "sbci %0,-1", op, plen, 4);
6563 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6565 /* [1s,const,B < 0] [2s,B < 0] */
6566 /* [3s,const,B > 0] [4s,B > 0] */
6568 if (n_bytes == 8)
6570 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6571 need_copy = false;
6574 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6575 if (n_bytes > 1 && need_copy)
6576 avr_asm_len ("clr %1", op, plen, 1);
6578 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6580 /* [1s,const,B > 0] [2s,B > 0] */
6581 /* [3s,const,B < 0] [4s,B < 0] */
6583 if (n_bytes == 8)
6585 avr_asm_len ("sec" CR_TAB
6586 "%~call __sbc_8", op, plen, 1 + len_call);
6587 need_copy = false;
6590 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6591 if (n_bytes > 1 && need_copy)
6592 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6594 else
6595 gcc_unreachable();
6597 break;
6599 case US_PLUS:
6600 /* [1u] : [2u] */
6602 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6604 if (n_bytes == 8)
6606 if (MINUS == code)
6607 avr_asm_len ("sec", op, plen, 1);
6608 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6610 need_copy = false;
6612 else
6614 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6615 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6616 else
6617 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6618 op, plen, 1);
6620 break; /* US_PLUS */
6622 case US_MINUS:
6623 /* [4u] : [3u] */
6625 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6627 if (n_bytes == 8)
6629 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6630 need_copy = false;
6632 else
6633 avr_asm_len ("clr %0", op, plen, 1);
6635 break;
6638 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6639 Now copy the right value to the LSBs. */
6641 if (need_copy && n_bytes > 1)
6643 if (US_MINUS == code_sat || US_PLUS == code_sat)
6645 avr_asm_len ("mov %1,%0", op, plen, 1);
6647 if (n_bytes > 2)
6649 op[0] = xop[0];
6650 if (AVR_HAVE_MOVW)
6651 avr_asm_len ("movw %0,%1", op, plen, 1);
6652 else
6653 avr_asm_len ("mov %A0,%1" CR_TAB
6654 "mov %B0,%1", op, plen, 2);
6657 else if (n_bytes > 2)
6659 op[0] = xop[0];
6660 avr_asm_len ("mov %A0,%1" CR_TAB
6661 "mov %B0,%1", op, plen, 2);
6665 if (need_copy && n_bytes == 8)
6667 if (AVR_HAVE_MOVW)
6668 avr_asm_len ("movw %r0+2,%0" CR_TAB
6669 "movw %r0+4,%0", xop, plen, 2);
6670 else
6671 avr_asm_len ("mov %r0+2,%0" CR_TAB
6672 "mov %r0+3,%0" CR_TAB
6673 "mov %r0+4,%0" CR_TAB
6674 "mov %r0+5,%0", xop, plen, 4);
6677 if (out_label)
6678 avr_asm_len ("0:", op, plen, 0);
6682 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6683 is ont a compile-time constant:
6685 XOP[0] = XOP[0] +/- XOP[2]
6687 This is a helper for the function below. The only insns that need this
6688 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6690 static const char*
6691 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6693 enum machine_mode mode = GET_MODE (xop[0]);
6695 /* Only pointer modes want to add symbols. */
6697 gcc_assert (mode == HImode || mode == PSImode);
6699 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6701 avr_asm_len (PLUS == code
6702 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6703 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6704 xop, plen, -2);
6706 if (PSImode == mode)
6707 avr_asm_len (PLUS == code
6708 ? "sbci %C0,hlo8(-(%2))"
6709 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6710 return "";
6714 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6716 INSN is a single_set insn or an insn pattern with a binary operation as
6717 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6719 XOP are the operands of INSN. In the case of 64-bit operations with
6720 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6721 The non-saturating insns up to 32 bits may or may not supply a "d" class
6722 scratch as XOP[3].
6724 If PLEN == NULL output the instructions.
6725 If PLEN != NULL set *PLEN to the length of the sequence in words.
6727 PCC is a pointer to store the instructions' effect on cc0.
6728 PCC may be NULL.
6730 PLEN and PCC default to NULL.
6732 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6734 Return "" */
6736 const char*
6737 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6739 int cc_plus, cc_minus, cc_dummy;
6740 int len_plus, len_minus;
6741 rtx op[4];
6742 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6743 rtx xdest = SET_DEST (xpattern);
6744 enum machine_mode mode = GET_MODE (xdest);
6745 enum machine_mode imode = int_mode_for_mode (mode);
6746 int n_bytes = GET_MODE_SIZE (mode);
6747 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6748 enum rtx_code code
6749 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6750 ? PLUS : MINUS);
6752 if (!pcc)
6753 pcc = &cc_dummy;
6755 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6757 if (PLUS == code_sat || MINUS == code_sat)
6758 code_sat = UNKNOWN;
6760 if (n_bytes <= 4 && REG_P (xop[2]))
6762 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6763 return "";
6766 if (8 == n_bytes)
6768 op[0] = gen_rtx_REG (DImode, ACC_A);
6769 op[1] = gen_rtx_REG (DImode, ACC_A);
6770 op[2] = avr_to_int_mode (xop[0]);
6772 else
6774 if (!REG_P (xop[2])
6775 && !CONST_INT_P (xop[2])
6776 && !CONST_FIXED_P (xop[2]))
6778 return avr_out_plus_symbol (xop, code, plen, pcc);
6781 op[0] = avr_to_int_mode (xop[0]);
6782 op[1] = avr_to_int_mode (xop[1]);
6783 op[2] = avr_to_int_mode (xop[2]);
6786 /* Saturations and 64-bit operations don't have a clobber operand.
6787 For the other cases, the caller will provide a proper XOP[3]. */
6789 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6790 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6792 /* Saturation will need the sign of the original operand. */
6794 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6795 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6797 /* If we subtract and the subtrahend is a constant, then negate it
6798 so that avr_out_plus_1 can be used. */
6800 if (MINUS == code)
6801 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6803 /* Work out the shortest sequence. */
6805 avr_out_plus_1 (op, &len_minus, MINUS, &cc_plus, code_sat, sign, out_label);
6806 avr_out_plus_1 (op, &len_plus, PLUS, &cc_minus, code_sat, sign, out_label);
6808 if (plen)
6810 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6811 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6813 else if (len_minus <= len_plus)
6814 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6815 else
6816 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6818 return "";
6822 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6823 time constant XOP[2]:
6825 XOP[0] = XOP[0] <op> XOP[2]
6827 and return "". If PLEN == NULL, print assembler instructions to perform the
6828 operation; otherwise, set *PLEN to the length of the instruction sequence
6829 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6830 register or SCRATCH if no clobber register is needed for the operation.
6831 INSN is an INSN_P or a pattern of an insn. */
6833 const char*
6834 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6836 /* CODE and MODE of the operation. */
6837 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6838 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6839 enum machine_mode mode = GET_MODE (xop[0]);
6841 /* Number of bytes to operate on. */
6842 int i, n_bytes = GET_MODE_SIZE (mode);
6844 /* Value of T-flag (0 or 1) or -1 if unknow. */
6845 int set_t = -1;
6847 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6848 int clobber_val = -1;
6850 /* op[0]: 8-bit destination register
6851 op[1]: 8-bit const int
6852 op[2]: 8-bit clobber register or SCRATCH
6853 op[3]: 8-bit register containing 0xff or NULL_RTX */
6854 rtx op[4];
6856 op[2] = xop[3];
6857 op[3] = NULL_RTX;
6859 if (plen)
6860 *plen = 0;
6862 for (i = 0; i < n_bytes; i++)
6864 /* We operate byte-wise on the destination. */
6865 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6866 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6868 /* 8-bit value to operate with this byte. */
6869 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6871 /* Number of bits set in the current byte of the constant. */
6872 int pop8 = avr_popcount (val8);
6874 /* Registers R16..R31 can operate with immediate. */
6875 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6877 op[0] = reg8;
6878 op[1] = GEN_INT (val8);
6880 switch (code)
6882 case IOR:
6884 if (0 == pop8)
6885 continue;
6886 else if (ld_reg_p)
6887 avr_asm_len ("ori %0,%1", op, plen, 1);
6888 else if (1 == pop8)
6890 if (set_t != 1)
6891 avr_asm_len ("set", op, plen, 1);
6892 set_t = 1;
6894 op[1] = GEN_INT (exact_log2 (val8));
6895 avr_asm_len ("bld %0,%1", op, plen, 1);
6897 else if (8 == pop8)
6899 if (op[3] != NULL_RTX)
6900 avr_asm_len ("mov %0,%3", op, plen, 1);
6901 else
6902 avr_asm_len ("clr %0" CR_TAB
6903 "dec %0", op, plen, 2);
6905 op[3] = op[0];
6907 else
6909 if (clobber_val != (int) val8)
6910 avr_asm_len ("ldi %2,%1", op, plen, 1);
6911 clobber_val = (int) val8;
6913 avr_asm_len ("or %0,%2", op, plen, 1);
6916 continue; /* IOR */
6918 case AND:
6920 if (8 == pop8)
6921 continue;
6922 else if (0 == pop8)
6923 avr_asm_len ("clr %0", op, plen, 1);
6924 else if (ld_reg_p)
6925 avr_asm_len ("andi %0,%1", op, plen, 1);
6926 else if (7 == pop8)
6928 if (set_t != 0)
6929 avr_asm_len ("clt", op, plen, 1);
6930 set_t = 0;
6932 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6933 avr_asm_len ("bld %0,%1", op, plen, 1);
6935 else
6937 if (clobber_val != (int) val8)
6938 avr_asm_len ("ldi %2,%1", op, plen, 1);
6939 clobber_val = (int) val8;
6941 avr_asm_len ("and %0,%2", op, plen, 1);
6944 continue; /* AND */
6946 case XOR:
6948 if (0 == pop8)
6949 continue;
6950 else if (8 == pop8)
6951 avr_asm_len ("com %0", op, plen, 1);
6952 else if (ld_reg_p && val8 == (1 << 7))
6953 avr_asm_len ("subi %0,%1", op, plen, 1);
6954 else
6956 if (clobber_val != (int) val8)
6957 avr_asm_len ("ldi %2,%1", op, plen, 1);
6958 clobber_val = (int) val8;
6960 avr_asm_len ("eor %0,%2", op, plen, 1);
6963 continue; /* XOR */
6965 default:
6966 /* Unknown rtx_code */
6967 gcc_unreachable();
6969 } /* for all sub-bytes */
6971 return "";
6975 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6976 PLEN != NULL: Set *PLEN to the length of that sequence.
6977 Return "". */
6979 const char*
6980 avr_out_addto_sp (rtx *op, int *plen)
6982 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6983 int addend = INTVAL (op[0]);
6985 if (plen)
6986 *plen = 0;
6988 if (addend < 0)
6990 if (flag_verbose_asm || flag_print_asm_name)
6991 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6993 while (addend <= -pc_len)
6995 addend += pc_len;
6996 avr_asm_len ("rcall .", op, plen, 1);
6999 while (addend++ < 0)
7000 avr_asm_len ("push __zero_reg__", op, plen, 1);
7002 else if (addend > 0)
7004 if (flag_verbose_asm || flag_print_asm_name)
7005 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7007 while (addend-- > 0)
7008 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7011 return "";
7015 /* Outputs instructions needed for fixed point type conversion.
7016 This includes converting between any fixed point type, as well
7017 as converting to any integer type. Conversion between integer
7018 types is not supported.
7020 Converting signed fractional types requires a bit shift if converting
7021 to or from any unsigned fractional type because the decimal place is
7022 shifted by 1 bit. When the destination is a signed fractional, the sign
7023 is stored in either the carry or T bit. */
7025 const char*
7026 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7028 size_t i;
7029 rtx xop[6];
7030 RTX_CODE shift = UNKNOWN;
7031 bool sign_in_carry = false;
7032 bool msb_in_carry = false;
7033 bool lsb_in_tmp_reg = false;
7034 bool lsb_in_carry = false;
7035 bool frac_rounded = false;
7036 const char *code_ashift = "lsl %0";
7039 #define MAY_CLOBBER(RR) \
7040 /* Shorthand used below. */ \
7041 ((sign_bytes \
7042 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7043 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7044 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7045 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7047 struct
7049 /* bytes : Length of operand in bytes.
7050 ibyte : Length of integral part in bytes.
7051 fbyte, fbit : Length of fractional part in bytes, bits. */
7053 bool sbit;
7054 unsigned fbit, bytes, ibyte, fbyte;
7055 unsigned regno, regno_msb;
7056 } dest, src, *val[2] = { &dest, &src };
7058 if (plen)
7059 *plen = 0;
7061 /* Step 0: Determine information on source and destination operand we
7062 ====== will need in the remainder. */
7064 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7066 enum machine_mode mode;
7068 xop[i] = operands[i];
7070 mode = GET_MODE (xop[i]);
7072 val[i]->bytes = GET_MODE_SIZE (mode);
7073 val[i]->regno = REGNO (xop[i]);
7074 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7076 if (SCALAR_INT_MODE_P (mode))
7078 val[i]->sbit = intsigned;
7079 val[i]->fbit = 0;
7081 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7083 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7084 val[i]->fbit = GET_MODE_FBIT (mode);
7086 else
7087 fatal_insn ("unsupported fixed-point conversion", insn);
7089 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7090 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7093 // Byte offset of the decimal point taking into account different place
7094 // of the decimal point in input and output and different register numbers
7095 // of input and output.
7096 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7098 // Number of destination bytes that will come from sign / zero extension.
7099 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7101 // Number of bytes at the low end to be filled with zeros.
7102 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7104 // Do we have a 16-Bit register that is cleared?
7105 rtx clrw = NULL_RTX;
7107 bool sign_extend = src.sbit && sign_bytes;
7109 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7110 shift = ASHIFT;
7111 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7112 shift = ASHIFTRT;
7113 else if (dest.fbit % 8 == src.fbit % 8)
7114 shift = UNKNOWN;
7115 else
7116 gcc_unreachable();
7118 /* If we need to round the fraction part, we might need to save/round it
7119 before clobbering any of it in Step 1. Also, we might to want to do
7120 the rounding now to make use of LD_REGS. */
7121 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7122 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7123 && !TARGET_FRACT_CONV_TRUNC)
7125 bool overlap
7126 = (src.regno <=
7127 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
7128 && dest.regno - offset -1 >= dest.regno);
7129 unsigned s0 = dest.regno - offset -1;
7130 bool use_src = true;
7131 unsigned sn;
7132 unsigned copied_msb = src.regno_msb;
7133 bool have_carry = false;
7135 if (src.ibyte > dest.ibyte)
7136 copied_msb -= src.ibyte - dest.ibyte;
7138 for (sn = s0; sn <= copied_msb; sn++)
7139 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
7140 && !reg_unused_after (insn, all_regs_rtx[sn]))
7141 use_src = false;
7142 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
7144 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7145 &all_regs_rtx[src.regno_msb], plen, 2);
7146 sn = src.regno;
7147 if (sn < s0)
7149 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
7150 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
7151 else
7152 avr_asm_len ("sec" CR_TAB "cpc %0,__zero_reg__",
7153 &all_regs_rtx[sn], plen, 2);
7154 have_carry = true;
7156 while (++sn < s0)
7157 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7158 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
7159 &all_regs_rtx[s0], plen, 1);
7160 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7161 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
7162 avr_asm_len ("\n0:", NULL, plen, 0);
7163 frac_rounded = true;
7165 else if (use_src && overlap)
7167 avr_asm_len ("clr __tmp_reg__" CR_TAB
7168 "sbrc %1,0" CR_TAB "dec __tmp_reg__", xop, plen, 1);
7169 sn = src.regno;
7170 if (sn < s0)
7172 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7173 have_carry = true;
7175 while (++sn < s0)
7176 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7177 if (have_carry)
7178 avr_asm_len ("clt" CR_TAB "bld __tmp_reg__,7" CR_TAB
7179 "adc %0,__tmp_reg__",
7180 &all_regs_rtx[s0], plen, 1);
7181 else
7182 avr_asm_len ("lsr __tmp_reg" CR_TAB "add %0,__tmp_reg__",
7183 &all_regs_rtx[s0], plen, 2);
7184 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7185 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7186 frac_rounded = true;
7188 else if (overlap)
7190 bool use_src
7191 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
7192 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
7193 || reg_unused_after (insn, all_regs_rtx[s0])));
7194 xop[2] = all_regs_rtx[s0];
7195 unsigned sn = src.regno;
7196 if (!use_src || sn == s0)
7197 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7198 /* We need to consider to-be-discarded bits
7199 if the value is negative. */
7200 if (sn < s0)
7202 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7203 &all_regs_rtx[src.regno_msb], plen, 2);
7204 /* Test to-be-discarded bytes for any nozero bits.
7205 ??? Could use OR or SBIW to test two registers at once. */
7206 if (sn < s0)
7207 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7208 while (++sn < s0)
7209 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7210 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7211 if (use_src)
7212 avr_asm_len ("breq 0f" CR_TAB
7213 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7214 xop, plen, 3);
7215 else
7216 avr_asm_len ("breq 0f" CR_TAB
7217 "set" CR_TAB "bld __tmp_reg__,0\n0:",
7218 xop, plen, 3);
7220 lsb_in_tmp_reg = true;
7224 /* Step 1: Clear bytes at the low end and copy payload bits from source
7225 ====== to destination. */
7227 int step = offset < 0 ? 1 : -1;
7228 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7230 // We cleared at least that number of registers.
7231 int clr_n = 0;
7233 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7235 // Next regno of destination is needed for MOVW
7236 unsigned d1 = d0 + step;
7238 // Current and next regno of source
7239 signed s0 = d0 - offset;
7240 signed s1 = s0 + step;
7242 // Must current resp. next regno be CLRed? This applies to the low
7243 // bytes of the destination that have no associated source bytes.
7244 bool clr0 = s0 < (signed) src.regno;
7245 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7247 // First gather what code to emit (if any) and additional step to
7248 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7249 // is the source rtx for the current loop iteration.
7250 const char *code = NULL;
7251 int stepw = 0;
7253 if (clr0)
7255 if (AVR_HAVE_MOVW && clr1 && clrw)
7257 xop[2] = all_regs_rtx[d0 & ~1];
7258 xop[3] = clrw;
7259 code = "movw %2,%3";
7260 stepw = step;
7262 else
7264 xop[2] = all_regs_rtx[d0];
7265 code = "clr %2";
7267 if (++clr_n >= 2
7268 && !clrw
7269 && d0 % 2 == (step > 0))
7271 clrw = all_regs_rtx[d0 & ~1];
7275 else if (offset && s0 <= (signed) src.regno_msb)
7277 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7278 && d0 % 2 == (offset > 0)
7279 && d1 <= dest.regno_msb && d1 >= dest.regno
7280 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7282 xop[2] = all_regs_rtx[d0 & ~movw];
7283 xop[3] = all_regs_rtx[s0 & ~movw];
7284 code = movw ? "movw %2,%3" : "mov %2,%3";
7285 stepw = step * movw;
7288 if (code)
7290 if (sign_extend && shift != ASHIFT && !sign_in_carry
7291 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7293 /* We are going to override the sign bit. If we sign-extend,
7294 store the sign in the Carry flag. This is not needed if
7295 the destination will be ASHIFT is the remainder because
7296 the ASHIFT will set Carry without extra instruction. */
7298 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7299 sign_in_carry = true;
7302 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7304 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7305 && src.ibyte > dest.ibyte
7306 && (d0 == src_msb || d0 + stepw == src_msb))
7308 /* We are going to override the MSB. If we shift right,
7309 store the MSB in the Carry flag. This is only needed if
7310 we don't sign-extend becaue with sign-extension the MSB
7311 (the sign) will be produced by the sign extension. */
7313 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7314 msb_in_carry = true;
7317 unsigned src_lsb = dest.regno - offset -1;
7319 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7320 && !lsb_in_tmp_reg
7321 && (d0 == src_lsb || d0 + stepw == src_lsb))
7323 /* We are going to override the new LSB; store it into carry. */
7325 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7326 code_ashift = "rol %0";
7327 lsb_in_carry = true;
7330 avr_asm_len (code, xop, plen, 1);
7331 d0 += stepw;
7335 /* Step 2: Shift destination left by 1 bit position. This might be needed
7336 ====== for signed input and unsigned output. */
7338 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7340 unsigned s0 = dest.regno - offset -1;
7342 /* n1169 4.1.4 says:
7343 "Conversions from a fixed-point to an integer type round toward zero."
7344 Hence, converting a fract type to integer only gives a non-zero result
7345 for -1. */
7346 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7347 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
7348 && !TARGET_FRACT_CONV_TRUNC)
7350 gcc_assert (s0 == src.regno_msb);
7351 /* Check if the input is -1. We do that by checking if negating
7352 the input causes an integer overflow. */
7353 unsigned sn = src.regno;
7354 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7355 while (sn <= s0)
7356 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7358 /* Overflow goes with set carry. Clear carry otherwise. */
7359 avr_asm_len ("brvs 0f" CR_TAB "clc\n0:", NULL, plen, 2);
7361 /* Likewise, when converting from accumulator types to integer, we
7362 need to round up negative values. */
7363 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7364 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7365 && !TARGET_FRACT_CONV_TRUNC
7366 && !frac_rounded)
7368 bool have_carry = false;
7370 xop[2] = all_regs_rtx[s0];
7371 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
7372 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7373 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7374 &all_regs_rtx[src.regno_msb], plen, 2);
7375 if (!lsb_in_tmp_reg)
7377 unsigned sn = src.regno;
7378 if (sn < s0)
7380 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
7381 plen, 1);
7382 have_carry = true;
7384 while (++sn < s0)
7385 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
7386 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
7388 /* Add in C and the rounding value 127. */
7389 /* If the destination msb is a sign byte, and in LD_REGS,
7390 grab it as a temporary. */
7391 if (sign_bytes
7392 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
7393 dest.regno_msb))
7395 xop[3] = all_regs_rtx[dest.regno_msb];
7396 avr_asm_len ("ldi %3,127", xop, plen, 1);
7397 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
7398 : have_carry ? "adc %2,%3"
7399 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
7400 : "add %2,%3"),
7401 xop, plen, 1);
7403 else
7405 /* Fall back to use __zero_reg__ as a temporary. */
7406 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
7407 if (have_carry)
7408 avr_asm_len ("clt" CR_TAB "bld __zero_reg__,7", NULL, plen, 2);
7409 else
7410 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
7411 avr_asm_len ((have_carry && lsb_in_tmp_reg
7412 ? "adc __tmp_reg__,__zero_reg__"
7413 : have_carry ? "adc %2,__zero_reg__"
7414 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
7415 : "add %2,__zero_reg__"),
7416 xop, plen, 1);
7417 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
7419 for (d0 = dest.regno + zero_bytes;
7420 d0 <= dest.regno_msb - sign_bytes; d0++)
7421 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
7422 avr_asm_len (lsb_in_tmp_reg
7423 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7424 xop, plen, 1);
7426 else if (MAY_CLOBBER (s0))
7427 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7428 else
7429 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7430 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7432 code_ashift = "rol %0";
7433 lsb_in_carry = true;
7436 if (shift == ASHIFT)
7438 for (d0 = dest.regno + zero_bytes;
7439 d0 <= dest.regno_msb - sign_bytes; d0++)
7441 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7442 code_ashift = "rol %0";
7445 lsb_in_carry = false;
7446 sign_in_carry = true;
7449 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7450 ======= it in sign-extension below. */
7452 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7453 && src.ibyte > dest.ibyte)
7455 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7457 if (MAY_CLOBBER (s0))
7458 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7459 else
7460 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7461 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7463 msb_in_carry = true;
7466 /* Step 3: Sign-extend or zero-extend the destination as needed.
7467 ====== */
7469 if (sign_extend && !sign_in_carry)
7471 unsigned s0 = src.regno_msb;
7473 if (MAY_CLOBBER (s0))
7474 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7475 else
7476 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7477 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7479 sign_in_carry = true;
7482 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7484 unsigned copies = 0;
7485 rtx movw = sign_extend ? NULL_RTX : clrw;
7487 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7489 if (AVR_HAVE_MOVW && movw
7490 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7492 xop[2] = all_regs_rtx[d0];
7493 xop[3] = movw;
7494 avr_asm_len ("movw %2,%3", xop, plen, 1);
7495 d0++;
7497 else
7499 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7500 &all_regs_rtx[d0], plen, 1);
7502 if (++copies >= 2 && !movw && d0 % 2 == 1)
7503 movw = all_regs_rtx[d0-1];
7505 } /* for */
7508 /* Step 4: Right shift the destination. This might be needed for
7509 ====== conversions from unsigned to signed. */
7511 if (shift == ASHIFTRT)
7513 const char *code_ashiftrt = "lsr %0";
7515 if (sign_extend || msb_in_carry)
7516 code_ashiftrt = "ror %0";
7518 if (src.sbit && src.ibyte == dest.ibyte)
7519 code_ashiftrt = "asr %0";
7521 for (d0 = dest.regno_msb - sign_bytes;
7522 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7524 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7525 code_ashiftrt = "ror %0";
7529 #undef MAY_CLOBBER
7531 return "";
7535 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7536 XOP[2] is the rounding point, a CONST_INT. The function prints the
7537 instruction sequence if PLEN = NULL and computes the length in words
7538 of the sequence if PLEN != NULL. Most of this function deals with
7539 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7541 const char*
7542 avr_out_round (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7544 enum machine_mode mode = GET_MODE (xop[0]);
7545 enum machine_mode imode = int_mode_for_mode (mode);
7546 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7547 int fbit = (int) GET_MODE_FBIT (mode);
7548 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7549 // Lengths of PLUS and AND parts.
7550 int len_add = 0, *plen_add = plen ? &len_add : NULL;
7551 int len_and = 0, *plen_and = plen ? &len_and : NULL;
7553 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7554 // the saturated addition so that we can emit the "rjmp 1f" before the
7555 // "0:" below.
7557 rtx xadd = const_fixed_from_double_int (i_add, mode);
7558 rtx xpattern, xsrc, op[4];
7560 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7561 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7562 : gen_rtx_US_PLUS (mode, xop[1], xadd);
7563 xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7565 op[0] = xop[0];
7566 op[1] = xop[1];
7567 op[2] = xadd;
7568 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7570 avr_asm_len ("rjmp 1f" CR_TAB
7571 "0:", NULL, plen_add, 1);
7573 // Keep all bits from RP and higher: ... 2^(-RP)
7574 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7575 // Rounding point ^^^^^^^
7576 // Added above ^^^^^^^^^
7577 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7578 rtx xmask = immed_double_int_const (-i_add - i_add, imode);
7580 xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7582 op[0] = xreg;
7583 op[1] = xreg;
7584 op[2] = xmask;
7585 op[3] = gen_rtx_SCRATCH (QImode);
7586 avr_out_bitop (xpattern, op, plen_and);
7587 avr_asm_len ("1:", NULL, plen, 0);
7589 if (plen)
7590 *plen = len_add + len_and;
7592 return "";
7596 /* Create RTL split patterns for byte sized rotate expressions. This
7597 produces a series of move instructions and considers overlap situations.
7598 Overlapping non-HImode operands need a scratch register. */
7600 bool
7601 avr_rotate_bytes (rtx operands[])
7603 int i, j;
7604 enum machine_mode mode = GET_MODE (operands[0]);
7605 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7606 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7607 int num = INTVAL (operands[2]);
7608 rtx scratch = operands[3];
7609 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7610 Word move if no scratch is needed, otherwise use size of scratch. */
7611 enum machine_mode move_mode = QImode;
7612 int move_size, offset, size;
7614 if (num & 0xf)
7615 move_mode = QImode;
7616 else if ((mode == SImode && !same_reg) || !overlapped)
7617 move_mode = HImode;
7618 else
7619 move_mode = GET_MODE (scratch);
7621 /* Force DI rotate to use QI moves since other DI moves are currently split
7622 into QI moves so forward propagation works better. */
7623 if (mode == DImode)
7624 move_mode = QImode;
7625 /* Make scratch smaller if needed. */
7626 if (SCRATCH != GET_CODE (scratch)
7627 && HImode == GET_MODE (scratch)
7628 && QImode == move_mode)
7629 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7631 move_size = GET_MODE_SIZE (move_mode);
7632 /* Number of bytes/words to rotate. */
7633 offset = (num >> 3) / move_size;
7634 /* Number of moves needed. */
7635 size = GET_MODE_SIZE (mode) / move_size;
7636 /* Himode byte swap is special case to avoid a scratch register. */
7637 if (mode == HImode && same_reg)
7639 /* HImode byte swap, using xor. This is as quick as using scratch. */
7640 rtx src, dst;
7641 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7642 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7643 if (!rtx_equal_p (dst, src))
7645 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7646 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7647 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7650 else
7652 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7653 /* Create linked list of moves to determine move order. */
7654 struct {
7655 rtx src, dst;
7656 int links;
7657 } move[MAX_SIZE + 8];
7658 int blocked, moves;
7660 gcc_assert (size <= MAX_SIZE);
7661 /* Generate list of subreg moves. */
7662 for (i = 0; i < size; i++)
7664 int from = i;
7665 int to = (from + offset) % size;
7666 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7667 mode, from * move_size);
7668 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7669 mode, to * move_size);
7670 move[i].links = -1;
7672 /* Mark dependence where a dst of one move is the src of another move.
7673 The first move is a conflict as it must wait until second is
7674 performed. We ignore moves to self - we catch this later. */
7675 if (overlapped)
7676 for (i = 0; i < size; i++)
7677 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7678 for (j = 0; j < size; j++)
7679 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7681 /* The dst of move i is the src of move j. */
7682 move[i].links = j;
7683 break;
7686 blocked = -1;
7687 moves = 0;
7688 /* Go through move list and perform non-conflicting moves. As each
7689 non-overlapping move is made, it may remove other conflicts
7690 so the process is repeated until no conflicts remain. */
7693 blocked = -1;
7694 moves = 0;
7695 /* Emit move where dst is not also a src or we have used that
7696 src already. */
7697 for (i = 0; i < size; i++)
7698 if (move[i].src != NULL_RTX)
7700 if (move[i].links == -1
7701 || move[move[i].links].src == NULL_RTX)
7703 moves++;
7704 /* Ignore NOP moves to self. */
7705 if (!rtx_equal_p (move[i].dst, move[i].src))
7706 emit_move_insn (move[i].dst, move[i].src);
7708 /* Remove conflict from list. */
7709 move[i].src = NULL_RTX;
7711 else
7712 blocked = i;
7715 /* Check for deadlock. This is when no moves occurred and we have
7716 at least one blocked move. */
7717 if (moves == 0 && blocked != -1)
7719 /* Need to use scratch register to break deadlock.
7720 Add move to put dst of blocked move into scratch.
7721 When this move occurs, it will break chain deadlock.
7722 The scratch register is substituted for real move. */
7724 gcc_assert (SCRATCH != GET_CODE (scratch));
7726 move[size].src = move[blocked].dst;
7727 move[size].dst = scratch;
7728 /* Scratch move is never blocked. */
7729 move[size].links = -1;
7730 /* Make sure we have valid link. */
7731 gcc_assert (move[blocked].links != -1);
7732 /* Replace src of blocking move with scratch reg. */
7733 move[move[blocked].links].src = scratch;
7734 /* Make dependent on scratch move occurring. */
7735 move[blocked].links = size;
7736 size=size+1;
7739 while (blocked != -1);
7741 return true;
7745 /* Worker function for `ADJUST_INSN_LENGTH'. */
7746 /* Modifies the length assigned to instruction INSN
7747 LEN is the initially computed length of the insn. */
7750 avr_adjust_insn_length (rtx insn, int len)
7752 rtx *op = recog_data.operand;
7753 enum attr_adjust_len adjust_len;
7755 /* Some complex insns don't need length adjustment and therefore
7756 the length need not/must not be adjusted for these insns.
7757 It is easier to state this in an insn attribute "adjust_len" than
7758 to clutter up code here... */
7760 if (-1 == recog_memoized (insn))
7762 return len;
7765 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7767 adjust_len = get_attr_adjust_len (insn);
7769 if (adjust_len == ADJUST_LEN_NO)
7771 /* Nothing to adjust: The length from attribute "length" is fine.
7772 This is the default. */
7774 return len;
7777 /* Extract insn's operands. */
7779 extract_constrain_insn_cached (insn);
7781 /* Dispatch to right function. */
7783 switch (adjust_len)
7785 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7786 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7787 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7789 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7791 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7792 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7794 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7795 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7796 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7797 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7798 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7799 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7800 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7802 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7803 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7804 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7806 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7807 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7808 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7809 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7810 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7812 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7813 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7814 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7816 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7817 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7818 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7820 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7821 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7822 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7824 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7825 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7826 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7828 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7830 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7832 default:
7833 gcc_unreachable();
7836 return len;
7839 /* Return nonzero if register REG dead after INSN. */
7842 reg_unused_after (rtx insn, rtx reg)
7844 return (dead_or_set_p (insn, reg)
7845 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7848 /* Return nonzero if REG is not used after INSN.
7849 We assume REG is a reload reg, and therefore does
7850 not live past labels. It may live past calls or jumps though. */
7853 _reg_unused_after (rtx insn, rtx reg)
7855 enum rtx_code code;
7856 rtx set;
7858 /* If the reg is set by this instruction, then it is safe for our
7859 case. Disregard the case where this is a store to memory, since
7860 we are checking a register used in the store address. */
7861 set = single_set (insn);
7862 if (set && GET_CODE (SET_DEST (set)) != MEM
7863 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7864 return 1;
7866 while ((insn = NEXT_INSN (insn)))
7868 rtx set;
7869 code = GET_CODE (insn);
7871 #if 0
7872 /* If this is a label that existed before reload, then the register
7873 if dead here. However, if this is a label added by reorg, then
7874 the register may still be live here. We can't tell the difference,
7875 so we just ignore labels completely. */
7876 if (code == CODE_LABEL)
7877 return 1;
7878 /* else */
7879 #endif
7881 if (!INSN_P (insn))
7882 continue;
7884 if (code == JUMP_INSN)
7885 return 0;
7887 /* If this is a sequence, we must handle them all at once.
7888 We could have for instance a call that sets the target register,
7889 and an insn in a delay slot that uses the register. In this case,
7890 we must return 0. */
7891 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7893 int i;
7894 int retval = 0;
7896 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7898 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7899 rtx set = single_set (this_insn);
7901 if (CALL_P (this_insn))
7902 code = CALL_INSN;
7903 else if (JUMP_P (this_insn))
7905 if (INSN_ANNULLED_BRANCH_P (this_insn))
7906 return 0;
7907 code = JUMP_INSN;
7910 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7911 return 0;
7912 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7914 if (GET_CODE (SET_DEST (set)) != MEM)
7915 retval = 1;
7916 else
7917 return 0;
7919 if (set == 0
7920 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7921 return 0;
7923 if (retval == 1)
7924 return 1;
7925 else if (code == JUMP_INSN)
7926 return 0;
7929 if (code == CALL_INSN)
7931 rtx tem;
7932 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7933 if (GET_CODE (XEXP (tem, 0)) == USE
7934 && REG_P (XEXP (XEXP (tem, 0), 0))
7935 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7936 return 0;
7937 if (call_used_regs[REGNO (reg)])
7938 return 1;
7941 set = single_set (insn);
7943 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7944 return 0;
7945 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7946 return GET_CODE (SET_DEST (set)) != MEM;
7947 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7948 return 0;
7950 return 1;
7954 /* Implement `TARGET_ASM_INTEGER'. */
7955 /* Target hook for assembling integer objects. The AVR version needs
7956 special handling for references to certain labels. */
7958 static bool
7959 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7961 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7962 && text_segment_operand (x, VOIDmode))
7964 fputs ("\t.word\tgs(", asm_out_file);
7965 output_addr_const (asm_out_file, x);
7966 fputs (")\n", asm_out_file);
7968 return true;
7970 else if (GET_MODE (x) == PSImode)
7972 /* This needs binutils 2.23+, see PR binutils/13503 */
7974 fputs ("\t.byte\tlo8(", asm_out_file);
7975 output_addr_const (asm_out_file, x);
7976 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7978 fputs ("\t.byte\thi8(", asm_out_file);
7979 output_addr_const (asm_out_file, x);
7980 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7982 fputs ("\t.byte\thh8(", asm_out_file);
7983 output_addr_const (asm_out_file, x);
7984 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7986 return true;
7988 else if (CONST_FIXED_P (x))
7990 unsigned n;
7992 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7994 for (n = 0; n < size; n++)
7996 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
7997 default_assemble_integer (xn, 1, aligned_p);
8000 return true;
8003 return default_assemble_integer (x, size, aligned_p);
8007 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8008 /* Return value is nonzero if pseudos that have been
8009 assigned to registers of class CLASS would likely be spilled
8010 because registers of CLASS are needed for spill registers. */
8012 static bool
8013 avr_class_likely_spilled_p (reg_class_t c)
8015 return (c != ALL_REGS && c != ADDW_REGS);
8019 /* Valid attributes:
8020 progmem - Put data to program memory.
8021 signal - Make a function to be hardware interrupt.
8022 After function prologue interrupts remain disabled.
8023 interrupt - Make a function to be hardware interrupt. Before function
8024 prologue interrupts are enabled by means of SEI.
8025 naked - Don't generate function prologue/epilogue and RET
8026 instruction. */
8028 /* Handle a "progmem" attribute; arguments as in
8029 struct attribute_spec.handler. */
8031 static tree
8032 avr_handle_progmem_attribute (tree *node, tree name,
8033 tree args ATTRIBUTE_UNUSED,
8034 int flags ATTRIBUTE_UNUSED,
8035 bool *no_add_attrs)
8037 if (DECL_P (*node))
8039 if (TREE_CODE (*node) == TYPE_DECL)
8041 /* This is really a decl attribute, not a type attribute,
8042 but try to handle it for GCC 3.0 backwards compatibility. */
8044 tree type = TREE_TYPE (*node);
8045 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8046 tree newtype = build_type_attribute_variant (type, attr);
8048 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8049 TREE_TYPE (*node) = newtype;
8050 *no_add_attrs = true;
8052 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8054 *no_add_attrs = false;
8056 else
8058 warning (OPT_Wattributes, "%qE attribute ignored",
8059 name);
8060 *no_add_attrs = true;
8064 return NULL_TREE;
8067 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8068 struct attribute_spec.handler. */
8070 static tree
8071 avr_handle_fndecl_attribute (tree *node, tree name,
8072 tree args ATTRIBUTE_UNUSED,
8073 int flags ATTRIBUTE_UNUSED,
8074 bool *no_add_attrs)
8076 if (TREE_CODE (*node) != FUNCTION_DECL)
8078 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8079 name);
8080 *no_add_attrs = true;
8083 return NULL_TREE;
8086 static tree
8087 avr_handle_fntype_attribute (tree *node, tree name,
8088 tree args ATTRIBUTE_UNUSED,
8089 int flags ATTRIBUTE_UNUSED,
8090 bool *no_add_attrs)
8092 if (TREE_CODE (*node) != FUNCTION_TYPE)
8094 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8095 name);
8096 *no_add_attrs = true;
8099 return NULL_TREE;
8103 /* AVR attributes. */
8104 static const struct attribute_spec
8105 avr_attribute_table[] =
8107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8108 affects_type_identity } */
8109 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
8110 false },
8111 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8112 false },
8113 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8114 false },
8115 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
8116 false },
8117 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
8118 false },
8119 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
8120 false },
8121 { NULL, 0, 0, false, false, false, NULL, false }
8125 /* Look if DECL shall be placed in program memory space by
8126 means of attribute `progmem' or some address-space qualifier.
8127 Return non-zero if DECL is data that must end up in Flash and
8128 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8130 Return 2 if DECL is located in 24-bit flash address-space
8131 Return 1 if DECL is located in 16-bit flash address-space
8132 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8133 Return 0 otherwise */
8136 avr_progmem_p (tree decl, tree attributes)
8138 tree a;
8140 if (TREE_CODE (decl) != VAR_DECL)
8141 return 0;
8143 if (avr_decl_memx_p (decl))
8144 return 2;
8146 if (avr_decl_flash_p (decl))
8147 return 1;
8149 if (NULL_TREE
8150 != lookup_attribute ("progmem", attributes))
8151 return -1;
8153 a = decl;
8156 a = TREE_TYPE(a);
8157 while (TREE_CODE (a) == ARRAY_TYPE);
8159 if (a == error_mark_node)
8160 return 0;
8162 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
8163 return -1;
8165 return 0;
8169 /* Scan type TYP for pointer references to address space ASn.
8170 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8171 the AS are also declared to be CONST.
8172 Otherwise, return the respective address space, i.e. a value != 0. */
8174 static addr_space_t
8175 avr_nonconst_pointer_addrspace (tree typ)
8177 while (ARRAY_TYPE == TREE_CODE (typ))
8178 typ = TREE_TYPE (typ);
8180 if (POINTER_TYPE_P (typ))
8182 addr_space_t as;
8183 tree target = TREE_TYPE (typ);
8185 /* Pointer to function: Test the function's return type. */
8187 if (FUNCTION_TYPE == TREE_CODE (target))
8188 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8190 /* "Ordinary" pointers... */
8192 while (TREE_CODE (target) == ARRAY_TYPE)
8193 target = TREE_TYPE (target);
8195 /* Pointers to non-generic address space must be const.
8196 Refuse address spaces outside the device's flash. */
8198 as = TYPE_ADDR_SPACE (target);
8200 if (!ADDR_SPACE_GENERIC_P (as)
8201 && (!TYPE_READONLY (target)
8202 || avr_addrspace[as].segment >= avr_current_device->n_flash))
8204 return as;
8207 /* Scan pointer's target type. */
8209 return avr_nonconst_pointer_addrspace (target);
8212 return ADDR_SPACE_GENERIC;
8216 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8217 go along with CONST qualifier. Writing to these address spaces should
8218 be detected and complained about as early as possible. */
8220 static bool
8221 avr_pgm_check_var_decl (tree node)
8223 const char *reason = NULL;
8225 addr_space_t as = ADDR_SPACE_GENERIC;
8227 gcc_assert (as == 0);
8229 if (avr_log.progmem)
8230 avr_edump ("%?: %t\n", node);
8232 switch (TREE_CODE (node))
8234 default:
8235 break;
8237 case VAR_DECL:
8238 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8239 reason = "variable";
8240 break;
8242 case PARM_DECL:
8243 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8244 reason = "function parameter";
8245 break;
8247 case FIELD_DECL:
8248 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8249 reason = "structure field";
8250 break;
8252 case FUNCTION_DECL:
8253 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8255 reason = "return type of function";
8256 break;
8258 case POINTER_TYPE:
8259 if (as = avr_nonconst_pointer_addrspace (node), as)
8260 reason = "pointer";
8261 break;
8264 if (reason)
8266 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8268 if (TYPE_P (node))
8269 error ("%qT uses address space %qs beyond flash of %qs",
8270 node, avr_addrspace[as].name, avr_current_device->name);
8271 else
8272 error ("%s %q+D uses address space %qs beyond flash of %qs",
8273 reason, node, avr_addrspace[as].name,
8274 avr_current_device->name);
8276 else
8278 if (TYPE_P (node))
8279 error ("pointer targeting address space %qs must be const in %qT",
8280 avr_addrspace[as].name, node);
8281 else
8282 error ("pointer targeting address space %qs must be const"
8283 " in %s %q+D",
8284 avr_addrspace[as].name, reason, node);
8288 return reason == NULL;
8292 /* Add the section attribute if the variable is in progmem. */
8294 static void
8295 avr_insert_attributes (tree node, tree *attributes)
8297 avr_pgm_check_var_decl (node);
8299 if (TREE_CODE (node) == VAR_DECL
8300 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8301 && avr_progmem_p (node, *attributes))
8303 addr_space_t as;
8304 tree node0 = node;
8306 /* For C++, we have to peel arrays in order to get correct
8307 determination of readonlyness. */
8310 node0 = TREE_TYPE (node0);
8311 while (TREE_CODE (node0) == ARRAY_TYPE);
8313 if (error_mark_node == node0)
8314 return;
8316 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8318 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8320 error ("variable %q+D located in address space %qs"
8321 " beyond flash of %qs",
8322 node, avr_addrspace[as].name, avr_current_device->name);
8325 if (!TYPE_READONLY (node0)
8326 && !TREE_READONLY (node))
8328 const char *reason = "__attribute__((progmem))";
8330 if (!ADDR_SPACE_GENERIC_P (as))
8331 reason = avr_addrspace[as].name;
8333 if (avr_log.progmem)
8334 avr_edump ("\n%?: %t\n%t\n", node, node0);
8336 error ("variable %q+D must be const in order to be put into"
8337 " read-only section by means of %qs", node, reason);
8343 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8344 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8345 /* Track need of __do_clear_bss. */
8347 void
8348 avr_asm_output_aligned_decl_common (FILE * stream,
8349 const_tree decl ATTRIBUTE_UNUSED,
8350 const char *name,
8351 unsigned HOST_WIDE_INT size,
8352 unsigned int align, bool local_p)
8354 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8355 There is no need to trigger __do_clear_bss code for them. */
8357 if (!STR_PREFIX_P (name, "__gnu_lto"))
8358 avr_need_clear_bss_p = true;
8360 if (local_p)
8361 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8362 else
8363 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8367 /* Unnamed section callback for data_section
8368 to track need of __do_copy_data. */
8370 static void
8371 avr_output_data_section_asm_op (const void *data)
8373 avr_need_copy_data_p = true;
8375 /* Dispatch to default. */
8376 output_section_asm_op (data);
8380 /* Unnamed section callback for bss_section
8381 to track need of __do_clear_bss. */
8383 static void
8384 avr_output_bss_section_asm_op (const void *data)
8386 avr_need_clear_bss_p = true;
8388 /* Dispatch to default. */
8389 output_section_asm_op (data);
8393 /* Unnamed section callback for progmem*.data sections. */
8395 static void
8396 avr_output_progmem_section_asm_op (const void *data)
8398 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8399 (const char*) data);
8403 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8405 static void
8406 avr_asm_init_sections (void)
8408 /* Set up a section for jump tables. Alignment is handled by
8409 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8411 if (AVR_HAVE_JMP_CALL)
8413 progmem_swtable_section
8414 = get_unnamed_section (0, output_section_asm_op,
8415 "\t.section\t.progmem.gcc_sw_table"
8416 ",\"a\",@progbits");
8418 else
8420 progmem_swtable_section
8421 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8422 "\t.section\t.progmem.gcc_sw_table"
8423 ",\"ax\",@progbits");
8426 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8427 resp. `avr_need_copy_data_p'. */
8429 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8430 data_section->unnamed.callback = avr_output_data_section_asm_op;
8431 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8435 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8437 static section*
8438 avr_asm_function_rodata_section (tree decl)
8440 /* If a function is unused and optimized out by -ffunction-sections
8441 and --gc-sections, ensure that the same will happen for its jump
8442 tables by putting them into individual sections. */
8444 unsigned int flags;
8445 section * frodata;
8447 /* Get the frodata section from the default function in varasm.c
8448 but treat function-associated data-like jump tables as code
8449 rather than as user defined data. AVR has no constant pools. */
8451 int fdata = flag_data_sections;
8453 flag_data_sections = flag_function_sections;
8454 frodata = default_function_rodata_section (decl);
8455 flag_data_sections = fdata;
8456 flags = frodata->common.flags;
8459 if (frodata != readonly_data_section
8460 && flags & SECTION_NAMED)
8462 /* Adjust section flags and replace section name prefix. */
8464 unsigned int i;
8466 static const char* const prefix[] =
8468 ".rodata", ".progmem.gcc_sw_table",
8469 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8472 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8474 const char * old_prefix = prefix[i];
8475 const char * new_prefix = prefix[i+1];
8476 const char * name = frodata->named.name;
8478 if (STR_PREFIX_P (name, old_prefix))
8480 const char *rname = ACONCAT ((new_prefix,
8481 name + strlen (old_prefix), NULL));
8482 flags &= ~SECTION_CODE;
8483 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8485 return get_section (rname, flags, frodata->named.decl);
8490 return progmem_swtable_section;
8494 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8495 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8497 static void
8498 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8500 if (flags & AVR_SECTION_PROGMEM)
8502 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8503 const char *old_prefix = ".rodata";
8504 const char *new_prefix = avr_addrspace[as].section_name;
8506 if (STR_PREFIX_P (name, old_prefix))
8508 const char *sname = ACONCAT ((new_prefix,
8509 name + strlen (old_prefix), NULL));
8510 default_elf_asm_named_section (sname, flags, decl);
8511 return;
8514 default_elf_asm_named_section (new_prefix, flags, decl);
8515 return;
8518 if (!avr_need_copy_data_p)
8519 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8520 || STR_PREFIX_P (name, ".rodata")
8521 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8523 if (!avr_need_clear_bss_p)
8524 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8526 default_elf_asm_named_section (name, flags, decl);
8530 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8532 static unsigned int
8533 avr_section_type_flags (tree decl, const char *name, int reloc)
8535 unsigned int flags = default_section_type_flags (decl, name, reloc);
8537 if (STR_PREFIX_P (name, ".noinit"))
8539 if (decl && TREE_CODE (decl) == VAR_DECL
8540 && DECL_INITIAL (decl) == NULL_TREE)
8541 flags |= SECTION_BSS; /* @nobits */
8542 else
8543 warning (0, "only uninitialized variables can be placed in the "
8544 ".noinit section");
8547 if (decl && DECL_P (decl)
8548 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8550 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8552 /* Attribute progmem puts data in generic address space.
8553 Set section flags as if it was in __flash to get the right
8554 section prefix in the remainder. */
8556 if (ADDR_SPACE_GENERIC_P (as))
8557 as = ADDR_SPACE_FLASH;
8559 flags |= as * SECTION_MACH_DEP;
8560 flags &= ~SECTION_WRITE;
8561 flags &= ~SECTION_BSS;
8564 return flags;
8568 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8570 static void
8571 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8573 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8574 readily available, see PR34734. So we postpone the warning
8575 about uninitialized data in program memory section until here. */
8577 if (new_decl_p
8578 && decl && DECL_P (decl)
8579 && NULL_TREE == DECL_INITIAL (decl)
8580 && !DECL_EXTERNAL (decl)
8581 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8583 warning (OPT_Wuninitialized,
8584 "uninitialized variable %q+D put into "
8585 "program memory area", decl);
8588 default_encode_section_info (decl, rtl, new_decl_p);
8590 if (decl && DECL_P (decl)
8591 && TREE_CODE (decl) != FUNCTION_DECL
8592 && MEM_P (rtl)
8593 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8595 rtx sym = XEXP (rtl, 0);
8596 tree type = TREE_TYPE (decl);
8597 if (type == error_mark_node)
8598 return;
8599 addr_space_t as = TYPE_ADDR_SPACE (type);
8601 /* PSTR strings are in generic space but located in flash:
8602 patch address space. */
8604 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8605 as = ADDR_SPACE_FLASH;
8607 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8612 /* Implement `TARGET_ASM_SELECT_SECTION' */
8614 static section *
8615 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8617 section * sect = default_elf_select_section (decl, reloc, align);
8619 if (decl && DECL_P (decl)
8620 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8622 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8624 /* __progmem__ goes in generic space but shall be allocated to
8625 .progmem.data */
8627 if (ADDR_SPACE_GENERIC_P (as))
8628 as = ADDR_SPACE_FLASH;
8630 if (sect->common.flags & SECTION_NAMED)
8632 const char * name = sect->named.name;
8633 const char * old_prefix = ".rodata";
8634 const char * new_prefix = avr_addrspace[as].section_name;
8636 if (STR_PREFIX_P (name, old_prefix))
8638 const char *sname = ACONCAT ((new_prefix,
8639 name + strlen (old_prefix), NULL));
8640 return get_section (sname, sect->common.flags, sect->named.decl);
8644 if (!progmem_section[as])
8646 progmem_section[as]
8647 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8648 avr_addrspace[as].section_name);
8651 return progmem_section[as];
8654 return sect;
8657 /* Implement `TARGET_ASM_FILE_START'. */
8658 /* Outputs some text at the start of each assembler file. */
8660 static void
8661 avr_file_start (void)
8663 int sfr_offset = avr_current_arch->sfr_offset;
8665 if (avr_current_arch->asm_only)
8666 error ("MCU %qs supported for assembler only", avr_current_device->name);
8668 default_file_start ();
8670 /* Print I/O addresses of some SFRs used with IN and OUT. */
8672 if (AVR_HAVE_SPH)
8673 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8675 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8676 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8677 if (AVR_HAVE_RAMPZ)
8678 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8679 if (AVR_HAVE_RAMPY)
8680 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8681 if (AVR_HAVE_RAMPX)
8682 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8683 if (AVR_HAVE_RAMPD)
8684 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8685 if (AVR_XMEGA)
8686 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8687 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8688 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8692 /* Implement `TARGET_ASM_FILE_END'. */
8693 /* Outputs to the stdio stream FILE some
8694 appropriate text to go at the end of an assembler file. */
8696 static void
8697 avr_file_end (void)
8699 /* Output these only if there is anything in the
8700 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8701 input section(s) - some code size can be saved by not
8702 linking in the initialization code from libgcc if resp.
8703 sections are empty, see PR18145. */
8705 if (avr_need_copy_data_p)
8706 fputs (".global __do_copy_data\n", asm_out_file);
8708 if (avr_need_clear_bss_p)
8709 fputs (".global __do_clear_bss\n", asm_out_file);
8713 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8714 /* Choose the order in which to allocate hard registers for
8715 pseudo-registers local to a basic block.
8717 Store the desired register order in the array `reg_alloc_order'.
8718 Element 0 should be the register to allocate first; element 1, the
8719 next register; and so on. */
8721 void
8722 avr_adjust_reg_alloc_order (void)
8724 unsigned int i;
8725 static const int order_0[] =
8727 24, 25,
8728 18, 19, 20, 21, 22, 23,
8729 30, 31,
8730 26, 27, 28, 29,
8731 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8732 0, 1,
8733 32, 33, 34, 35
8735 static const int order_1[] =
8737 18, 19, 20, 21, 22, 23, 24, 25,
8738 30, 31,
8739 26, 27, 28, 29,
8740 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8741 0, 1,
8742 32, 33, 34, 35
8744 static const int order_2[] =
8746 25, 24, 23, 22, 21, 20, 19, 18,
8747 30, 31,
8748 26, 27, 28, 29,
8749 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8750 1, 0,
8751 32, 33, 34, 35
8754 const int *order = (TARGET_ORDER_1 ? order_1 :
8755 TARGET_ORDER_2 ? order_2 :
8756 order_0);
8757 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8758 reg_alloc_order[i] = order[i];
8762 /* Implement `TARGET_REGISTER_MOVE_COST' */
8764 static int
8765 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8766 reg_class_t from, reg_class_t to)
8768 return (from == STACK_REG ? 6
8769 : to == STACK_REG ? 12
8770 : 2);
8774 /* Implement `TARGET_MEMORY_MOVE_COST' */
8776 static int
8777 avr_memory_move_cost (enum machine_mode mode,
8778 reg_class_t rclass ATTRIBUTE_UNUSED,
8779 bool in ATTRIBUTE_UNUSED)
8781 return (mode == QImode ? 2
8782 : mode == HImode ? 4
8783 : mode == SImode ? 8
8784 : mode == SFmode ? 8
8785 : 16);
8789 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8790 cost of an RTX operand given its context. X is the rtx of the
8791 operand, MODE is its mode, and OUTER is the rtx_code of this
8792 operand's parent operator. */
8794 static int
8795 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8796 int opno, bool speed)
8798 enum rtx_code code = GET_CODE (x);
8799 int total;
8801 switch (code)
8803 case REG:
8804 case SUBREG:
8805 return 0;
8807 case CONST_INT:
8808 case CONST_FIXED:
8809 case CONST_DOUBLE:
8810 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8812 default:
8813 break;
8816 total = 0;
8817 avr_rtx_costs (x, code, outer, opno, &total, speed);
8818 return total;
8821 /* Worker function for AVR backend's rtx_cost function.
8822 X is rtx expression whose cost is to be calculated.
8823 Return true if the complete cost has been computed.
8824 Return false if subexpressions should be scanned.
8825 In either case, *TOTAL contains the cost result. */
8827 static bool
8828 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8829 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8831 enum rtx_code code = (enum rtx_code) codearg;
8832 enum machine_mode mode = GET_MODE (x);
8833 HOST_WIDE_INT val;
8835 switch (code)
8837 case CONST_INT:
8838 case CONST_FIXED:
8839 case CONST_DOUBLE:
8840 case SYMBOL_REF:
8841 case CONST:
8842 case LABEL_REF:
8843 /* Immediate constants are as cheap as registers. */
8844 *total = 0;
8845 return true;
8847 case MEM:
8848 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8849 return true;
8851 case NEG:
8852 switch (mode)
8854 case QImode:
8855 case SFmode:
8856 *total = COSTS_N_INSNS (1);
8857 break;
8859 case HImode:
8860 case PSImode:
8861 case SImode:
8862 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8863 break;
8865 default:
8866 return false;
8868 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8869 return true;
8871 case ABS:
8872 switch (mode)
8874 case QImode:
8875 case SFmode:
8876 *total = COSTS_N_INSNS (1);
8877 break;
8879 default:
8880 return false;
8882 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8883 return true;
8885 case NOT:
8886 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8887 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8888 return true;
8890 case ZERO_EXTEND:
8891 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8892 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8893 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8894 return true;
8896 case SIGN_EXTEND:
8897 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8898 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8899 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8900 return true;
8902 case PLUS:
8903 switch (mode)
8905 case QImode:
8906 if (AVR_HAVE_MUL
8907 && MULT == GET_CODE (XEXP (x, 0))
8908 && register_operand (XEXP (x, 1), QImode))
8910 /* multiply-add */
8911 *total = COSTS_N_INSNS (speed ? 4 : 3);
8912 /* multiply-add with constant: will be split and load constant. */
8913 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8914 *total = COSTS_N_INSNS (1) + *total;
8915 return true;
8917 *total = COSTS_N_INSNS (1);
8918 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8919 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8920 break;
8922 case HImode:
8923 if (AVR_HAVE_MUL
8924 && (MULT == GET_CODE (XEXP (x, 0))
8925 || ASHIFT == GET_CODE (XEXP (x, 0)))
8926 && register_operand (XEXP (x, 1), HImode)
8927 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8928 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8930 /* multiply-add */
8931 *total = COSTS_N_INSNS (speed ? 5 : 4);
8932 /* multiply-add with constant: will be split and load constant. */
8933 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8934 *total = COSTS_N_INSNS (1) + *total;
8935 return true;
8937 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8939 *total = COSTS_N_INSNS (2);
8940 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8941 speed);
8943 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8944 *total = COSTS_N_INSNS (1);
8945 else
8946 *total = COSTS_N_INSNS (2);
8947 break;
8949 case PSImode:
8950 if (!CONST_INT_P (XEXP (x, 1)))
8952 *total = COSTS_N_INSNS (3);
8953 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8954 speed);
8956 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8957 *total = COSTS_N_INSNS (2);
8958 else
8959 *total = COSTS_N_INSNS (3);
8960 break;
8962 case SImode:
8963 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8965 *total = COSTS_N_INSNS (4);
8966 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8967 speed);
8969 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8970 *total = COSTS_N_INSNS (1);
8971 else
8972 *total = COSTS_N_INSNS (4);
8973 break;
8975 default:
8976 return false;
8978 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8979 return true;
8981 case MINUS:
8982 if (AVR_HAVE_MUL
8983 && QImode == mode
8984 && register_operand (XEXP (x, 0), QImode)
8985 && MULT == GET_CODE (XEXP (x, 1)))
8987 /* multiply-sub */
8988 *total = COSTS_N_INSNS (speed ? 4 : 3);
8989 /* multiply-sub with constant: will be split and load constant. */
8990 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8991 *total = COSTS_N_INSNS (1) + *total;
8992 return true;
8994 if (AVR_HAVE_MUL
8995 && HImode == mode
8996 && register_operand (XEXP (x, 0), HImode)
8997 && (MULT == GET_CODE (XEXP (x, 1))
8998 || ASHIFT == GET_CODE (XEXP (x, 1)))
8999 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
9000 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
9002 /* multiply-sub */
9003 *total = COSTS_N_INSNS (speed ? 5 : 4);
9004 /* multiply-sub with constant: will be split and load constant. */
9005 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9006 *total = COSTS_N_INSNS (1) + *total;
9007 return true;
9009 /* FALLTHRU */
9010 case AND:
9011 case IOR:
9012 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9013 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9014 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9015 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9016 return true;
9018 case XOR:
9019 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9020 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9021 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9022 return true;
9024 case MULT:
9025 switch (mode)
9027 case QImode:
9028 if (AVR_HAVE_MUL)
9029 *total = COSTS_N_INSNS (!speed ? 3 : 4);
9030 else if (!speed)
9031 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9032 else
9033 return false;
9034 break;
9036 case HImode:
9037 if (AVR_HAVE_MUL)
9039 rtx op0 = XEXP (x, 0);
9040 rtx op1 = XEXP (x, 1);
9041 enum rtx_code code0 = GET_CODE (op0);
9042 enum rtx_code code1 = GET_CODE (op1);
9043 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
9044 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
9046 if (ex0
9047 && (u8_operand (op1, HImode)
9048 || s8_operand (op1, HImode)))
9050 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9051 return true;
9053 if (ex0
9054 && register_operand (op1, HImode))
9056 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9057 return true;
9059 else if (ex0 || ex1)
9061 *total = COSTS_N_INSNS (!speed ? 3 : 5);
9062 return true;
9064 else if (register_operand (op0, HImode)
9065 && (u8_operand (op1, HImode)
9066 || s8_operand (op1, HImode)))
9068 *total = COSTS_N_INSNS (!speed ? 6 : 9);
9069 return true;
9071 else
9072 *total = COSTS_N_INSNS (!speed ? 7 : 10);
9074 else if (!speed)
9075 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9076 else
9077 return false;
9078 break;
9080 case PSImode:
9081 if (!speed)
9082 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9083 else
9084 *total = 10;
9085 break;
9087 case SImode:
9088 if (AVR_HAVE_MUL)
9090 if (!speed)
9092 /* Add some additional costs besides CALL like moves etc. */
9094 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9096 else
9098 /* Just a rough estimate. Even with -O2 we don't want bulky
9099 code expanded inline. */
9101 *total = COSTS_N_INSNS (25);
9104 else
9106 if (speed)
9107 *total = COSTS_N_INSNS (300);
9108 else
9109 /* Add some additional costs besides CALL like moves etc. */
9110 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9113 return true;
9115 default:
9116 return false;
9118 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9119 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9120 return true;
9122 case DIV:
9123 case MOD:
9124 case UDIV:
9125 case UMOD:
9126 if (!speed)
9127 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9128 else
9129 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
9130 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9131 /* For div/mod with const-int divisor we have at least the cost of
9132 loading the divisor. */
9133 if (CONST_INT_P (XEXP (x, 1)))
9134 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
9135 /* Add some overall penaly for clobbering and moving around registers */
9136 *total += COSTS_N_INSNS (2);
9137 return true;
9139 case ROTATE:
9140 switch (mode)
9142 case QImode:
9143 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
9144 *total = COSTS_N_INSNS (1);
9146 break;
9148 case HImode:
9149 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
9150 *total = COSTS_N_INSNS (3);
9152 break;
9154 case SImode:
9155 if (CONST_INT_P (XEXP (x, 1)))
9156 switch (INTVAL (XEXP (x, 1)))
9158 case 8:
9159 case 24:
9160 *total = COSTS_N_INSNS (5);
9161 break;
9162 case 16:
9163 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
9164 break;
9166 break;
9168 default:
9169 return false;
9171 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9172 return true;
9174 case ASHIFT:
9175 switch (mode)
9177 case QImode:
9178 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9180 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9181 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9182 speed);
9184 else
9186 val = INTVAL (XEXP (x, 1));
9187 if (val == 7)
9188 *total = COSTS_N_INSNS (3);
9189 else if (val >= 0 && val <= 7)
9190 *total = COSTS_N_INSNS (val);
9191 else
9192 *total = COSTS_N_INSNS (1);
9194 break;
9196 case HImode:
9197 if (AVR_HAVE_MUL)
9199 if (const_2_to_7_operand (XEXP (x, 1), HImode)
9200 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9201 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9203 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9204 return true;
9208 if (const1_rtx == (XEXP (x, 1))
9209 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9211 *total = COSTS_N_INSNS (2);
9212 return true;
9215 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9217 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9219 speed);
9221 else
9222 switch (INTVAL (XEXP (x, 1)))
9224 case 0:
9225 *total = 0;
9226 break;
9227 case 1:
9228 case 8:
9229 *total = COSTS_N_INSNS (2);
9230 break;
9231 case 9:
9232 *total = COSTS_N_INSNS (3);
9233 break;
9234 case 2:
9235 case 3:
9236 case 10:
9237 case 15:
9238 *total = COSTS_N_INSNS (4);
9239 break;
9240 case 7:
9241 case 11:
9242 case 12:
9243 *total = COSTS_N_INSNS (5);
9244 break;
9245 case 4:
9246 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9247 break;
9248 case 6:
9249 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9250 break;
9251 case 5:
9252 *total = COSTS_N_INSNS (!speed ? 5 : 10);
9253 break;
9254 default:
9255 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9256 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9257 speed);
9259 break;
9261 case PSImode:
9262 if (!CONST_INT_P (XEXP (x, 1)))
9264 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9266 else
9267 switch (INTVAL (XEXP (x, 1)))
9269 case 0:
9270 *total = 0;
9271 break;
9272 case 1:
9273 case 8:
9274 case 16:
9275 *total = COSTS_N_INSNS (3);
9276 break;
9277 case 23:
9278 *total = COSTS_N_INSNS (5);
9279 break;
9280 default:
9281 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9282 break;
9284 break;
9286 case SImode:
9287 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9289 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9290 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9291 speed);
9293 else
9294 switch (INTVAL (XEXP (x, 1)))
9296 case 0:
9297 *total = 0;
9298 break;
9299 case 24:
9300 *total = COSTS_N_INSNS (3);
9301 break;
9302 case 1:
9303 case 8:
9304 case 16:
9305 *total = COSTS_N_INSNS (4);
9306 break;
9307 case 31:
9308 *total = COSTS_N_INSNS (6);
9309 break;
9310 case 2:
9311 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9312 break;
9313 default:
9314 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9315 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9316 speed);
9318 break;
9320 default:
9321 return false;
9323 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9324 return true;
9326 case ASHIFTRT:
9327 switch (mode)
9329 case QImode:
9330 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9332 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9333 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9334 speed);
9336 else
9338 val = INTVAL (XEXP (x, 1));
9339 if (val == 6)
9340 *total = COSTS_N_INSNS (4);
9341 else if (val == 7)
9342 *total = COSTS_N_INSNS (2);
9343 else if (val >= 0 && val <= 7)
9344 *total = COSTS_N_INSNS (val);
9345 else
9346 *total = COSTS_N_INSNS (1);
9348 break;
9350 case HImode:
9351 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9353 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9355 speed);
9357 else
9358 switch (INTVAL (XEXP (x, 1)))
9360 case 0:
9361 *total = 0;
9362 break;
9363 case 1:
9364 *total = COSTS_N_INSNS (2);
9365 break;
9366 case 15:
9367 *total = COSTS_N_INSNS (3);
9368 break;
9369 case 2:
9370 case 7:
9371 case 8:
9372 case 9:
9373 *total = COSTS_N_INSNS (4);
9374 break;
9375 case 10:
9376 case 14:
9377 *total = COSTS_N_INSNS (5);
9378 break;
9379 case 11:
9380 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9381 break;
9382 case 12:
9383 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9384 break;
9385 case 6:
9386 case 13:
9387 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9388 break;
9389 default:
9390 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9391 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9392 speed);
9394 break;
9396 case PSImode:
9397 if (!CONST_INT_P (XEXP (x, 1)))
9399 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9401 else
9402 switch (INTVAL (XEXP (x, 1)))
9404 case 0:
9405 *total = 0;
9406 break;
9407 case 1:
9408 *total = COSTS_N_INSNS (3);
9409 break;
9410 case 16:
9411 case 8:
9412 *total = COSTS_N_INSNS (5);
9413 break;
9414 case 23:
9415 *total = COSTS_N_INSNS (4);
9416 break;
9417 default:
9418 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9419 break;
9421 break;
9423 case SImode:
9424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9426 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9428 speed);
9430 else
9431 switch (INTVAL (XEXP (x, 1)))
9433 case 0:
9434 *total = 0;
9435 break;
9436 case 1:
9437 *total = COSTS_N_INSNS (4);
9438 break;
9439 case 8:
9440 case 16:
9441 case 24:
9442 *total = COSTS_N_INSNS (6);
9443 break;
9444 case 2:
9445 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9446 break;
9447 case 31:
9448 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9449 break;
9450 default:
9451 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9452 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9453 speed);
9455 break;
9457 default:
9458 return false;
9460 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9461 return true;
9463 case LSHIFTRT:
9464 switch (mode)
9466 case QImode:
9467 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9469 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9470 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9471 speed);
9473 else
9475 val = INTVAL (XEXP (x, 1));
9476 if (val == 7)
9477 *total = COSTS_N_INSNS (3);
9478 else if (val >= 0 && val <= 7)
9479 *total = COSTS_N_INSNS (val);
9480 else
9481 *total = COSTS_N_INSNS (1);
9483 break;
9485 case HImode:
9486 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9488 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9489 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9490 speed);
9492 else
9493 switch (INTVAL (XEXP (x, 1)))
9495 case 0:
9496 *total = 0;
9497 break;
9498 case 1:
9499 case 8:
9500 *total = COSTS_N_INSNS (2);
9501 break;
9502 case 9:
9503 *total = COSTS_N_INSNS (3);
9504 break;
9505 case 2:
9506 case 10:
9507 case 15:
9508 *total = COSTS_N_INSNS (4);
9509 break;
9510 case 7:
9511 case 11:
9512 *total = COSTS_N_INSNS (5);
9513 break;
9514 case 3:
9515 case 12:
9516 case 13:
9517 case 14:
9518 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9519 break;
9520 case 4:
9521 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9522 break;
9523 case 5:
9524 case 6:
9525 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9526 break;
9527 default:
9528 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9529 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9530 speed);
9532 break;
9534 case PSImode:
9535 if (!CONST_INT_P (XEXP (x, 1)))
9537 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9539 else
9540 switch (INTVAL (XEXP (x, 1)))
9542 case 0:
9543 *total = 0;
9544 break;
9545 case 1:
9546 case 8:
9547 case 16:
9548 *total = COSTS_N_INSNS (3);
9549 break;
9550 case 23:
9551 *total = COSTS_N_INSNS (5);
9552 break;
9553 default:
9554 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9555 break;
9557 break;
9559 case SImode:
9560 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9562 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9563 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9564 speed);
9566 else
9567 switch (INTVAL (XEXP (x, 1)))
9569 case 0:
9570 *total = 0;
9571 break;
9572 case 1:
9573 *total = COSTS_N_INSNS (4);
9574 break;
9575 case 2:
9576 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9577 break;
9578 case 8:
9579 case 16:
9580 case 24:
9581 *total = COSTS_N_INSNS (4);
9582 break;
9583 case 31:
9584 *total = COSTS_N_INSNS (6);
9585 break;
9586 default:
9587 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9589 speed);
9591 break;
9593 default:
9594 return false;
9596 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9597 return true;
9599 case COMPARE:
9600 switch (GET_MODE (XEXP (x, 0)))
9602 case QImode:
9603 *total = COSTS_N_INSNS (1);
9604 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9605 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9606 break;
9608 case HImode:
9609 *total = COSTS_N_INSNS (2);
9610 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9611 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9612 else if (INTVAL (XEXP (x, 1)) != 0)
9613 *total += COSTS_N_INSNS (1);
9614 break;
9616 case PSImode:
9617 *total = COSTS_N_INSNS (3);
9618 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9619 *total += COSTS_N_INSNS (2);
9620 break;
9622 case SImode:
9623 *total = COSTS_N_INSNS (4);
9624 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9625 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9626 else if (INTVAL (XEXP (x, 1)) != 0)
9627 *total += COSTS_N_INSNS (3);
9628 break;
9630 default:
9631 return false;
9633 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9634 return true;
9636 case TRUNCATE:
9637 if (AVR_HAVE_MUL
9638 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9639 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9640 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9642 if (QImode == mode || HImode == mode)
9644 *total = COSTS_N_INSNS (2);
9645 return true;
9648 break;
9650 default:
9651 break;
9653 return false;
9657 /* Implement `TARGET_RTX_COSTS'. */
9659 static bool
9660 avr_rtx_costs (rtx x, int codearg, int outer_code,
9661 int opno, int *total, bool speed)
9663 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9664 opno, total, speed);
9666 if (avr_log.rtx_costs)
9668 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9669 done, speed ? "speed" : "size", *total, outer_code, x);
9672 return done;
9676 /* Implement `TARGET_ADDRESS_COST'. */
9678 static int
9679 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9680 addr_space_t as ATTRIBUTE_UNUSED,
9681 bool speed ATTRIBUTE_UNUSED)
9683 int cost = 4;
9685 if (GET_CODE (x) == PLUS
9686 && CONST_INT_P (XEXP (x, 1))
9687 && (REG_P (XEXP (x, 0))
9688 || GET_CODE (XEXP (x, 0)) == SUBREG))
9690 if (INTVAL (XEXP (x, 1)) >= 61)
9691 cost = 18;
9693 else if (CONSTANT_ADDRESS_P (x))
9695 if (optimize > 0
9696 && io_address_operand (x, QImode))
9697 cost = 2;
9700 if (avr_log.address_cost)
9701 avr_edump ("\n%?: %d = %r\n", cost, x);
9703 return cost;
9706 /* Test for extra memory constraint 'Q'.
9707 It's a memory address based on Y or Z pointer with valid displacement. */
9710 extra_constraint_Q (rtx x)
9712 int ok = 0;
9714 if (GET_CODE (XEXP (x,0)) == PLUS
9715 && REG_P (XEXP (XEXP (x,0), 0))
9716 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9717 && (INTVAL (XEXP (XEXP (x,0), 1))
9718 <= MAX_LD_OFFSET (GET_MODE (x))))
9720 rtx xx = XEXP (XEXP (x,0), 0);
9721 int regno = REGNO (xx);
9723 ok = (/* allocate pseudos */
9724 regno >= FIRST_PSEUDO_REGISTER
9725 /* strictly check */
9726 || regno == REG_Z || regno == REG_Y
9727 /* XXX frame & arg pointer checks */
9728 || xx == frame_pointer_rtx
9729 || xx == arg_pointer_rtx);
9731 if (avr_log.constraints)
9732 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9733 ok, reload_completed, reload_in_progress, x);
9736 return ok;
9739 /* Convert condition code CONDITION to the valid AVR condition code. */
9741 RTX_CODE
9742 avr_normalize_condition (RTX_CODE condition)
9744 switch (condition)
9746 case GT:
9747 return GE;
9748 case GTU:
9749 return GEU;
9750 case LE:
9751 return LT;
9752 case LEU:
9753 return LTU;
9754 default:
9755 gcc_unreachable ();
9759 /* Helper function for `avr_reorg'. */
9761 static rtx
9762 avr_compare_pattern (rtx insn)
9764 rtx pattern = single_set (insn);
9766 if (pattern
9767 && NONJUMP_INSN_P (insn)
9768 && SET_DEST (pattern) == cc0_rtx
9769 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9771 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9772 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9774 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9775 They must not be swapped, thus skip them. */
9777 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9778 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9779 return pattern;
9782 return NULL_RTX;
9785 /* Helper function for `avr_reorg'. */
9787 /* Expansion of switch/case decision trees leads to code like
9789 cc0 = compare (Reg, Num)
9790 if (cc0 == 0)
9791 goto L1
9793 cc0 = compare (Reg, Num)
9794 if (cc0 > 0)
9795 goto L2
9797 The second comparison is superfluous and can be deleted.
9798 The second jump condition can be transformed from a
9799 "difficult" one to a "simple" one because "cc0 > 0" and
9800 "cc0 >= 0" will have the same effect here.
9802 This function relies on the way switch/case is being expaned
9803 as binary decision tree. For example code see PR 49903.
9805 Return TRUE if optimization performed.
9806 Return FALSE if nothing changed.
9808 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9810 We don't want to do this in text peephole because it is
9811 tedious to work out jump offsets there and the second comparison
9812 might have been transormed by `avr_reorg'.
9814 RTL peephole won't do because peephole2 does not scan across
9815 basic blocks. */
9817 static bool
9818 avr_reorg_remove_redundant_compare (rtx insn1)
9820 rtx comp1, ifelse1, xcond1, branch1;
9821 rtx comp2, ifelse2, xcond2, branch2, insn2;
9822 enum rtx_code code;
9823 rtx jump, target, cond;
9825 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9827 branch1 = next_nonnote_nondebug_insn (insn1);
9828 if (!branch1 || !JUMP_P (branch1))
9829 return false;
9831 insn2 = next_nonnote_nondebug_insn (branch1);
9832 if (!insn2 || !avr_compare_pattern (insn2))
9833 return false;
9835 branch2 = next_nonnote_nondebug_insn (insn2);
9836 if (!branch2 || !JUMP_P (branch2))
9837 return false;
9839 comp1 = avr_compare_pattern (insn1);
9840 comp2 = avr_compare_pattern (insn2);
9841 xcond1 = single_set (branch1);
9842 xcond2 = single_set (branch2);
9844 if (!comp1 || !comp2
9845 || !rtx_equal_p (comp1, comp2)
9846 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9847 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9848 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9849 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9851 return false;
9854 comp1 = SET_SRC (comp1);
9855 ifelse1 = SET_SRC (xcond1);
9856 ifelse2 = SET_SRC (xcond2);
9858 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9860 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9861 || !REG_P (XEXP (comp1, 0))
9862 || !CONST_INT_P (XEXP (comp1, 1))
9863 || XEXP (ifelse1, 2) != pc_rtx
9864 || XEXP (ifelse2, 2) != pc_rtx
9865 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9866 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9867 || !COMPARISON_P (XEXP (ifelse2, 0))
9868 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9869 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9870 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9871 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9873 return false;
9876 /* We filtered the insn sequence to look like
9878 (set (cc0)
9879 (compare (reg:M N)
9880 (const_int VAL)))
9881 (set (pc)
9882 (if_then_else (eq (cc0)
9883 (const_int 0))
9884 (label_ref L1)
9885 (pc)))
9887 (set (cc0)
9888 (compare (reg:M N)
9889 (const_int VAL)))
9890 (set (pc)
9891 (if_then_else (CODE (cc0)
9892 (const_int 0))
9893 (label_ref L2)
9894 (pc)))
9897 code = GET_CODE (XEXP (ifelse2, 0));
9899 /* Map GT/GTU to GE/GEU which is easier for AVR.
9900 The first two instructions compare/branch on EQ
9901 so we may replace the difficult
9903 if (x == VAL) goto L1;
9904 if (x > VAL) goto L2;
9906 with easy
9908 if (x == VAL) goto L1;
9909 if (x >= VAL) goto L2;
9911 Similarly, replace LE/LEU by LT/LTU. */
9913 switch (code)
9915 case EQ:
9916 case LT: case LTU:
9917 case GE: case GEU:
9918 break;
9920 case LE: case LEU:
9921 case GT: case GTU:
9922 code = avr_normalize_condition (code);
9923 break;
9925 default:
9926 return false;
9929 /* Wrap the branches into UNSPECs so they won't be changed or
9930 optimized in the remainder. */
9932 target = XEXP (XEXP (ifelse1, 1), 0);
9933 cond = XEXP (ifelse1, 0);
9934 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9936 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9938 target = XEXP (XEXP (ifelse2, 1), 0);
9939 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9940 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9942 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9944 /* The comparisons in insn1 and insn2 are exactly the same;
9945 insn2 is superfluous so delete it. */
9947 delete_insn (insn2);
9948 delete_insn (branch1);
9949 delete_insn (branch2);
9951 return true;
9955 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9956 /* Optimize conditional jumps. */
9958 static void
9959 avr_reorg (void)
9961 rtx insn = get_insns();
9963 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9965 rtx pattern = avr_compare_pattern (insn);
9967 if (!pattern)
9968 continue;
9970 if (optimize
9971 && avr_reorg_remove_redundant_compare (insn))
9973 continue;
9976 if (compare_diff_p (insn))
9978 /* Now we work under compare insn with difficult branch. */
9980 rtx next = next_real_insn (insn);
9981 rtx pat = PATTERN (next);
9983 pattern = SET_SRC (pattern);
9985 if (true_regnum (XEXP (pattern, 0)) >= 0
9986 && true_regnum (XEXP (pattern, 1)) >= 0)
9988 rtx x = XEXP (pattern, 0);
9989 rtx src = SET_SRC (pat);
9990 rtx t = XEXP (src,0);
9991 PUT_CODE (t, swap_condition (GET_CODE (t)));
9992 XEXP (pattern, 0) = XEXP (pattern, 1);
9993 XEXP (pattern, 1) = x;
9994 INSN_CODE (next) = -1;
9996 else if (true_regnum (XEXP (pattern, 0)) >= 0
9997 && XEXP (pattern, 1) == const0_rtx)
9999 /* This is a tst insn, we can reverse it. */
10000 rtx src = SET_SRC (pat);
10001 rtx t = XEXP (src,0);
10003 PUT_CODE (t, swap_condition (GET_CODE (t)));
10004 XEXP (pattern, 1) = XEXP (pattern, 0);
10005 XEXP (pattern, 0) = const0_rtx;
10006 INSN_CODE (next) = -1;
10007 INSN_CODE (insn) = -1;
10009 else if (true_regnum (XEXP (pattern, 0)) >= 0
10010 && CONST_INT_P (XEXP (pattern, 1)))
10012 rtx x = XEXP (pattern, 1);
10013 rtx src = SET_SRC (pat);
10014 rtx t = XEXP (src,0);
10015 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
10017 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
10019 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
10020 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
10021 INSN_CODE (next) = -1;
10022 INSN_CODE (insn) = -1;
10029 /* Returns register number for function return value.*/
10031 static inline unsigned int
10032 avr_ret_register (void)
10034 return 24;
10038 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10040 static bool
10041 avr_function_value_regno_p (const unsigned int regno)
10043 return (regno == avr_ret_register ());
10047 /* Implement `TARGET_LIBCALL_VALUE'. */
10048 /* Create an RTX representing the place where a
10049 library function returns a value of mode MODE. */
10051 static rtx
10052 avr_libcall_value (enum machine_mode mode,
10053 const_rtx func ATTRIBUTE_UNUSED)
10055 int offs = GET_MODE_SIZE (mode);
10057 if (offs <= 4)
10058 offs = (offs + 1) & ~1;
10060 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
10064 /* Implement `TARGET_FUNCTION_VALUE'. */
10065 /* Create an RTX representing the place where a
10066 function returns a value of data type VALTYPE. */
10068 static rtx
10069 avr_function_value (const_tree type,
10070 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
10071 bool outgoing ATTRIBUTE_UNUSED)
10073 unsigned int offs;
10075 if (TYPE_MODE (type) != BLKmode)
10076 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
10078 offs = int_size_in_bytes (type);
10079 if (offs < 2)
10080 offs = 2;
10081 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
10082 offs = GET_MODE_SIZE (SImode);
10083 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
10084 offs = GET_MODE_SIZE (DImode);
10086 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
10090 test_hard_reg_class (enum reg_class rclass, rtx x)
10092 int regno = true_regnum (x);
10093 if (regno < 0)
10094 return 0;
10096 if (TEST_HARD_REG_CLASS (rclass, regno))
10097 return 1;
10099 return 0;
10103 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10104 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10106 static bool
10107 avr_2word_insn_p (rtx insn)
10109 if (avr_current_device->errata_skip
10110 || !insn
10111 || 2 != get_attr_length (insn))
10113 return false;
10116 switch (INSN_CODE (insn))
10118 default:
10119 return false;
10121 case CODE_FOR_movqi_insn:
10122 case CODE_FOR_movuqq_insn:
10123 case CODE_FOR_movqq_insn:
10125 rtx set = single_set (insn);
10126 rtx src = SET_SRC (set);
10127 rtx dest = SET_DEST (set);
10129 /* Factor out LDS and STS from movqi_insn. */
10131 if (MEM_P (dest)
10132 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
10134 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
10136 else if (REG_P (dest)
10137 && MEM_P (src))
10139 return CONSTANT_ADDRESS_P (XEXP (src, 0));
10142 return false;
10145 case CODE_FOR_call_insn:
10146 case CODE_FOR_call_value_insn:
10147 return true;
10153 jump_over_one_insn_p (rtx insn, rtx dest)
10155 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
10156 ? XEXP (dest, 0)
10157 : dest);
10158 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
10159 int dest_addr = INSN_ADDRESSES (uid);
10160 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
10162 return (jump_offset == 1
10163 || (jump_offset == 2
10164 && avr_2word_insn_p (next_active_insn (insn))));
10168 /* Worker function for `HARD_REGNO_MODE_OK'. */
10169 /* Returns 1 if a value of mode MODE can be stored starting with hard
10170 register number REGNO. On the enhanced core, anything larger than
10171 1 byte must start in even numbered register for "movw" to work
10172 (this way we don't have to check for odd registers everywhere). */
10175 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
10177 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10178 Disallowing QI et al. in these regs might lead to code like
10179 (set (subreg:QI (reg:HI 28) n) ...)
10180 which will result in wrong code because reload does not
10181 handle SUBREGs of hard regsisters like this.
10182 This could be fixed in reload. However, it appears
10183 that fixing reload is not wanted by reload people. */
10185 /* Any GENERAL_REGS register can hold 8-bit values. */
10187 if (GET_MODE_SIZE (mode) == 1)
10188 return 1;
10190 /* FIXME: Ideally, the following test is not needed.
10191 However, it turned out that it can reduce the number
10192 of spill fails. AVR and it's poor endowment with
10193 address registers is extreme stress test for reload. */
10195 if (GET_MODE_SIZE (mode) >= 4
10196 && regno >= REG_X)
10197 return 0;
10199 /* All modes larger than 8 bits should start in an even register. */
10201 return !(regno & 1);
10205 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10208 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10210 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10211 represent valid hard registers like, e.g. HI:29. Returning TRUE
10212 for such registers can lead to performance degradation as mentioned
10213 in PR53595. Thus, report invalid hard registers as FALSE. */
10215 if (!avr_hard_regno_mode_ok (regno, mode))
10216 return 0;
10218 /* Return true if any of the following boundaries is crossed:
10219 17/18, 27/28 and 29/30. */
10221 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10222 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10223 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10227 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10229 enum reg_class
10230 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10231 addr_space_t as, RTX_CODE outer_code,
10232 RTX_CODE index_code ATTRIBUTE_UNUSED)
10234 if (!ADDR_SPACE_GENERIC_P (as))
10236 return POINTER_Z_REGS;
10239 if (!avr_strict_X)
10240 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10242 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10246 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10248 bool
10249 avr_regno_mode_code_ok_for_base_p (int regno,
10250 enum machine_mode mode ATTRIBUTE_UNUSED,
10251 addr_space_t as ATTRIBUTE_UNUSED,
10252 RTX_CODE outer_code,
10253 RTX_CODE index_code ATTRIBUTE_UNUSED)
10255 bool ok = false;
10257 if (!ADDR_SPACE_GENERIC_P (as))
10259 if (regno < FIRST_PSEUDO_REGISTER
10260 && regno == REG_Z)
10262 return true;
10265 if (reg_renumber)
10267 regno = reg_renumber[regno];
10269 if (regno == REG_Z)
10271 return true;
10275 return false;
10278 if (regno < FIRST_PSEUDO_REGISTER
10279 && (regno == REG_X
10280 || regno == REG_Y
10281 || regno == REG_Z
10282 || regno == ARG_POINTER_REGNUM))
10284 ok = true;
10286 else if (reg_renumber)
10288 regno = reg_renumber[regno];
10290 if (regno == REG_X
10291 || regno == REG_Y
10292 || regno == REG_Z
10293 || regno == ARG_POINTER_REGNUM)
10295 ok = true;
10299 if (avr_strict_X
10300 && PLUS == outer_code
10301 && regno == REG_X)
10303 ok = false;
10306 return ok;
10310 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10311 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10312 CLOBBER_REG is a QI clobber register or NULL_RTX.
10313 LEN == NULL: output instructions.
10314 LEN != NULL: set *LEN to the length of the instruction sequence
10315 (in words) printed with LEN = NULL.
10316 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10317 If CLEAR_P is false, nothing is known about OP[0].
10319 The effect on cc0 is as follows:
10321 Load 0 to any register except ZERO_REG : NONE
10322 Load ld register with any value : NONE
10323 Anything else: : CLOBBER */
10325 static void
10326 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10328 rtx src = op[1];
10329 rtx dest = op[0];
10330 rtx xval, xdest[4];
10331 int ival[4];
10332 int clobber_val = 1234;
10333 bool cooked_clobber_p = false;
10334 bool set_p = false;
10335 enum machine_mode mode = GET_MODE (dest);
10336 int n, n_bytes = GET_MODE_SIZE (mode);
10338 gcc_assert (REG_P (dest)
10339 && CONSTANT_P (src));
10341 if (len)
10342 *len = 0;
10344 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10345 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10347 if (REGNO (dest) < 16
10348 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10350 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10353 /* We might need a clobber reg but don't have one. Look at the value to
10354 be loaded more closely. A clobber is only needed if it is a symbol
10355 or contains a byte that is neither 0, -1 or a power of 2. */
10357 if (NULL_RTX == clobber_reg
10358 && !test_hard_reg_class (LD_REGS, dest)
10359 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10360 || !avr_popcount_each_byte (src, n_bytes,
10361 (1 << 0) | (1 << 1) | (1 << 8))))
10363 /* We have no clobber register but need one. Cook one up.
10364 That's cheaper than loading from constant pool. */
10366 cooked_clobber_p = true;
10367 clobber_reg = all_regs_rtx[REG_Z + 1];
10368 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10371 /* Now start filling DEST from LSB to MSB. */
10373 for (n = 0; n < n_bytes; n++)
10375 int ldreg_p;
10376 bool done_byte = false;
10377 int j;
10378 rtx xop[3];
10380 /* Crop the n-th destination byte. */
10382 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10383 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10385 if (!CONST_INT_P (src)
10386 && !CONST_FIXED_P (src)
10387 && !CONST_DOUBLE_P (src))
10389 static const char* const asm_code[][2] =
10391 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10392 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10393 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10394 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10397 xop[0] = xdest[n];
10398 xop[1] = src;
10399 xop[2] = clobber_reg;
10401 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10403 continue;
10406 /* Crop the n-th source byte. */
10408 xval = simplify_gen_subreg (QImode, src, mode, n);
10409 ival[n] = INTVAL (xval);
10411 /* Look if we can reuse the low word by means of MOVW. */
10413 if (n == 2
10414 && n_bytes >= 4
10415 && AVR_HAVE_MOVW)
10417 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10418 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10420 if (INTVAL (lo16) == INTVAL (hi16))
10422 if (0 != INTVAL (lo16)
10423 || !clear_p)
10425 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10428 break;
10432 /* Don't use CLR so that cc0 is set as expected. */
10434 if (ival[n] == 0)
10436 if (!clear_p)
10437 avr_asm_len (ldreg_p ? "ldi %0,0"
10438 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10439 : "mov %0,__zero_reg__",
10440 &xdest[n], len, 1);
10441 continue;
10444 if (clobber_val == ival[n]
10445 && REGNO (clobber_reg) == REGNO (xdest[n]))
10447 continue;
10450 /* LD_REGS can use LDI to move a constant value */
10452 if (ldreg_p)
10454 xop[0] = xdest[n];
10455 xop[1] = xval;
10456 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10457 continue;
10460 /* Try to reuse value already loaded in some lower byte. */
10462 for (j = 0; j < n; j++)
10463 if (ival[j] == ival[n])
10465 xop[0] = xdest[n];
10466 xop[1] = xdest[j];
10468 avr_asm_len ("mov %0,%1", xop, len, 1);
10469 done_byte = true;
10470 break;
10473 if (done_byte)
10474 continue;
10476 /* Need no clobber reg for -1: Use CLR/DEC */
10478 if (-1 == ival[n])
10480 if (!clear_p)
10481 avr_asm_len ("clr %0", &xdest[n], len, 1);
10483 avr_asm_len ("dec %0", &xdest[n], len, 1);
10484 continue;
10486 else if (1 == ival[n])
10488 if (!clear_p)
10489 avr_asm_len ("clr %0", &xdest[n], len, 1);
10491 avr_asm_len ("inc %0", &xdest[n], len, 1);
10492 continue;
10495 /* Use T flag or INC to manage powers of 2 if we have
10496 no clobber reg. */
10498 if (NULL_RTX == clobber_reg
10499 && single_one_operand (xval, QImode))
10501 xop[0] = xdest[n];
10502 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10504 gcc_assert (constm1_rtx != xop[1]);
10506 if (!set_p)
10508 set_p = true;
10509 avr_asm_len ("set", xop, len, 1);
10512 if (!clear_p)
10513 avr_asm_len ("clr %0", xop, len, 1);
10515 avr_asm_len ("bld %0,%1", xop, len, 1);
10516 continue;
10519 /* We actually need the LD_REGS clobber reg. */
10521 gcc_assert (NULL_RTX != clobber_reg);
10523 xop[0] = xdest[n];
10524 xop[1] = xval;
10525 xop[2] = clobber_reg;
10526 clobber_val = ival[n];
10528 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10529 "mov %0,%2", xop, len, 2);
10532 /* If we cooked up a clobber reg above, restore it. */
10534 if (cooked_clobber_p)
10536 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10541 /* Reload the constant OP[1] into the HI register OP[0].
10542 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10543 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10544 need a clobber reg or have to cook one up.
10546 PLEN == NULL: Output instructions.
10547 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10548 by the insns printed.
10550 Return "". */
10552 const char*
10553 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10555 output_reload_in_const (op, clobber_reg, plen, false);
10556 return "";
10560 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10561 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10562 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10563 need a clobber reg or have to cook one up.
10565 LEN == NULL: Output instructions.
10567 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10568 by the insns printed.
10570 Return "". */
10572 const char *
10573 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10575 if (AVR_HAVE_MOVW
10576 && !test_hard_reg_class (LD_REGS, op[0])
10577 && (CONST_INT_P (op[1])
10578 || CONST_FIXED_P (op[1])
10579 || CONST_DOUBLE_P (op[1])))
10581 int len_clr, len_noclr;
10583 /* In some cases it is better to clear the destination beforehand, e.g.
10585 CLR R2 CLR R3 MOVW R4,R2 INC R2
10587 is shorther than
10589 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10591 We find it too tedious to work that out in the print function.
10592 Instead, we call the print function twice to get the lengths of
10593 both methods and use the shortest one. */
10595 output_reload_in_const (op, clobber_reg, &len_clr, true);
10596 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10598 if (len_noclr - len_clr == 4)
10600 /* Default needs 4 CLR instructions: clear register beforehand. */
10602 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10603 "mov %B0,__zero_reg__" CR_TAB
10604 "movw %C0,%A0", &op[0], len, 3);
10606 output_reload_in_const (op, clobber_reg, len, true);
10608 if (len)
10609 *len += 3;
10611 return "";
10615 /* Default: destination not pre-cleared. */
10617 output_reload_in_const (op, clobber_reg, len, false);
10618 return "";
10621 const char*
10622 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10624 output_reload_in_const (op, clobber_reg, len, false);
10625 return "";
10629 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10631 void
10632 avr_output_addr_vec_elt (FILE *stream, int value)
10634 if (AVR_HAVE_JMP_CALL)
10635 fprintf (stream, "\t.word gs(.L%d)\n", value);
10636 else
10637 fprintf (stream, "\trjmp .L%d\n", value);
10641 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10642 /* Returns true if SCRATCH are safe to be allocated as a scratch
10643 registers (for a define_peephole2) in the current function. */
10645 static bool
10646 avr_hard_regno_scratch_ok (unsigned int regno)
10648 /* Interrupt functions can only use registers that have already been saved
10649 by the prologue, even if they would normally be call-clobbered. */
10651 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10652 && !df_regs_ever_live_p (regno))
10653 return false;
10655 /* Don't allow hard registers that might be part of the frame pointer.
10656 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10657 and don't care for a frame pointer that spans more than one register. */
10659 if ((!reload_completed || frame_pointer_needed)
10660 && (regno == REG_Y || regno == REG_Y + 1))
10662 return false;
10665 return true;
10669 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10670 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10673 avr_hard_regno_rename_ok (unsigned int old_reg,
10674 unsigned int new_reg)
10676 /* Interrupt functions can only use registers that have already been
10677 saved by the prologue, even if they would normally be
10678 call-clobbered. */
10680 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10681 && !df_regs_ever_live_p (new_reg))
10682 return 0;
10684 /* Don't allow hard registers that might be part of the frame pointer.
10685 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10686 and don't care for a frame pointer that spans more than one register. */
10688 if ((!reload_completed || frame_pointer_needed)
10689 && (old_reg == REG_Y || old_reg == REG_Y + 1
10690 || new_reg == REG_Y || new_reg == REG_Y + 1))
10692 return 0;
10695 return 1;
10698 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10699 or memory location in the I/O space (QImode only).
10701 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10702 Operand 1: register operand to test, or CONST_INT memory address.
10703 Operand 2: bit number.
10704 Operand 3: label to jump to if the test is true. */
10706 const char*
10707 avr_out_sbxx_branch (rtx insn, rtx operands[])
10709 enum rtx_code comp = GET_CODE (operands[0]);
10710 bool long_jump = get_attr_length (insn) >= 4;
10711 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10713 if (comp == GE)
10714 comp = EQ;
10715 else if (comp == LT)
10716 comp = NE;
10718 if (reverse)
10719 comp = reverse_condition (comp);
10721 switch (GET_CODE (operands[1]))
10723 default:
10724 gcc_unreachable();
10726 case CONST_INT:
10728 if (low_io_address_operand (operands[1], QImode))
10730 if (comp == EQ)
10731 output_asm_insn ("sbis %i1,%2", operands);
10732 else
10733 output_asm_insn ("sbic %i1,%2", operands);
10735 else
10737 output_asm_insn ("in __tmp_reg__,%i1", operands);
10738 if (comp == EQ)
10739 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10740 else
10741 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10744 break; /* CONST_INT */
10746 case REG:
10748 if (comp == EQ)
10749 output_asm_insn ("sbrs %T1%T2", operands);
10750 else
10751 output_asm_insn ("sbrc %T1%T2", operands);
10753 break; /* REG */
10754 } /* switch */
10756 if (long_jump)
10757 return ("rjmp .+4" CR_TAB
10758 "jmp %x3");
10760 if (!reverse)
10761 return "rjmp %x3";
10763 return "";
10766 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10768 static void
10769 avr_asm_out_ctor (rtx symbol, int priority)
10771 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10772 default_ctor_section_asm_out_constructor (symbol, priority);
10776 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10778 static void
10779 avr_asm_out_dtor (rtx symbol, int priority)
10781 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10782 default_dtor_section_asm_out_destructor (symbol, priority);
10786 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10788 static bool
10789 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10791 if (TYPE_MODE (type) == BLKmode)
10793 HOST_WIDE_INT size = int_size_in_bytes (type);
10794 return (size == -1 || size > 8);
10796 else
10797 return false;
10801 /* Implement `CASE_VALUES_THRESHOLD'. */
10802 /* Supply the default for --param case-values-threshold=0 */
10804 static unsigned int
10805 avr_case_values_threshold (void)
10807 /* The exact break-even point between a jump table and an if-else tree
10808 depends on several factors not available here like, e.g. if 8-bit
10809 comparisons can be used in the if-else tree or not, on the
10810 range of the case values, if the case value can be reused, on the
10811 register allocation, etc. '7' appears to be a good choice. */
10813 return 7;
10817 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10819 static enum machine_mode
10820 avr_addr_space_address_mode (addr_space_t as)
10822 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10826 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10828 static enum machine_mode
10829 avr_addr_space_pointer_mode (addr_space_t as)
10831 return avr_addr_space_address_mode (as);
10835 /* Helper for following function. */
10837 static bool
10838 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10840 gcc_assert (REG_P (reg));
10842 if (strict)
10844 return REGNO (reg) == REG_Z;
10847 /* Avoid combine to propagate hard regs. */
10849 if (can_create_pseudo_p()
10850 && REGNO (reg) < REG_Z)
10852 return false;
10855 return true;
10859 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10861 static bool
10862 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10863 bool strict, addr_space_t as)
10865 bool ok = false;
10867 switch (as)
10869 default:
10870 gcc_unreachable();
10872 case ADDR_SPACE_GENERIC:
10873 return avr_legitimate_address_p (mode, x, strict);
10875 case ADDR_SPACE_FLASH:
10876 case ADDR_SPACE_FLASH1:
10877 case ADDR_SPACE_FLASH2:
10878 case ADDR_SPACE_FLASH3:
10879 case ADDR_SPACE_FLASH4:
10880 case ADDR_SPACE_FLASH5:
10882 switch (GET_CODE (x))
10884 case REG:
10885 ok = avr_reg_ok_for_pgm_addr (x, strict);
10886 break;
10888 case POST_INC:
10889 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10890 break;
10892 default:
10893 break;
10896 break; /* FLASH */
10898 case ADDR_SPACE_MEMX:
10899 if (REG_P (x))
10900 ok = (!strict
10901 && can_create_pseudo_p());
10903 if (LO_SUM == GET_CODE (x))
10905 rtx hi = XEXP (x, 0);
10906 rtx lo = XEXP (x, 1);
10908 ok = (REG_P (hi)
10909 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10910 && REG_P (lo)
10911 && REGNO (lo) == REG_Z);
10914 break; /* MEMX */
10917 if (avr_log.legitimate_address_p)
10919 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10920 "reload_completed=%d reload_in_progress=%d %s:",
10921 ok, mode, strict, reload_completed, reload_in_progress,
10922 reg_renumber ? "(reg_renumber)" : "");
10924 if (GET_CODE (x) == PLUS
10925 && REG_P (XEXP (x, 0))
10926 && CONST_INT_P (XEXP (x, 1))
10927 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10928 && reg_renumber)
10930 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10931 true_regnum (XEXP (x, 0)));
10934 avr_edump ("\n%r\n", x);
10937 return ok;
10941 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10943 static rtx
10944 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10945 enum machine_mode mode, addr_space_t as)
10947 if (ADDR_SPACE_GENERIC_P (as))
10948 return avr_legitimize_address (x, old_x, mode);
10950 if (avr_log.legitimize_address)
10952 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10955 return old_x;
10959 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10961 static rtx
10962 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10964 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10965 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10967 if (avr_log.progmem)
10968 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10969 src, type_from, type_to);
10971 /* Up-casting from 16-bit to 24-bit pointer. */
10973 if (as_from != ADDR_SPACE_MEMX
10974 && as_to == ADDR_SPACE_MEMX)
10976 int msb;
10977 rtx sym = src;
10978 rtx reg = gen_reg_rtx (PSImode);
10980 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10981 sym = XEXP (sym, 0);
10983 /* Look at symbol flags: avr_encode_section_info set the flags
10984 also if attribute progmem was seen so that we get the right
10985 promotion for, e.g. PSTR-like strings that reside in generic space
10986 but are located in flash. In that case we patch the incoming
10987 address space. */
10989 if (SYMBOL_REF == GET_CODE (sym)
10990 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
10992 as_from = ADDR_SPACE_FLASH;
10995 /* Linearize memory: RAM has bit 23 set. */
10997 msb = ADDR_SPACE_GENERIC_P (as_from)
10998 ? 0x80
10999 : avr_addrspace[as_from].segment;
11001 src = force_reg (Pmode, src);
11003 emit_insn (msb == 0
11004 ? gen_zero_extendhipsi2 (reg, src)
11005 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
11007 return reg;
11010 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11012 if (as_from == ADDR_SPACE_MEMX
11013 && as_to != ADDR_SPACE_MEMX)
11015 rtx new_src = gen_reg_rtx (Pmode);
11017 src = force_reg (PSImode, src);
11019 emit_move_insn (new_src,
11020 simplify_gen_subreg (Pmode, src, PSImode, 0));
11021 return new_src;
11024 return src;
11028 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11030 static bool
11031 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
11032 addr_space_t superset ATTRIBUTE_UNUSED)
11034 /* Allow any kind of pointer mess. */
11036 return true;
11040 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11042 static tree
11043 avr_convert_to_type (tree type, tree expr)
11045 /* Print a diagnose for pointer conversion that changes the address
11046 space of the pointer target to a non-enclosing address space,
11047 provided -Waddr-space-convert is on.
11049 FIXME: Filter out cases where the target object is known to
11050 be located in the right memory, like in
11052 (const __flash*) PSTR ("text")
11054 Also try to distinguish between explicit casts requested by
11055 the user and implicit casts like
11057 void f (const __flash char*);
11059 void g (const char *p)
11061 f ((const __flash*) p);
11064 under the assumption that an explicit casts means that the user
11065 knows what he is doing, e.g. interface with PSTR or old style
11066 code with progmem and pgm_read_xxx.
11069 if (avr_warn_addr_space_convert
11070 && expr != error_mark_node
11071 && POINTER_TYPE_P (type)
11072 && POINTER_TYPE_P (TREE_TYPE (expr)))
11074 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
11075 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
11077 if (avr_log.progmem)
11078 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
11080 if (as_new != ADDR_SPACE_MEMX
11081 && as_new != as_old)
11083 location_t loc = EXPR_LOCATION (expr);
11084 const char *name_old = avr_addrspace[as_old].name;
11085 const char *name_new = avr_addrspace[as_new].name;
11087 warning (OPT_Waddr_space_convert,
11088 "conversion from address space %qs to address space %qs",
11089 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
11090 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
11092 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
11096 return NULL_TREE;
11100 /* Worker function for movmemhi expander.
11101 XOP[0] Destination as MEM:BLK
11102 XOP[1] Source " "
11103 XOP[2] # Bytes to copy
11105 Return TRUE if the expansion is accomplished.
11106 Return FALSE if the operand compination is not supported. */
11108 bool
11109 avr_emit_movmemhi (rtx *xop)
11111 HOST_WIDE_INT count;
11112 enum machine_mode loop_mode;
11113 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
11114 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
11115 rtx a_hi8 = NULL_RTX;
11117 if (avr_mem_flash_p (xop[0]))
11118 return false;
11120 if (!CONST_INT_P (xop[2]))
11121 return false;
11123 count = INTVAL (xop[2]);
11124 if (count <= 0)
11125 return false;
11127 a_src = XEXP (xop[1], 0);
11128 a_dest = XEXP (xop[0], 0);
11130 if (PSImode == GET_MODE (a_src))
11132 gcc_assert (as == ADDR_SPACE_MEMX);
11134 loop_mode = (count < 0x100) ? QImode : HImode;
11135 loop_reg = gen_rtx_REG (loop_mode, 24);
11136 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
11138 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
11139 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
11141 else
11143 int segment = avr_addrspace[as].segment;
11145 if (segment
11146 && avr_current_device->n_flash > 1)
11148 a_hi8 = GEN_INT (segment);
11149 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
11151 else if (!ADDR_SPACE_GENERIC_P (as))
11153 as = ADDR_SPACE_FLASH;
11156 addr1 = a_src;
11158 loop_mode = (count <= 0x100) ? QImode : HImode;
11159 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
11162 xas = GEN_INT (as);
11164 /* FIXME: Register allocator might come up with spill fails if it is left
11165 on its own. Thus, we allocate the pointer registers by hand:
11166 Z = source address
11167 X = destination address */
11169 emit_move_insn (lpm_addr_reg_rtx, addr1);
11170 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
11172 /* FIXME: Register allocator does a bad job and might spill address
11173 register(s) inside the loop leading to additional move instruction
11174 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11175 load and store as separate insns. Instead, we perform the copy
11176 by means of one monolithic insn. */
11178 gcc_assert (TMP_REGNO == LPM_REGNO);
11180 if (as != ADDR_SPACE_MEMX)
11182 /* Load instruction ([E]LPM or LD) is known at compile time:
11183 Do the copy-loop inline. */
11185 rtx (*fun) (rtx, rtx, rtx)
11186 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11188 insn = fun (xas, loop_reg, loop_reg);
11190 else
11192 rtx (*fun) (rtx, rtx)
11193 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11195 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11197 insn = fun (xas, GEN_INT (avr_addr.rampz));
11200 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11201 emit_insn (insn);
11203 return true;
11207 /* Print assembler for movmem_qi, movmem_hi insns...
11208 $0 : Address Space
11209 $1, $2 : Loop register
11210 Z : Source address
11211 X : Destination address
11214 const char*
11215 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11217 addr_space_t as = (addr_space_t) INTVAL (op[0]);
11218 enum machine_mode loop_mode = GET_MODE (op[1]);
11219 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11220 rtx xop[3];
11222 if (plen)
11223 *plen = 0;
11225 xop[0] = op[0];
11226 xop[1] = op[1];
11227 xop[2] = tmp_reg_rtx;
11229 /* Loop label */
11231 avr_asm_len ("0:", xop, plen, 0);
11233 /* Load with post-increment */
11235 switch (as)
11237 default:
11238 gcc_unreachable();
11240 case ADDR_SPACE_GENERIC:
11242 avr_asm_len ("ld %2,Z+", xop, plen, 1);
11243 break;
11245 case ADDR_SPACE_FLASH:
11247 if (AVR_HAVE_LPMX)
11248 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11249 else
11250 avr_asm_len ("lpm" CR_TAB
11251 "adiw r30,1", xop, plen, 2);
11252 break;
11254 case ADDR_SPACE_FLASH1:
11255 case ADDR_SPACE_FLASH2:
11256 case ADDR_SPACE_FLASH3:
11257 case ADDR_SPACE_FLASH4:
11258 case ADDR_SPACE_FLASH5:
11260 if (AVR_HAVE_ELPMX)
11261 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11262 else
11263 avr_asm_len ("elpm" CR_TAB
11264 "adiw r30,1", xop, plen, 2);
11265 break;
11268 /* Store with post-increment */
11270 avr_asm_len ("st X+,%2", xop, plen, 1);
11272 /* Decrement loop-counter and set Z-flag */
11274 if (QImode == loop_mode)
11276 avr_asm_len ("dec %1", xop, plen, 1);
11278 else if (sbiw_p)
11280 avr_asm_len ("sbiw %1,1", xop, plen, 1);
11282 else
11284 avr_asm_len ("subi %A1,1" CR_TAB
11285 "sbci %B1,0", xop, plen, 2);
11288 /* Loop until zero */
11290 return avr_asm_len ("brne 0b", xop, plen, 1);
11295 /* Helper for __builtin_avr_delay_cycles */
11297 static rtx
11298 avr_mem_clobber (void)
11300 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11301 MEM_VOLATILE_P (mem) = 1;
11302 return mem;
11305 static void
11306 avr_expand_delay_cycles (rtx operands0)
11308 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11309 unsigned HOST_WIDE_INT cycles_used;
11310 unsigned HOST_WIDE_INT loop_count;
11312 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11314 loop_count = ((cycles - 9) / 6) + 1;
11315 cycles_used = ((loop_count - 1) * 6) + 9;
11316 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11317 avr_mem_clobber()));
11318 cycles -= cycles_used;
11321 if (IN_RANGE (cycles, 262145, 83886081))
11323 loop_count = ((cycles - 7) / 5) + 1;
11324 if (loop_count > 0xFFFFFF)
11325 loop_count = 0xFFFFFF;
11326 cycles_used = ((loop_count - 1) * 5) + 7;
11327 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11328 avr_mem_clobber()));
11329 cycles -= cycles_used;
11332 if (IN_RANGE (cycles, 768, 262144))
11334 loop_count = ((cycles - 5) / 4) + 1;
11335 if (loop_count > 0xFFFF)
11336 loop_count = 0xFFFF;
11337 cycles_used = ((loop_count - 1) * 4) + 5;
11338 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11339 avr_mem_clobber()));
11340 cycles -= cycles_used;
11343 if (IN_RANGE (cycles, 6, 767))
11345 loop_count = cycles / 3;
11346 if (loop_count > 255)
11347 loop_count = 255;
11348 cycles_used = loop_count * 3;
11349 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11350 avr_mem_clobber()));
11351 cycles -= cycles_used;
11354 while (cycles >= 2)
11356 emit_insn (gen_nopv (GEN_INT(2)));
11357 cycles -= 2;
11360 if (cycles == 1)
11362 emit_insn (gen_nopv (GEN_INT(1)));
11363 cycles--;
11368 /* Compute the image of x under f, i.e. perform x --> f(x) */
11370 static int
11371 avr_map (unsigned int f, int x)
11373 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
11377 /* Return some metrics of map A. */
11379 enum
11381 /* Number of fixed points in { 0 ... 7 } */
11382 MAP_FIXED_0_7,
11384 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11385 MAP_NONFIXED_0_7,
11387 /* Mask representing the fixed points in { 0 ... 7 } */
11388 MAP_MASK_FIXED_0_7,
11390 /* Size of the preimage of { 0 ... 7 } */
11391 MAP_PREIMAGE_0_7,
11393 /* Mask that represents the preimage of { f } */
11394 MAP_MASK_PREIMAGE_F
11397 static unsigned
11398 avr_map_metric (unsigned int a, int mode)
11400 unsigned i, metric = 0;
11402 for (i = 0; i < 8; i++)
11404 unsigned ai = avr_map (a, i);
11406 if (mode == MAP_FIXED_0_7)
11407 metric += ai == i;
11408 else if (mode == MAP_NONFIXED_0_7)
11409 metric += ai < 8 && ai != i;
11410 else if (mode == MAP_MASK_FIXED_0_7)
11411 metric |= ((unsigned) (ai == i)) << i;
11412 else if (mode == MAP_PREIMAGE_0_7)
11413 metric += ai < 8;
11414 else if (mode == MAP_MASK_PREIMAGE_F)
11415 metric |= ((unsigned) (ai == 0xf)) << i;
11416 else
11417 gcc_unreachable();
11420 return metric;
11424 /* Return true if IVAL has a 0xf in its hexadecimal representation
11425 and false, otherwise. Only nibbles 0..7 are taken into account.
11426 Used as constraint helper for C0f and Cxf. */
11428 bool
11429 avr_has_nibble_0xf (rtx ival)
11431 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
11432 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11436 /* We have a set of bits that are mapped by a function F.
11437 Try to decompose F by means of a second function G so that
11439 F = F o G^-1 o G
11443 cost (F o G^-1) + cost (G) < cost (F)
11445 Example: Suppose builtin insert_bits supplies us with the map
11446 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11447 nibble of the result, we can just as well rotate the bits before inserting
11448 them and use the map 0x7654ffff which is cheaper than the original map.
11449 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11451 typedef struct
11453 /* tree code of binary function G */
11454 enum tree_code code;
11456 /* The constant second argument of G */
11457 int arg;
11459 /* G^-1, the inverse of G (*, arg) */
11460 unsigned ginv;
11462 /* The cost of appplying G (*, arg) */
11463 int cost;
11465 /* The composition F o G^-1 (*, arg) for some function F */
11466 unsigned int map;
11468 /* For debug purpose only */
11469 const char *str;
11470 } avr_map_op_t;
11472 static const avr_map_op_t avr_map_op[] =
11474 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
11475 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
11476 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
11477 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
11478 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
11479 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
11480 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
11481 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
11482 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
11483 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
11484 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
11485 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
11486 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
11487 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
11488 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
11492 /* Try to decompose F as F = (F o G^-1) o G as described above.
11493 The result is a struct representing F o G^-1 and G.
11494 If result.cost < 0 then such a decomposition does not exist. */
11496 static avr_map_op_t
11497 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
11499 int i;
11500 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11501 avr_map_op_t f_ginv = *g;
11502 unsigned int ginv = g->ginv;
11504 f_ginv.cost = -1;
11506 /* Step 1: Computing F o G^-1 */
11508 for (i = 7; i >= 0; i--)
11510 int x = avr_map (f, i);
11512 if (x <= 7)
11514 x = avr_map (ginv, x);
11516 /* The bit is no element of the image of G: no avail (cost = -1) */
11518 if (x > 7)
11519 return f_ginv;
11522 f_ginv.map = (f_ginv.map << 4) + x;
11525 /* Step 2: Compute the cost of the operations.
11526 The overall cost of doing an operation prior to the insertion is
11527 the cost of the insertion plus the cost of the operation. */
11529 /* Step 2a: Compute cost of F o G^-1 */
11531 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11533 /* The mapping consists only of fixed points and can be folded
11534 to AND/OR logic in the remainder. Reasonable cost is 3. */
11536 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11538 else
11540 rtx xop[4];
11542 /* Get the cost of the insn by calling the output worker with some
11543 fake values. Mimic effect of reloading xop[3]: Unused operands
11544 are mapped to 0 and used operands are reloaded to xop[0]. */
11546 xop[0] = all_regs_rtx[24];
11547 xop[1] = gen_int_mode (f_ginv.map, SImode);
11548 xop[2] = all_regs_rtx[25];
11549 xop[3] = val_used_p ? xop[0] : const0_rtx;
11551 avr_out_insert_bits (xop, &f_ginv.cost);
11553 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11556 /* Step 2b: Add cost of G */
11558 f_ginv.cost += g->cost;
11560 if (avr_log.builtin)
11561 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11563 return f_ginv;
11567 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11568 XOP[0] and XOP[1] don't overlap.
11569 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11570 If FIXP_P = false: Just move the bit if its position in the destination
11571 is different to its source position. */
11573 static void
11574 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
11576 int bit_dest, b;
11578 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11579 int t_bit_src = -1;
11581 /* We order the operations according to the requested source bit b. */
11583 for (b = 0; b < 8; b++)
11584 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11586 int bit_src = avr_map (map, bit_dest);
11588 if (b != bit_src
11589 || bit_src >= 8
11590 /* Same position: No need to copy as requested by FIXP_P. */
11591 || (bit_dest == bit_src && !fixp_p))
11592 continue;
11594 if (t_bit_src != bit_src)
11596 /* Source bit is not yet in T: Store it to T. */
11598 t_bit_src = bit_src;
11600 xop[3] = GEN_INT (bit_src);
11601 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11604 /* Load destination bit with T. */
11606 xop[3] = GEN_INT (bit_dest);
11607 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11612 /* PLEN == 0: Print assembler code for `insert_bits'.
11613 PLEN != 0: Compute code length in bytes.
11615 OP[0]: Result
11616 OP[1]: The mapping composed of nibbles. If nibble no. N is
11617 0: Bit N of result is copied from bit OP[2].0
11618 ... ...
11619 7: Bit N of result is copied from bit OP[2].7
11620 0xf: Bit N of result is copied from bit OP[3].N
11621 OP[2]: Bits to be inserted
11622 OP[3]: Target value */
11624 const char*
11625 avr_out_insert_bits (rtx *op, int *plen)
11627 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
11628 unsigned mask_fixed;
11629 bool fixp_p = true;
11630 rtx xop[4];
11632 xop[0] = op[0];
11633 xop[1] = op[2];
11634 xop[2] = op[3];
11636 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11638 if (plen)
11639 *plen = 0;
11640 else if (flag_print_asm_name)
11641 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
11643 /* If MAP has fixed points it might be better to initialize the result
11644 with the bits to be inserted instead of moving all bits by hand. */
11646 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11648 if (REGNO (xop[0]) == REGNO (xop[1]))
11650 /* Avoid early-clobber conflicts */
11652 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11653 xop[1] = tmp_reg_rtx;
11654 fixp_p = false;
11657 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11659 /* XOP[2] is used and reloaded to XOP[0] already */
11661 int n_fix = 0, n_nofix = 0;
11663 gcc_assert (REG_P (xop[2]));
11665 /* Get the code size of the bit insertions; once with all bits
11666 moved and once with fixed points omitted. */
11668 avr_move_bits (xop, map, true, &n_fix);
11669 avr_move_bits (xop, map, false, &n_nofix);
11671 if (fixp_p && n_fix - n_nofix > 3)
11673 xop[3] = gen_int_mode (~mask_fixed, QImode);
11675 avr_asm_len ("eor %0,%1" CR_TAB
11676 "andi %0,%3" CR_TAB
11677 "eor %0,%1", xop, plen, 3);
11678 fixp_p = false;
11681 else
11683 /* XOP[2] is unused */
11685 if (fixp_p && mask_fixed)
11687 avr_asm_len ("mov %0,%1", xop, plen, 1);
11688 fixp_p = false;
11692 /* Move/insert remaining bits. */
11694 avr_move_bits (xop, map, fixp_p, plen);
11696 return "";
11700 /* IDs for all the AVR builtins. */
11702 enum avr_builtin_id
11704 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11705 AVR_BUILTIN_ ## NAME,
11706 #include "builtins.def"
11707 #undef DEF_BUILTIN
11709 AVR_BUILTIN_COUNT
11712 struct GTY(()) avr_builtin_description
11714 enum insn_code icode;
11715 int n_args;
11716 tree fndecl;
11720 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11721 that a built-in's ID can be used to access the built-in by means of
11722 avr_bdesc[ID] */
11724 static GTY(()) struct avr_builtin_description
11725 avr_bdesc[AVR_BUILTIN_COUNT] =
11727 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11728 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11729 #include "builtins.def"
11730 #undef DEF_BUILTIN
11734 /* Implement `TARGET_BUILTIN_DECL'. */
11736 static tree
11737 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11739 if (id < AVR_BUILTIN_COUNT)
11740 return avr_bdesc[id].fndecl;
11742 return error_mark_node;
11746 static void
11747 avr_init_builtin_int24 (void)
11749 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11750 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11752 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11753 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11757 /* Implement `TARGET_INIT_BUILTINS' */
11758 /* Set up all builtin functions for this target. */
11760 static void
11761 avr_init_builtins (void)
11763 tree void_ftype_void
11764 = build_function_type_list (void_type_node, NULL_TREE);
11765 tree uchar_ftype_uchar
11766 = build_function_type_list (unsigned_char_type_node,
11767 unsigned_char_type_node,
11768 NULL_TREE);
11769 tree uint_ftype_uchar_uchar
11770 = build_function_type_list (unsigned_type_node,
11771 unsigned_char_type_node,
11772 unsigned_char_type_node,
11773 NULL_TREE);
11774 tree int_ftype_char_char
11775 = build_function_type_list (integer_type_node,
11776 char_type_node,
11777 char_type_node,
11778 NULL_TREE);
11779 tree int_ftype_char_uchar
11780 = build_function_type_list (integer_type_node,
11781 char_type_node,
11782 unsigned_char_type_node,
11783 NULL_TREE);
11784 tree void_ftype_ulong
11785 = build_function_type_list (void_type_node,
11786 long_unsigned_type_node,
11787 NULL_TREE);
11789 tree uchar_ftype_ulong_uchar_uchar
11790 = build_function_type_list (unsigned_char_type_node,
11791 long_unsigned_type_node,
11792 unsigned_char_type_node,
11793 unsigned_char_type_node,
11794 NULL_TREE);
11796 tree const_memx_void_node
11797 = build_qualified_type (void_type_node,
11798 TYPE_QUAL_CONST
11799 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11801 tree const_memx_ptr_type_node
11802 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11804 tree char_ftype_const_memx_ptr
11805 = build_function_type_list (char_type_node,
11806 const_memx_ptr_type_node,
11807 NULL);
11809 #define ITYP(T) \
11810 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11812 #define FX_FTYPE_FX(fx) \
11813 tree fx##r_ftype_##fx##r \
11814 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11815 tree fx##k_ftype_##fx##k \
11816 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11818 #define FX_FTYPE_FX_INT(fx) \
11819 tree fx##r_ftype_##fx##r_int \
11820 = build_function_type_list (node_##fx##r, node_##fx##r, \
11821 integer_type_node, NULL); \
11822 tree fx##k_ftype_##fx##k_int \
11823 = build_function_type_list (node_##fx##k, node_##fx##k, \
11824 integer_type_node, NULL)
11826 #define INT_FTYPE_FX(fx) \
11827 tree int_ftype_##fx##r \
11828 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11829 tree int_ftype_##fx##k \
11830 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11832 #define INTX_FTYPE_FX(fx) \
11833 tree int##fx##r_ftype_##fx##r \
11834 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11835 tree int##fx##k_ftype_##fx##k \
11836 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11838 #define FX_FTYPE_INTX(fx) \
11839 tree fx##r_ftype_int##fx##r \
11840 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11841 tree fx##k_ftype_int##fx##k \
11842 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11844 tree node_hr = short_fract_type_node;
11845 tree node_nr = fract_type_node;
11846 tree node_lr = long_fract_type_node;
11847 tree node_llr = long_long_fract_type_node;
11849 tree node_uhr = unsigned_short_fract_type_node;
11850 tree node_unr = unsigned_fract_type_node;
11851 tree node_ulr = unsigned_long_fract_type_node;
11852 tree node_ullr = unsigned_long_long_fract_type_node;
11854 tree node_hk = short_accum_type_node;
11855 tree node_nk = accum_type_node;
11856 tree node_lk = long_accum_type_node;
11857 tree node_llk = long_long_accum_type_node;
11859 tree node_uhk = unsigned_short_accum_type_node;
11860 tree node_unk = unsigned_accum_type_node;
11861 tree node_ulk = unsigned_long_accum_type_node;
11862 tree node_ullk = unsigned_long_long_accum_type_node;
11865 /* For absfx builtins. */
11867 FX_FTYPE_FX (h);
11868 FX_FTYPE_FX (n);
11869 FX_FTYPE_FX (l);
11870 FX_FTYPE_FX (ll);
11872 /* For roundfx builtins. */
11874 FX_FTYPE_FX_INT (h);
11875 FX_FTYPE_FX_INT (n);
11876 FX_FTYPE_FX_INT (l);
11877 FX_FTYPE_FX_INT (ll);
11879 FX_FTYPE_FX_INT (uh);
11880 FX_FTYPE_FX_INT (un);
11881 FX_FTYPE_FX_INT (ul);
11882 FX_FTYPE_FX_INT (ull);
11884 /* For countlsfx builtins. */
11886 INT_FTYPE_FX (h);
11887 INT_FTYPE_FX (n);
11888 INT_FTYPE_FX (l);
11889 INT_FTYPE_FX (ll);
11891 INT_FTYPE_FX (uh);
11892 INT_FTYPE_FX (un);
11893 INT_FTYPE_FX (ul);
11894 INT_FTYPE_FX (ull);
11896 /* For bitsfx builtins. */
11898 INTX_FTYPE_FX (h);
11899 INTX_FTYPE_FX (n);
11900 INTX_FTYPE_FX (l);
11901 INTX_FTYPE_FX (ll);
11903 INTX_FTYPE_FX (uh);
11904 INTX_FTYPE_FX (un);
11905 INTX_FTYPE_FX (ul);
11906 INTX_FTYPE_FX (ull);
11908 /* For fxbits builtins. */
11910 FX_FTYPE_INTX (h);
11911 FX_FTYPE_INTX (n);
11912 FX_FTYPE_INTX (l);
11913 FX_FTYPE_INTX (ll);
11915 FX_FTYPE_INTX (uh);
11916 FX_FTYPE_INTX (un);
11917 FX_FTYPE_INTX (ul);
11918 FX_FTYPE_INTX (ull);
11921 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11923 int id = AVR_BUILTIN_ ## NAME; \
11924 const char *Name = "__builtin_avr_" #NAME; \
11925 char *name = (char*) alloca (1 + strlen (Name)); \
11927 gcc_assert (id < AVR_BUILTIN_COUNT); \
11928 avr_bdesc[id].fndecl \
11929 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11930 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11932 #include "builtins.def"
11933 #undef DEF_BUILTIN
11935 avr_init_builtin_int24 ();
11939 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11940 with non-void result and 1 ... 3 arguments. */
11942 static rtx
11943 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11945 rtx pat, xop[3];
11946 int n, n_args = call_expr_nargs (exp);
11947 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11949 gcc_assert (n_args >= 1 && n_args <= 3);
11951 if (target == NULL_RTX
11952 || GET_MODE (target) != tmode
11953 || !insn_data[icode].operand[0].predicate (target, tmode))
11955 target = gen_reg_rtx (tmode);
11958 for (n = 0; n < n_args; n++)
11960 tree arg = CALL_EXPR_ARG (exp, n);
11961 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11962 enum machine_mode opmode = GET_MODE (op);
11963 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11965 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11967 opmode = HImode;
11968 op = gen_lowpart (HImode, op);
11971 /* In case the insn wants input operands in modes different from
11972 the result, abort. */
11974 gcc_assert (opmode == mode || opmode == VOIDmode);
11976 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11977 op = copy_to_mode_reg (mode, op);
11979 xop[n] = op;
11982 switch (n_args)
11984 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11985 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
11986 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
11988 default:
11989 gcc_unreachable();
11992 if (pat == NULL_RTX)
11993 return NULL_RTX;
11995 emit_insn (pat);
11997 return target;
12001 /* Implement `TARGET_EXPAND_BUILTIN'. */
12002 /* Expand an expression EXP that calls a built-in function,
12003 with result going to TARGET if that's convenient
12004 (and in mode MODE if that's convenient).
12005 SUBTARGET may be used as the target for computing one of EXP's operands.
12006 IGNORE is nonzero if the value is to be ignored. */
12008 static rtx
12009 avr_expand_builtin (tree exp, rtx target,
12010 rtx subtarget ATTRIBUTE_UNUSED,
12011 enum machine_mode mode ATTRIBUTE_UNUSED,
12012 int ignore)
12014 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12015 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
12016 unsigned int id = DECL_FUNCTION_CODE (fndecl);
12017 const struct avr_builtin_description *d = &avr_bdesc[id];
12018 tree arg0;
12019 rtx op0;
12021 gcc_assert (id < AVR_BUILTIN_COUNT);
12023 switch (id)
12025 case AVR_BUILTIN_NOP:
12026 emit_insn (gen_nopv (GEN_INT(1)));
12027 return 0;
12029 case AVR_BUILTIN_DELAY_CYCLES:
12031 arg0 = CALL_EXPR_ARG (exp, 0);
12032 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12034 if (!CONST_INT_P (op0))
12035 error ("%s expects a compile time integer constant", bname);
12036 else
12037 avr_expand_delay_cycles (op0);
12039 return NULL_RTX;
12042 case AVR_BUILTIN_INSERT_BITS:
12044 arg0 = CALL_EXPR_ARG (exp, 0);
12045 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12047 if (!CONST_INT_P (op0))
12049 error ("%s expects a compile time long integer constant"
12050 " as first argument", bname);
12051 return target;
12054 break;
12057 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
12058 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
12059 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
12060 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
12062 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
12063 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
12064 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
12065 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
12067 /* Warn about odd rounding. Rounding points >= FBIT will have
12068 no effect. */
12070 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
12071 break;
12073 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
12075 if (rbit >= (int) GET_MODE_FBIT (mode))
12077 warning (OPT_Wextra, "rounding to %d bits has no effect for "
12078 "fixed-point value with %d fractional bits",
12079 rbit, GET_MODE_FBIT (mode));
12081 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
12082 EXPAND_NORMAL);
12084 else if (rbit <= - (int) GET_MODE_IBIT (mode))
12086 warning (0, "rounding result will always be 0");
12087 return CONST0_RTX (mode);
12090 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12092 TR 18037 only specifies results for RP > 0. However, the
12093 remaining cases of -IBIT < RP <= 0 can easily be supported
12094 without any additional overhead. */
12096 break; /* round */
12099 /* No fold found and no insn: Call support function from libgcc. */
12101 if (d->icode == CODE_FOR_nothing
12102 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
12104 return expand_call (exp, target, ignore);
12107 /* No special treatment needed: vanilla expand. */
12109 gcc_assert (d->icode != CODE_FOR_nothing);
12110 gcc_assert (d->n_args == call_expr_nargs (exp));
12112 if (d->n_args == 0)
12114 emit_insn ((GEN_FCN (d->icode)) (target));
12115 return NULL_RTX;
12118 return avr_default_expand_builtin (d->icode, exp, target);
12122 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12124 static tree
12125 avr_fold_absfx (tree tval)
12127 if (FIXED_CST != TREE_CODE (tval))
12128 return NULL_TREE;
12130 /* Our fixed-points have no padding: Use double_int payload directly. */
12132 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
12133 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
12134 double_int ival = fval.data.sext (bits);
12136 if (!ival.is_negative())
12137 return tval;
12139 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12141 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
12142 ? double_int::max_value (bits, false)
12143 : -ival;
12145 return build_fixed (TREE_TYPE (tval), fval);
12149 /* Implement `TARGET_FOLD_BUILTIN'. */
12151 static tree
12152 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
12153 bool ignore ATTRIBUTE_UNUSED)
12155 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
12156 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
12158 if (!optimize)
12159 return NULL_TREE;
12161 switch (fcode)
12163 default:
12164 break;
12166 case AVR_BUILTIN_SWAP:
12168 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12169 build_int_cst (val_type, 4));
12172 case AVR_BUILTIN_ABSHR:
12173 case AVR_BUILTIN_ABSR:
12174 case AVR_BUILTIN_ABSLR:
12175 case AVR_BUILTIN_ABSLLR:
12177 case AVR_BUILTIN_ABSHK:
12178 case AVR_BUILTIN_ABSK:
12179 case AVR_BUILTIN_ABSLK:
12180 case AVR_BUILTIN_ABSLLK:
12181 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12183 return avr_fold_absfx (arg[0]);
12185 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
12186 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
12187 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
12188 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
12190 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
12191 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
12192 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
12193 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
12195 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
12196 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
12197 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
12198 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
12200 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
12201 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
12202 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
12203 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
12205 gcc_assert (TYPE_PRECISION (val_type)
12206 == TYPE_PRECISION (TREE_TYPE (arg[0])));
12208 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12210 case AVR_BUILTIN_INSERT_BITS:
12212 tree tbits = arg[1];
12213 tree tval = arg[2];
12214 tree tmap;
12215 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12216 unsigned int map;
12217 bool changed = false;
12218 unsigned i;
12219 avr_map_op_t best_g;
12221 if (TREE_CODE (arg[0]) != INTEGER_CST)
12223 /* No constant as first argument: Don't fold this and run into
12224 error in avr_expand_builtin. */
12226 break;
12229 tmap = double_int_to_tree (map_type, tree_to_double_int (arg[0]));
12230 map = TREE_INT_CST_LOW (tmap);
12232 if (TREE_CODE (tval) != INTEGER_CST
12233 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12235 /* There are no F in the map, i.e. 3rd operand is unused.
12236 Replace that argument with some constant to render
12237 respective input unused. */
12239 tval = build_int_cst (val_type, 0);
12240 changed = true;
12243 if (TREE_CODE (tbits) != INTEGER_CST
12244 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12246 /* Similar for the bits to be inserted. If they are unused,
12247 we can just as well pass 0. */
12249 tbits = build_int_cst (val_type, 0);
12252 if (TREE_CODE (tbits) == INTEGER_CST)
12254 /* Inserting bits known at compile time is easy and can be
12255 performed by AND and OR with appropriate masks. */
12257 int bits = TREE_INT_CST_LOW (tbits);
12258 int mask_ior = 0, mask_and = 0xff;
12260 for (i = 0; i < 8; i++)
12262 int mi = avr_map (map, i);
12264 if (mi < 8)
12266 if (bits & (1 << mi)) mask_ior |= (1 << i);
12267 else mask_and &= ~(1 << i);
12271 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12272 build_int_cst (val_type, mask_ior));
12273 return fold_build2 (BIT_AND_EXPR, val_type, tval,
12274 build_int_cst (val_type, mask_and));
12277 if (changed)
12278 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12280 /* If bits don't change their position we can use vanilla logic
12281 to merge the two arguments. */
12283 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12285 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12286 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12288 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12289 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12290 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12293 /* Try to decomposing map to reduce overall cost. */
12295 if (avr_log.builtin)
12296 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
12298 best_g = avr_map_op[0];
12299 best_g.cost = 1000;
12301 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12303 avr_map_op_t g
12304 = avr_map_decompose (map, avr_map_op + i,
12305 TREE_CODE (tval) == INTEGER_CST);
12307 if (g.cost >= 0 && g.cost < best_g.cost)
12308 best_g = g;
12311 if (avr_log.builtin)
12312 avr_edump ("\n");
12314 if (best_g.arg == 0)
12315 /* No optimization found */
12316 break;
12318 /* Apply operation G to the 2nd argument. */
12320 if (avr_log.builtin)
12321 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12322 best_g.str, best_g.arg, best_g.map, best_g.cost);
12324 /* Do right-shifts arithmetically: They copy the MSB instead of
12325 shifting in a non-usable value (0) as with logic right-shift. */
12327 tbits = fold_convert (signed_char_type_node, tbits);
12328 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12329 build_int_cst (val_type, best_g.arg));
12330 tbits = fold_convert (val_type, tbits);
12332 /* Use map o G^-1 instead of original map to undo the effect of G. */
12334 tmap = double_int_to_tree (map_type,
12335 double_int::from_uhwi (best_g.map));
12337 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12338 } /* AVR_BUILTIN_INSERT_BITS */
12341 return NULL_TREE;
12346 /* Initialize the GCC target structure. */
12348 #undef TARGET_ASM_ALIGNED_HI_OP
12349 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12350 #undef TARGET_ASM_ALIGNED_SI_OP
12351 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12352 #undef TARGET_ASM_UNALIGNED_HI_OP
12353 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12354 #undef TARGET_ASM_UNALIGNED_SI_OP
12355 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12356 #undef TARGET_ASM_INTEGER
12357 #define TARGET_ASM_INTEGER avr_assemble_integer
12358 #undef TARGET_ASM_FILE_START
12359 #define TARGET_ASM_FILE_START avr_file_start
12360 #undef TARGET_ASM_FILE_END
12361 #define TARGET_ASM_FILE_END avr_file_end
12363 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12364 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12365 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12366 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12368 #undef TARGET_FUNCTION_VALUE
12369 #define TARGET_FUNCTION_VALUE avr_function_value
12370 #undef TARGET_LIBCALL_VALUE
12371 #define TARGET_LIBCALL_VALUE avr_libcall_value
12372 #undef TARGET_FUNCTION_VALUE_REGNO_P
12373 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12375 #undef TARGET_ATTRIBUTE_TABLE
12376 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12377 #undef TARGET_INSERT_ATTRIBUTES
12378 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12379 #undef TARGET_SECTION_TYPE_FLAGS
12380 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12382 #undef TARGET_ASM_NAMED_SECTION
12383 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12384 #undef TARGET_ASM_INIT_SECTIONS
12385 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12386 #undef TARGET_ENCODE_SECTION_INFO
12387 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12388 #undef TARGET_ASM_SELECT_SECTION
12389 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12391 #undef TARGET_REGISTER_MOVE_COST
12392 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12393 #undef TARGET_MEMORY_MOVE_COST
12394 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12395 #undef TARGET_RTX_COSTS
12396 #define TARGET_RTX_COSTS avr_rtx_costs
12397 #undef TARGET_ADDRESS_COST
12398 #define TARGET_ADDRESS_COST avr_address_cost
12399 #undef TARGET_MACHINE_DEPENDENT_REORG
12400 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12401 #undef TARGET_FUNCTION_ARG
12402 #define TARGET_FUNCTION_ARG avr_function_arg
12403 #undef TARGET_FUNCTION_ARG_ADVANCE
12404 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12406 #undef TARGET_SET_CURRENT_FUNCTION
12407 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12409 #undef TARGET_RETURN_IN_MEMORY
12410 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12412 #undef TARGET_STRICT_ARGUMENT_NAMING
12413 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12415 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12416 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12418 #undef TARGET_HARD_REGNO_SCRATCH_OK
12419 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12420 #undef TARGET_CASE_VALUES_THRESHOLD
12421 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12423 #undef TARGET_FRAME_POINTER_REQUIRED
12424 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12425 #undef TARGET_CAN_ELIMINATE
12426 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12428 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12429 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12431 #undef TARGET_WARN_FUNC_RETURN
12432 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12434 #undef TARGET_CLASS_LIKELY_SPILLED_P
12435 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12437 #undef TARGET_OPTION_OVERRIDE
12438 #define TARGET_OPTION_OVERRIDE avr_option_override
12440 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12441 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12443 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12444 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12446 #undef TARGET_INIT_BUILTINS
12447 #define TARGET_INIT_BUILTINS avr_init_builtins
12449 #undef TARGET_BUILTIN_DECL
12450 #define TARGET_BUILTIN_DECL avr_builtin_decl
12452 #undef TARGET_EXPAND_BUILTIN
12453 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12455 #undef TARGET_FOLD_BUILTIN
12456 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12458 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12459 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12461 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12462 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12464 #undef TARGET_BUILD_BUILTIN_VA_LIST
12465 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12467 #undef TARGET_FIXED_POINT_SUPPORTED_P
12468 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12470 #undef TARGET_CONVERT_TO_TYPE
12471 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12473 #undef TARGET_ADDR_SPACE_SUBSET_P
12474 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12476 #undef TARGET_ADDR_SPACE_CONVERT
12477 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12479 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12480 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12482 #undef TARGET_ADDR_SPACE_POINTER_MODE
12483 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12485 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12486 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12487 avr_addr_space_legitimate_address_p
12489 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12490 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12492 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12493 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12495 #undef TARGET_SECONDARY_RELOAD
12496 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12498 #undef TARGET_PRINT_OPERAND
12499 #define TARGET_PRINT_OPERAND avr_print_operand
12500 #undef TARGET_PRINT_OPERAND_ADDRESS
12501 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12502 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12503 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12505 struct gcc_target targetm = TARGET_INITIALIZER;
12508 #include "gt-avr.h"